_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
d7e901e0979a883f7966e8250949c37829c36c2459be7201480c0ff2791029be | jarohen/yoyo | flow.cljs | (ns {{name}}.ui.app
(:require [flow.core :as f :include-macros true]
[clojure.string :as s]
[nrepl.embed :refer [connect-brepl!]]))
(enable-console-print!)
(connect-brepl!)
(set! (.-onload js/window)
(fn []
(f/root js/document.body
(f/el
[:p "Hello world!"]))))
;; ------------------------------------------------------------
;; Below this line is only required for the Yo-yo welcome page, feel
;; free to just delete all of it when you want to get cracking on your
;; own project!
(defn code [s]
(f/el
[:strong {::f/style {:font-family "'Courier New', 'monospace'"}}
s]))
(set! (.-onload js/window)
(fn []
(f/root js/document.body
(f/el
[:div.container
[:h2 {::f/style {:margin-top "1em"}}
"Hello from Yo-yo!"]
[:h3 "Things to try:"]
[:ul
[:li [:p "In your Clojure REPL, run " [code "(yoyo/reload!)"] " to completely reload the webapp without restarting the JVM."]]
[:li [:p "Start making your webapp!"]
[:ul
[:li [:p "The CLJS entry point is in " [code "ui-src/{{sanitized}}/ui/app.cljs"]]]
[:li [:p "The Clojure system entry point is in " [code "src/{{sanitized}}/service/system.clj"]]]
[:li [:p "The Clojure Ring handler is in " [code "src/{{sanitized}}/service/handler.clj"]]]]]
[:li [:p "Connect to the CLJS browser REPL"]
[:ol
[:li "Connect to the normal server-side REPL (port 7888, by default)"]
[:li "Evaluate: " [code "(nrepl.embed/->brepl)"]]
[:li "Refresh this page"]
[:li "When you get a " [code "cljs.user =>"] " prompt, you can test it with:"
[:ul
[:li [code "(+ 1 1)"]]
[:li [code "(js/window.alert \"Hello world!\")"]]
[:li [code "(set! (.-backgroundColor js/document.body.style) \"green\")"]]]]]]
[:li [:p "Any trouble, let me know - either through GitHub or on Twitter at " [:a {:href ""} "@jarohen"]]]
[:li [:p "Good luck!"]]]
[:div {::f/style {:text-align "right"
:font-weight "bold"}}
[:p
[:span {::f/style {:font-size "1.3em"}} "James Henderson"]
[:br]
"Twitter: " [:a {:href ""} "@jarohen"]
[:br]
"GitHub: " [:a {:href "-henderson"} "james-henderson"]]]]))))
| null | https://raw.githubusercontent.com/jarohen/yoyo/b579d21becd06b5330dee9f5963708db03ce1e25/templates/yoyo-webapp/src/leiningen/new/yoyo_webapp/cljs/flow.cljs | clojure | ------------------------------------------------------------
Below this line is only required for the Yo-yo welcome page, feel
free to just delete all of it when you want to get cracking on your
own project! | (ns {{name}}.ui.app
(:require [flow.core :as f :include-macros true]
[clojure.string :as s]
[nrepl.embed :refer [connect-brepl!]]))
(enable-console-print!)
(connect-brepl!)
(set! (.-onload js/window)
(fn []
(f/root js/document.body
(f/el
[:p "Hello world!"]))))
(defn code [s]
(f/el
[:strong {::f/style {:font-family "'Courier New', 'monospace'"}}
s]))
(set! (.-onload js/window)
(fn []
(f/root js/document.body
(f/el
[:div.container
[:h2 {::f/style {:margin-top "1em"}}
"Hello from Yo-yo!"]
[:h3 "Things to try:"]
[:ul
[:li [:p "In your Clojure REPL, run " [code "(yoyo/reload!)"] " to completely reload the webapp without restarting the JVM."]]
[:li [:p "Start making your webapp!"]
[:ul
[:li [:p "The CLJS entry point is in " [code "ui-src/{{sanitized}}/ui/app.cljs"]]]
[:li [:p "The Clojure system entry point is in " [code "src/{{sanitized}}/service/system.clj"]]]
[:li [:p "The Clojure Ring handler is in " [code "src/{{sanitized}}/service/handler.clj"]]]]]
[:li [:p "Connect to the CLJS browser REPL"]
[:ol
[:li "Connect to the normal server-side REPL (port 7888, by default)"]
[:li "Evaluate: " [code "(nrepl.embed/->brepl)"]]
[:li "Refresh this page"]
[:li "When you get a " [code "cljs.user =>"] " prompt, you can test it with:"
[:ul
[:li [code "(+ 1 1)"]]
[:li [code "(js/window.alert \"Hello world!\")"]]
[:li [code "(set! (.-backgroundColor js/document.body.style) \"green\")"]]]]]]
[:li [:p "Any trouble, let me know - either through GitHub or on Twitter at " [:a {:href ""} "@jarohen"]]]
[:li [:p "Good luck!"]]]
[:div {::f/style {:text-align "right"
:font-weight "bold"}}
[:p
[:span {::f/style {:font-size "1.3em"}} "James Henderson"]
[:br]
"Twitter: " [:a {:href ""} "@jarohen"]
[:br]
"GitHub: " [:a {:href "-henderson"} "james-henderson"]]]]))))
|
4a3e30c9f54651e0bf338d92a6c57596b2f08158159ef9489da8e917e8654375 | yahoojapan/big3store | mj_query_node.erl | %%
%% Main-memory Join Query Node processes
%%
2014 - 2016 UP FAMNIT and Yahoo Japan Corporation
%% @version 0.3
@since February , 2016
@author < >
@author < >
%%
%% @doc Main-memory join query node is implemented as independent gen_process. Main-memory
%% join query node is a state-machine realizing main-memory join algorithm as protocol among
%% join query node and the outer and inner query nodes. It is expected that there is enough
%% room that all graphs from outer and inner query nodes are stored in main memory.
%%
%% <table bgcolor="lemonchiffon">
%% <tr><th>Section Index</th></tr>
%% <tr><td>{@section main-memory join algorithm}</td></tr>
%% <tr><td>{@section property list}</td></tr>
%% <tr><td>{@section handle_call (synchronous) message API}</td></tr>
%% <tr><td>{@section handle_cast (asynchronous) message API}</td></tr>
%% </table>
%%
%% == main-memory join algorithm ==
%%
%% (LINK: {@section main-memory join algorithm})
%%
%% Main-memory join query node is independent gen_server process that can have multiple
%% outer query nodes as well as multiple inner query nodes--each of them is implemented as
%% separate gen_server process.
%%
%% Main-memory join algorithm loads results of inner query nodes into main memory creating
%% main memory hash-index. At the same time results of outer query nodes are pre-loaded into
%% the queue. After all inner results are loaded into the main-memory index, process starts to
%% compute join between outer graphs and inner triples as well as further loading of outer
%% graphs.
%%
State - machine has the following states : inactive , active , wait_next_outer , wait_next_inner ,
and eos . Message start set state of protocol to active . Message eval starts the evaluation
%% of all inner and outer query nodes, and, moves state
to wait_next_inner , the first phase of algorithm . In this phase main - memory join reads
%% results from inner query nodes and creates main-memory index on join attributes.
%% All the resulted graphs from outer query nodes are in this pahase stored in the queue.
%%
After reading all results from inner query node protocol enters second phase and state
is changed wait_next_outer . In the second phase of the algorithm graphs from outer
%% query nodes are joined with triples from inner query nodes. After end_of_stream is received
from all outer query nodes , state moves to eos .
%%
%% == property list ==
%%
%% (LINK: {@section property list})
%%
%% The gen_server process uses following properties holded by {@link
jqn_state ( ) } .
%%
< table " >
%% <tr><th>Name</th><th>Type</th><th>Description</th></tr>
%%
< tr > < td > > < td > boolean()</td > < td > true denotes that
%% process dictionary was created and used. false denotes that
%% completely new process.</td> </tr>
%%
%% <tr> <td>id</td> <td>string()</td> <td>query node identifier</td> </tr>
%%
%% <tr> <td>pid</td> <td>pid()</td> <td>process id</td> </tr>
%%
%% <tr> <td>state</td> <td>atom()</td> <td>active | inactive | wait_next_outer |
wait_next_inner | eos</td > < /tr >
%%
< tr > < td > gp</td > < td > maps : > < td > graph represented as
mapping from { @type query_node : qn_id ( ) } to { @type : qn_triple_pattern()}</td > < /tr >
%%
< tr > < td > select_pred</td > < td > : qn_select_predicate()</td > < td > selection
predicate in the form of abstract syntax tree of type { @type : qn_select_predicate ( ) }
%% </td> </tr>
%%
< tr > < td > project_list</td > < td > : > < td > list of
%% variables to be projected</td> </tr>
%%
%% <tr> <td>project_out</td> <td>[query_node::qn_id()]</td> <td>list of
%% query node id-s identifying triples to be projected out of resulting graph
%% </td> </tr>
%%
< tr > < td > > < td>{@link
%% triple_distributor:td_node_location()}</td> <td>location of query
%% node process</td> </tr>
%%
%% <tr> <td>parent</td> <td>pid()</td> <td>process id of parent query
%% node</td> </tr>
%%
%% <tr> <td>outer</td> <td>[pid()]</td> <td>process ids of outer
%% children query nodes</td> </tr>
%%
%% <tr> <td>inner</td> <td>[pid()]</td> <td>process ids of inner
%% children query nodes</td> </tr>
%%
< tr > < td > join_vars</td > < td>[{@link : qn_var()}]</td > < td > List of
variables used for joining.</td > < /tr >
%%
< tr > < td > vars_pos</td > < td > maps : > < td > mapping from { @link
%% query_node:qn_var()} to {@link jqn_var_position()}</td> </tr>
%%
< tr > < td > vars_values</td > < td > maps : > < td > mapping from
{ @link : qn_var ( ) } to string ( ) ( not used)</td > < /tr >
%%
%% <tr> <td>wait</td> <td>boolean()</td> <td>indicate whether the
%% process is in wait state or not.</td> </tr>
%%
%% <tr> <td>inner_outer</td> <td>inner | outer</td> <td> Position to
%% its parent query node.</td> </tr>
%%
< tr > < td > inner_graph</td > < td>{@link : qn_graph()}</td > < td > current
graph data from inner child</td > < /tr >
%%
< tr > < td > outer_graph</td > < td>{@link : qn_graph()}</td > < td > current
graph data from outer child</td > < /tr >
%%
< tr > < td > state_of_outer_streams</td > < td > maps : > < td > Map
%% structure from outer child pid() to atom() (alive | eos).</td> </tr>
%%
%% <tr> <td>empty_outer_sent</td> <td>boolean()</td> <td>N empty messages
%% are sent to each of outer processes when eval message of mj_query_node
%% is processed.</td> </tr>
%%
< tr > < td > state_of_inner_streams</td > < td > maps : > < td > Map
%% structure from inner child pid() to atom() (alive | eos).</td> </tr>
%%
%% <tr> <td>empty_inner_sent</td> <td>boolean()</td> <td>N empty messages
are sent to each of inner processes after first eval message is sent
%% to them.
%% </td> </tr>
%%
< tr > < td > queue_from_outer</td > < td > queue : > < td > Queue storing
graphs from outer child query node while processing one of previous
outer > < /tr >
%%
< tr > < td > queue_from_parpent</td > < td > queue : > < td > Queue storing
%% empty messages from parent when graph to be sent to parent is not
%% yet available.</td> </tr>
%%
< tr > < td > queue_to_parent</td > < td > queue : > < td > Queue storing
%% graphs (complete messages) to be sent to parent but there is no empty message
%% available.</td> </tr>
%%
%% <tr> <td>pause</td> <td>boolean()</td> <td>query stops evaluating
%% if true and evaluates normally if false</td> </tr>
%%
%% <tr> <td>start_date_time</td> <td>calendar:datetime()</td>
%% <td>started date and time of the process.</td> </tr>
%%
< tr > < td > b3s_state_pid</td > < td>{@type node_state : ns_pid()}</td >
%% <td>process id of b3s_state.</td> </tr>
%%
< tr > < td > benchmark_task_pid</td > < td>{@type
%% node_state:ns_pid()}</td> <td>process id of executing benchmark
%% task.</td> </tr>
%%
< tr > < td > result_record_max</td > < td > integer()</td > < td > number
%% of records to be reported.</td> </tr>
%%
%% </table>
%%
%% == handle_call (synchronous) message API ==
%%
%% (LINK: {@section handle_call (synchronous) message API})
%%
= = = { start , QueryNodeId , QueryId , SessionId , Self , GraphPattern , SelectPred , ProjectList , ParentPid , OuterPids , InnerPids , VarsPositions , JoinVars } = = =
%%
%% Initialization of join query node process. All parameters are
%% saved to process dictionary.
( LINK : { @section @{start , QueryNodeId , QueryId , SessionId , Self , GraphPattern , SelectPred , ProjectList , ParentPid , OuterPids , InnerPids , VarsPositions , JoinVars@ } } )
%%
QueryNodeId is { @link : qn_id ( ) } , QueryId is string ( ) , SessionId is string ( ) ,
Self is { @link node_state : ns_pid ( ) } , GraphPattern is { @link : ( ) } ,
SelectPred is { @link : qn_select_predicate ( ) } ,
ProjectList is { @link : qn_project_list ( ) } , ParentPid is pid ( ) ,
OuterPids is [ pid ( ) ] , InnerPids is [ pid ( ) ] , VarsPositions is { @link
jqn_var_position ( ) } , JoinVars is [ { @link : qn_var ( ) } ] .
%%
%% This request is implemented by {@link hc_start/10}.
%%
%% === {eval, VarsValues} ===
%%
%% Initiate evaluation of query node. The state of
query node must be either active or eos so that eval message is executed .
%%
%% Firstly, initiate evaluation in all children, and, then send N empty
%% messages to each child so that they can begin sending results.
%% Note than message passing for eval message is synchronous. This means that
%% complete query tree is locked while evaluation is initiated.
%%
VarsValues is : qn_var_val_map ( ) .
%% It includes variables and values to be set in graph pattern
of query node . In the case value of VarsValues is [ ] then graph pattern
%% of query node is not changed.
%%
%% Message eval can be sent to query node multiple times. In each instance,
%% process dictionary is initialized to the initial state. After eval is executed
%% query node can expect empty messages from parent.
%% (LINK: {@section @{eval, VarsValues@}})
%%
VarsValues is { @link : qn_var_val_map ( ) } . This request is implemented by { @link hc_eval/1 } .
%%
%% === {get_property, Name} ===
%%
%% Return the value of specified property name. Variable Name is an
%% atom(). This request is implemented by {@link
%% hc_get_property/2}.
%%
%% == handle_cast (asynchronous) message API ==
%%
%% (LINK: {@section handle_cast (asynchronous) message API})
%%
= = = { data_outer , ParentPid , Graph } = = =
%%
%% Processing data message from outer child. In the case join query node is in state
wait_next_inner , complete data message is stored in to be processed
%% later.
%%
%% In the case join query node is in state wait_next_outer then data message from outer
%% children is set as current outer message. Inner query nodes are reset using join values
%% of common variables that are set in graph-pattern of outer query nodes.
%%
When all outer query nodes are in state eos ( end of stream ) then end_of_stream is sent
to parent and state of this query node is set to eos .
( LINK : { @section @{data_outer , Pid , Graph@ } } )
%%
Pid is pid ( ) and is : qn_graph ( ) . This request is implemented by { @link hc_data_outer/1 } .
%%
= = = { data_inner , Pid , Graph } = = =
%%
%% Processing data message from inner children. Inner graph is joined with
%% current outer graph stored as outer_graph in process dictionary. Resulted graph
%% is sent to parent as outer data message. While inner graphs are comming from
inner children , query node is in state wait_next_inner .
%%
More graphs from outer child may be stored in queue_from_outer . State may change to
wait_next_outer in the case all inner streams are terminated and
%% is not empty. In this case function hc_data_outer is called (from hc_data_inner)
for outer graph from queue . ( LINK : { @section @{data_inner , Pid , Graph@ } } )
%%
Pid is pid ( ) , is : qn_graph ( ) . This request is implemented by { @link hc_data_inner/3 } .
%%
%% === {empty, ParentPid} ===
%%
( LINK : { @section @{empty , ParentPid@ } } ) .
%%
%% Processing empty message from parent. If state of query node is
eos or inactive then simply ignore empty message . If
%% does not include any data message prepared for parent then empty
message is stored in queue_from_parent and used later . Finally , if
there is a message in then send it to parent .
%%
%% ParentPid is pid().
%%
%% This request is implemented by {@link hc_empty/1}.
%%
%% @type jqn_state() = maps:map(). Map
%% structure that manages properties for operating the gen_server
%% process.
%%
@type jqn_var_position ( ) = maps : map ( ) . Mapping from { @link : qn_var ( ) }
to [ { { @link : qn_id ( ) } , integer ( ) } ] . List of pairs represent
%% positions of some variable in a triple pattern of given query. Pairs include
%% query node id of triple pattern, and, position of variable in triple pattern
( 1 : i d , 2 : sub , 3 : prd , 4 : obj ) .
%%
-module(mj_query_node).
-behavior(gen_server).
-export(
[
child_spec/1, spawn_process/2, receive_empty/0, hcst_sne/0,
init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3
]).
-include_lib("stdlib/include/qlc.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("record.hrl").
%% ======================================================================
%%
%% gen_server behavior
%%
%%
%% init/1
%%
%% @doc Initialize a mj_query_node process.
%%
@spec init ( [ ] ) - > { ok , jqn_state ( ) }
%%
init([]) ->
process_flag(trap_exit, true),
%% set main pd keys
put(wait, true),
put(pid, self()),
put(start_date_time, calendar:local_time()),
put(mq_debug, gen_server:call(node_state, {get, mq_debug})),
%% init queues
query_node:queue_init(from_parent, plain, empty),
query_node:queue_init(to_parent, output, data_outer),
query_node:queue_init(from_inner, input, data_inner),
query_node:queue_init(from_outer, input, data_outer),
info_msg(init, [{state,hc_save_pd()}], done, -1),
{ok, hc_save_pd()}.
%%
%% handle_call/3
%%
%% @doc Handle synchronous query requests.
%%
handle_call(term ( ) , { pid ( ) , term ( ) } , jqn_state ( ) ) - > { reply , term ( ) , jqn_state ( ) }
%%
handle_call({start, QueryNodeId, QueryId, SessionId, Self, GraphPattern, SelectPred, ProjectList, ParentPid, OuterPid, InnerPid,
VarsPositions, JoinVars}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [Self, {message,start}, {all,get()}, get(state)], message_received, 10),
hc_start(QueryNodeId, QueryId, SessionId, Self, GraphPattern, SelectPred, ProjectList, ParentPid, OuterPid, InnerPid, VarsPositions, JoinVars),
{reply, ok, hc_save_pd()};
handle_call({get_property, all}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,get_property}, {name,all}, {value,get()}, get(state)], message_received, 10),
{reply, get(), hc_save_pd()};
handle_call({get_property, Name}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,get_property}, {name,Name}, {value,get(Name)}, get(state)], message_received, 10),
{reply, get(Name), hc_save_pd()};
handle_call({get, Name}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,get}, {name,Name}, {value,get(Name)}, get(state)], message_received, 10),
{reply, get(Name), hc_save_pd()};
handle_call({eval, VarsValues}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,eval}, {vars_values,VarsValues}, {all,get()}, get(state)], message_received, 10),
hc_eval(VarsValues, get(state)),
{reply, ok, hc_save_pd()};
%% default
handle_call(Request, From, State) ->
R = {unknown_request, Request},
error_msg(handle_call, [get(self), Request, From, get()], R),
{reply, R, State}.
%%
%% handle_cast/2
%%
%% @doc Handle asynchronous query requests.
%%
@spec handle_cast(term ( ) , jqn_state ( ) ) - > { noreply , jqn_state ( ) }
%%
handle_cast({empty, From}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
%% insert into queue
query_node:queue_write(from_parent, {empty, From}),
%% process empty message
info_msg(handle_cast, [get(self), {message,empty}, {from,From}, {queue_from_parent,get(queue_from_parent)}, get(state)], message_received, 30),
hc_empty(get(state)),
{noreply, hc_save_pd()};
handle_cast({data_inner, From, Block}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_cast, [get(self), {message,data_inner}, {from,From}, {block,Block}, get(state)], message_received, 30),
%% insert into queue
query_node:queue_write(from_inner, {data_inner, From, Block}),
hc_data_inner(get(state)),
{noreply, hc_save_pd()};
handle_cast({data_outer, From, Block}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_cast, [get(self), {message,data_outer}, {from,From}, {block,Block}, get(state)], message_received, 30),
%% insert into queue
query_node:queue_write(from_outer, {data_outer, From, Block}),
%% process outer block
hc_data_outer(get(state)),
{noreply, hc_save_pd()};
handle_cast({stop, From}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_cast, [get(self), {message,stop}, {from,From}, get(state)], message_received, 10),
%% erase complete PD
erase(),
{noreply, hc_save_pd()};
%% default
handle_cast(Request, State) ->
R = {unknown_request, Request},
error_msg(handle_cast, [get(self), {request,Request}, {state,State}, get()], R),
{noreply, hc_save_pd()}.
%%
%% @doc Restore process dictionaries from state map structure.
%%
@spec hc_restore_pd([{atom ( ) , term ( ) } ] | undefined , jqn_state ( ) ) - > ok
%%
hc_restore_pd(undefined, State) ->
hc_restore_pd_1(maps:to_list(State));
hc_restore_pd(_, _) ->
ok.
hc_restore_pd_1([]) ->
ok;
hc_restore_pd_1([{K, V} | T]) ->
put(K, V),
hc_restore_pd_1(T).
%%
%% @doc Save process all dictionary contents into state map structure.
%%
( ) - > jqn_state ( )
%%
hc_save_pd() ->
maps:from_list(get()).
%%
%% handle_info/2
%%
%% @doc Handle exceptional query requests.
%%
@spec handle_info(term ( ) , jqn_state ( ) ) - > { noreply , jqn_state ( ) }
%%
handle_info(_Info, State) ->
{noreply, State}.
%%
%% terminate/2
%%
%% @doc Process termination.
%%
terminate(term ( ) , jqn_state ( ) ) - > none ( )
%%
terminate(Reason, State) ->
P = pid_to_list(self()),
info_msg(terminate, [get(self), {reason,Reason}, {state,State}, {pid,P}, get(state)], done, -1),
ok.
%%
%% code_change/3
%%
%% @doc Process code change action.
%%
@spec code_change(term ( ) , jqn_state ( ) , term ( ) ) - > { ok , jqn_state ( ) }
%%
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% ======================================================================
%%
%% utility
%%
%%
%% @doc Report an error issue to the error_logger.
%%
%% @spec error_msg(atom(), term(), term()) -> ok
%%
error_msg(FunName, Argument, Result) ->
node_state:error_msg(?MODULE, FunName, Argument, Result).
%%
%% @doc Report an information issue to the error_logger if current
%% debug level is greater than ThresholdDL.
%%
info_msg(atom ( ) , term ( ) , term ( ) , integer ( ) ) - > ok
%%
info_msg(FunName, Argument, Result, ThresholdDL) ->
node_state:info_msg(?MODULE, FunName, Argument, Result, ThresholdDL).
%% ======================================================================
%%
%% api
%%
%%
%% @doc Return child spec for this process. It can be used in
%% supervisor:init/0 callback implementation.
%%
( ) ) - > supervisor : child_spec ( )
%%
child_spec(Id) ->
GSOpt = [{local, Id}, mj_query_node, [], []],
StartFunc = {gen_server, start_link, GSOpt},
Restart = permanent,
Shutdwon = 1000,
Type = worker,
Modules = [mj_query_node],
{Id, StartFunc, Restart, Shutdwon, Type, Modules}.
%%
%% @doc Spawn tp_query_node process with given local identifier at given node.
%%
spawn_process ( Id::atom ( ) , Node::node ( ) ) - > node_state : ns_pid ( )
%%
spawn_process(Id, Node ) ->
ChildSpec = mj_query_node:child_spec(Id),
supervisor:start_child({b3s, Node}, ChildSpec),
{Id, Node}.
%% ======================================================================
%%
%% handle call/cast implementation
%%
%%
%% hc_start/10
%%
%% @doc Initialize mj_query_node process.
%%
: qn_id ( ) , string ( ) , string ( ) , node_state : ns_pid ( ) , [ : qn_triple_pattern ( ) ] ,
: qn_select_predicate ( ) , : qn_project_list ( ) ,
%% node_state:ns_pid(), [node_state:ns_pid()], [node_state:ns_pid()],
jqn_var_position ( ) , [ query_node : qn_var ( ) ] ) - > ok
%%
hc_start(QueryNodeId, QueryId, SessionId, Self, GraphPattern, SelectPred, ProjectList, ParentPid, OuterPids, InnerPids, VarsPositions, JoinVars) ->
put(created, true),
put(qnode, join),
put(node_id, QueryNodeId),
put(query_id, QueryId),
put(session_id, SessionId),
put(self, Self),
put(state, active),
put(gp, GraphPattern),
put(select_pred, SelectPred),
put(project_list, ProjectList),
put(parent, ParentPid),
put(outer, OuterPids),
put(inner, InnerPids),
put(vars_pos, VarsPositions),
put(join_vars, JoinVars),
put(empty_outer_sent, false),
put(empty_inner_sent, false),
put(index_inner, maps:new()),
put(wait, false),
put(pause, false),
erase(sid_table_name),
erase(sid_max_id),
erase(di_cursor__),
erase(di_ets__),
%% benchmark stuff
BSP = b3s_state_pid,
BMT = benchmark_task,
BTP = benchmark_task_pid,
put(BSP, gen_server:call(node_state, {get, BSP})),
{_, FSN} = get(BSP),
put(BTP, {gen_server:call(get(BSP), {get, BMT}), FSN}),
%% store num-of-empty-msgs in PD
{ok, N} = application:get_env(b3s, num_of_empty_msgs),
put(num_of_empty_msgs, N),
%% store block-size in PD
BSZ = block_size,
put(BSZ, gen_server:call(get(BSP), {get, BSZ})).
@doc Send N empty messages to Pid . N is stored in config .
send_N_empty(Pid) ->
N = get(num_of_empty_msgs),
send_N_empty_1(Pid, N),
info_msg(send_N_empty, [get(self), {send_to, Pid}, {num, N}], done, 50).
send_N_empty_1(_, 0) ->
ok;
send_N_empty_1(Pid, N) ->
gen_server:cast(Pid, {empty, get(self)}),
info_msg(send_cast, [get(self), {message,empty}, {to,Pid}, {invoker,send_N_empty}, get(state)], message_sent, 30),
send_N_empty_1(Pid, N-1).
receive_empty() ->
receive
{_, M} -> M
end,
info_msg(receive_empty, [get(self), {message, M}], done, 50),
M.
%%
%% @doc Test function for hc_start.
%%
hc_start_test_() ->
b3s:start(),
b3s:stop(),
b3s:start(),
b3s:bootstrap(),
{inorder,
[
? _ , b3s : start ( ) ) ,
% {generator, fun()-> hcst_sne() end},
{generator, fun()-> hcst_q01() end},
?_assertMatch(ok, b3s:stop())
]}.
hcst_sne() ->
info_msg(hcst_sne, [get(self)], start, 50),
S = self(),
{inorder,
[
?_assertMatch(ok, send_N_empty(S)),
?_assertMatch({empty, S}, receive_empty()),
?_assertMatch({empty, S}, receive_empty())
]}.
hcst_q01() ->
info_msg(hcst_q01, [get(self)], start, 50),
QueryNodeId = "3",
QueryId = "1",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
JQN1 = mj_query_node:spawn_process(Id, node()),
QNState = active,
GraphPattern = #{"1" => {"?id1", "<Japanese>", "?prd", "?obj1"},
"2" => {"?id2", "<Slovenian>", "?prd", "?obj2"}},
SelectPred = none,
ProjectList = none,
ParentPid = self(),
OuterPids = [self()],
InnerPids = [self()],
VarsPositions = #{"?id1" => [{"1", 1}],
"?id2" => [{"2", 1}],
"?prd" => [{"1", 3}, {"2", 3}],
"?obj1" => [{"1", 4}],
"?obj2" => [{"2", 4}]},
JoinVars = ["?prd"],
M1 = {start, QueryNodeId, QueryId, SessionId, JQN1,
GraphPattern, SelectPred, ProjectList,
ParentPid, OuterPids, InnerPids, VarsPositions, JoinVars},
GP = get_property,
{inorder,
[
?_assertMatch(true, gen_server:call(JQN1, {GP, wait})),
?_assertMatch(ok, gen_server:call(JQN1, M1)),
?_assertMatch(QueryNodeId, gen_server:call(JQN1, {GP, node_id})),
?_assertMatch(GraphPattern, gen_server:call(JQN1, {GP, gp})),
?_assertMatch(QNState, gen_server:call(JQN1, {GP, state})),
?_assertMatch(ParentPid, gen_server:call(JQN1, {GP, parent})),
?_assertMatch(OuterPids, gen_server:call(JQN1, {GP, outer})),
?_assertMatch(InnerPids, gen_server:call(JQN1, {GP, inner})),
?_assertMatch(VarsPositions, gen_server:call(JQN1, {GP, vars_pos})),
?_assertMatch(JoinVars, gen_server:call(JQN1, {GP, join_vars})),
?_assertMatch(false, gen_server:call(JQN1, {GP, wait})),
?_assertMatch(undefined, gen_server:call(JQN1, {GP, inner_outer}))
]}.
%%
%% hc_eval/2
%%
%% @doc Initiate evaluation of join query node.
%%
%% @spec hc_eval(jqn_var_position(), atom()) -> ok
%%
ignoring VarsValues since we have currently only left - deep trees [ TODO ]
hc_eval(_, State)
when (State =:= eos) or (State =:= active) ->
%% send eval to each pid in outer children and mark state of outer stream 'alive'.
put(state_of_outer_streams, #{}),
F1 = fun (Pid) ->
gen_server:call(Pid, {eval, []}),
info_msg(send_call, [get(self), {message,eval}, {invoker,hc_eval}, {to,Pid}, {gp,get(gp)},
{vars_values,[]}, {invoker,hc_eval}, get(state)], message_sent, 30),
M = get(state_of_outer_streams),
put(state_of_outer_streams, maps:put(Pid, alive, M))
end,
lists:map(F1, get(outer)),
%% send empty messages
case get(empty_outer_sent) of
false -> lists:map(fun send_N_empty/1, get(outer));
true -> ok
end,
put(empty_outer_sent, true),
%% send eval and empty messages to inner nodes and update state of inner streams
put(state_of_inner_streams, #{}),
F2 = fun (Pid) ->
%% reset inner child
gen_server:call(Pid, {eval, []}),
info_msg(send_call, [get(self), {message,eval}, {invoker,hc_eval}, {to,Pid}, {gp,get(gp)},
{join_var_values,[]}, get(state)], message_sent, 30),
%% send empty messages (eval is done once?)
case get(empty_inner_sent) of
false -> send_N_empty(Pid);
true -> ok
end,
%% remember state of inner child
M = get(state_of_inner_streams),
put(state_of_inner_streams, maps:put(Pid, alive, M))
end,
lists:map(F2, get(inner)),
put(empty_inner_sent, true),
%% compute list of qn id-s to be projected out
query_node:project_prepare(get(project_list)),
%% update parameters
BSP = b3s_state_pid,
BSZ = block_size,
put(BSZ, gen_server:call(get(BSP), {get, BSZ})),
%% waiting for data from outer
put(state, wait_next_inner);
hc_eval(_, State) ->
error_msg(hc_eval, [get(self), {all,get()}, State], wrong_state),
ok.
%%
%% hc_empty/2
%%
%% @doc Co-routine for processing empty message from parent. It is expected that there is
at least one empty message in queue from_parent , either the one that has just arrived
%% as message, or, some other procedure has checked that the message is in the queue.
%%
%% @spec hc_empty( State::atom() ) -> ok
%%
hc_empty(undefined) ->
%% leave empty message in queue from parent
info_msg(hc_empty, [get(self), {from,get(parent)}, get(state)], empty_before_start, 50);
hc_empty(active) ->
%% leave empty message in queue from parent
info_msg(hc_empty, [get(self), {from,get(parent)}, get(state)], empty_before_eval, 50);
hc_empty(State)
when (State =:= wait_next_outer) or (State =:= wait_next_inner) or
(State =:= eos) ->
%% check if there are messages to parent and send data message to parent
case query_node:queue_prepared(to_parent) of
true ->
read empty message from queue ( there must be at least one msg )
{empty, _} = query_node:queue_read(from_parent),
get data message from and send it
Msg = query_node:queue_read(to_parent),
gen_server:cast(get(parent), Msg),
info_msg(send_cast, [get(self), {message,Msg}, {to,get(parent)}, {invoker,hc_empty}, get(state)], message_sent, 30);
false ->
info_msg(hc_empty, [get(self), {to,get(parent)}, get(state)], no_messages_to_parent, 50)
end,
%% next actions to be done
%% state==wait_next_outer
QEFO = query_node:queue_empty(from_outer),
case {get(state), QEFO} of
{wait_next_outer, false} ->
%% if state is wait_next_outer and there are messages waiting from outer query nodes
%% than empty message wakes up processing of another outer graph
hc_data_outer(get(state));
_ -> ok
end,
%% state==wait_next_inner
QEFI = query_node:queue_empty(from_inner),
case {get(state), QEFI} of
{wait_next_inner, false} ->
if state is wait_next_inner and there are messages waiting from inner query nodes
%% than empty message wakes up processing of inner graphs
hc_data_inner(get(state));
_ -> ok
end,
if there is another pair of empty - data messages run hc_empty again
case {query_node:queue_prepared(from_parent), query_node:queue_prepared(to_parent)} of
{true,true} -> hc_empty(get(state));
_ -> ok
end;
hc_empty(State) ->
error_msg(hc_empty, [get(self), {all,get()}, State], wrong_state).
%%
%% hc_data_inner/3
%%
%% @doc Co-routine for processing data message from inner children.
%%
%% @spec hc_data_inner(atom()) -> ok
%%
hc_data_inner(State) when State =/= wait_next_inner ->
something is wrong ; state should be wait_next_inner
error_msg(hc_data_inner, [get(self), {all,get()}, get(state)], inner_msg_in_wrong_state);
hc_data_inner(State) when State == wait_next_inner ->
%% get inner graph from queue from_inner
{From, Graph} = query_node:queue_get(from_inner),
%% get status for end of block
BE = query_node:queue_block_end(from_inner),
%% send empty if at the end of block
case BE of
true -> %% send empty message back to outer
EMsg = {empty, get(self)},
gen_server:cast(From, EMsg),
info_msg(send_cast, [get(self), {message,EMsg}, {to,From}, {invoker,hc_data_inner}, get(state)], message_sent, 30);
_ -> ok
end,
inner loop actions for read from inner queue
case Graph of
end_of_stream ->
hcdi_process_eos(From),
info_msg(hc_data_inner, [get(self), {from,From}, {graph,Graph}, get(state)], inner_eos_processed, 50);
_ -> hcdi_process_graph(Graph),
info_msg(hc_data_inner, [get(self), {from,From}, {graph,Graph}, get(state)], inner_graph_processed, 50)
end,
%% next protocol actions
%% state = wait_next_outer?
QEFO = query_node:queue_empty(from_outer),
case {get(state), QEFO} of
{wait_next_outer, false} ->
state changed from wait_next_inner to wait_next_outer .
%% inner graphs have been processed. start processing outer.
hc_data_outer(get(state)),
info_msg(hc_data_inner, [get(self), get(state)], call_outer_loop, 50);
_ -> ok
end,
state = wait_next_inner ?
QEFI = query_node:queue_empty(from_inner),
case {get(state), QEFI} of
{wait_next_inner, false} ->
%% inner queue not empty yet, continue with next inner graph
hc_data_inner(get(state)),
info_msg(hc_data_inner, [get(self), get(state)], call_inner_loop, 50);
_ -> ok
end;
hc_data_inner(State) ->
error_msg(hc_data_inner, [get(self), {all,get()}, {state,State}, get(state)], wrong_state ).
hcdi_process_eos(From) ->
%% mark end_of_stream of outer process
M = get(state_of_inner_streams),
put(state_of_inner_streams, maps:put(From, eos, M)),
count finished streams and send eos to parent if 0
F3 = fun (alive) -> true;
(eos) -> false
end,
NumAlive = length(lists:filter(F3, maps:values(get(state_of_inner_streams)))),
case NumAlive of
set state eos
put(state, wait_next_outer),
info_msg(hc_data_inner, [get(self), get(state)], query_evaluation_inner_completed, 50);
_ -> ok
end.
hcdi_process_graph(Graph) ->
make qn_var_val_map ( )
F1 = fun (V) ->
hce_get_var_value(V, Graph)
end,
JV = lists:map(F1, get(join_vars)),
info_msg(hcdo_process_graph , [ get(self ) , { graph , Graph } , { , get(join_vars ) } , { join_var_values , JoinVarValues } , get(state ) ] , debug_join_var_values , 50 ) ,
insert Graph with key JV to index
II = get(index_inner),
case maps:is_key(JV, II) of
true -> L = maps:get(JV, II),
put(index_inner, maps:put(JV, [Graph|L], II));
false -> put(index_inner, maps:put(JV, [Graph], II))
end.
hce_get_var_value(Variable, Graph) ->
VP = get(vars_pos),
%% get position and tuple
LVP is [ { NodeId , ]
LVP = maps:get(Variable, VP),
{NodeId,Pos} = hce_get_node_id(LVP, Graph),
Tuple = maps:get(NodeId, Graph),
Pos+1 since first component is table - name
query_node:eval_attribute(element(Pos+1, Tuple)).
hce_get_node_id([{NID,Pos}|Rest], Graph) ->
case maps:is_key(NID, Graph) of
true -> {NID,Pos};
false -> hce_get_node_id(Rest, Graph)
end;
hce_get_node_id([], Graph) ->
error_msg(hce_get_node_id, [get(self), {graph,Graph}, {all,get()}, get(state)], cant_find_var_val).
%%
hc_data_outer/3
%%
%% @doc Co-routine for processing data block from outer children.
%%
%% @spec hc_data_outer(State::atom()) -> ok|fail
%%
hc_data_outer(State) when State =/= wait_next_outer ->
%% data message left in queue from_outer to be processed later
info_msg(hc_data_outer, [get(self), get(state)], message_left_in_queue_from_outer, 50);
hc_data_outer(State) when State =:= wait_next_outer ->
%% retrieve outer graph from queue from_outer and save it in PD
{From, Graph} = query_node:queue_get(from_outer),
put(outer_graph, Graph),
%% get status for end of block
BE = query_node:queue_block_end(from_outer),
%% send empty if at the end of block
case BE of
true -> %% send empty message back to outer
EMsg = {empty, get(self)},
gen_server:cast(From, EMsg),
info_msg(send_cast, [get(self), {message,EMsg}, {to,From}, {invoker,hc_data_outer}, get(state)], message_sent, 30);
_ -> ok
end,
%% outer loop actions for outer graph read from queue
case Graph of
end_of_stream ->
%% we are at the end of some inner stream
hcdo_process_eos(From);
info_msg(hc_data_outer , [ get(self ) , { from , From } , { graph , Graph } , get(state ) ] , inner_eos_processed , 50 )
_ -> %% join outer graph with inner and send it to parent
hcdo_match_inner_graphs()
info_msg(hc_data_outer , [ get(self ) , { from , From } , { graph , Graph } , get(state ) ] , inner_graph_processed , 50 )
end,
%% next protocol actions for outer loop
%% state = wait_next_outer?
QPFP = query_node:queue_prepared(from_parent),
QEFO = query_node:queue_empty(from_outer),
case {get(state), QPFP, QEFO} of
{wait_next_outer, true, false} ->
%% continue outer loop if we stayed in wait_next_outer and
%% there is another outer graph to process and there is empty message
%% to be used
hc_data_outer(get(state)),
info_msg(hc_data_outer, [get(self), get(state)], call_outer_loop, 50);
_ -> ok
end.
hcdo_process_eos(From) ->
%% mark outer stream not alive
M = get(state_of_outer_streams),
put(state_of_outer_streams, maps:put(From, eos, M)),
%% check if all outer streams are dead
F1 = fun (alive) -> true;
(eos) -> false
end,
NumAlive = length(lists:filter(F1, maps:values(get(state_of_outer_streams)))),
all outer streams dead ? move to eos .
if NumAlive == 0 -> hcdo_send_parent_eos();
true -> ok
end.
info_msg(hc_data_outer , [ get(self ) , { from , From } , { graph , Graph } , { numAlive , NumAlive } , get(state ) ] , outer_eos_processed , 50 ) .
hcdo_send_parent_eos() ->
store eos in queue to_parent and flush it
query_node:queue_put(to_parent, end_of_stream),
query_node:queue_flush(to_parent),
check if includes empty messages
case query_node:queue_prepared(from_parent) of
true -> %% there is empty message from parent
{empty, _} = query_node:queue_read(from_parent),
Msg = query_node:queue_read(to_parent),
%% send parent last block of to_parent
gen_server:cast(get(parent), Msg),
info_msg(send_cast, [get(self), {message,Msg}, {to,get(parent)}, {invoker,hcdo_send_parent_eos}, get(state)], message_sent, 30);
false -> %% empty queue from_parent, so leave message in queue to_parent.
msg will be processed when the first empty message comes from_parent .
ok
end,
move state to eos
put(state, eos).
hcdo_match_inner_graphs() ->
%% retrieve outer graph from pd
Graph = get(outer_graph),
%% get index entry for key
first , make qn_var_val_map ( )
F1 = fun (V) ->
hce_get_var_value(V, Graph)
end,
info_msg(hcdo_match_outer_graphs , [ get(self ) , { inner_graph , Graph } , { , get(join_vars ) } , { vars_pos , get(vars_pos ) } , get(state ) ] , debug_join_var_values , 20 ) ,
JV = lists:map(F1, get(join_vars)),
info_msg(hcdo_match_outer_graphs , [ get(self ) , { inner_graph , Graph } , { , get(join_vars ) } , , JV } , get(state ) ] , debug_join_var_values , 20 ) ,
%% attempt to join outer Graph to index_inner
II = get(index_inner),
case maps:is_key(JV, II) of
true -> hcdo_process_graph(maps:get(JV, II));
false -> ok
end.
hcdo_process_graph([IG|GList]) ->
%% compute join
OG = get(outer_graph),
G = maps:merge(OG, IG),
info_msg(hc_data_inner , [ get(self ) , , OG , G , get(state ) ] , join_computed , 50 ) ,
%% set current graph G and compute val of select predicate
put(gp_val, G),
SP = query_node:eval_select(get(select_pred)),
info_msg(hc_data_inner , [ get(self ) , { graph , G } , { select_pred , get(select_pred ) } , { select_pred_value , SP } , get(state ) ] , select_pred_computed , 50 ) ,
%% skip graph G if SP==false
case SP of
true ->
%% compute projection and put in queue to_parent
query_node:eval_project(get(project_list)),
G1 = get(gp_val),
query_node:queue_put(to_parent, G1),
info_msg(hc_data_inner , [ get(self ) , { graph_in , G } , { project_list , get(project_list ) } , { graph_out , G1 } , get(state ) ] , project_computed , 50 ) ,
send block to parent if evrythng preped
case query_node:queue_prepared(from_parent) and query_node:queue_prepared(to_parent) of
true ->
%% get empty message from queue from_parent
{empty, _} = query_node:queue_read(from_parent),
%% get block and create message
Msg = query_node:queue_read(to_parent),
%% send it to parent
gen_server:cast(get(parent), Msg),
info_msg(send_cast, [get(self), {message,Msg}, {to,get(parent)}, {invoker,hcdo_process_graph}, get(state)], message_sent, 30);
false-> ok
end;
false -> ok
end,
%% loop on list of graphs
hcdo_process_graph(GList);
hcdo_process_graph([]) -> ok.
%%
%% hc_eval_test_/0
%%
%% @doc Main test function of module.
%%
hc_eval_test_() ->
hcet_site(b3s_state:get(test_mode)).
hcet_site(local1) ->
Attrs = {attributes, record_info(fields, triple_store)},
TabDef = [Attrs, {disc_copies, [node()]}],
info_msg(hcet_load_db, [get(self), TabDef], display_table, 50),
NDS = node(),
BSS = {b3s_state, NDS},
CRC = clm_row_conf,
RMS = #{1 => NDS},
CM1 = #{1 => RMS, 2 => RMS},
, ] ,
put(self, {'1-1-1', node()}),
{inorder,
[
?_assertMatch(ok, b3s:start()),
?_assertMatch(ok, b3s:bootstrap()),
?_assertMatch(ok, gen_server:call(BSS, {put, CRC, CM1})),
?_assertMatch(R01, gen_server:call(BSS, propagate)),
{generator, fun()-> tp_query_node:hcet_load_db() end},
{generator, fun()-> hcet_q02() end},
?_assertMatch(ok, b3s:stop()),
?_assertMatch(ok, b3s:start()),
?_assertMatch(ok, b3s:bootstrap()),
{generator, fun()-> hcet_load_db() end},
{generator, fun()-> hcet_q03() end},
{generator, fun()-> hcet_q05() end},
{generator, fun()-> hcet_q06() end},
%% finish
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(stopped, mnesia:stop()),
?_assertMatch(ok, b3s:stop())
? _ , : start ( ) ) ,
? _ assertMatch({atomic , ok } , : create_table(triple_store , TabDef ) ) ,
? _ assertMatch({atomic , ok } , : delete_table(triple_store ) ) ,
? _ assertMatch(stopped , : stop ( ) )
]};
hcet_site(local_two) ->
[];
hcet_site(_) ->
[].
hcet_q02() ->
info_msg(hcet_q02, [get(self)], start, 50),
BS = gen_server:call(node_state, {get, b3s_state_pid}),
Tab = gen_server:call(BS, {get, name_of_triple_table}),
QueryNodeId = "3",
QueryId = "2",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
JQN3 = mj_query_node:spawn_process(Id3, node()),
TPQN1 = hcet_tpqn1(JQN3),
TPQN2 = hcet_tpqn2(JQN3),
GraphPattern = maps:from_list(
[{"1", {"?id1", eI("<Japanese>"), "?prd", "?obj1"}},
{"2", {"?id2", eI("<Slovenian>"), "?prd", "?obj2"}}]),
SelectPred = none,
ProjectList = none,
ParentPid = self(),
OuterPids = [TPQN1],
InnerPids = [TPQN2],
VarsPositions = #{"?id1" => [{"1", 1}],
"?id2" => [{"2", 1}],
"?prd" => [{"1", 3}, {"2", 3}],
"?obj1" => [{"1", 4}],
"?obj2" => [{"2", 4}]},
JoinVars = ["?prd"],
GP = get_property,
DFO = data_outer,
EOS = end_of_stream,
T1 = eT({Tab,"<triple_id_0002>","<Japanese>","<eat>","<fishes>"}),
T2 = eT({Tab,"<triple_id_0003>","<Slovenian>","<eat>","<potatoes>"}),
TP1 = {"?id2", eI("<Slovenian>"), "?prd", "?obj2"},
M1 = {start, QueryNodeId, QueryId, SessionId, JQN3, GraphPattern,
SelectPred, ProjectList, ParentPid, OuterPids, InnerPids,
VarsPositions, JoinVars},
M2 = {eval, []},
R1Map = maps:put("1", T1, maps:new()),
R2Map = maps:put("2", T2, R1Map),
R = {DFO, JQN3, [R2Map,EOS]},
{inorder,
[
?_assertMatch(true, gen_server:call(JQN3, {GP, wait})),
?_assertMatch(ok, gen_server:call(JQN3, M1)),
?_assertMatch(outer, gen_server:call(TPQN1, {GP, inner_outer})),
?_assertMatch(inner, gen_server:call(TPQN2, {GP, inner_outer})),
?_assertMatch(false, gen_server:call(JQN3, {GP, wait})),
?_assertMatch(undefined, gen_server:call(JQN3, {GP, inner_outer})),
?_assertMatch(ok, gen_server:call(JQN3, M2)),
?_assertMatch({_, R}, hcet_send_empty(JQN3, R)),
?_assertMatch(TP1, gen_server:call(TPQN2, {GP, tp})),
?_assertMatch(OuterPids, gen_server:call(JQN3, {GP, outer})),
?_assertMatch(InnerPids, gen_server:call(JQN3, {GP, inner})),
?_assertMatch(GraphPattern, gen_server:call(JQN3, {GP, gp})),
?_assertMatch(ok, gen_server:cast(TPQN1, {stop, self()})),
?_assertMatch(ok, gen_server:cast(TPQN2, {stop, self()})),
?_assertMatch(ok, gen_server:cast(JQN3, {stop, self()}))
]}.
hcet_tpqn1(Pid) ->
QueryNodeId = "1",
QueryId = "2",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?id1", eI("<Japanese>"), "?prd", "?obj1"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?id1" => 1, "?prd" => 3, "?obj1" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1,
TriplePattern, SelectPred, ProjectList,
ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tpqn2(Pid) ->
QueryNodeId = "2",
QueryId = "2",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?id2", eI("<Slovenian>"), "?prd", "?obj2"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?id2" => 1, "?prd" => 3, "?obj2" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2,
TriplePattern, SelectPred, ProjectList,
ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
%%
%% @doc Creation of triple-store used in examples.
%%
example_table() ->
[%% country
{triple_store, "id1", "japan", "type", "country"},
{triple_store, "id2", "slovenia", "type", "country"},
%% cities
{triple_store, "id3", "koper", "type", "city"},
{triple_store, "id4", "ljubljana","type", "city"},
{triple_store, "id5", "tokyo", "type", "city"},
{triple_store, "id6", "kyoto", "type", "city"},
{triple_store, "id7", "osaka", "type", "city"},
%% organizations
uni primorska
{triple_store, "id9", "ul", "type", "university"}, % uni ljubljana
{triple_store, "id10", "ijs", "type", "institute"}, % institute jozef stefan
yahoo ! japan
{triple_store, "id12", "tu", "type", "university"}, % tokyo uni
{triple_store, "id13", "ku", "type", "university"}, % kyoto uni
osaka uni
%% persons
{triple_store, "id15", "shou", "type", "person"},
{triple_store, "id16", "yoshio", "type", "person"},
{triple_store, "id17", "sakura", "type", "person"},
{triple_store, "id18", "luka", "type", "person"},
{triple_store, "id19", "jan", "type", "person"},
{triple_store, "id20", "nika", "type", "person"},
{triple_store, "id57", "marko", "type", "person"},
hasCapital
{triple_store, "id21", "japan", "hasCapital", "tokyo"},
{triple_store, "id22", "slovenia", "hasCapital", "ljubljana"},
%% isLocatedIn
{triple_store, "id23", "tokyo", "isLocatedIn", "japan"},
{triple_store, "id24", "kyoto", "isLocatedIn", "japan"},
{triple_store, "id25", "osaka", "isLocatedIn", "japan"},
{triple_store, "id26", "koper", "isLocatedIn", "slovenia"},
{triple_store, "id27", "ljubljana","isLocatedIn", "slovenia"},
{triple_store, "id28", "up", "isLocatedIn", "koper"},
{triple_store, "id29", "ul", "isLocatedIn", "ljubljana"},
{triple_store, "id30", "ijs", "isLocatedIn", "ljubljana"},
{triple_store, "id31", "yj", "isLocatedIn", "tokyo"},
{triple_store, "id32", "ku", "isLocatedIn", "kyoto"},
{triple_store, "id33", "ou", "isLocatedIn", "osaka"},
{triple_store, "id34", "tu", "isLocatedIn", "tokyo"},
%% livesIn
{triple_store, "id35", "shou", "livesIn", "tokyo"},
{triple_store, "id36", "yoshio", "livesIn", "tokyo"},
{triple_store, "id37", "sakura", "livesIn", "kyoto"},
{triple_store, "id38", "luka", "livesIn", "ljubljana"},
{triple_store, "id39", "jan", "livesIn", "koper"},
{triple_store, "id40", "nika", "livesIn", "ljubljana"},
{triple_store, "id41", "marko", "livesIn", "ljubljana"},
%% worksAt
{triple_store, "id42", "shou", "worksAt", "yj"},
{triple_store, "id43", "shou", "worksAt", "ku"},
{triple_store, "id44", "yoshio", "worksAt", "yj"},
{triple_store, "id45", "sakura", "worksAt", "ku"},
{triple_store, "id46", "luka", "worksAt", "up"},
{triple_store, "id47", "luka", "worksAt", "ijs"},
{triple_store, "id48", "jan", "worksAt", "up"},
{triple_store, "id49", "nika", "worksAt", "ijs"},
{triple_store, "id50", "marko", "worksAt", "ijs"},
%% graduatedFrom
{triple_store, "id51", "shou", "graduatedFrom", "ou"},
{triple_store, "id52", "yoshio", "graduatedFrom", "tu"},
{triple_store, "id53", "sakura", "graduatedFrom", "ku"},
{triple_store, "id54", "luka", "graduatedFrom", "ul"},
{triple_store, "id55", "jan", "graduatedFrom", "up"},
{triple_store, "id56", "nika", "graduatedFrom", "ul"},
%% age
{triple_store, "id58", "shou", "age", "25"},
{triple_store, "id59", "yoshio", "age", "36"},
{triple_store, "id60", "sakura", "age", "27"},
{triple_store, "id61", "luka", "age", "38"},
{triple_store, "id62", "jan", "age", "45"},
{triple_store, "id63", "nika", "age", "22"},
{triple_store, "id64", "marko", "age", "30"}].
hcet_load_db() ->
case 2 of
2 -> hcet_load_db_postgres();
1 -> hcet_load_db_bdbnif();
_ -> hcet_load_db_mnesia_qlc()
end.
hcet_load_db_postgres() ->
info_msg(hcet_load_db_postgres, [get(self)], start, 50),
BS = gen_server:call(node_state, {get, b3s_state_pid}),
Tab = db_interface:dot_get_tn(),
F1 = fun (X) ->
{_, Tid, Sbj, Prd, Obj} = X,
D = eT({Tab, Tid, Sbj, Prd, Obj}),
db_interface:db_write(D)
end,
SI = string_id,
SIT = gen_server:call(BS, {get, name_of_string_id_table}),
gen_server:call(SI, {put, sid_table_name, SIT}),
gen_server:call(SI, {put, di_cursor__, undefined}),
gen_server:call(SI, delete_table),
gen_server:call(SI, {create_table, SIT}),
gen_server:call(SI, make_index),
erase(sid_table_name),
erase(sid_max_id),
ok = db_interface:db_close(),
ok = db_interface:db_init(),
ok = db_interface:db_close(),
ok = lists:foreach(F1, example_table()),
ok = db_interface:db_add_index(),
ok = db_interface:db_close(),
TP01 = eTP({"id1", "?s", "?p", "?o"}),
TP02 = eTP({"id11", "?s", "?p", "?o"}),
TP03 = eTP({"id56", "?s", "?p", "?o"}),
R01 = eT({Tab, "id1", "japan", "type", "country"}),
R02 = eT({Tab, "id11", "yj", "type", "corporation"}),
R03 = eT({Tab, "id56", "nika", "graduatedFrom", "ul"}),
EOS = end_of_stream,
{inorder,
[
? _ , db_interface : db_close ( ) ) ,
? _ , db_interface : db_init ( ) ) ,
? _ , lists : , example_table ( ) ) ) ,
? _ , db_interface : db_add_index ( ) ) ,
? _ , db_interface : db_close ( ) ) ,
?_assertMatch(ok, db_interface:db_open_tp(TP01)),
?_assertMatch(R01, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP02)),
?_assertMatch(R02, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP03)),
?_assertMatch(R03, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_close())
]}.
hcet_load_db_bdbnif() ->
info_msg(hcet_load_db_bdbnif, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
Fsto = fun (X) ->
{_, Tid, Sbj, Prd, Obj} = X,
D = {Tab, Tid, Sbj, Prd, Obj},
db_interface:db_write(D)
end,
ok = db_interface:db_init(),
ok = db_interface:db_add_index(),
lists:foreach(Fsto, example_table()),
ok = db_interface:db_close(),
TP01 = {"id1", "?s", "?p", "?o"},
TP02 = {"id11", "?s", "?p", "?o"},
TP03 = {"id56", "?s", "?p", "?o"},
R01 = {Tab, "id1", "japan", "type", "country"},
R02 = {Tab, "id11", "yj", "type", "corporation"},
R03 = {Tab, "id56", "nika", "graduatedFrom", "ul"},
EOS = end_of_stream,
{inorder,
[
?_assertMatch(ok, db_interface:db_open_tp(TP01)),
?_assertMatch(R01, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP02)),
?_assertMatch(R02, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP03)),
?_assertMatch(R03, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_close())
]}.
hcet_load_db_mnesia_qlc() ->
info_msg(hcet_load_db, [get(self)], start, 50),
Attrs = {attributes, record_info(fields, triple_store)},
TabDef = [Attrs, {disc_copies, [node()]}],
info_msg(hcet_load_db, [get(self), TabDef], display_table, 50),
ET = example_table(),
AGE = fun(X ) - > application : , X ) end ,
F = fun() ->
lists:foreach(fun mnesia:write/1, ET)
end,
MW = fun() -> mnesia:transaction(F) end,
% F1 = fun(QLC, List) ->
% Result = tm:do(QLC),
SetR = sets : ) ,
% SetL = sets:from_list(List),
RL = sets : to_list(sets : subtract(SetR , SetL ) ) ,
% LR = sets:to_list(sets:subtract(SetL, SetR)),
{ RL , LR }
% end,
Q1 = qlc:q([X||X<-mnesia:table(triple_store)]),
Q2 = qlc:q([X||X<-mnesia:table(triple_store), X#triple_store.s=="koper"]),
Q3 = qlc:q([X||X<-mnesia:table(triple_store), X#triple_store.p=="livesIn"]),
Q4 = qlc:q([X||X<-mnesia:table(triple_store),
X#triple_store.p=="worksAt",
X#triple_store.o=="yj"]),
[ { triple_store,"id26","koper","isLocatedIn","slovenia " } ,
% {triple_store,"id3","koper","type","city"}],
R3 = [ { triple_store,"id35","shou","livesIn","tokyo " } ,
% {triple_store,"id40","nika","livesIn","ljubljana"},
% {triple_store,"id37","sakura","livesIn","kyoto"},
% {triple_store,"id36","yoshio","livesIn","tokyo"},
% {triple_store,"id38","luka","livesIn","koper"},
% {triple_store,"id39","jan","livesIn","koper"}],
R4 = [ { triple_store,"id41","shou","worksAt","yj " } ,
% {triple_store,"id43","yoshio","worksAt","yj"}],
%% rr("record.hrl").
c("tm.erl " ) , tm : : q([X||X<-mnesia : table(triple_store ) ] ) ) .
info_msg(hcet_load_db, [get(self)], testing, 50),
{inorder,
[
? _ assertMatch(stopped , : stop ( ) ) ,
? _ , mnesia : create_schema([node ( ) ] ) ) ,
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
%% ?_assertMatch('b3ss01@shoo', node()),
%% ?_assertMatch({ok, triple_store}, AGE(name_of_triple_table)),
?_assertMatch({atomic, ok}, mnesia:delete_table(triple_store)),
?_assertMatch({atomic, ok}, mnesia:create_table(triple_store, TabDef)),
? _ , db_interface : db_init ( ) ) ,
?_assertMatch(ok, db_interface:db_add_index()),
?_assertMatch({atomic, ok}, MW()),
?_assertMatch(57, length(tm:do(Q1))),
?_assertMatch(2, length(tm:do(Q2))),
?_assertMatch(7, length(tm:do(Q3))),
?_assertMatch(2, length(tm:do(Q4))),
?_assertMatch(stopped, mnesia:stop())
]}.
hcet_send_empty(QN, R) ->
gen_server:cast(QN, {empty, self()}),
receive
M -> M
end,
info_msg(hcet_send_empty, [get(self), {from,QN}, {received, M}, {expected,R}, get(state)], data_received, 30),
M.
hcet_get_PD(QN) ->
M = gen_server:call(QN, {get_property, all}),
info_msg(hcet_get_PD, [get(self), {pid,QN}, {all,M}, length(M)], response_property_all_received, 50),
M.
eI(X) -> string_id:get_id(X).
eT({T,I,S,P,O}) ->
ET = string_id:encode_triple({I, S, P, O}),
list_to_tuple([T | tuple_to_list(ET)]).
eTP(X) -> string_id:encode_triple_pattern(X).
hcet_q03() ->
%%
%% query: using single tp query nodes
%%
slovenia hasCapital ? x
%% ?y livesIn ?x
? y worksAt
%%
info_msg(hcet_q03, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
%% creating processes
QueryNodeId3 = "3",
QueryId = "3",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId3),
JQN3 = mj_query_node:spawn_process(Id3, node()),
QueryNodeId5 = "5",
Id5 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId5),
JQN5 = mj_query_node:spawn_process(Id5, node()),
TPQN1 = hcet_tpqn3(JQN3),
TPQN2 = hcet_tpqn4(JQN3),
TPQN4 = hcet_tpqn5(JQN5),
%% first join
GraphPattern3 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}}]),
SelectPred3 = none,
ProjectList3 = none,
ParentPid3 = JQN5,
OuterPids3 = [TPQN1],
InnerPids3 = [TPQN2],
VarsPositions3 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}]},
JoinVars3 = ["?x"],
second join
GraphPattern5 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}},
{"4", {"?i4", "?y", eI("worksAt"), eI("ijs")}}]),
SelectPred5 = {lnot, {"?y", equal, eI("luka")}},
ProjectList5 = ["?y"],
ParentPid5 = self(),
OuterPids5 = [JQN3],
InnerPids5 = [TPQN4],
VarsPositions5 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}, {"4", 2}]},
JoinVars5 = ["?y"],
%% data to be returned
T4 = eT({Tab, "id40", "nika", "livesIn", "ljubljana"}),
T5 = eT({Tab, "id49", "nika", "worksAt", "ijs"}),
T6 = eT({Tab, "id41", "marko", "livesIn", "ljubljana"}),
T7 = eT({Tab, "id50", "marko", "worksAt", "ijs"}),
messages for JQN3 and JQN5
DFO = data_outer,
EOS = end_of_stream,
S3 = {start, QueryNodeId3, QueryId, SessionId, JQN3, GraphPattern3,
SelectPred3, ProjectList3, ParentPid3, OuterPids3, InnerPids3,
VarsPositions3, JoinVars3},
S5 = {start, QueryNodeId5, QueryId, SessionId, JQN5, GraphPattern5,
SelectPred5, ProjectList5, ParentPid5, OuterPids5, InnerPids5,
VarsPositions5, JoinVars5},
E5 = {eval, []},
%% tuples to be returned
R2Map = maps:put("4", T5, maps:put("2", T4, maps:new())),
R3Map = maps:put("4", T7, maps:put("2", T6, maps:new())),
R1 = {DFO, JQN5, [R2Map,R3Map,EOS]},
info_msg(hcet_q03 , [ get(self ) , R1 , RE ] , before_tests , 50 ) ,
{inorder,
[
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(ok, gen_server:call(JQN3, S3)),
?_assertMatch(ok, gen_server:call(JQN5, S5)),
%% check state of qn-s
?_assertMatch(35, length(hcet_get_PD(TPQN1))),
?_assertMatch(35, length(hcet_get_PD(TPQN2))),
?_assertMatch(43, length(hcet_get_PD(JQN3))),
?_assertMatch(35, length(hcet_get_PD(TPQN4))),
?_assertMatch(43, length(hcet_get_PD(JQN5))),
%% start evaluation
?_assertMatch(ok, gen_server:call(JQN5, E5)),
%% send empty messages to JQN5
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1))
]}.
hcet_tpqn3(Pid) ->
QueryNodeId = "1",
QueryId = "3",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tpqn4(Pid) ->
QueryNodeId = "2",
QueryId = "3",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tpqn5(Pid) ->
QueryNodeId = "4",
QueryId = "3",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_q05() ->
%%
query : using two query nodes for each tp
%%
slovenia hasCapital ? x
%% ?y livesIn ?x
? y worksAt
%%
info_msg(hcet_q05, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
%% creating processes
QueryNodeId3 = "3",
QueryId = "5",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId3),
JQN3 = mj_query_node:spawn_process(Id3, node()),
QueryNodeId5 = "5",
Id5 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId5),
JQN5 = mj_query_node:spawn_process(Id5, node()),
TPQN1 = hcet_tp5qn3(JQN3),
TPQN1a = hcet_tp5qn3a(JQN3),
TPQN2 = hcet_tp5qn4(JQN3),
TPQN2a = hcet_tp5qn4a(JQN3),
TPQN4 = hcet_tp5qn5(JQN5),
TPQN4a = hcet_tp5qn5a(JQN5),
%% first join
GraphPattern3 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}}]),
SelectPred = none,
ProjectList = none,
ParentPid3 = JQN5,
OuterPids3 = [TPQN1,TPQN1a],
InnerPids3 = [TPQN2,TPQN2a],
VarsPositions3 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}]},
JoinVars3 = ["?x"],
second join
GraphPattern5 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}},
{"4", {"?i4", "?y", eI("worksAt"), eI("ijs")}}]),
ParentPid5 = self(),
OuterPids5 = [JQN3],
InnerPids5 = [TPQN4,TPQN4a],
VarsPositions5 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}, {"4", 2}]},
JoinVars5 = ["?y"],
%% data to be returned
T1 = eT({Tab, "id22", "slovenia", "hasCapital", "ljubljana"}),
T2 = eT({Tab, "id38", "luka", "livesIn", "ljubljana"}),
T3 = eT({Tab, "id47", "luka", "worksAt", "ijs"}),
T4 = eT({Tab, "id40", "nika", "livesIn", "ljubljana"}),
T5 = eT({Tab, "id49", "nika", "worksAt", "ijs"}),
T6 = eT({Tab, "id41", "marko", "livesIn", "ljubljana"}),
T7 = eT({Tab, "id50", "marko", "worksAt", "ijs"}),
messages for JQN3 and JQN5
DFO = data_outer,
EOS = end_of_stream,
S3 = {start, QueryNodeId3, QueryId, SessionId, JQN3, GraphPattern3,
SelectPred, ProjectList, ParentPid3, OuterPids3, InnerPids3,
VarsPositions3, JoinVars3},
S5 = {start, QueryNodeId5, QueryId, SessionId, JQN5, GraphPattern5,
SelectPred, ProjectList, ParentPid5, OuterPids5, InnerPids5,
VarsPositions5, JoinVars5},
E5 = {eval, []},
%% tuples to be returned
R1Map = maps:put("4", T3, maps:put("2", T2, maps:put("1", T1, maps:new()))),
R2Map = maps:put("4", T5, maps:put("2", T4, maps:put("1", T1, maps:new()))),
R3Map = maps:put("4", T7, maps:put("2", T6, maps:put("1", T1, maps:new()))),
R1 = {DFO, JQN5, [R2Map,R2Map,R3Map,R3Map,R1Map]},
R2 = {DFO, JQN5, [R1Map,R2Map,R2Map,R3Map,R3Map]},
R3 = {DFO, JQN5, [R1Map,R1Map,R2Map,R2Map,R3Map]},
R4 = {DFO, JQN5, [R3Map,R1Map,R1Map,R2Map,R2Map]},
R5 = {DFO, JQN5, [R3Map,R3Map,R1Map,R1Map,EOS]},
info_msg(hcet_q05 , [ get(self ) , R1 , R2 , RE ] , before_tests , 50 ) ,
{inorder,
[
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(ok, gen_server:call(JQN3, S3)),
?_assertMatch(ok, gen_server:call(JQN5, S5)),
%% check state of qn-s
?_assertMatch(35, length(hcet_get_PD(TPQN1))),
?_assertMatch(35, length(hcet_get_PD(TPQN2))),
?_assertMatch(43, length(hcet_get_PD(JQN3))),
?_assertMatch(35, length(hcet_get_PD(TPQN4))),
?_assertMatch(43, length(hcet_get_PD(JQN5))),
%% start evaluation
?_assertMatch(ok, gen_server:call(JQN5, E5)),
%% send empty messages to JQN5
works only with block_size=5 ! ! ! ( iztok,2016/01/31 )
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1)),
?_assertMatch({'$gen_cast', R2}, hcet_send_empty(JQN5, R2)),
?_assertMatch({'$gen_cast', R3}, hcet_send_empty(JQN5, R3)),
?_assertMatch({'$gen_cast', R4}, hcet_send_empty(JQN5, R4)),
?_assertMatch({'$gen_cast', R5}, hcet_send_empty(JQN5, R5))
]}.
hcet_tp5qn3(Pid) ->
QueryNodeId = "1",
QueryId = "5",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp5qn3a(Pid) ->
QueryNodeId = "1",
QueryId = "5a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1a = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1a, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1a, M),
TPQN1a.
hcet_tp5qn4(Pid) ->
QueryNodeId = "2",
QueryId = "5",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp5qn4a(Pid) ->
QueryNodeId = "2",
QueryId = "5a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2a = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2a, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2a, M),
TPQN2a.
hcet_tp5qn5(Pid) ->
QueryNodeId = "4",
QueryId = "5",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_tp5qn5a(Pid) ->
QueryNodeId = "4",
QueryId = "5a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4a = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4a, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4a, M),
TPQN4a.
hcet_q06() ->
%%
query : using three query nodes for each tp
%%
slovenia hasCapital ? x
%% ?y livesIn ?x
? y worksAt
%%
info_msg(hcet_q06, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
%% creating processes
QueryNodeId3 = "3",
QueryId = "6",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId3),
JQN3 = mj_query_node:spawn_process(Id3, node()),
QueryNodeId5 = "5",
Id5 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId5),
JQN5 = mj_query_node:spawn_process(Id5, node()),
TPQN1 = hcet_tp6qn3(JQN3),
TPQN1a = hcet_tp6qn3a(JQN3),
TPQN1b = hcet_tp6qn3b(JQN3),
TPQN2 = hcet_tp6qn4(JQN3),
TPQN2a = hcet_tp6qn4a(JQN3),
TPQN2b = hcet_tp6qn4b(JQN3),
TPQN4 = hcet_tp6qn5(JQN5),
TPQN4a = hcet_tp6qn5a(JQN5),
TPQN4b = hcet_tp6qn5b(JQN5),
%% first join
GraphPattern3 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}}]),
SelectPred = none,
ProjectList = none,
ParentPid3 = JQN5,
OuterPids3 = [TPQN1,TPQN1a,TPQN1b],
InnerPids3 = [TPQN2,TPQN2a,TPQN2b],
VarsPositions3 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}]},
JoinVars3 = ["?x"],
second join
GraphPattern5 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}},
{"4", {"?i4", "?y", eI("worksAt"), eI("ijs")}}]),
ParentPid5 = self(),
OuterPids5 = [JQN3],
InnerPids5 = [TPQN4,TPQN4a,TPQN4b],
VarsPositions5 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}, {"4", 2}]},
JoinVars5 = ["?y"],
%% data to be returned
T1 = eT({Tab, "id22", "slovenia", "hasCapital", "ljubljana"}),
T2 = eT({Tab, "id38", "luka", "livesIn", "ljubljana"}),
T3 = eT({Tab, "id47", "luka", "worksAt", "ijs"}),
T4 = eT({Tab, "id40", "nika", "livesIn", "ljubljana"}),
T5 = eT({Tab, "id49", "nika", "worksAt", "ijs"}),
T6 = eT({Tab, "id41", "marko", "livesIn", "ljubljana"}),
T7 = eT({Tab, "id50", "marko", "worksAt", "ijs"}),
messages for JQN3 and JQN5
DFO = data_outer,
EOS = end_of_stream,
S3 = {start, QueryNodeId3, QueryId, SessionId, JQN3, GraphPattern3,
SelectPred, ProjectList, ParentPid3, OuterPids3, InnerPids3,
VarsPositions3, JoinVars3},
S5 = {start, QueryNodeId5, QueryId, SessionId, JQN5, GraphPattern5,
SelectPred, ProjectList, ParentPid5, OuterPids5, InnerPids5,
VarsPositions5, JoinVars5},
E5 = {eval, []},
%% tuples to be returned
R1Map = maps:put("4", T3, maps:put("2", T2, maps:put("1", T1, maps:new()))),
R2Map = maps:put("4", T5, maps:put("2", T4, maps:put("1", T1, maps:new()))),
R3Map = maps:put("4", T7, maps:put("2", T6, maps:put("1", T1, maps:new()))),
R1 = {DFO, JQN5, [R2Map,R2Map,R2Map,R3Map,R3Map]},
R2 = {DFO, JQN5, [R3Map,R1Map,R1Map,R1Map,R2Map]},
R3 = {DFO, JQN5, [R2Map,R2Map,R3Map,R3Map,R3Map]},
R4 = {DFO, JQN5, [R1Map,R1Map,R1Map,R2Map,R2Map]},
R5 = {DFO, JQN5, [R2Map,R3Map,R3Map,R3Map,R1Map]},
R6 = {DFO, JQN5, [R1Map,R1Map,R2Map,R2Map,R2Map]},
R7 = {DFO, JQN5, [R3Map,R3Map,R3Map,R1Map,R1Map]},
R8 = {DFO, JQN5, [R1Map,R2Map,R2Map,R2Map,R3Map]},
R9 = {DFO, JQN5, [R3Map,R3Map,R1Map,R1Map,R1Map]},
RE = {DFO, JQN5, [R1Map,EOS]},
info_msg(hcet_q06 , [ get(self ) , R1 , RE ] , before_tests , 50 ) ,
{inorder,
[
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(ok, gen_server:call(JQN3, S3)),
?_assertMatch(ok, gen_server:call(JQN5, S5)),
%% check state of qn-s
?_assertMatch(35, length(hcet_get_PD(TPQN1))),
?_assertMatch(35, length(hcet_get_PD(TPQN2))),
?_assertMatch(43, length(hcet_get_PD(JQN3))),
?_assertMatch(35, length(hcet_get_PD(TPQN4))),
?_assertMatch(43, length(hcet_get_PD(JQN5))),
%% start evaluation
?_assertMatch(ok, gen_server:call(JQN5, E5)),
%% send empty messages to JQN5
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1)),
?_assertMatch({'$gen_cast', R2}, hcet_send_empty(JQN5, R2)),
?_assertMatch({'$gen_cast', R3}, hcet_send_empty(JQN5, R3)),
?_assertMatch({'$gen_cast', R4}, hcet_send_empty(JQN5, R4)),
?_assertMatch({'$gen_cast', R5}, hcet_send_empty(JQN5, R5)),
?_assertMatch({'$gen_cast', R6}, hcet_send_empty(JQN5, R6)),
?_assertMatch({'$gen_cast', R7}, hcet_send_empty(JQN5, R7)),
?_assertMatch({'$gen_cast', R8}, hcet_send_empty(JQN5, R8)),
?_assertMatch({'$gen_cast', R9}, hcet_send_empty(JQN5, R9)),
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1)),
?_assertMatch({'$gen_cast', R2}, hcet_send_empty(JQN5, R2)),
?_assertMatch({'$gen_cast', R3}, hcet_send_empty(JQN5, R3)),
?_assertMatch({'$gen_cast', R4}, hcet_send_empty(JQN5, R4)),
?_assertMatch({'$gen_cast', R5}, hcet_send_empty(JQN5, R5)),
?_assertMatch({'$gen_cast', R6}, hcet_send_empty(JQN5, R6)),
?_assertMatch({'$gen_cast', R7}, hcet_send_empty(JQN5, R7)),
?_assertMatch({'$gen_cast', RE}, hcet_send_empty(JQN5, RE))
]}.
hcet_tp6qn3(Pid) ->
QueryNodeId = "1",
QueryId = "6",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp6qn3a(Pid) ->
QueryNodeId = "1",
QueryId = "6a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp6qn3b(Pid) ->
QueryNodeId = "1",
QueryId = "6b",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp6qn4(Pid) ->
QueryNodeId = "2",
QueryId = "6",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp6qn4a(Pid) ->
QueryNodeId = "2",
QueryId = "6a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp6qn4b(Pid) ->
QueryNodeId = "2",
QueryId = "6b",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp6qn5(Pid) ->
QueryNodeId = "4",
QueryId = "6",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_tp6qn5a(Pid) ->
QueryNodeId = "4",
QueryId = "6a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_tp6qn5b(Pid) ->
QueryNodeId = "4",
QueryId = "6b",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
%% ====> END OF LINE <====
| null | https://raw.githubusercontent.com/yahoojapan/big3store/c4142395695a836fec60b7c202f9f12d46a8ed01/src/mj_query_node.erl | erlang |
Main-memory Join Query Node processes
@version 0.3
@doc Main-memory join query node is implemented as independent gen_process. Main-memory
join query node is a state-machine realizing main-memory join algorithm as protocol among
join query node and the outer and inner query nodes. It is expected that there is enough
room that all graphs from outer and inner query nodes are stored in main memory.
<table bgcolor="lemonchiffon">
<tr><th>Section Index</th></tr>
<tr><td>{@section main-memory join algorithm}</td></tr>
<tr><td>{@section property list}</td></tr>
<tr><td>{@section handle_call (synchronous) message API}</td></tr>
<tr><td>{@section handle_cast (asynchronous) message API}</td></tr>
</table>
== main-memory join algorithm ==
(LINK: {@section main-memory join algorithm})
Main-memory join query node is independent gen_server process that can have multiple
outer query nodes as well as multiple inner query nodes--each of them is implemented as
separate gen_server process.
Main-memory join algorithm loads results of inner query nodes into main memory creating
main memory hash-index. At the same time results of outer query nodes are pre-loaded into
the queue. After all inner results are loaded into the main-memory index, process starts to
compute join between outer graphs and inner triples as well as further loading of outer
graphs.
of all inner and outer query nodes, and, moves state
results from inner query nodes and creates main-memory index on join attributes.
All the resulted graphs from outer query nodes are in this pahase stored in the queue.
query nodes are joined with triples from inner query nodes. After end_of_stream is received
== property list ==
(LINK: {@section property list})
The gen_server process uses following properties holded by {@link
<tr><th>Name</th><th>Type</th><th>Description</th></tr>
process dictionary was created and used. false denotes that
completely new process.</td> </tr>
<tr> <td>id</td> <td>string()</td> <td>query node identifier</td> </tr>
<tr> <td>pid</td> <td>pid()</td> <td>process id</td> </tr>
<tr> <td>state</td> <td>atom()</td> <td>active | inactive | wait_next_outer |
</td> </tr>
variables to be projected</td> </tr>
<tr> <td>project_out</td> <td>[query_node::qn_id()]</td> <td>list of
query node id-s identifying triples to be projected out of resulting graph
</td> </tr>
triple_distributor:td_node_location()}</td> <td>location of query
node process</td> </tr>
<tr> <td>parent</td> <td>pid()</td> <td>process id of parent query
node</td> </tr>
<tr> <td>outer</td> <td>[pid()]</td> <td>process ids of outer
children query nodes</td> </tr>
<tr> <td>inner</td> <td>[pid()]</td> <td>process ids of inner
children query nodes</td> </tr>
query_node:qn_var()} to {@link jqn_var_position()}</td> </tr>
<tr> <td>wait</td> <td>boolean()</td> <td>indicate whether the
process is in wait state or not.</td> </tr>
<tr> <td>inner_outer</td> <td>inner | outer</td> <td> Position to
its parent query node.</td> </tr>
structure from outer child pid() to atom() (alive | eos).</td> </tr>
<tr> <td>empty_outer_sent</td> <td>boolean()</td> <td>N empty messages
are sent to each of outer processes when eval message of mj_query_node
is processed.</td> </tr>
structure from inner child pid() to atom() (alive | eos).</td> </tr>
<tr> <td>empty_inner_sent</td> <td>boolean()</td> <td>N empty messages
to them.
</td> </tr>
empty messages from parent when graph to be sent to parent is not
yet available.</td> </tr>
graphs (complete messages) to be sent to parent but there is no empty message
available.</td> </tr>
<tr> <td>pause</td> <td>boolean()</td> <td>query stops evaluating
if true and evaluates normally if false</td> </tr>
<tr> <td>start_date_time</td> <td>calendar:datetime()</td>
<td>started date and time of the process.</td> </tr>
<td>process id of b3s_state.</td> </tr>
node_state:ns_pid()}</td> <td>process id of executing benchmark
task.</td> </tr>
of records to be reported.</td> </tr>
</table>
== handle_call (synchronous) message API ==
(LINK: {@section handle_call (synchronous) message API})
Initialization of join query node process. All parameters are
saved to process dictionary.
This request is implemented by {@link hc_start/10}.
=== {eval, VarsValues} ===
Initiate evaluation of query node. The state of
Firstly, initiate evaluation in all children, and, then send N empty
messages to each child so that they can begin sending results.
Note than message passing for eval message is synchronous. This means that
complete query tree is locked while evaluation is initiated.
It includes variables and values to be set in graph pattern
of query node is not changed.
Message eval can be sent to query node multiple times. In each instance,
process dictionary is initialized to the initial state. After eval is executed
query node can expect empty messages from parent.
(LINK: {@section @{eval, VarsValues@}})
=== {get_property, Name} ===
Return the value of specified property name. Variable Name is an
atom(). This request is implemented by {@link
hc_get_property/2}.
== handle_cast (asynchronous) message API ==
(LINK: {@section handle_cast (asynchronous) message API})
Processing data message from outer child. In the case join query node is in state
later.
In the case join query node is in state wait_next_outer then data message from outer
children is set as current outer message. Inner query nodes are reset using join values
of common variables that are set in graph-pattern of outer query nodes.
Processing data message from inner children. Inner graph is joined with
current outer graph stored as outer_graph in process dictionary. Resulted graph
is sent to parent as outer data message. While inner graphs are comming from
is not empty. In this case function hc_data_outer is called (from hc_data_inner)
=== {empty, ParentPid} ===
Processing empty message from parent. If state of query node is
does not include any data message prepared for parent then empty
ParentPid is pid().
This request is implemented by {@link hc_empty/1}.
@type jqn_state() = maps:map(). Map
structure that manages properties for operating the gen_server
process.
positions of some variable in a triple pattern of given query. Pairs include
query node id of triple pattern, and, position of variable in triple pattern
======================================================================
gen_server behavior
init/1
@doc Initialize a mj_query_node process.
set main pd keys
init queues
handle_call/3
@doc Handle synchronous query requests.
default
handle_cast/2
@doc Handle asynchronous query requests.
insert into queue
process empty message
insert into queue
insert into queue
process outer block
erase complete PD
default
@doc Restore process dictionaries from state map structure.
@doc Save process all dictionary contents into state map structure.
handle_info/2
@doc Handle exceptional query requests.
terminate/2
@doc Process termination.
code_change/3
@doc Process code change action.
======================================================================
utility
@doc Report an error issue to the error_logger.
@spec error_msg(atom(), term(), term()) -> ok
@doc Report an information issue to the error_logger if current
debug level is greater than ThresholdDL.
======================================================================
api
@doc Return child spec for this process. It can be used in
supervisor:init/0 callback implementation.
@doc Spawn tp_query_node process with given local identifier at given node.
======================================================================
handle call/cast implementation
hc_start/10
@doc Initialize mj_query_node process.
node_state:ns_pid(), [node_state:ns_pid()], [node_state:ns_pid()],
benchmark stuff
store num-of-empty-msgs in PD
store block-size in PD
@doc Test function for hc_start.
{generator, fun()-> hcst_sne() end},
hc_eval/2
@doc Initiate evaluation of join query node.
@spec hc_eval(jqn_var_position(), atom()) -> ok
send eval to each pid in outer children and mark state of outer stream 'alive'.
send empty messages
send eval and empty messages to inner nodes and update state of inner streams
reset inner child
send empty messages (eval is done once?)
remember state of inner child
compute list of qn id-s to be projected out
update parameters
waiting for data from outer
hc_empty/2
@doc Co-routine for processing empty message from parent. It is expected that there is
as message, or, some other procedure has checked that the message is in the queue.
@spec hc_empty( State::atom() ) -> ok
leave empty message in queue from parent
leave empty message in queue from parent
check if there are messages to parent and send data message to parent
next actions to be done
state==wait_next_outer
if state is wait_next_outer and there are messages waiting from outer query nodes
than empty message wakes up processing of another outer graph
state==wait_next_inner
than empty message wakes up processing of inner graphs
hc_data_inner/3
@doc Co-routine for processing data message from inner children.
@spec hc_data_inner(atom()) -> ok
get inner graph from queue from_inner
get status for end of block
send empty if at the end of block
send empty message back to outer
next protocol actions
state = wait_next_outer?
inner graphs have been processed. start processing outer.
inner queue not empty yet, continue with next inner graph
mark end_of_stream of outer process
get position and tuple
@doc Co-routine for processing data block from outer children.
@spec hc_data_outer(State::atom()) -> ok|fail
data message left in queue from_outer to be processed later
retrieve outer graph from queue from_outer and save it in PD
get status for end of block
send empty if at the end of block
send empty message back to outer
outer loop actions for outer graph read from queue
we are at the end of some inner stream
join outer graph with inner and send it to parent
next protocol actions for outer loop
state = wait_next_outer?
continue outer loop if we stayed in wait_next_outer and
there is another outer graph to process and there is empty message
to be used
mark outer stream not alive
check if all outer streams are dead
there is empty message from parent
send parent last block of to_parent
empty queue from_parent, so leave message in queue to_parent.
retrieve outer graph from pd
get index entry for key
attempt to join outer Graph to index_inner
compute join
set current graph G and compute val of select predicate
skip graph G if SP==false
compute projection and put in queue to_parent
get empty message from queue from_parent
get block and create message
send it to parent
loop on list of graphs
hc_eval_test_/0
@doc Main test function of module.
finish
@doc Creation of triple-store used in examples.
country
cities
organizations
uni ljubljana
institute jozef stefan
tokyo uni
kyoto uni
persons
isLocatedIn
livesIn
worksAt
graduatedFrom
age
F1 = fun(QLC, List) ->
Result = tm:do(QLC),
SetL = sets:from_list(List),
LR = sets:to_list(sets:subtract(SetL, SetR)),
end,
{triple_store,"id3","koper","type","city"}],
{triple_store,"id40","nika","livesIn","ljubljana"},
{triple_store,"id37","sakura","livesIn","kyoto"},
{triple_store,"id36","yoshio","livesIn","tokyo"},
{triple_store,"id38","luka","livesIn","koper"},
{triple_store,"id39","jan","livesIn","koper"}],
{triple_store,"id43","yoshio","worksAt","yj"}],
rr("record.hrl").
?_assertMatch('b3ss01@shoo', node()),
?_assertMatch({ok, triple_store}, AGE(name_of_triple_table)),
query: using single tp query nodes
?y livesIn ?x
creating processes
first join
data to be returned
tuples to be returned
check state of qn-s
start evaluation
send empty messages to JQN5
?y livesIn ?x
creating processes
first join
data to be returned
tuples to be returned
check state of qn-s
start evaluation
send empty messages to JQN5
?y livesIn ?x
creating processes
first join
data to be returned
tuples to be returned
check state of qn-s
start evaluation
send empty messages to JQN5
====> END OF LINE <==== | 2014 - 2016 UP FAMNIT and Yahoo Japan Corporation
@since February , 2016
@author < >
@author < >
State - machine has the following states : inactive , active , wait_next_outer , wait_next_inner ,
and eos . Message start set state of protocol to active . Message eval starts the evaluation
to wait_next_inner , the first phase of algorithm . In this phase main - memory join reads
After reading all results from inner query node protocol enters second phase and state
is changed wait_next_outer . In the second phase of the algorithm graphs from outer
from all outer query nodes , state moves to eos .
jqn_state ( ) } .
< table " >
< tr > < td > > < td > boolean()</td > < td > true denotes that
wait_next_inner | eos</td > < /tr >
< tr > < td > gp</td > < td > maps : > < td > graph represented as
mapping from { @type query_node : qn_id ( ) } to { @type : qn_triple_pattern()}</td > < /tr >
< tr > < td > select_pred</td > < td > : qn_select_predicate()</td > < td > selection
predicate in the form of abstract syntax tree of type { @type : qn_select_predicate ( ) }
< tr > < td > project_list</td > < td > : > < td > list of
< tr > < td > > < td>{@link
< tr > < td > join_vars</td > < td>[{@link : qn_var()}]</td > < td > List of
variables used for joining.</td > < /tr >
< tr > < td > vars_pos</td > < td > maps : > < td > mapping from { @link
< tr > < td > vars_values</td > < td > maps : > < td > mapping from
{ @link : qn_var ( ) } to string ( ) ( not used)</td > < /tr >
< tr > < td > inner_graph</td > < td>{@link : qn_graph()}</td > < td > current
graph data from inner child</td > < /tr >
< tr > < td > outer_graph</td > < td>{@link : qn_graph()}</td > < td > current
graph data from outer child</td > < /tr >
< tr > < td > state_of_outer_streams</td > < td > maps : > < td > Map
< tr > < td > state_of_inner_streams</td > < td > maps : > < td > Map
are sent to each of inner processes after first eval message is sent
< tr > < td > queue_from_outer</td > < td > queue : > < td > Queue storing
graphs from outer child query node while processing one of previous
outer > < /tr >
< tr > < td > queue_from_parpent</td > < td > queue : > < td > Queue storing
< tr > < td > queue_to_parent</td > < td > queue : > < td > Queue storing
< tr > < td > b3s_state_pid</td > < td>{@type node_state : ns_pid()}</td >
< tr > < td > benchmark_task_pid</td > < td>{@type
< tr > < td > result_record_max</td > < td > integer()</td > < td > number
= = = { start , QueryNodeId , QueryId , SessionId , Self , GraphPattern , SelectPred , ProjectList , ParentPid , OuterPids , InnerPids , VarsPositions , JoinVars } = = =
( LINK : { @section @{start , QueryNodeId , QueryId , SessionId , Self , GraphPattern , SelectPred , ProjectList , ParentPid , OuterPids , InnerPids , VarsPositions , JoinVars@ } } )
QueryNodeId is { @link : qn_id ( ) } , QueryId is string ( ) , SessionId is string ( ) ,
Self is { @link node_state : ns_pid ( ) } , GraphPattern is { @link : ( ) } ,
SelectPred is { @link : qn_select_predicate ( ) } ,
ProjectList is { @link : qn_project_list ( ) } , ParentPid is pid ( ) ,
OuterPids is [ pid ( ) ] , InnerPids is [ pid ( ) ] , VarsPositions is { @link
jqn_var_position ( ) } , JoinVars is [ { @link : qn_var ( ) } ] .
query node must be either active or eos so that eval message is executed .
VarsValues is : qn_var_val_map ( ) .
of query node . In the case value of VarsValues is [ ] then graph pattern
VarsValues is { @link : qn_var_val_map ( ) } . This request is implemented by { @link hc_eval/1 } .
= = = { data_outer , ParentPid , Graph } = = =
wait_next_inner , complete data message is stored in to be processed
When all outer query nodes are in state eos ( end of stream ) then end_of_stream is sent
to parent and state of this query node is set to eos .
( LINK : { @section @{data_outer , Pid , Graph@ } } )
Pid is pid ( ) and is : qn_graph ( ) . This request is implemented by { @link hc_data_outer/1 } .
= = = { data_inner , Pid , Graph } = = =
inner children , query node is in state wait_next_inner .
More graphs from outer child may be stored in queue_from_outer . State may change to
wait_next_outer in the case all inner streams are terminated and
for outer graph from queue . ( LINK : { @section @{data_inner , Pid , Graph@ } } )
Pid is pid ( ) , is : qn_graph ( ) . This request is implemented by { @link hc_data_inner/3 } .
( LINK : { @section @{empty , ParentPid@ } } ) .
eos or inactive then simply ignore empty message . If
message is stored in queue_from_parent and used later . Finally , if
there is a message in then send it to parent .
@type jqn_var_position ( ) = maps : map ( ) . Mapping from { @link : qn_var ( ) }
to [ { { @link : qn_id ( ) } , integer ( ) } ] . List of pairs represent
( 1 : i d , 2 : sub , 3 : prd , 4 : obj ) .
-module(mj_query_node).
-behavior(gen_server).
-export(
[
child_spec/1, spawn_process/2, receive_empty/0, hcst_sne/0,
init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3
]).
-include_lib("stdlib/include/qlc.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("record.hrl").
@spec init ( [ ] ) - > { ok , jqn_state ( ) }
init([]) ->
process_flag(trap_exit, true),
put(wait, true),
put(pid, self()),
put(start_date_time, calendar:local_time()),
put(mq_debug, gen_server:call(node_state, {get, mq_debug})),
query_node:queue_init(from_parent, plain, empty),
query_node:queue_init(to_parent, output, data_outer),
query_node:queue_init(from_inner, input, data_inner),
query_node:queue_init(from_outer, input, data_outer),
info_msg(init, [{state,hc_save_pd()}], done, -1),
{ok, hc_save_pd()}.
handle_call(term ( ) , { pid ( ) , term ( ) } , jqn_state ( ) ) - > { reply , term ( ) , jqn_state ( ) }
handle_call({start, QueryNodeId, QueryId, SessionId, Self, GraphPattern, SelectPred, ProjectList, ParentPid, OuterPid, InnerPid,
VarsPositions, JoinVars}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [Self, {message,start}, {all,get()}, get(state)], message_received, 10),
hc_start(QueryNodeId, QueryId, SessionId, Self, GraphPattern, SelectPred, ProjectList, ParentPid, OuterPid, InnerPid, VarsPositions, JoinVars),
{reply, ok, hc_save_pd()};
handle_call({get_property, all}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,get_property}, {name,all}, {value,get()}, get(state)], message_received, 10),
{reply, get(), hc_save_pd()};
handle_call({get_property, Name}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,get_property}, {name,Name}, {value,get(Name)}, get(state)], message_received, 10),
{reply, get(Name), hc_save_pd()};
handle_call({get, Name}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,get}, {name,Name}, {value,get(Name)}, get(state)], message_received, 10),
{reply, get(Name), hc_save_pd()};
handle_call({eval, VarsValues}, _, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_call, [get(self), {message,eval}, {vars_values,VarsValues}, {all,get()}, get(state)], message_received, 10),
hc_eval(VarsValues, get(state)),
{reply, ok, hc_save_pd()};
handle_call(Request, From, State) ->
R = {unknown_request, Request},
error_msg(handle_call, [get(self), Request, From, get()], R),
{reply, R, State}.
@spec handle_cast(term ( ) , jqn_state ( ) ) - > { noreply , jqn_state ( ) }
handle_cast({empty, From}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
query_node:queue_write(from_parent, {empty, From}),
info_msg(handle_cast, [get(self), {message,empty}, {from,From}, {queue_from_parent,get(queue_from_parent)}, get(state)], message_received, 30),
hc_empty(get(state)),
{noreply, hc_save_pd()};
handle_cast({data_inner, From, Block}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_cast, [get(self), {message,data_inner}, {from,From}, {block,Block}, get(state)], message_received, 30),
query_node:queue_write(from_inner, {data_inner, From, Block}),
hc_data_inner(get(state)),
{noreply, hc_save_pd()};
handle_cast({data_outer, From, Block}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_cast, [get(self), {message,data_outer}, {from,From}, {block,Block}, get(state)], message_received, 30),
query_node:queue_write(from_outer, {data_outer, From, Block}),
hc_data_outer(get(state)),
{noreply, hc_save_pd()};
handle_cast({stop, From}, State) ->
b3s_state:hc_monitor_mq(erlang:get(mq_debug)),
hc_restore_pd(get(created), State),
info_msg(handle_cast, [get(self), {message,stop}, {from,From}, get(state)], message_received, 10),
erase(),
{noreply, hc_save_pd()};
handle_cast(Request, State) ->
R = {unknown_request, Request},
error_msg(handle_cast, [get(self), {request,Request}, {state,State}, get()], R),
{noreply, hc_save_pd()}.
@spec hc_restore_pd([{atom ( ) , term ( ) } ] | undefined , jqn_state ( ) ) - > ok
hc_restore_pd(undefined, State) ->
hc_restore_pd_1(maps:to_list(State));
hc_restore_pd(_, _) ->
ok.
hc_restore_pd_1([]) ->
ok;
hc_restore_pd_1([{K, V} | T]) ->
put(K, V),
hc_restore_pd_1(T).
( ) - > jqn_state ( )
hc_save_pd() ->
maps:from_list(get()).
@spec handle_info(term ( ) , jqn_state ( ) ) - > { noreply , jqn_state ( ) }
handle_info(_Info, State) ->
{noreply, State}.
terminate(term ( ) , jqn_state ( ) ) - > none ( )
terminate(Reason, State) ->
P = pid_to_list(self()),
info_msg(terminate, [get(self), {reason,Reason}, {state,State}, {pid,P}, get(state)], done, -1),
ok.
@spec code_change(term ( ) , jqn_state ( ) , term ( ) ) - > { ok , jqn_state ( ) }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
error_msg(FunName, Argument, Result) ->
node_state:error_msg(?MODULE, FunName, Argument, Result).
info_msg(atom ( ) , term ( ) , term ( ) , integer ( ) ) - > ok
info_msg(FunName, Argument, Result, ThresholdDL) ->
node_state:info_msg(?MODULE, FunName, Argument, Result, ThresholdDL).
( ) ) - > supervisor : child_spec ( )
child_spec(Id) ->
GSOpt = [{local, Id}, mj_query_node, [], []],
StartFunc = {gen_server, start_link, GSOpt},
Restart = permanent,
Shutdwon = 1000,
Type = worker,
Modules = [mj_query_node],
{Id, StartFunc, Restart, Shutdwon, Type, Modules}.
spawn_process ( Id::atom ( ) , Node::node ( ) ) - > node_state : ns_pid ( )
spawn_process(Id, Node ) ->
ChildSpec = mj_query_node:child_spec(Id),
supervisor:start_child({b3s, Node}, ChildSpec),
{Id, Node}.
: qn_id ( ) , string ( ) , string ( ) , node_state : ns_pid ( ) , [ : qn_triple_pattern ( ) ] ,
: qn_select_predicate ( ) , : qn_project_list ( ) ,
jqn_var_position ( ) , [ query_node : qn_var ( ) ] ) - > ok
hc_start(QueryNodeId, QueryId, SessionId, Self, GraphPattern, SelectPred, ProjectList, ParentPid, OuterPids, InnerPids, VarsPositions, JoinVars) ->
put(created, true),
put(qnode, join),
put(node_id, QueryNodeId),
put(query_id, QueryId),
put(session_id, SessionId),
put(self, Self),
put(state, active),
put(gp, GraphPattern),
put(select_pred, SelectPred),
put(project_list, ProjectList),
put(parent, ParentPid),
put(outer, OuterPids),
put(inner, InnerPids),
put(vars_pos, VarsPositions),
put(join_vars, JoinVars),
put(empty_outer_sent, false),
put(empty_inner_sent, false),
put(index_inner, maps:new()),
put(wait, false),
put(pause, false),
erase(sid_table_name),
erase(sid_max_id),
erase(di_cursor__),
erase(di_ets__),
BSP = b3s_state_pid,
BMT = benchmark_task,
BTP = benchmark_task_pid,
put(BSP, gen_server:call(node_state, {get, BSP})),
{_, FSN} = get(BSP),
put(BTP, {gen_server:call(get(BSP), {get, BMT}), FSN}),
{ok, N} = application:get_env(b3s, num_of_empty_msgs),
put(num_of_empty_msgs, N),
BSZ = block_size,
put(BSZ, gen_server:call(get(BSP), {get, BSZ})).
@doc Send N empty messages to Pid . N is stored in config .
send_N_empty(Pid) ->
N = get(num_of_empty_msgs),
send_N_empty_1(Pid, N),
info_msg(send_N_empty, [get(self), {send_to, Pid}, {num, N}], done, 50).
send_N_empty_1(_, 0) ->
ok;
send_N_empty_1(Pid, N) ->
gen_server:cast(Pid, {empty, get(self)}),
info_msg(send_cast, [get(self), {message,empty}, {to,Pid}, {invoker,send_N_empty}, get(state)], message_sent, 30),
send_N_empty_1(Pid, N-1).
receive_empty() ->
receive
{_, M} -> M
end,
info_msg(receive_empty, [get(self), {message, M}], done, 50),
M.
hc_start_test_() ->
b3s:start(),
b3s:stop(),
b3s:start(),
b3s:bootstrap(),
{inorder,
[
? _ , b3s : start ( ) ) ,
{generator, fun()-> hcst_q01() end},
?_assertMatch(ok, b3s:stop())
]}.
hcst_sne() ->
info_msg(hcst_sne, [get(self)], start, 50),
S = self(),
{inorder,
[
?_assertMatch(ok, send_N_empty(S)),
?_assertMatch({empty, S}, receive_empty()),
?_assertMatch({empty, S}, receive_empty())
]}.
hcst_q01() ->
info_msg(hcst_q01, [get(self)], start, 50),
QueryNodeId = "3",
QueryId = "1",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
JQN1 = mj_query_node:spawn_process(Id, node()),
QNState = active,
GraphPattern = #{"1" => {"?id1", "<Japanese>", "?prd", "?obj1"},
"2" => {"?id2", "<Slovenian>", "?prd", "?obj2"}},
SelectPred = none,
ProjectList = none,
ParentPid = self(),
OuterPids = [self()],
InnerPids = [self()],
VarsPositions = #{"?id1" => [{"1", 1}],
"?id2" => [{"2", 1}],
"?prd" => [{"1", 3}, {"2", 3}],
"?obj1" => [{"1", 4}],
"?obj2" => [{"2", 4}]},
JoinVars = ["?prd"],
M1 = {start, QueryNodeId, QueryId, SessionId, JQN1,
GraphPattern, SelectPred, ProjectList,
ParentPid, OuterPids, InnerPids, VarsPositions, JoinVars},
GP = get_property,
{inorder,
[
?_assertMatch(true, gen_server:call(JQN1, {GP, wait})),
?_assertMatch(ok, gen_server:call(JQN1, M1)),
?_assertMatch(QueryNodeId, gen_server:call(JQN1, {GP, node_id})),
?_assertMatch(GraphPattern, gen_server:call(JQN1, {GP, gp})),
?_assertMatch(QNState, gen_server:call(JQN1, {GP, state})),
?_assertMatch(ParentPid, gen_server:call(JQN1, {GP, parent})),
?_assertMatch(OuterPids, gen_server:call(JQN1, {GP, outer})),
?_assertMatch(InnerPids, gen_server:call(JQN1, {GP, inner})),
?_assertMatch(VarsPositions, gen_server:call(JQN1, {GP, vars_pos})),
?_assertMatch(JoinVars, gen_server:call(JQN1, {GP, join_vars})),
?_assertMatch(false, gen_server:call(JQN1, {GP, wait})),
?_assertMatch(undefined, gen_server:call(JQN1, {GP, inner_outer}))
]}.
ignoring VarsValues since we have currently only left - deep trees [ TODO ]
hc_eval(_, State)
when (State =:= eos) or (State =:= active) ->
put(state_of_outer_streams, #{}),
F1 = fun (Pid) ->
gen_server:call(Pid, {eval, []}),
info_msg(send_call, [get(self), {message,eval}, {invoker,hc_eval}, {to,Pid}, {gp,get(gp)},
{vars_values,[]}, {invoker,hc_eval}, get(state)], message_sent, 30),
M = get(state_of_outer_streams),
put(state_of_outer_streams, maps:put(Pid, alive, M))
end,
lists:map(F1, get(outer)),
case get(empty_outer_sent) of
false -> lists:map(fun send_N_empty/1, get(outer));
true -> ok
end,
put(empty_outer_sent, true),
put(state_of_inner_streams, #{}),
F2 = fun (Pid) ->
gen_server:call(Pid, {eval, []}),
info_msg(send_call, [get(self), {message,eval}, {invoker,hc_eval}, {to,Pid}, {gp,get(gp)},
{join_var_values,[]}, get(state)], message_sent, 30),
case get(empty_inner_sent) of
false -> send_N_empty(Pid);
true -> ok
end,
M = get(state_of_inner_streams),
put(state_of_inner_streams, maps:put(Pid, alive, M))
end,
lists:map(F2, get(inner)),
put(empty_inner_sent, true),
query_node:project_prepare(get(project_list)),
BSP = b3s_state_pid,
BSZ = block_size,
put(BSZ, gen_server:call(get(BSP), {get, BSZ})),
put(state, wait_next_inner);
hc_eval(_, State) ->
error_msg(hc_eval, [get(self), {all,get()}, State], wrong_state),
ok.
at least one empty message in queue from_parent , either the one that has just arrived
hc_empty(undefined) ->
info_msg(hc_empty, [get(self), {from,get(parent)}, get(state)], empty_before_start, 50);
hc_empty(active) ->
info_msg(hc_empty, [get(self), {from,get(parent)}, get(state)], empty_before_eval, 50);
hc_empty(State)
when (State =:= wait_next_outer) or (State =:= wait_next_inner) or
(State =:= eos) ->
case query_node:queue_prepared(to_parent) of
true ->
read empty message from queue ( there must be at least one msg )
{empty, _} = query_node:queue_read(from_parent),
get data message from and send it
Msg = query_node:queue_read(to_parent),
gen_server:cast(get(parent), Msg),
info_msg(send_cast, [get(self), {message,Msg}, {to,get(parent)}, {invoker,hc_empty}, get(state)], message_sent, 30);
false ->
info_msg(hc_empty, [get(self), {to,get(parent)}, get(state)], no_messages_to_parent, 50)
end,
QEFO = query_node:queue_empty(from_outer),
case {get(state), QEFO} of
{wait_next_outer, false} ->
hc_data_outer(get(state));
_ -> ok
end,
QEFI = query_node:queue_empty(from_inner),
case {get(state), QEFI} of
{wait_next_inner, false} ->
if state is wait_next_inner and there are messages waiting from inner query nodes
hc_data_inner(get(state));
_ -> ok
end,
if there is another pair of empty - data messages run hc_empty again
case {query_node:queue_prepared(from_parent), query_node:queue_prepared(to_parent)} of
{true,true} -> hc_empty(get(state));
_ -> ok
end;
hc_empty(State) ->
error_msg(hc_empty, [get(self), {all,get()}, State], wrong_state).
hc_data_inner(State) when State =/= wait_next_inner ->
something is wrong ; state should be wait_next_inner
error_msg(hc_data_inner, [get(self), {all,get()}, get(state)], inner_msg_in_wrong_state);
hc_data_inner(State) when State == wait_next_inner ->
{From, Graph} = query_node:queue_get(from_inner),
BE = query_node:queue_block_end(from_inner),
case BE of
EMsg = {empty, get(self)},
gen_server:cast(From, EMsg),
info_msg(send_cast, [get(self), {message,EMsg}, {to,From}, {invoker,hc_data_inner}, get(state)], message_sent, 30);
_ -> ok
end,
inner loop actions for read from inner queue
case Graph of
end_of_stream ->
hcdi_process_eos(From),
info_msg(hc_data_inner, [get(self), {from,From}, {graph,Graph}, get(state)], inner_eos_processed, 50);
_ -> hcdi_process_graph(Graph),
info_msg(hc_data_inner, [get(self), {from,From}, {graph,Graph}, get(state)], inner_graph_processed, 50)
end,
QEFO = query_node:queue_empty(from_outer),
case {get(state), QEFO} of
{wait_next_outer, false} ->
state changed from wait_next_inner to wait_next_outer .
hc_data_outer(get(state)),
info_msg(hc_data_inner, [get(self), get(state)], call_outer_loop, 50);
_ -> ok
end,
state = wait_next_inner ?
QEFI = query_node:queue_empty(from_inner),
case {get(state), QEFI} of
{wait_next_inner, false} ->
hc_data_inner(get(state)),
info_msg(hc_data_inner, [get(self), get(state)], call_inner_loop, 50);
_ -> ok
end;
hc_data_inner(State) ->
error_msg(hc_data_inner, [get(self), {all,get()}, {state,State}, get(state)], wrong_state ).
hcdi_process_eos(From) ->
M = get(state_of_inner_streams),
put(state_of_inner_streams, maps:put(From, eos, M)),
count finished streams and send eos to parent if 0
F3 = fun (alive) -> true;
(eos) -> false
end,
NumAlive = length(lists:filter(F3, maps:values(get(state_of_inner_streams)))),
case NumAlive of
set state eos
put(state, wait_next_outer),
info_msg(hc_data_inner, [get(self), get(state)], query_evaluation_inner_completed, 50);
_ -> ok
end.
hcdi_process_graph(Graph) ->
make qn_var_val_map ( )
F1 = fun (V) ->
hce_get_var_value(V, Graph)
end,
JV = lists:map(F1, get(join_vars)),
info_msg(hcdo_process_graph , [ get(self ) , { graph , Graph } , { , get(join_vars ) } , { join_var_values , JoinVarValues } , get(state ) ] , debug_join_var_values , 50 ) ,
insert Graph with key JV to index
II = get(index_inner),
case maps:is_key(JV, II) of
true -> L = maps:get(JV, II),
put(index_inner, maps:put(JV, [Graph|L], II));
false -> put(index_inner, maps:put(JV, [Graph], II))
end.
hce_get_var_value(Variable, Graph) ->
VP = get(vars_pos),
LVP is [ { NodeId , ]
LVP = maps:get(Variable, VP),
{NodeId,Pos} = hce_get_node_id(LVP, Graph),
Tuple = maps:get(NodeId, Graph),
Pos+1 since first component is table - name
query_node:eval_attribute(element(Pos+1, Tuple)).
hce_get_node_id([{NID,Pos}|Rest], Graph) ->
case maps:is_key(NID, Graph) of
true -> {NID,Pos};
false -> hce_get_node_id(Rest, Graph)
end;
hce_get_node_id([], Graph) ->
error_msg(hce_get_node_id, [get(self), {graph,Graph}, {all,get()}, get(state)], cant_find_var_val).
hc_data_outer/3
hc_data_outer(State) when State =/= wait_next_outer ->
info_msg(hc_data_outer, [get(self), get(state)], message_left_in_queue_from_outer, 50);
hc_data_outer(State) when State =:= wait_next_outer ->
{From, Graph} = query_node:queue_get(from_outer),
put(outer_graph, Graph),
BE = query_node:queue_block_end(from_outer),
case BE of
EMsg = {empty, get(self)},
gen_server:cast(From, EMsg),
info_msg(send_cast, [get(self), {message,EMsg}, {to,From}, {invoker,hc_data_outer}, get(state)], message_sent, 30);
_ -> ok
end,
case Graph of
end_of_stream ->
hcdo_process_eos(From);
info_msg(hc_data_outer , [ get(self ) , { from , From } , { graph , Graph } , get(state ) ] , inner_eos_processed , 50 )
hcdo_match_inner_graphs()
info_msg(hc_data_outer , [ get(self ) , { from , From } , { graph , Graph } , get(state ) ] , inner_graph_processed , 50 )
end,
QPFP = query_node:queue_prepared(from_parent),
QEFO = query_node:queue_empty(from_outer),
case {get(state), QPFP, QEFO} of
{wait_next_outer, true, false} ->
hc_data_outer(get(state)),
info_msg(hc_data_outer, [get(self), get(state)], call_outer_loop, 50);
_ -> ok
end.
hcdo_process_eos(From) ->
M = get(state_of_outer_streams),
put(state_of_outer_streams, maps:put(From, eos, M)),
F1 = fun (alive) -> true;
(eos) -> false
end,
NumAlive = length(lists:filter(F1, maps:values(get(state_of_outer_streams)))),
all outer streams dead ? move to eos .
if NumAlive == 0 -> hcdo_send_parent_eos();
true -> ok
end.
info_msg(hc_data_outer , [ get(self ) , { from , From } , { graph , Graph } , { numAlive , NumAlive } , get(state ) ] , outer_eos_processed , 50 ) .
hcdo_send_parent_eos() ->
store eos in queue to_parent and flush it
query_node:queue_put(to_parent, end_of_stream),
query_node:queue_flush(to_parent),
check if includes empty messages
case query_node:queue_prepared(from_parent) of
{empty, _} = query_node:queue_read(from_parent),
Msg = query_node:queue_read(to_parent),
gen_server:cast(get(parent), Msg),
info_msg(send_cast, [get(self), {message,Msg}, {to,get(parent)}, {invoker,hcdo_send_parent_eos}, get(state)], message_sent, 30);
msg will be processed when the first empty message comes from_parent .
ok
end,
move state to eos
put(state, eos).
hcdo_match_inner_graphs() ->
Graph = get(outer_graph),
first , make qn_var_val_map ( )
F1 = fun (V) ->
hce_get_var_value(V, Graph)
end,
info_msg(hcdo_match_outer_graphs , [ get(self ) , { inner_graph , Graph } , { , get(join_vars ) } , { vars_pos , get(vars_pos ) } , get(state ) ] , debug_join_var_values , 20 ) ,
JV = lists:map(F1, get(join_vars)),
info_msg(hcdo_match_outer_graphs , [ get(self ) , { inner_graph , Graph } , { , get(join_vars ) } , , JV } , get(state ) ] , debug_join_var_values , 20 ) ,
II = get(index_inner),
case maps:is_key(JV, II) of
true -> hcdo_process_graph(maps:get(JV, II));
false -> ok
end.
hcdo_process_graph([IG|GList]) ->
OG = get(outer_graph),
G = maps:merge(OG, IG),
info_msg(hc_data_inner , [ get(self ) , , OG , G , get(state ) ] , join_computed , 50 ) ,
put(gp_val, G),
SP = query_node:eval_select(get(select_pred)),
info_msg(hc_data_inner , [ get(self ) , { graph , G } , { select_pred , get(select_pred ) } , { select_pred_value , SP } , get(state ) ] , select_pred_computed , 50 ) ,
case SP of
true ->
query_node:eval_project(get(project_list)),
G1 = get(gp_val),
query_node:queue_put(to_parent, G1),
info_msg(hc_data_inner , [ get(self ) , { graph_in , G } , { project_list , get(project_list ) } , { graph_out , G1 } , get(state ) ] , project_computed , 50 ) ,
send block to parent if evrythng preped
case query_node:queue_prepared(from_parent) and query_node:queue_prepared(to_parent) of
true ->
{empty, _} = query_node:queue_read(from_parent),
Msg = query_node:queue_read(to_parent),
gen_server:cast(get(parent), Msg),
info_msg(send_cast, [get(self), {message,Msg}, {to,get(parent)}, {invoker,hcdo_process_graph}, get(state)], message_sent, 30);
false-> ok
end;
false -> ok
end,
hcdo_process_graph(GList);
hcdo_process_graph([]) -> ok.
hc_eval_test_() ->
hcet_site(b3s_state:get(test_mode)).
hcet_site(local1) ->
Attrs = {attributes, record_info(fields, triple_store)},
TabDef = [Attrs, {disc_copies, [node()]}],
info_msg(hcet_load_db, [get(self), TabDef], display_table, 50),
NDS = node(),
BSS = {b3s_state, NDS},
CRC = clm_row_conf,
RMS = #{1 => NDS},
CM1 = #{1 => RMS, 2 => RMS},
, ] ,
put(self, {'1-1-1', node()}),
{inorder,
[
?_assertMatch(ok, b3s:start()),
?_assertMatch(ok, b3s:bootstrap()),
?_assertMatch(ok, gen_server:call(BSS, {put, CRC, CM1})),
?_assertMatch(R01, gen_server:call(BSS, propagate)),
{generator, fun()-> tp_query_node:hcet_load_db() end},
{generator, fun()-> hcet_q02() end},
?_assertMatch(ok, b3s:stop()),
?_assertMatch(ok, b3s:start()),
?_assertMatch(ok, b3s:bootstrap()),
{generator, fun()-> hcet_load_db() end},
{generator, fun()-> hcet_q03() end},
{generator, fun()-> hcet_q05() end},
{generator, fun()-> hcet_q06() end},
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(stopped, mnesia:stop()),
?_assertMatch(ok, b3s:stop())
? _ , : start ( ) ) ,
? _ assertMatch({atomic , ok } , : create_table(triple_store , TabDef ) ) ,
? _ assertMatch({atomic , ok } , : delete_table(triple_store ) ) ,
? _ assertMatch(stopped , : stop ( ) )
]};
hcet_site(local_two) ->
[];
hcet_site(_) ->
[].
hcet_q02() ->
info_msg(hcet_q02, [get(self)], start, 50),
BS = gen_server:call(node_state, {get, b3s_state_pid}),
Tab = gen_server:call(BS, {get, name_of_triple_table}),
QueryNodeId = "3",
QueryId = "2",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
JQN3 = mj_query_node:spawn_process(Id3, node()),
TPQN1 = hcet_tpqn1(JQN3),
TPQN2 = hcet_tpqn2(JQN3),
GraphPattern = maps:from_list(
[{"1", {"?id1", eI("<Japanese>"), "?prd", "?obj1"}},
{"2", {"?id2", eI("<Slovenian>"), "?prd", "?obj2"}}]),
SelectPred = none,
ProjectList = none,
ParentPid = self(),
OuterPids = [TPQN1],
InnerPids = [TPQN2],
VarsPositions = #{"?id1" => [{"1", 1}],
"?id2" => [{"2", 1}],
"?prd" => [{"1", 3}, {"2", 3}],
"?obj1" => [{"1", 4}],
"?obj2" => [{"2", 4}]},
JoinVars = ["?prd"],
GP = get_property,
DFO = data_outer,
EOS = end_of_stream,
T1 = eT({Tab,"<triple_id_0002>","<Japanese>","<eat>","<fishes>"}),
T2 = eT({Tab,"<triple_id_0003>","<Slovenian>","<eat>","<potatoes>"}),
TP1 = {"?id2", eI("<Slovenian>"), "?prd", "?obj2"},
M1 = {start, QueryNodeId, QueryId, SessionId, JQN3, GraphPattern,
SelectPred, ProjectList, ParentPid, OuterPids, InnerPids,
VarsPositions, JoinVars},
M2 = {eval, []},
R1Map = maps:put("1", T1, maps:new()),
R2Map = maps:put("2", T2, R1Map),
R = {DFO, JQN3, [R2Map,EOS]},
{inorder,
[
?_assertMatch(true, gen_server:call(JQN3, {GP, wait})),
?_assertMatch(ok, gen_server:call(JQN3, M1)),
?_assertMatch(outer, gen_server:call(TPQN1, {GP, inner_outer})),
?_assertMatch(inner, gen_server:call(TPQN2, {GP, inner_outer})),
?_assertMatch(false, gen_server:call(JQN3, {GP, wait})),
?_assertMatch(undefined, gen_server:call(JQN3, {GP, inner_outer})),
?_assertMatch(ok, gen_server:call(JQN3, M2)),
?_assertMatch({_, R}, hcet_send_empty(JQN3, R)),
?_assertMatch(TP1, gen_server:call(TPQN2, {GP, tp})),
?_assertMatch(OuterPids, gen_server:call(JQN3, {GP, outer})),
?_assertMatch(InnerPids, gen_server:call(JQN3, {GP, inner})),
?_assertMatch(GraphPattern, gen_server:call(JQN3, {GP, gp})),
?_assertMatch(ok, gen_server:cast(TPQN1, {stop, self()})),
?_assertMatch(ok, gen_server:cast(TPQN2, {stop, self()})),
?_assertMatch(ok, gen_server:cast(JQN3, {stop, self()}))
]}.
hcet_tpqn1(Pid) ->
QueryNodeId = "1",
QueryId = "2",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?id1", eI("<Japanese>"), "?prd", "?obj1"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?id1" => 1, "?prd" => 3, "?obj1" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1,
TriplePattern, SelectPred, ProjectList,
ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tpqn2(Pid) ->
QueryNodeId = "2",
QueryId = "2",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?id2", eI("<Slovenian>"), "?prd", "?obj2"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?id2" => 1, "?prd" => 3, "?obj2" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2,
TriplePattern, SelectPred, ProjectList,
ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
example_table() ->
{triple_store, "id1", "japan", "type", "country"},
{triple_store, "id2", "slovenia", "type", "country"},
{triple_store, "id3", "koper", "type", "city"},
{triple_store, "id4", "ljubljana","type", "city"},
{triple_store, "id5", "tokyo", "type", "city"},
{triple_store, "id6", "kyoto", "type", "city"},
{triple_store, "id7", "osaka", "type", "city"},
uni primorska
yahoo ! japan
osaka uni
{triple_store, "id15", "shou", "type", "person"},
{triple_store, "id16", "yoshio", "type", "person"},
{triple_store, "id17", "sakura", "type", "person"},
{triple_store, "id18", "luka", "type", "person"},
{triple_store, "id19", "jan", "type", "person"},
{triple_store, "id20", "nika", "type", "person"},
{triple_store, "id57", "marko", "type", "person"},
hasCapital
{triple_store, "id21", "japan", "hasCapital", "tokyo"},
{triple_store, "id22", "slovenia", "hasCapital", "ljubljana"},
{triple_store, "id23", "tokyo", "isLocatedIn", "japan"},
{triple_store, "id24", "kyoto", "isLocatedIn", "japan"},
{triple_store, "id25", "osaka", "isLocatedIn", "japan"},
{triple_store, "id26", "koper", "isLocatedIn", "slovenia"},
{triple_store, "id27", "ljubljana","isLocatedIn", "slovenia"},
{triple_store, "id28", "up", "isLocatedIn", "koper"},
{triple_store, "id29", "ul", "isLocatedIn", "ljubljana"},
{triple_store, "id30", "ijs", "isLocatedIn", "ljubljana"},
{triple_store, "id31", "yj", "isLocatedIn", "tokyo"},
{triple_store, "id32", "ku", "isLocatedIn", "kyoto"},
{triple_store, "id33", "ou", "isLocatedIn", "osaka"},
{triple_store, "id34", "tu", "isLocatedIn", "tokyo"},
{triple_store, "id35", "shou", "livesIn", "tokyo"},
{triple_store, "id36", "yoshio", "livesIn", "tokyo"},
{triple_store, "id37", "sakura", "livesIn", "kyoto"},
{triple_store, "id38", "luka", "livesIn", "ljubljana"},
{triple_store, "id39", "jan", "livesIn", "koper"},
{triple_store, "id40", "nika", "livesIn", "ljubljana"},
{triple_store, "id41", "marko", "livesIn", "ljubljana"},
{triple_store, "id42", "shou", "worksAt", "yj"},
{triple_store, "id43", "shou", "worksAt", "ku"},
{triple_store, "id44", "yoshio", "worksAt", "yj"},
{triple_store, "id45", "sakura", "worksAt", "ku"},
{triple_store, "id46", "luka", "worksAt", "up"},
{triple_store, "id47", "luka", "worksAt", "ijs"},
{triple_store, "id48", "jan", "worksAt", "up"},
{triple_store, "id49", "nika", "worksAt", "ijs"},
{triple_store, "id50", "marko", "worksAt", "ijs"},
{triple_store, "id51", "shou", "graduatedFrom", "ou"},
{triple_store, "id52", "yoshio", "graduatedFrom", "tu"},
{triple_store, "id53", "sakura", "graduatedFrom", "ku"},
{triple_store, "id54", "luka", "graduatedFrom", "ul"},
{triple_store, "id55", "jan", "graduatedFrom", "up"},
{triple_store, "id56", "nika", "graduatedFrom", "ul"},
{triple_store, "id58", "shou", "age", "25"},
{triple_store, "id59", "yoshio", "age", "36"},
{triple_store, "id60", "sakura", "age", "27"},
{triple_store, "id61", "luka", "age", "38"},
{triple_store, "id62", "jan", "age", "45"},
{triple_store, "id63", "nika", "age", "22"},
{triple_store, "id64", "marko", "age", "30"}].
hcet_load_db() ->
case 2 of
2 -> hcet_load_db_postgres();
1 -> hcet_load_db_bdbnif();
_ -> hcet_load_db_mnesia_qlc()
end.
hcet_load_db_postgres() ->
info_msg(hcet_load_db_postgres, [get(self)], start, 50),
BS = gen_server:call(node_state, {get, b3s_state_pid}),
Tab = db_interface:dot_get_tn(),
F1 = fun (X) ->
{_, Tid, Sbj, Prd, Obj} = X,
D = eT({Tab, Tid, Sbj, Prd, Obj}),
db_interface:db_write(D)
end,
SI = string_id,
SIT = gen_server:call(BS, {get, name_of_string_id_table}),
gen_server:call(SI, {put, sid_table_name, SIT}),
gen_server:call(SI, {put, di_cursor__, undefined}),
gen_server:call(SI, delete_table),
gen_server:call(SI, {create_table, SIT}),
gen_server:call(SI, make_index),
erase(sid_table_name),
erase(sid_max_id),
ok = db_interface:db_close(),
ok = db_interface:db_init(),
ok = db_interface:db_close(),
ok = lists:foreach(F1, example_table()),
ok = db_interface:db_add_index(),
ok = db_interface:db_close(),
TP01 = eTP({"id1", "?s", "?p", "?o"}),
TP02 = eTP({"id11", "?s", "?p", "?o"}),
TP03 = eTP({"id56", "?s", "?p", "?o"}),
R01 = eT({Tab, "id1", "japan", "type", "country"}),
R02 = eT({Tab, "id11", "yj", "type", "corporation"}),
R03 = eT({Tab, "id56", "nika", "graduatedFrom", "ul"}),
EOS = end_of_stream,
{inorder,
[
? _ , db_interface : db_close ( ) ) ,
? _ , db_interface : db_init ( ) ) ,
? _ , lists : , example_table ( ) ) ) ,
? _ , db_interface : db_add_index ( ) ) ,
? _ , db_interface : db_close ( ) ) ,
?_assertMatch(ok, db_interface:db_open_tp(TP01)),
?_assertMatch(R01, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP02)),
?_assertMatch(R02, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP03)),
?_assertMatch(R03, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_close())
]}.
hcet_load_db_bdbnif() ->
info_msg(hcet_load_db_bdbnif, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
Fsto = fun (X) ->
{_, Tid, Sbj, Prd, Obj} = X,
D = {Tab, Tid, Sbj, Prd, Obj},
db_interface:db_write(D)
end,
ok = db_interface:db_init(),
ok = db_interface:db_add_index(),
lists:foreach(Fsto, example_table()),
ok = db_interface:db_close(),
TP01 = {"id1", "?s", "?p", "?o"},
TP02 = {"id11", "?s", "?p", "?o"},
TP03 = {"id56", "?s", "?p", "?o"},
R01 = {Tab, "id1", "japan", "type", "country"},
R02 = {Tab, "id11", "yj", "type", "corporation"},
R03 = {Tab, "id56", "nika", "graduatedFrom", "ul"},
EOS = end_of_stream,
{inorder,
[
?_assertMatch(ok, db_interface:db_open_tp(TP01)),
?_assertMatch(R01, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP02)),
?_assertMatch(R02, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_open_tp(TP03)),
?_assertMatch(R03, db_interface:db_next()),
?_assertMatch(EOS, db_interface:db_next()),
?_assertMatch(ok, db_interface:db_close())
]}.
hcet_load_db_mnesia_qlc() ->
info_msg(hcet_load_db, [get(self)], start, 50),
Attrs = {attributes, record_info(fields, triple_store)},
TabDef = [Attrs, {disc_copies, [node()]}],
info_msg(hcet_load_db, [get(self), TabDef], display_table, 50),
ET = example_table(),
AGE = fun(X ) - > application : , X ) end ,
F = fun() ->
lists:foreach(fun mnesia:write/1, ET)
end,
MW = fun() -> mnesia:transaction(F) end,
SetR = sets : ) ,
RL = sets : to_list(sets : subtract(SetR , SetL ) ) ,
{ RL , LR }
Q1 = qlc:q([X||X<-mnesia:table(triple_store)]),
Q2 = qlc:q([X||X<-mnesia:table(triple_store), X#triple_store.s=="koper"]),
Q3 = qlc:q([X||X<-mnesia:table(triple_store), X#triple_store.p=="livesIn"]),
Q4 = qlc:q([X||X<-mnesia:table(triple_store),
X#triple_store.p=="worksAt",
X#triple_store.o=="yj"]),
[ { triple_store,"id26","koper","isLocatedIn","slovenia " } ,
R3 = [ { triple_store,"id35","shou","livesIn","tokyo " } ,
R4 = [ { triple_store,"id41","shou","worksAt","yj " } ,
c("tm.erl " ) , tm : : q([X||X<-mnesia : table(triple_store ) ] ) ) .
info_msg(hcet_load_db, [get(self)], testing, 50),
{inorder,
[
? _ assertMatch(stopped , : stop ( ) ) ,
? _ , mnesia : create_schema([node ( ) ] ) ) ,
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch({atomic, ok}, mnesia:delete_table(triple_store)),
?_assertMatch({atomic, ok}, mnesia:create_table(triple_store, TabDef)),
? _ , db_interface : db_init ( ) ) ,
?_assertMatch(ok, db_interface:db_add_index()),
?_assertMatch({atomic, ok}, MW()),
?_assertMatch(57, length(tm:do(Q1))),
?_assertMatch(2, length(tm:do(Q2))),
?_assertMatch(7, length(tm:do(Q3))),
?_assertMatch(2, length(tm:do(Q4))),
?_assertMatch(stopped, mnesia:stop())
]}.
hcet_send_empty(QN, R) ->
gen_server:cast(QN, {empty, self()}),
receive
M -> M
end,
info_msg(hcet_send_empty, [get(self), {from,QN}, {received, M}, {expected,R}, get(state)], data_received, 30),
M.
hcet_get_PD(QN) ->
M = gen_server:call(QN, {get_property, all}),
info_msg(hcet_get_PD, [get(self), {pid,QN}, {all,M}, length(M)], response_property_all_received, 50),
M.
eI(X) -> string_id:get_id(X).
eT({T,I,S,P,O}) ->
ET = string_id:encode_triple({I, S, P, O}),
list_to_tuple([T | tuple_to_list(ET)]).
eTP(X) -> string_id:encode_triple_pattern(X).
hcet_q03() ->
slovenia hasCapital ? x
? y worksAt
info_msg(hcet_q03, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
QueryNodeId3 = "3",
QueryId = "3",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId3),
JQN3 = mj_query_node:spawn_process(Id3, node()),
QueryNodeId5 = "5",
Id5 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId5),
JQN5 = mj_query_node:spawn_process(Id5, node()),
TPQN1 = hcet_tpqn3(JQN3),
TPQN2 = hcet_tpqn4(JQN3),
TPQN4 = hcet_tpqn5(JQN5),
GraphPattern3 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}}]),
SelectPred3 = none,
ProjectList3 = none,
ParentPid3 = JQN5,
OuterPids3 = [TPQN1],
InnerPids3 = [TPQN2],
VarsPositions3 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}]},
JoinVars3 = ["?x"],
second join
GraphPattern5 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}},
{"4", {"?i4", "?y", eI("worksAt"), eI("ijs")}}]),
SelectPred5 = {lnot, {"?y", equal, eI("luka")}},
ProjectList5 = ["?y"],
ParentPid5 = self(),
OuterPids5 = [JQN3],
InnerPids5 = [TPQN4],
VarsPositions5 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}, {"4", 2}]},
JoinVars5 = ["?y"],
T4 = eT({Tab, "id40", "nika", "livesIn", "ljubljana"}),
T5 = eT({Tab, "id49", "nika", "worksAt", "ijs"}),
T6 = eT({Tab, "id41", "marko", "livesIn", "ljubljana"}),
T7 = eT({Tab, "id50", "marko", "worksAt", "ijs"}),
messages for JQN3 and JQN5
DFO = data_outer,
EOS = end_of_stream,
S3 = {start, QueryNodeId3, QueryId, SessionId, JQN3, GraphPattern3,
SelectPred3, ProjectList3, ParentPid3, OuterPids3, InnerPids3,
VarsPositions3, JoinVars3},
S5 = {start, QueryNodeId5, QueryId, SessionId, JQN5, GraphPattern5,
SelectPred5, ProjectList5, ParentPid5, OuterPids5, InnerPids5,
VarsPositions5, JoinVars5},
E5 = {eval, []},
R2Map = maps:put("4", T5, maps:put("2", T4, maps:new())),
R3Map = maps:put("4", T7, maps:put("2", T6, maps:new())),
R1 = {DFO, JQN5, [R2Map,R3Map,EOS]},
info_msg(hcet_q03 , [ get(self ) , R1 , RE ] , before_tests , 50 ) ,
{inorder,
[
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(ok, gen_server:call(JQN3, S3)),
?_assertMatch(ok, gen_server:call(JQN5, S5)),
?_assertMatch(35, length(hcet_get_PD(TPQN1))),
?_assertMatch(35, length(hcet_get_PD(TPQN2))),
?_assertMatch(43, length(hcet_get_PD(JQN3))),
?_assertMatch(35, length(hcet_get_PD(TPQN4))),
?_assertMatch(43, length(hcet_get_PD(JQN5))),
?_assertMatch(ok, gen_server:call(JQN5, E5)),
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1))
]}.
hcet_tpqn3(Pid) ->
QueryNodeId = "1",
QueryId = "3",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tpqn4(Pid) ->
QueryNodeId = "2",
QueryId = "3",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tpqn5(Pid) ->
QueryNodeId = "4",
QueryId = "3",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_q05() ->
query : using two query nodes for each tp
slovenia hasCapital ? x
? y worksAt
info_msg(hcet_q05, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
QueryNodeId3 = "3",
QueryId = "5",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId3),
JQN3 = mj_query_node:spawn_process(Id3, node()),
QueryNodeId5 = "5",
Id5 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId5),
JQN5 = mj_query_node:spawn_process(Id5, node()),
TPQN1 = hcet_tp5qn3(JQN3),
TPQN1a = hcet_tp5qn3a(JQN3),
TPQN2 = hcet_tp5qn4(JQN3),
TPQN2a = hcet_tp5qn4a(JQN3),
TPQN4 = hcet_tp5qn5(JQN5),
TPQN4a = hcet_tp5qn5a(JQN5),
GraphPattern3 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}}]),
SelectPred = none,
ProjectList = none,
ParentPid3 = JQN5,
OuterPids3 = [TPQN1,TPQN1a],
InnerPids3 = [TPQN2,TPQN2a],
VarsPositions3 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}]},
JoinVars3 = ["?x"],
second join
GraphPattern5 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}},
{"4", {"?i4", "?y", eI("worksAt"), eI("ijs")}}]),
ParentPid5 = self(),
OuterPids5 = [JQN3],
InnerPids5 = [TPQN4,TPQN4a],
VarsPositions5 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}, {"4", 2}]},
JoinVars5 = ["?y"],
T1 = eT({Tab, "id22", "slovenia", "hasCapital", "ljubljana"}),
T2 = eT({Tab, "id38", "luka", "livesIn", "ljubljana"}),
T3 = eT({Tab, "id47", "luka", "worksAt", "ijs"}),
T4 = eT({Tab, "id40", "nika", "livesIn", "ljubljana"}),
T5 = eT({Tab, "id49", "nika", "worksAt", "ijs"}),
T6 = eT({Tab, "id41", "marko", "livesIn", "ljubljana"}),
T7 = eT({Tab, "id50", "marko", "worksAt", "ijs"}),
messages for JQN3 and JQN5
DFO = data_outer,
EOS = end_of_stream,
S3 = {start, QueryNodeId3, QueryId, SessionId, JQN3, GraphPattern3,
SelectPred, ProjectList, ParentPid3, OuterPids3, InnerPids3,
VarsPositions3, JoinVars3},
S5 = {start, QueryNodeId5, QueryId, SessionId, JQN5, GraphPattern5,
SelectPred, ProjectList, ParentPid5, OuterPids5, InnerPids5,
VarsPositions5, JoinVars5},
E5 = {eval, []},
R1Map = maps:put("4", T3, maps:put("2", T2, maps:put("1", T1, maps:new()))),
R2Map = maps:put("4", T5, maps:put("2", T4, maps:put("1", T1, maps:new()))),
R3Map = maps:put("4", T7, maps:put("2", T6, maps:put("1", T1, maps:new()))),
R1 = {DFO, JQN5, [R2Map,R2Map,R3Map,R3Map,R1Map]},
R2 = {DFO, JQN5, [R1Map,R2Map,R2Map,R3Map,R3Map]},
R3 = {DFO, JQN5, [R1Map,R1Map,R2Map,R2Map,R3Map]},
R4 = {DFO, JQN5, [R3Map,R1Map,R1Map,R2Map,R2Map]},
R5 = {DFO, JQN5, [R3Map,R3Map,R1Map,R1Map,EOS]},
info_msg(hcet_q05 , [ get(self ) , R1 , R2 , RE ] , before_tests , 50 ) ,
{inorder,
[
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(ok, gen_server:call(JQN3, S3)),
?_assertMatch(ok, gen_server:call(JQN5, S5)),
?_assertMatch(35, length(hcet_get_PD(TPQN1))),
?_assertMatch(35, length(hcet_get_PD(TPQN2))),
?_assertMatch(43, length(hcet_get_PD(JQN3))),
?_assertMatch(35, length(hcet_get_PD(TPQN4))),
?_assertMatch(43, length(hcet_get_PD(JQN5))),
?_assertMatch(ok, gen_server:call(JQN5, E5)),
works only with block_size=5 ! ! ! ( iztok,2016/01/31 )
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1)),
?_assertMatch({'$gen_cast', R2}, hcet_send_empty(JQN5, R2)),
?_assertMatch({'$gen_cast', R3}, hcet_send_empty(JQN5, R3)),
?_assertMatch({'$gen_cast', R4}, hcet_send_empty(JQN5, R4)),
?_assertMatch({'$gen_cast', R5}, hcet_send_empty(JQN5, R5))
]}.
hcet_tp5qn3(Pid) ->
QueryNodeId = "1",
QueryId = "5",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp5qn3a(Pid) ->
QueryNodeId = "1",
QueryId = "5a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1a = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1a, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1a, M),
TPQN1a.
hcet_tp5qn4(Pid) ->
QueryNodeId = "2",
QueryId = "5",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp5qn4a(Pid) ->
QueryNodeId = "2",
QueryId = "5a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2a = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2a, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2a, M),
TPQN2a.
hcet_tp5qn5(Pid) ->
QueryNodeId = "4",
QueryId = "5",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_tp5qn5a(Pid) ->
QueryNodeId = "4",
QueryId = "5a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4a = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4a, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4a, M),
TPQN4a.
hcet_q06() ->
query : using three query nodes for each tp
slovenia hasCapital ? x
? y worksAt
info_msg(hcet_q06, [get(self)], start, 50),
Tab = db_interface:dot_get_tn(),
QueryNodeId3 = "3",
QueryId = "6",
SessionId = "1",
Id3 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId3),
JQN3 = mj_query_node:spawn_process(Id3, node()),
QueryNodeId5 = "5",
Id5 = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId5),
JQN5 = mj_query_node:spawn_process(Id5, node()),
TPQN1 = hcet_tp6qn3(JQN3),
TPQN1a = hcet_tp6qn3a(JQN3),
TPQN1b = hcet_tp6qn3b(JQN3),
TPQN2 = hcet_tp6qn4(JQN3),
TPQN2a = hcet_tp6qn4a(JQN3),
TPQN2b = hcet_tp6qn4b(JQN3),
TPQN4 = hcet_tp6qn5(JQN5),
TPQN4a = hcet_tp6qn5a(JQN5),
TPQN4b = hcet_tp6qn5b(JQN5),
GraphPattern3 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}}]),
SelectPred = none,
ProjectList = none,
ParentPid3 = JQN5,
OuterPids3 = [TPQN1,TPQN1a,TPQN1b],
InnerPids3 = [TPQN2,TPQN2a,TPQN2b],
VarsPositions3 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}]},
JoinVars3 = ["?x"],
second join
GraphPattern5 = maps:from_list(
[{"1", {"?i1", eI("slovenia"), eI("hasCapital"), "?x"}},
{"2", {"?i2", "?y", eI("livesIn"), "?x"}},
{"4", {"?i4", "?y", eI("worksAt"), eI("ijs")}}]),
ParentPid5 = self(),
OuterPids5 = [JQN3],
InnerPids5 = [TPQN4,TPQN4a,TPQN4b],
VarsPositions5 = #{"?i1" => [{"1", 1}],
"?i2" => [{"2", 1}],
"?i4" => [{"4", 1}],
"?x" => [{"1", 4}, {"2", 4}],
"?y" => [{"2", 2}, {"4", 2}]},
JoinVars5 = ["?y"],
T1 = eT({Tab, "id22", "slovenia", "hasCapital", "ljubljana"}),
T2 = eT({Tab, "id38", "luka", "livesIn", "ljubljana"}),
T3 = eT({Tab, "id47", "luka", "worksAt", "ijs"}),
T4 = eT({Tab, "id40", "nika", "livesIn", "ljubljana"}),
T5 = eT({Tab, "id49", "nika", "worksAt", "ijs"}),
T6 = eT({Tab, "id41", "marko", "livesIn", "ljubljana"}),
T7 = eT({Tab, "id50", "marko", "worksAt", "ijs"}),
messages for JQN3 and JQN5
DFO = data_outer,
EOS = end_of_stream,
S3 = {start, QueryNodeId3, QueryId, SessionId, JQN3, GraphPattern3,
SelectPred, ProjectList, ParentPid3, OuterPids3, InnerPids3,
VarsPositions3, JoinVars3},
S5 = {start, QueryNodeId5, QueryId, SessionId, JQN5, GraphPattern5,
SelectPred, ProjectList, ParentPid5, OuterPids5, InnerPids5,
VarsPositions5, JoinVars5},
E5 = {eval, []},
R1Map = maps:put("4", T3, maps:put("2", T2, maps:put("1", T1, maps:new()))),
R2Map = maps:put("4", T5, maps:put("2", T4, maps:put("1", T1, maps:new()))),
R3Map = maps:put("4", T7, maps:put("2", T6, maps:put("1", T1, maps:new()))),
R1 = {DFO, JQN5, [R2Map,R2Map,R2Map,R3Map,R3Map]},
R2 = {DFO, JQN5, [R3Map,R1Map,R1Map,R1Map,R2Map]},
R3 = {DFO, JQN5, [R2Map,R2Map,R3Map,R3Map,R3Map]},
R4 = {DFO, JQN5, [R1Map,R1Map,R1Map,R2Map,R2Map]},
R5 = {DFO, JQN5, [R2Map,R3Map,R3Map,R3Map,R1Map]},
R6 = {DFO, JQN5, [R1Map,R1Map,R2Map,R2Map,R2Map]},
R7 = {DFO, JQN5, [R3Map,R3Map,R3Map,R1Map,R1Map]},
R8 = {DFO, JQN5, [R1Map,R2Map,R2Map,R2Map,R3Map]},
R9 = {DFO, JQN5, [R3Map,R3Map,R1Map,R1Map,R1Map]},
RE = {DFO, JQN5, [R1Map,EOS]},
info_msg(hcet_q06 , [ get(self ) , R1 , RE ] , before_tests , 50 ) ,
{inorder,
[
?_assertMatch(ok, mnesia:start()),
?_assertMatch(ok, timer:sleep(1000)),
?_assertMatch(ok, gen_server:call(JQN3, S3)),
?_assertMatch(ok, gen_server:call(JQN5, S5)),
?_assertMatch(35, length(hcet_get_PD(TPQN1))),
?_assertMatch(35, length(hcet_get_PD(TPQN2))),
?_assertMatch(43, length(hcet_get_PD(JQN3))),
?_assertMatch(35, length(hcet_get_PD(TPQN4))),
?_assertMatch(43, length(hcet_get_PD(JQN5))),
?_assertMatch(ok, gen_server:call(JQN5, E5)),
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1)),
?_assertMatch({'$gen_cast', R2}, hcet_send_empty(JQN5, R2)),
?_assertMatch({'$gen_cast', R3}, hcet_send_empty(JQN5, R3)),
?_assertMatch({'$gen_cast', R4}, hcet_send_empty(JQN5, R4)),
?_assertMatch({'$gen_cast', R5}, hcet_send_empty(JQN5, R5)),
?_assertMatch({'$gen_cast', R6}, hcet_send_empty(JQN5, R6)),
?_assertMatch({'$gen_cast', R7}, hcet_send_empty(JQN5, R7)),
?_assertMatch({'$gen_cast', R8}, hcet_send_empty(JQN5, R8)),
?_assertMatch({'$gen_cast', R9}, hcet_send_empty(JQN5, R9)),
?_assertMatch({'$gen_cast', R1}, hcet_send_empty(JQN5, R1)),
?_assertMatch({'$gen_cast', R2}, hcet_send_empty(JQN5, R2)),
?_assertMatch({'$gen_cast', R3}, hcet_send_empty(JQN5, R3)),
?_assertMatch({'$gen_cast', R4}, hcet_send_empty(JQN5, R4)),
?_assertMatch({'$gen_cast', R5}, hcet_send_empty(JQN5, R5)),
?_assertMatch({'$gen_cast', R6}, hcet_send_empty(JQN5, R6)),
?_assertMatch({'$gen_cast', R7}, hcet_send_empty(JQN5, R7)),
?_assertMatch({'$gen_cast', RE}, hcet_send_empty(JQN5, RE))
]}.
hcet_tp6qn3(Pid) ->
QueryNodeId = "1",
QueryId = "6",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp6qn3a(Pid) ->
QueryNodeId = "1",
QueryId = "6a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp6qn3b(Pid) ->
QueryNodeId = "1",
QueryId = "6b",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN1 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i1", eI("slovenia"), eI("hasCapital"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i1" => 1, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN1, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, outer},
gen_server:call(TPQN1, M),
TPQN1.
hcet_tp6qn4(Pid) ->
QueryNodeId = "2",
QueryId = "6",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp6qn4a(Pid) ->
QueryNodeId = "2",
QueryId = "6a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp6qn4b(Pid) ->
QueryNodeId = "2",
QueryId = "6b",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN2 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i2", "?y", eI("livesIn"), "?x"},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i2" => 1, "?y" => 2, "?x" => 4},
M = {start, QueryNodeId, QueryId, SessionId, TPQN2, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN2, M),
TPQN2.
hcet_tp6qn5(Pid) ->
QueryNodeId = "4",
QueryId = "6",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_tp6qn5a(Pid) ->
QueryNodeId = "4",
QueryId = "6a",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
hcet_tp6qn5b(Pid) ->
QueryNodeId = "4",
QueryId = "6b",
SessionId = "1",
Id = list_to_atom(SessionId++"-"++QueryId++"-"++QueryNodeId),
TPQN4 = tp_query_node:spawn_process(Id, node()),
TriplePattern = {"?i4", "?y", eI("worksAt"), eI("ijs")},
SelectPred = none,
ProjectList = none,
ParentPid = Pid,
VarsPositions = #{"?i4" => 1, "?y" => 2},
M = {start, QueryNodeId, QueryId, SessionId, TPQN4, TriplePattern,
SelectPred, ProjectList, ParentPid, VarsPositions, inner},
gen_server:call(TPQN4, M),
TPQN4.
|
608871cffbd55df551c67f9c11bb4ae97723cef890eb1adf0fd88590e953dda1 | athos/syntactic-closure | close.clj | (ns close
(:use [syntactic-closure :only [defsyntax qq]]))
(defn foo [x]
x)
(defsyntax bar [x]
(qq (foo ~^:? x)))
(comment
(ns baz
(:use [close :only [bar]]))
(defn foo [x]
(* x x))
= > ( close / foo ( baz / foo 4 ) )
)
| null | https://raw.githubusercontent.com/athos/syntactic-closure/e251b03a199507df4bbc35788230d434d6506634/examples/close.clj | clojure | (ns close
(:use [syntactic-closure :only [defsyntax qq]]))
(defn foo [x]
x)
(defsyntax bar [x]
(qq (foo ~^:? x)))
(comment
(ns baz
(:use [close :only [bar]]))
(defn foo [x]
(* x x))
= > ( close / foo ( baz / foo 4 ) )
)
| |
42ef9094a4ff493274f065e4dd56462fc166511164ea5f1c75139e1a89c020e2 | wireapp/wire-server | Env.hs | # LANGUAGE TemplateHaskell #
-- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Gundeck.Env where
import Bilge
import Cassandra (ClientState, Keyspace (..))
import qualified Cassandra as C
import qualified Cassandra.Settings as C
import Control.AutoUpdate
import Control.Lens (makeLenses, (^.))
import Control.Retry (capDelay, exponentialBackoff)
import Data.Default (def)
import qualified Data.List.NonEmpty as NE
import Data.Metrics.Middleware (Metrics)
import Data.Misc (Milliseconds (..))
import Data.Text (unpack)
import Data.Time.Clock
import Data.Time.Clock.POSIX
import qualified Database.Redis as Redis
import qualified Gundeck.Aws as Aws
import Gundeck.Options as Opt
import qualified Gundeck.Redis as Redis
import qualified Gundeck.Redis.HedisExtensions as Redis
import Gundeck.ThreadBudget
import Imports
import Network.HTTP.Client (responseTimeoutMicro)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import qualified System.Logger as Log
import qualified System.Logger.Extended as Logger
import Util.Options
data Env = Env
{ _reqId :: !RequestId,
_monitor :: !Metrics,
_options :: !Opts,
_applog :: !Logger.Logger,
_manager :: !Manager,
_cstate :: !ClientState,
_rstate :: !Redis.RobustConnection,
_rstateAdditionalWrite :: !(Maybe Redis.RobustConnection),
_awsEnv :: !Aws.Env,
_time :: !(IO Milliseconds),
_threadBudgetState :: !(Maybe ThreadBudgetState)
}
makeLenses ''Env
schemaVersion :: Int32
schemaVersion = 7
createEnv :: Metrics -> Opts -> IO Env
createEnv m o = do
l <- Logger.mkLogger (o ^. optLogLevel) (o ^. optLogNetStrings) (o ^. optLogFormat)
c <-
maybe
(C.initialContactsPlain (o ^. optCassandra . casEndpoint . epHost))
(C.initialContactsDisco "cassandra_gundeck" . unpack)
(o ^. optDiscoUrl)
n <-
newManager
tlsManagerSettings
{ managerConnCount = o ^. optSettings . setHttpPoolSize,
managerIdleConnectionCount = 3 * (o ^. optSettings . setHttpPoolSize),
managerResponseTimeout = responseTimeoutMicro 5000000
}
r <- createRedisPool l (o ^. optRedis) "main-redis"
rAdditional <- case o ^. optRedisAdditionalWrite of
Nothing -> pure Nothing
Just additionalRedis -> do
rAdd <- createRedisPool l additionalRedis "additional-write-redis"
pure $ Just rAdd
p <-
C.init
$ C.setLogger (C.mkLogger (Logger.clone (Just "cassandra.gundeck") l))
. C.setContacts (NE.head c) (NE.tail c)
. C.setPortNumber (fromIntegral $ o ^. optCassandra . casEndpoint . epPort)
. C.setKeyspace (Keyspace (o ^. optCassandra . casKeyspace))
. C.setMaxConnections 4
. C.setMaxStreams 128
. C.setPoolStripes 4
. C.setSendTimeout 3
. C.setResponseTimeout 10
. C.setProtocolVersion C.V4
. C.setPolicy (C.dcFilterPolicyIfConfigured l (o ^. optCassandra . casFilterNodesByDatacentre))
$ C.defSettings
a <- Aws.mkEnv l o n
io <-
mkAutoUpdate
defaultUpdateSettings
{ updateAction = Ms . round . (* 1000) <$> getPOSIXTime
}
mtbs <- mkThreadBudgetState `mapM` (o ^. optSettings . setMaxConcurrentNativePushes)
pure $! Env def m o l n p r rAdditional a io mtbs
reqIdMsg :: RequestId -> Logger.Msg -> Logger.Msg
reqIdMsg = ("request" Logger..=) . unRequestId
# INLINE reqIdMsg #
createRedisPool :: Logger.Logger -> RedisEndpoint -> ByteString -> IO Redis.RobustConnection
createRedisPool l endpoint identifier = do
let redisConnInfo =
Redis.defaultConnectInfo
{ Redis.connectHost = unpack $ endpoint ^. rHost,
Redis.connectPort = Redis.PortNumber (fromIntegral $ endpoint ^. rPort),
Redis.connectTimeout = Just (secondsToNominalDiffTime 5),
Redis.connectMaxConnections = 100
}
Log.info l $
Log.msg (Log.val $ "starting connection to " <> identifier <> "...")
. Log.field "connectionMode" (show $ endpoint ^. rConnectionMode)
. Log.field "connInfo" (show redisConnInfo)
let connectWithRetry = Redis.connectRobust l (capDelay 1000000 (exponentialBackoff 50000))
r <- case endpoint ^. rConnectionMode of
Master -> connectWithRetry $ Redis.checkedConnect redisConnInfo
Cluster -> connectWithRetry $ Redis.checkedConnectCluster redisConnInfo
Log.info l $ Log.msg (Log.val $ "Established connection to " <> identifier <> ".")
pure r
| null | https://raw.githubusercontent.com/wireapp/wire-server/e03f7219210019ae5be50739f594dc667e669168/services/gundeck/src/Gundeck/Env.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>. | # LANGUAGE TemplateHaskell #
Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Gundeck.Env where
import Bilge
import Cassandra (ClientState, Keyspace (..))
import qualified Cassandra as C
import qualified Cassandra.Settings as C
import Control.AutoUpdate
import Control.Lens (makeLenses, (^.))
import Control.Retry (capDelay, exponentialBackoff)
import Data.Default (def)
import qualified Data.List.NonEmpty as NE
import Data.Metrics.Middleware (Metrics)
import Data.Misc (Milliseconds (..))
import Data.Text (unpack)
import Data.Time.Clock
import Data.Time.Clock.POSIX
import qualified Database.Redis as Redis
import qualified Gundeck.Aws as Aws
import Gundeck.Options as Opt
import qualified Gundeck.Redis as Redis
import qualified Gundeck.Redis.HedisExtensions as Redis
import Gundeck.ThreadBudget
import Imports
import Network.HTTP.Client (responseTimeoutMicro)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import qualified System.Logger as Log
import qualified System.Logger.Extended as Logger
import Util.Options
data Env = Env
{ _reqId :: !RequestId,
_monitor :: !Metrics,
_options :: !Opts,
_applog :: !Logger.Logger,
_manager :: !Manager,
_cstate :: !ClientState,
_rstate :: !Redis.RobustConnection,
_rstateAdditionalWrite :: !(Maybe Redis.RobustConnection),
_awsEnv :: !Aws.Env,
_time :: !(IO Milliseconds),
_threadBudgetState :: !(Maybe ThreadBudgetState)
}
makeLenses ''Env
schemaVersion :: Int32
schemaVersion = 7
createEnv :: Metrics -> Opts -> IO Env
createEnv m o = do
l <- Logger.mkLogger (o ^. optLogLevel) (o ^. optLogNetStrings) (o ^. optLogFormat)
c <-
maybe
(C.initialContactsPlain (o ^. optCassandra . casEndpoint . epHost))
(C.initialContactsDisco "cassandra_gundeck" . unpack)
(o ^. optDiscoUrl)
n <-
newManager
tlsManagerSettings
{ managerConnCount = o ^. optSettings . setHttpPoolSize,
managerIdleConnectionCount = 3 * (o ^. optSettings . setHttpPoolSize),
managerResponseTimeout = responseTimeoutMicro 5000000
}
r <- createRedisPool l (o ^. optRedis) "main-redis"
rAdditional <- case o ^. optRedisAdditionalWrite of
Nothing -> pure Nothing
Just additionalRedis -> do
rAdd <- createRedisPool l additionalRedis "additional-write-redis"
pure $ Just rAdd
p <-
C.init
$ C.setLogger (C.mkLogger (Logger.clone (Just "cassandra.gundeck") l))
. C.setContacts (NE.head c) (NE.tail c)
. C.setPortNumber (fromIntegral $ o ^. optCassandra . casEndpoint . epPort)
. C.setKeyspace (Keyspace (o ^. optCassandra . casKeyspace))
. C.setMaxConnections 4
. C.setMaxStreams 128
. C.setPoolStripes 4
. C.setSendTimeout 3
. C.setResponseTimeout 10
. C.setProtocolVersion C.V4
. C.setPolicy (C.dcFilterPolicyIfConfigured l (o ^. optCassandra . casFilterNodesByDatacentre))
$ C.defSettings
a <- Aws.mkEnv l o n
io <-
mkAutoUpdate
defaultUpdateSettings
{ updateAction = Ms . round . (* 1000) <$> getPOSIXTime
}
mtbs <- mkThreadBudgetState `mapM` (o ^. optSettings . setMaxConcurrentNativePushes)
pure $! Env def m o l n p r rAdditional a io mtbs
reqIdMsg :: RequestId -> Logger.Msg -> Logger.Msg
reqIdMsg = ("request" Logger..=) . unRequestId
# INLINE reqIdMsg #
createRedisPool :: Logger.Logger -> RedisEndpoint -> ByteString -> IO Redis.RobustConnection
createRedisPool l endpoint identifier = do
let redisConnInfo =
Redis.defaultConnectInfo
{ Redis.connectHost = unpack $ endpoint ^. rHost,
Redis.connectPort = Redis.PortNumber (fromIntegral $ endpoint ^. rPort),
Redis.connectTimeout = Just (secondsToNominalDiffTime 5),
Redis.connectMaxConnections = 100
}
Log.info l $
Log.msg (Log.val $ "starting connection to " <> identifier <> "...")
. Log.field "connectionMode" (show $ endpoint ^. rConnectionMode)
. Log.field "connInfo" (show redisConnInfo)
let connectWithRetry = Redis.connectRobust l (capDelay 1000000 (exponentialBackoff 50000))
r <- case endpoint ^. rConnectionMode of
Master -> connectWithRetry $ Redis.checkedConnect redisConnInfo
Cluster -> connectWithRetry $ Redis.checkedConnectCluster redisConnInfo
Log.info l $ Log.msg (Log.val $ "Established connection to " <> identifier <> ".")
pure r
|
5120feb3a241dce84bfd41f3d8377699a90056feecb6668b4654acfa5161ba3f | Ekdohibs/camlboot | let_open.ml | let () = print_endline "Let open:"
module M = struct
let x = 42
let f x = x + x
end
let () =
show_int M.x;
M.(show_int x);
let open M in
show_int (f 21)
module N = struct
let f ~x ?(y=2) p = p (x * y)
end
let () =
let open N in
f ~x:21 show_int
module R = struct
type r = { a : int; b : int }
type o = A of int | B of int
end
let () =
let mk a b = R.{ a; b } in
let unmk R.{ a; b } = (a, b) in
let r = mk 42 21 in
let u = snd (unmk r) in
let w = match R.A 12 with R.(A x) -> x + 30 | R.B x -> x in
show_int r.R.a;
show_int (u + u);
show_int w
let () = print_newline ()
| null | https://raw.githubusercontent.com/Ekdohibs/camlboot/506280c6e0813e0e794988151a8e46be55373ebc/miniml/compiler/test/let_open.ml | ocaml | let () = print_endline "Let open:"
module M = struct
let x = 42
let f x = x + x
end
let () =
show_int M.x;
M.(show_int x);
let open M in
show_int (f 21)
module N = struct
let f ~x ?(y=2) p = p (x * y)
end
let () =
let open N in
f ~x:21 show_int
module R = struct
type r = { a : int; b : int }
type o = A of int | B of int
end
let () =
let mk a b = R.{ a; b } in
let unmk R.{ a; b } = (a, b) in
let r = mk 42 21 in
let u = snd (unmk r) in
let w = match R.A 12 with R.(A x) -> x + 30 | R.B x -> x in
show_int r.R.a;
show_int (u + u);
show_int w
let () = print_newline ()
| |
efec9192b439beabb1f96b2a556f5abbc7a751b537454ebc9997fef40acb4042 | ocaml-ppx/ppx | main.ml | open Migrate_parsetree
open Ast_403
let mapper =
let super = Ast_mapper.default_mapper in
let expr self (e : Parsetree.expression) =
match e.pexp_desc with
| Pexp_extension ({ txt = "omp_test"; _ }, _) ->
{ e with pexp_desc = Pexp_constant (Pconst_integer ("42", None)) }
| _ ->
super.expr self e
in
{ super with expr }
let () =
Driver.register ~name:"omp_test"
(module OCaml_403)
(fun _ _ -> mapper)
| null | https://raw.githubusercontent.com/ocaml-ppx/ppx/40e5a35a4386d969effaf428078c900bd03b78ec/test/driver/omp-integration/omp-ppx/main.ml | ocaml | open Migrate_parsetree
open Ast_403
let mapper =
let super = Ast_mapper.default_mapper in
let expr self (e : Parsetree.expression) =
match e.pexp_desc with
| Pexp_extension ({ txt = "omp_test"; _ }, _) ->
{ e with pexp_desc = Pexp_constant (Pconst_integer ("42", None)) }
| _ ->
super.expr self e
in
{ super with expr }
let () =
Driver.register ~name:"omp_test"
(module OCaml_403)
(fun _ _ -> mapper)
| |
c271f2e8d901e7c4340a7bfc3ab5b0bf2bf2db6ba1ebea1841b54801f497d83a | ZHaskell/z-data | Base64.hs | |
Module : Z.Data . Vector . Base64
Description : Base64 codec for bytes .
Copyright : ( c ) , 2017 - 2018
License : BSD
Maintainer :
Stability : experimental
Portability : non - portable
This module provides base64 encoding & decoding tools , as well as ' Base64Bytes ' newtype with base64 textual instances .
Module : Z.Data.Vector.Base64
Description : Base64 codec for bytes.
Copyright : (c) Dong Han, 2017-2018
License : BSD
Maintainer :
Stability : experimental
Portability : non-portable
This module provides base64 encoding & decoding tools, as well as 'Base64Bytes' newtype with base64 textual instances.
-}
module Z.Data.Vector.Base64
(-- * Encoding & Decoding functions
base64Encode
, base64EncodeLength
, base64EncodeText
, base64EncodeBuilder
, base64Decode
, base64Decode'
, base64DecodeLength
, Base64DecodeException(..)
-- * Internal C FFIs
, hs_base64_encode, hs_base64_decode
) where
import Control.Exception
import Data.Word
import Data.Bits (unsafeShiftL, unsafeShiftR, (.&.))
import GHC.Stack
import System.IO.Unsafe
import qualified Z.Data.Vector.Base as V
import qualified Z.Data.Builder.Base as B
import qualified Z.Data.Text.Base as T
import Z.Foreign
-- | Encode 'V.Bytes' using base64 encoding.
base64Encode :: V.Bytes -> V.Bytes
# INLINABLE base64Encode #
base64Encode (V.PrimVector arr s l) = fst . unsafeDupablePerformIO $ do
allocPrimVectorUnsafe (base64EncodeLength l) $ \ buf# ->
withPrimArrayUnsafe arr $ \ parr _ ->
hs_base64_encode buf# 0 parr s l
| Return the encoded length of a given input length , always a multipler of 4 .
base64EncodeLength :: Int -> Int
# INLINE base64EncodeLength #
base64EncodeLength n = ((n+2) `quot` 3) `unsafeShiftL` 2
-- | 'B.Builder' version of 'base64Encode'.
base64EncodeBuilder :: V.Bytes -> B.Builder ()
# INLINE base64EncodeBuilder #
base64EncodeBuilder (V.PrimVector arr s l) =
B.writeN (base64EncodeLength l) (\ (MutablePrimArray mba#) i -> do
withPrimArrayUnsafe arr $ \ parr _ ->
hs_base64_encode mba# i parr s l)
-- | Text version of 'base64Encode'.
base64EncodeText :: V.Bytes -> T.Text
# INLINABLE base64EncodeText #
base64EncodeText = T.Text . base64Encode
-- | Decode a base64 encoding string, return Nothing on illegal bytes or incomplete input.
base64Decode :: V.Bytes -> Maybe V.Bytes
# INLINABLE base64Decode #
base64Decode ba
| inputLen == 0 = Just V.empty
| decodeLen == -1 = Nothing
| otherwise = unsafeDupablePerformIO $ do
(arr, r) <- withPrimVectorUnsafe ba $ \ ba# s l ->
allocPrimArrayUnsafe decodeLen $ \ buf# ->
hs_base64_decode buf# ba# s l
if r == 0
then return Nothing
else return (Just (V.PrimVector arr 0 r))
where
inputLen = V.length ba
decodeLen = base64DecodeLength inputLen
-- | Exception during base64 decoding.
data Base64DecodeException = IllegalBase64Bytes V.Bytes CallStack
| IncompleteBase64Bytes V.Bytes CallStack
deriving Show
instance Exception Base64DecodeException
-- | Decode a base64 encoding string, throw 'Base64DecodeException' on error.
base64Decode' :: HasCallStack => V.Bytes -> V.Bytes
# INLINABLE base64Decode ' #
base64Decode' ba = case base64Decode ba of
Just r -> r
_ -> throw (IllegalBase64Bytes ba callStack)
-- | Return the upper bound of decoded length of a given input length
, return -1 if illegal(not a multipler of 4 ) .
base64DecodeLength :: Int -> Int
# INLINE base64DecodeLength #
base64DecodeLength n | n .&. 3 == 1 = -1
| otherwise = (n `unsafeShiftR` 2) * 3 + 2
--------------------------------------------------------------------------------
foreign import ccall unsafe hs_base64_encode :: MBA# Word8 -> Int -> BA# Word8 -> Int -> Int -> IO ()
foreign import ccall unsafe hs_base64_decode :: MBA# Word8 -> BA# Word8 -> Int -> Int -> IO Int
| null | https://raw.githubusercontent.com/ZHaskell/z-data/82e31fea77ab9384000a21cb3fedcfd3eee6cc4a/Z/Data/Vector/Base64.hs | haskell | * Encoding & Decoding functions
* Internal C FFIs
| Encode 'V.Bytes' using base64 encoding.
| 'B.Builder' version of 'base64Encode'.
| Text version of 'base64Encode'.
| Decode a base64 encoding string, return Nothing on illegal bytes or incomplete input.
| Exception during base64 decoding.
| Decode a base64 encoding string, throw 'Base64DecodeException' on error.
| Return the upper bound of decoded length of a given input length
------------------------------------------------------------------------------ | |
Module : Z.Data . Vector . Base64
Description : Base64 codec for bytes .
Copyright : ( c ) , 2017 - 2018
License : BSD
Maintainer :
Stability : experimental
Portability : non - portable
This module provides base64 encoding & decoding tools , as well as ' Base64Bytes ' newtype with base64 textual instances .
Module : Z.Data.Vector.Base64
Description : Base64 codec for bytes.
Copyright : (c) Dong Han, 2017-2018
License : BSD
Maintainer :
Stability : experimental
Portability : non-portable
This module provides base64 encoding & decoding tools, as well as 'Base64Bytes' newtype with base64 textual instances.
-}
module Z.Data.Vector.Base64
base64Encode
, base64EncodeLength
, base64EncodeText
, base64EncodeBuilder
, base64Decode
, base64Decode'
, base64DecodeLength
, Base64DecodeException(..)
, hs_base64_encode, hs_base64_decode
) where
import Control.Exception
import Data.Word
import Data.Bits (unsafeShiftL, unsafeShiftR, (.&.))
import GHC.Stack
import System.IO.Unsafe
import qualified Z.Data.Vector.Base as V
import qualified Z.Data.Builder.Base as B
import qualified Z.Data.Text.Base as T
import Z.Foreign
base64Encode :: V.Bytes -> V.Bytes
# INLINABLE base64Encode #
base64Encode (V.PrimVector arr s l) = fst . unsafeDupablePerformIO $ do
allocPrimVectorUnsafe (base64EncodeLength l) $ \ buf# ->
withPrimArrayUnsafe arr $ \ parr _ ->
hs_base64_encode buf# 0 parr s l
| Return the encoded length of a given input length , always a multipler of 4 .
base64EncodeLength :: Int -> Int
# INLINE base64EncodeLength #
base64EncodeLength n = ((n+2) `quot` 3) `unsafeShiftL` 2
base64EncodeBuilder :: V.Bytes -> B.Builder ()
# INLINE base64EncodeBuilder #
base64EncodeBuilder (V.PrimVector arr s l) =
B.writeN (base64EncodeLength l) (\ (MutablePrimArray mba#) i -> do
withPrimArrayUnsafe arr $ \ parr _ ->
hs_base64_encode mba# i parr s l)
base64EncodeText :: V.Bytes -> T.Text
# INLINABLE base64EncodeText #
base64EncodeText = T.Text . base64Encode
base64Decode :: V.Bytes -> Maybe V.Bytes
# INLINABLE base64Decode #
base64Decode ba
| inputLen == 0 = Just V.empty
| decodeLen == -1 = Nothing
| otherwise = unsafeDupablePerformIO $ do
(arr, r) <- withPrimVectorUnsafe ba $ \ ba# s l ->
allocPrimArrayUnsafe decodeLen $ \ buf# ->
hs_base64_decode buf# ba# s l
if r == 0
then return Nothing
else return (Just (V.PrimVector arr 0 r))
where
inputLen = V.length ba
decodeLen = base64DecodeLength inputLen
data Base64DecodeException = IllegalBase64Bytes V.Bytes CallStack
| IncompleteBase64Bytes V.Bytes CallStack
deriving Show
instance Exception Base64DecodeException
base64Decode' :: HasCallStack => V.Bytes -> V.Bytes
# INLINABLE base64Decode ' #
base64Decode' ba = case base64Decode ba of
Just r -> r
_ -> throw (IllegalBase64Bytes ba callStack)
, return -1 if illegal(not a multipler of 4 ) .
base64DecodeLength :: Int -> Int
# INLINE base64DecodeLength #
base64DecodeLength n | n .&. 3 == 1 = -1
| otherwise = (n `unsafeShiftR` 2) * 3 + 2
foreign import ccall unsafe hs_base64_encode :: MBA# Word8 -> Int -> BA# Word8 -> Int -> Int -> IO ()
foreign import ccall unsafe hs_base64_decode :: MBA# Word8 -> BA# Word8 -> Int -> Int -> IO Int
|
c6cf9f1b22927adf3d8b7ebdeddd4dc9b5574c32e97bb35b99ce52577e99fe82 | rain-1/single_cream | 3.scm | #t #f
| null | https://raw.githubusercontent.com/rain-1/single_cream/f8989fde4bfcffe0af7f6ed5916885446bf40124/t/trivial/3.scm | scheme | #t #f
| |
e66d571c63e48ad11569798e467628836467f5527a2ca705dc1a8902cfdf9aa5 | shortishly/pgmp | pgmp_rep_sup.erl | Copyright ( c ) 2022 < >
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(pgmp_rep_sup).
-behaviour(supervisor).
-export([init/1]).
-export([start_child/1]).
-export([start_link/1]).
-export([terminate_child/1]).
-import(pgmp_sup, [supervisor/1]).
start_link(#{} = Arg) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, [Arg]).
start_child(Pub) ->
Arg = #{config := Config} = pgmp_sup:config(),
supervisor:start_child(
?MODULE,
supervisor(#{id => Pub,
m => pgmp_rep_log_sup,
args => [Arg#{config := Config#{publication => Pub}}]})).
terminate_child(Pub) ->
supervisor:terminate_child(?MODULE, Pub).
init([Arg]) ->
{ok,
configuration(
case pgmp_config:enabled(pgmp_replication) of
true ->
children(Arg);
false ->
[]
end)}.
configuration(Children) ->
{pgmp_config:sup_flags(?MODULE), Children}.
children(#{config := Config} = Arg) ->
lists:map(
fun
(Pub) ->
supervisor(
#{id => Pub,
m => pgmp_rep_log_sup,
args => [Arg#{config := Config#{publication => Pub}}]})
end,
pgmp_config:replication(logical, publication_names)).
| null | https://raw.githubusercontent.com/shortishly/pgmp/99eef1e6c6c9f8dcd85450348fa2042a61b8a240/src/pgmp_rep_sup.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright ( c ) 2022 < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(pgmp_rep_sup).
-behaviour(supervisor).
-export([init/1]).
-export([start_child/1]).
-export([start_link/1]).
-export([terminate_child/1]).
-import(pgmp_sup, [supervisor/1]).
start_link(#{} = Arg) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, [Arg]).
start_child(Pub) ->
Arg = #{config := Config} = pgmp_sup:config(),
supervisor:start_child(
?MODULE,
supervisor(#{id => Pub,
m => pgmp_rep_log_sup,
args => [Arg#{config := Config#{publication => Pub}}]})).
terminate_child(Pub) ->
supervisor:terminate_child(?MODULE, Pub).
init([Arg]) ->
{ok,
configuration(
case pgmp_config:enabled(pgmp_replication) of
true ->
children(Arg);
false ->
[]
end)}.
configuration(Children) ->
{pgmp_config:sup_flags(?MODULE), Children}.
children(#{config := Config} = Arg) ->
lists:map(
fun
(Pub) ->
supervisor(
#{id => Pub,
m => pgmp_rep_log_sup,
args => [Arg#{config := Config#{publication => Pub}}]})
end,
pgmp_config:replication(logical, publication_names)).
|
83526c255c992867220149431bd7917aa69d0329fbd66b24fb7e1782a2049015 | phadej/vec | Internal.hs | {-# LANGUAGE CPP #-}
# LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE InstanceSigs #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE Safe #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Data.RAList.Internal (
RAList (..),
-- * Showing
explicitShow,
explicitShowsPrec,
-- * Construction
empty,
singleton,
cons,
-- * Indexing
(!),
(!?),
length,
null,
-- * Conversions
toList,
fromList,
-- * Folding
ifoldMap,
-- * Mapping
adjust,
map,
imap,
itraverse,
) where
import Prelude
(Bool (..), Eq, Functor (..), Int, Maybe (..), Ord (..), Show (..),
ShowS, String, showParen, showString, ($), (.))
import Control.Applicative (Applicative (..), (<$>))
import Control.DeepSeq (NFData (..))
import Control.Exception (ArrayException (IndexOutOfBounds), throw)
import Data.Hashable (Hashable (..))
import Data.List.NonEmpty (NonEmpty (..))
import Data.Monoid (Monoid (..))
import Data.Semigroup (Semigroup (..))
import qualified Data.Foldable as I (Foldable (..))
import qualified Data.Traversable as I (Traversable (..))
import qualified Test.QuickCheck as QC
import qualified Data.Foldable.WithIndex as WI (FoldableWithIndex (..))
import qualified Data.Functor.WithIndex as WI (FunctorWithIndex (..))
import qualified Data.Traversable.WithIndex as WI (TraversableWithIndex (..))
import qualified Data.RAList.NonEmpty.Internal as NE
-- $setup
> > > import Data . ( toUpper )
-------------------------------------------------------------------------------
-- Type
-------------------------------------------------------------------------------
-- | Random access list.
data RAList a
= Empty
| NonEmpty (NE.NERAList a)
deriving (Eq, Ord, Functor, I.Traversable)
-------------------------------------------------------------------------------
-- Instances
-------------------------------------------------------------------------------
-- |
--
-- >>> I.length $ fromList $ ['a' .. 'z']
26
--
instance I.Foldable RAList where
foldMap _ Empty = mempty
foldMap f (NonEmpty xs) = I.foldMap f xs
#if MIN_VERSION_base(4,8,0)
length = length
null = null
#endif
instance NFData a => NFData (RAList a) where
rnf Empty = ()
rnf (NonEmpty xs) = rnf xs
instance Hashable a => Hashable (RAList a) where
hashWithSalt salt Empty = hashWithSalt salt (0 :: Int)
hashWithSalt salt (NonEmpty r) = hashWithSalt salt r
-- |
--
> > > fromList " abc " < > fromList " xyz "
-- fromList "abcxyz"
--
instance Semigroup (RAList a) where
Empty <> ys = ys
xs <> Empty = xs
NonEmpty xs <> NonEmpty ys = NonEmpty (xs <> ys)
instance Monoid (RAList a) where
mempty = Empty
mappend = (<>)
TODO : Applicative , Monad
#ifdef MIN_VERSION_semigroupoids
-- Apply, Bind
#endif
| @since 0.2
instance WI.FunctorWithIndex Int RAList where
imap = imap
| @since 0.2
instance WI.FoldableWithIndex Int RAList where
ifoldMap = ifoldMap
ifoldr = ifoldr -- TODO , PR welcome !
| @since 0.2
instance WI.TraversableWithIndex Int RAList where
itraverse = itraverse
-------------------------------------------------------------------------------
-- Showing
-------------------------------------------------------------------------------
instance Show a => Show (RAList a) where
showsPrec d xs = showParen (d > 10) $ showString "fromList " . showsPrec 11 (toList xs)
explicitShow :: Show a => RAList a -> String
explicitShow xs = explicitShowsPrec 0 xs ""
explicitShowsPrec :: Show a => Int -> RAList a -> ShowS
explicitShowsPrec _ Empty = showString "Empty"
explicitShowsPrec d (NonEmpty xs) = showParen (d > 10) $ showString "NonEmpty " . NE.explicitShowsPrec 11 xs
-------------------------------------------------------------------------------
-- Construction
-------------------------------------------------------------------------------
-- | Empty 'RAList'.
--
-- >>> empty :: RAList Int
-- fromList []
--
empty :: RAList a
empty = Empty
-- | Single element 'RAList'.
singleton :: a -> RAList a
singleton = NonEmpty . NE.singleton
-- | 'cons' for non-empty rals.
cons :: a -> RAList a -> RAList a
cons x Empty = singleton x
cons x (NonEmpty xs) = NonEmpty (NE.cons x xs)
toList :: RAList a -> [a]
toList Empty = []
toList (NonEmpty xs) = I.foldr (:) [] xs
-- |
--
-- >>> fromList ['a' .. 'f']
-- fromList "abcdef"
--
-- >>> explicitShow $ fromList ['a' .. 'f']
" NonEmpty ( NE ( Cons0 ( Cons1 ( Nd ( Lf ' a ' ) ( Lf ' b ' ) ) ( Last ( Nd ( Nd ( Lf ' c ' ) ( Lf 'd ' ) ) ( Nd ( Lf ' e ' ) ( Lf ' f ' ) ) ) ) ) ) ) "
--
fromList :: [a] -> RAList a
fromList [] = Empty
fromList (x:xs) = NonEmpty (NE.fromNonEmpty (x :| xs))
-------------------------------------------------------------------------------
-- Indexing
-------------------------------------------------------------------------------
-- | List index.
--
--- >>> fromList ['a'..'f'] ! 0
-- 'a'
--
> > > fromList [ ' a' .. 'f ' ] ! 5
-- 'f'
--
> > > fromList [ ' a' .. 'f ' ] ! 6
-- *** Exception: array index out of range: RAList
-- ...
--
(!) :: RAList a -> Int -> a
(!) Empty _ = throw $ IndexOutOfBounds "RAList"
(!) (NonEmpty xs) i = xs NE.! i
-- | safe list index.
--
-- >>> fromList ['a'..'f'] !? 0
-- Just 'a'
--
> > > fromList [ ' a' .. 'f ' ] ! ? 5
-- Just 'f'
--
> > > fromList [ ' a' .. 'f ' ] ! ? 6
-- Nothing
--
(!?) :: RAList a -> Int -> Maybe a
Empty !? _ = Nothing
NonEmpty xs !? i = xs NE.!? i
length :: RAList a -> Int
length Empty = 0
length (NonEmpty xs) = NE.length xs
null :: RAList a -> Bool
null Empty = True
null (NonEmpty _) = False
-------------------------------------------------------------------------------
-- Folds
-------------------------------------------------------------------------------
ifoldMap :: Monoid m => (Int -> a -> m) -> RAList a -> m
ifoldMap _ Empty = mempty
ifoldMap f (NonEmpty r) = NE.ifoldMap f r
-------------------------------------------------------------------------------
-- Mapping
-------------------------------------------------------------------------------
-- |
-- >>> map toUpper (fromList ['a'..'f'])
fromList " ABCDEF "
--
map :: (a -> b) -> RAList a -> RAList b
map = fmap
-- |
--
-- >>> imap (,) $ fromList ['a' .. 'f']
-- fromList [(0,'a'),(1,'b'),(2,'c'),(3,'d'),(4,'e'),(5,'f')]
imap :: (Int -> a -> b) -> RAList a -> RAList b
imap f xs = unI (itraverse (\i x -> I (f i x)) xs)
itraverse :: forall f a b. Applicative f => (Int -> a -> f b) -> RAList a -> f (RAList b)
itraverse _ Empty = pure Empty
itraverse f (NonEmpty xs) = NonEmpty <$> NE.itraverse f xs
-- | Adjust a value in the list.
--
> > > adjust 3 toUpper $ fromList " "
-- fromList "bcdEf"
--
-- If index is out of bounds, the list is returned unmodified.
--
> > > adjust 10 toUpper $ fromList " "
fromList " "
--
> > > adjust ( -1 ) toUpper $ fromList " "
fromList " "
--
adjust :: forall a. Int -> (a -> a) -> RAList a -> RAList a
adjust _ _ Empty = Empty
adjust i f (NonEmpty xs) = NonEmpty (NE.adjust i f xs)
-------------------------------------------------------------------------------
-- QuickCheck
-------------------------------------------------------------------------------
instance QC.Arbitrary1 RAList where
liftArbitrary = fmap fromList . QC.liftArbitrary
liftShrink shr = fmap fromList . QC.liftShrink shr . toList
instance QC.Arbitrary a => QC.Arbitrary (RAList a) where
arbitrary = QC.arbitrary1
shrink = QC.shrink1
instance QC.CoArbitrary a => QC.CoArbitrary (RAList a) where
coarbitrary = QC.coarbitrary . toList
instance QC.Function a => QC.Function (RAList a) where
function = QC.functionMap toList fromList
-------------------------------------------------------------------------------
Utilities
-------------------------------------------------------------------------------
newtype I a = I a
unI :: I a -> a
unI (I a) = a
instance Functor I where
fmap f (I x) = I (f x)
instance Applicative I where
pure = I
I f <*> I x = I (f x)
_ *> x = x
x <* _ = x
#if MIN_VERSION_base(4,10,0)
liftA2 f (I x) (I y) = I (f x y)
#endif
| null | https://raw.githubusercontent.com/phadej/vec/a895ab79e054987938295d5a149758eb79d85683/ral/src/Data/RAList/Internal.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DeriveTraversable #
# LANGUAGE FlexibleContexts #
# LANGUAGE Safe #
# LANGUAGE ScopedTypeVariables #
* Showing
* Construction
* Indexing
* Conversions
* Folding
* Mapping
$setup
-----------------------------------------------------------------------------
Type
-----------------------------------------------------------------------------
| Random access list.
-----------------------------------------------------------------------------
Instances
-----------------------------------------------------------------------------
|
>>> I.length $ fromList $ ['a' .. 'z']
|
fromList "abcxyz"
Apply, Bind
TODO , PR welcome !
-----------------------------------------------------------------------------
Showing
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Construction
-----------------------------------------------------------------------------
| Empty 'RAList'.
>>> empty :: RAList Int
fromList []
| Single element 'RAList'.
| 'cons' for non-empty rals.
|
>>> fromList ['a' .. 'f']
fromList "abcdef"
>>> explicitShow $ fromList ['a' .. 'f']
-----------------------------------------------------------------------------
Indexing
-----------------------------------------------------------------------------
| List index.
- >>> fromList ['a'..'f'] ! 0
'a'
'f'
*** Exception: array index out of range: RAList
...
| safe list index.
>>> fromList ['a'..'f'] !? 0
Just 'a'
Just 'f'
Nothing
-----------------------------------------------------------------------------
Folds
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Mapping
-----------------------------------------------------------------------------
|
>>> map toUpper (fromList ['a'..'f'])
|
>>> imap (,) $ fromList ['a' .. 'f']
fromList [(0,'a'),(1,'b'),(2,'c'),(3,'d'),(4,'e'),(5,'f')]
| Adjust a value in the list.
fromList "bcdEf"
If index is out of bounds, the list is returned unmodified.
-----------------------------------------------------------------------------
QuickCheck
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
----------------------------------------------------------------------------- | # LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
# LANGUAGE InstanceSigs #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
module Data.RAList.Internal (
RAList (..),
explicitShow,
explicitShowsPrec,
empty,
singleton,
cons,
(!),
(!?),
length,
null,
toList,
fromList,
ifoldMap,
adjust,
map,
imap,
itraverse,
) where
import Prelude
(Bool (..), Eq, Functor (..), Int, Maybe (..), Ord (..), Show (..),
ShowS, String, showParen, showString, ($), (.))
import Control.Applicative (Applicative (..), (<$>))
import Control.DeepSeq (NFData (..))
import Control.Exception (ArrayException (IndexOutOfBounds), throw)
import Data.Hashable (Hashable (..))
import Data.List.NonEmpty (NonEmpty (..))
import Data.Monoid (Monoid (..))
import Data.Semigroup (Semigroup (..))
import qualified Data.Foldable as I (Foldable (..))
import qualified Data.Traversable as I (Traversable (..))
import qualified Test.QuickCheck as QC
import qualified Data.Foldable.WithIndex as WI (FoldableWithIndex (..))
import qualified Data.Functor.WithIndex as WI (FunctorWithIndex (..))
import qualified Data.Traversable.WithIndex as WI (TraversableWithIndex (..))
import qualified Data.RAList.NonEmpty.Internal as NE
> > > import Data . ( toUpper )
data RAList a
= Empty
| NonEmpty (NE.NERAList a)
deriving (Eq, Ord, Functor, I.Traversable)
26
instance I.Foldable RAList where
foldMap _ Empty = mempty
foldMap f (NonEmpty xs) = I.foldMap f xs
#if MIN_VERSION_base(4,8,0)
length = length
null = null
#endif
instance NFData a => NFData (RAList a) where
rnf Empty = ()
rnf (NonEmpty xs) = rnf xs
instance Hashable a => Hashable (RAList a) where
hashWithSalt salt Empty = hashWithSalt salt (0 :: Int)
hashWithSalt salt (NonEmpty r) = hashWithSalt salt r
> > > fromList " abc " < > fromList " xyz "
instance Semigroup (RAList a) where
Empty <> ys = ys
xs <> Empty = xs
NonEmpty xs <> NonEmpty ys = NonEmpty (xs <> ys)
instance Monoid (RAList a) where
mempty = Empty
mappend = (<>)
TODO : Applicative , Monad
#ifdef MIN_VERSION_semigroupoids
#endif
| @since 0.2
instance WI.FunctorWithIndex Int RAList where
imap = imap
| @since 0.2
instance WI.FoldableWithIndex Int RAList where
ifoldMap = ifoldMap
| @since 0.2
instance WI.TraversableWithIndex Int RAList where
itraverse = itraverse
instance Show a => Show (RAList a) where
showsPrec d xs = showParen (d > 10) $ showString "fromList " . showsPrec 11 (toList xs)
explicitShow :: Show a => RAList a -> String
explicitShow xs = explicitShowsPrec 0 xs ""
explicitShowsPrec :: Show a => Int -> RAList a -> ShowS
explicitShowsPrec _ Empty = showString "Empty"
explicitShowsPrec d (NonEmpty xs) = showParen (d > 10) $ showString "NonEmpty " . NE.explicitShowsPrec 11 xs
empty :: RAList a
empty = Empty
singleton :: a -> RAList a
singleton = NonEmpty . NE.singleton
cons :: a -> RAList a -> RAList a
cons x Empty = singleton x
cons x (NonEmpty xs) = NonEmpty (NE.cons x xs)
toList :: RAList a -> [a]
toList Empty = []
toList (NonEmpty xs) = I.foldr (:) [] xs
" NonEmpty ( NE ( Cons0 ( Cons1 ( Nd ( Lf ' a ' ) ( Lf ' b ' ) ) ( Last ( Nd ( Nd ( Lf ' c ' ) ( Lf 'd ' ) ) ( Nd ( Lf ' e ' ) ( Lf ' f ' ) ) ) ) ) ) ) "
fromList :: [a] -> RAList a
fromList [] = Empty
fromList (x:xs) = NonEmpty (NE.fromNonEmpty (x :| xs))
> > > fromList [ ' a' .. 'f ' ] ! 5
> > > fromList [ ' a' .. 'f ' ] ! 6
(!) :: RAList a -> Int -> a
(!) Empty _ = throw $ IndexOutOfBounds "RAList"
(!) (NonEmpty xs) i = xs NE.! i
> > > fromList [ ' a' .. 'f ' ] ! ? 5
> > > fromList [ ' a' .. 'f ' ] ! ? 6
(!?) :: RAList a -> Int -> Maybe a
Empty !? _ = Nothing
NonEmpty xs !? i = xs NE.!? i
length :: RAList a -> Int
length Empty = 0
length (NonEmpty xs) = NE.length xs
null :: RAList a -> Bool
null Empty = True
null (NonEmpty _) = False
ifoldMap :: Monoid m => (Int -> a -> m) -> RAList a -> m
ifoldMap _ Empty = mempty
ifoldMap f (NonEmpty r) = NE.ifoldMap f r
fromList " ABCDEF "
map :: (a -> b) -> RAList a -> RAList b
map = fmap
imap :: (Int -> a -> b) -> RAList a -> RAList b
imap f xs = unI (itraverse (\i x -> I (f i x)) xs)
itraverse :: forall f a b. Applicative f => (Int -> a -> f b) -> RAList a -> f (RAList b)
itraverse _ Empty = pure Empty
itraverse f (NonEmpty xs) = NonEmpty <$> NE.itraverse f xs
> > > adjust 3 toUpper $ fromList " "
> > > adjust 10 toUpper $ fromList " "
fromList " "
> > > adjust ( -1 ) toUpper $ fromList " "
fromList " "
adjust :: forall a. Int -> (a -> a) -> RAList a -> RAList a
adjust _ _ Empty = Empty
adjust i f (NonEmpty xs) = NonEmpty (NE.adjust i f xs)
instance QC.Arbitrary1 RAList where
liftArbitrary = fmap fromList . QC.liftArbitrary
liftShrink shr = fmap fromList . QC.liftShrink shr . toList
instance QC.Arbitrary a => QC.Arbitrary (RAList a) where
arbitrary = QC.arbitrary1
shrink = QC.shrink1
instance QC.CoArbitrary a => QC.CoArbitrary (RAList a) where
coarbitrary = QC.coarbitrary . toList
instance QC.Function a => QC.Function (RAList a) where
function = QC.functionMap toList fromList
Utilities
newtype I a = I a
unI :: I a -> a
unI (I a) = a
instance Functor I where
fmap f (I x) = I (f x)
instance Applicative I where
pure = I
I f <*> I x = I (f x)
_ *> x = x
x <* _ = x
#if MIN_VERSION_base(4,10,0)
liftA2 f (I x) (I y) = I (f x y)
#endif
|
b0348476f6d32619df134ed8b53bb718a4195a654240b474853239423ef9c67f | nasa/ogma | Main.hs | Copyright 2020 United States Government as represented by the Administrator
of the National Aeronautics and Space Administration . All Rights Reserved .
--
-- Disclaimers
--
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY
OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT
LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO
-- SPECIFICATIONS, ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-- PARTICULAR PURPOSE, OR FREEDOM FROM INFRINGEMENT, ANY WARRANTY THAT THE
-- SUBJECT SOFTWARE WILL BE ERROR FREE, OR ANY WARRANTY THAT DOCUMENTATION, IF
PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN
-- ANY MANNER, CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR
RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR
ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . ,
-- GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING
THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES
-- IT "AS IS."
--
-- Waiver and Indemnity: RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST
THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS
-- ANY PRIOR RECIPIENT. IF RECIPIENT'S USE OF THE SUBJECT SOFTWARE RESULTS IN
ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE ,
-- INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON, OR RESULTING FROM, RECIPIENT'S
-- USE OF THE SUBJECT SOFTWARE, RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE
UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY
PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S
FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS
-- AGREEMENT.
--
-- | Test CoCoSpec language library.
module Main where
-- External imports
import Data.Either ( isLeft, isRight )
import Test.Framework ( Test, defaultMainWithOpts )
import Test.Framework.Providers.QuickCheck2 ( testProperty )
import Test.QuickCheck ( Property )
import Test.QuickCheck.Monadic ( assert, monadicIO, run )
-- Internal imports
import qualified Language.CoCoSpec.ParCoCoSpec as CoCoSpec ( myLexer,
pBoolSpec )
| Run all unit tests for the parser .
main :: IO ()
main =
defaultMainWithOpts tests mempty
| All unit tests for the parser .
tests :: [Test.Framework.Test]
tests =
[ testProperty "Parse CoCoSpec (correct case)" propParseCoCoSpecOk
, testProperty "Parse CoCoSpec (incorrect case)" propParseCoCoSpecFail
]
-- | Test the CoCoSpec parser on a well-formed boolean specification.
propParseCoCoSpecOk :: Property
propParseCoCoSpecOk = monadicIO $ do
content <- run $ readFile "tests/cocospec_good"
let program = CoCoSpec.pBoolSpec $ CoCoSpec.myLexer content
assert (isRight program)
-- | Test the CoCoSpec parser on an incorrect boolean specification.
propParseCoCoSpecFail :: Property
propParseCoCoSpecFail = monadicIO $ do
content <- run $ readFile "tests/cocospec_bad"
let program = CoCoSpec.pBoolSpec $ CoCoSpec.myLexer content
assert (isLeft program)
| null | https://raw.githubusercontent.com/nasa/ogma/825129e4133ac6c2c506676a3f197a14a35dc3e2/ogma-language-cocospec/tests/Main.hs | haskell |
Disclaimers
SPECIFICATIONS, ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE, OR FREEDOM FROM INFRINGEMENT, ANY WARRANTY THAT THE
SUBJECT SOFTWARE WILL BE ERROR FREE, OR ANY WARRANTY THAT DOCUMENTATION, IF
ANY MANNER, CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR
GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING
IT "AS IS."
Waiver and Indemnity: RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST
ANY PRIOR RECIPIENT. IF RECIPIENT'S USE OF THE SUBJECT SOFTWARE RESULTS IN
INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON, OR RESULTING FROM, RECIPIENT'S
USE OF THE SUBJECT SOFTWARE, RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE
AGREEMENT.
| Test CoCoSpec language library.
External imports
Internal imports
| Test the CoCoSpec parser on a well-formed boolean specification.
| Test the CoCoSpec parser on an incorrect boolean specification. | Copyright 2020 United States Government as represented by the Administrator
of the National Aeronautics and Space Administration . All Rights Reserved .
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY
OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT
LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO
PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN
RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR
ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . ,
THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES
THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS
ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE ,
UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY
PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S
FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS
module Main where
import Data.Either ( isLeft, isRight )
import Test.Framework ( Test, defaultMainWithOpts )
import Test.Framework.Providers.QuickCheck2 ( testProperty )
import Test.QuickCheck ( Property )
import Test.QuickCheck.Monadic ( assert, monadicIO, run )
import qualified Language.CoCoSpec.ParCoCoSpec as CoCoSpec ( myLexer,
pBoolSpec )
| Run all unit tests for the parser .
main :: IO ()
main =
defaultMainWithOpts tests mempty
| All unit tests for the parser .
tests :: [Test.Framework.Test]
tests =
[ testProperty "Parse CoCoSpec (correct case)" propParseCoCoSpecOk
, testProperty "Parse CoCoSpec (incorrect case)" propParseCoCoSpecFail
]
propParseCoCoSpecOk :: Property
propParseCoCoSpecOk = monadicIO $ do
content <- run $ readFile "tests/cocospec_good"
let program = CoCoSpec.pBoolSpec $ CoCoSpec.myLexer content
assert (isRight program)
propParseCoCoSpecFail :: Property
propParseCoCoSpecFail = monadicIO $ do
content <- run $ readFile "tests/cocospec_bad"
let program = CoCoSpec.pBoolSpec $ CoCoSpec.myLexer content
assert (isLeft program)
|
408e31304a5dd259f7ddd075801e565410144af9f657f0c6594e9423d6a43362 | fluree/ledger | sparql.clj | (ns fluree.db.ledger.docs.query.sparql
(:require [clojure.test :refer :all]
[fluree.db.test-helpers :as test]
[fluree.db.ledger.docs.getting-started.basic-schema :as basic]
[fluree.db.api :as fdb]
[clojure.core.async :as async]))
(use-fixtures :once test/test-system-deprecated)
(deftest basic-sparql
(testing "SPARQL query with two-triple WHERE clause")
(let [sparql-query "SELECT ?person ?fullName \nWHERE {\n ?person fd:person/handle \"jdoe\".\n ?person fd:person/fullName ?fullName.\n}"
db (basic/get-db test/ledger-chat)
res (first (async/<!! (fdb/sparql-async db sparql-query)))]
(is (= (first res) 351843720888320))
(is (= (last res) "Jane Doe"))))
(deftest sparql-max-function-in-select
(testing "SPARQL query MAX function as selector")
(let [sparql-query "SELECT ?fullName (MAX(?favNums) AS ?max)\nWHERE {\n ?person fd:person/favNums ?favNums.\n ?person fd:person/fullName ?fullName\n}\n"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))]
(is (every? #(and (string? (first %)) (number? (last %))) res))))
(deftest sparql-multi-clause-with-semicolon
(testing "SPARQL query with where clauses separated by semicolon")
(let [sparql-query "SELECT ?person ?fullName ?favNums\nWHERE {\n ?person fd:person/handle \"jdoe\";\n fd:person/fullName ?fullName;\n fd:person/favNums ?favNums.\n}"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))]
(is (every? #(= (count %) 3) res))
(is (every? #(and (= 351843720888320 (first %)) (= "Jane Doe" (second %)) (number? (last %))) res))))
(deftest sparql-clause-with-comma
(testing "SPARQL query with same subject/predicate, using commas to separate different objects")
(let [sparql-query "SELECT ?person\nWHERE {\n ?person fd:person/handle \"jdoe\", \"zsmith\".\n}\n"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))]
This test is very stupid because we do n't have a great example for a ? person with two same - subject - predicate objects
(is (empty? res))))
(deftest sparql-groupBy-having
(testing "SPARQL query with GROUP BY and HAVING"
(let [sparql-query "SELECT (SUM(?favNums) AS ?sumNums)\n WHERE {\n ?e fdb:person/favNums ?favNums. \n } \n GROUP BY ?e \n HAVING(SUM(?favNums) > 1000)"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))
summed-vals (-> res vals flatten)]
(is (every? #(> % 1000) summed-vals)))))
(deftest sparql-test
(basic-sparql)
(sparql-max-function-in-select)
(sparql-multi-clause-with-semicolon)
(sparql-clause-with-comma)
(sparql-groupBy-having))
(deftest tests-independent
(basic/add-collections*)
(basic/add-predicates)
(basic/add-sample-data)
(basic/graphql-txn)
(sparql-test))
| null | https://raw.githubusercontent.com/fluree/ledger/26a0745e4edc3c00ff46533b62276fb7a3ea9da4/test/fluree/db/ledger/docs/query/sparql.clj | clojure | (ns fluree.db.ledger.docs.query.sparql
(:require [clojure.test :refer :all]
[fluree.db.test-helpers :as test]
[fluree.db.ledger.docs.getting-started.basic-schema :as basic]
[fluree.db.api :as fdb]
[clojure.core.async :as async]))
(use-fixtures :once test/test-system-deprecated)
(deftest basic-sparql
(testing "SPARQL query with two-triple WHERE clause")
(let [sparql-query "SELECT ?person ?fullName \nWHERE {\n ?person fd:person/handle \"jdoe\".\n ?person fd:person/fullName ?fullName.\n}"
db (basic/get-db test/ledger-chat)
res (first (async/<!! (fdb/sparql-async db sparql-query)))]
(is (= (first res) 351843720888320))
(is (= (last res) "Jane Doe"))))
(deftest sparql-max-function-in-select
(testing "SPARQL query MAX function as selector")
(let [sparql-query "SELECT ?fullName (MAX(?favNums) AS ?max)\nWHERE {\n ?person fd:person/favNums ?favNums.\n ?person fd:person/fullName ?fullName\n}\n"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))]
(is (every? #(and (string? (first %)) (number? (last %))) res))))
(deftest sparql-multi-clause-with-semicolon
(testing "SPARQL query with where clauses separated by semicolon")
(let [sparql-query "SELECT ?person ?fullName ?favNums\nWHERE {\n ?person fd:person/handle \"jdoe\";\n fd:person/fullName ?fullName;\n fd:person/favNums ?favNums.\n}"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))]
(is (every? #(= (count %) 3) res))
(is (every? #(and (= 351843720888320 (first %)) (= "Jane Doe" (second %)) (number? (last %))) res))))
(deftest sparql-clause-with-comma
(testing "SPARQL query with same subject/predicate, using commas to separate different objects")
(let [sparql-query "SELECT ?person\nWHERE {\n ?person fd:person/handle \"jdoe\", \"zsmith\".\n}\n"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))]
This test is very stupid because we do n't have a great example for a ? person with two same - subject - predicate objects
(is (empty? res))))
(deftest sparql-groupBy-having
(testing "SPARQL query with GROUP BY and HAVING"
(let [sparql-query "SELECT (SUM(?favNums) AS ?sumNums)\n WHERE {\n ?e fdb:person/favNums ?favNums. \n } \n GROUP BY ?e \n HAVING(SUM(?favNums) > 1000)"
db (basic/get-db test/ledger-chat)
res (async/<!! (fdb/sparql-async db sparql-query))
summed-vals (-> res vals flatten)]
(is (every? #(> % 1000) summed-vals)))))
(deftest sparql-test
(basic-sparql)
(sparql-max-function-in-select)
(sparql-multi-clause-with-semicolon)
(sparql-clause-with-comma)
(sparql-groupBy-having))
(deftest tests-independent
(basic/add-collections*)
(basic/add-predicates)
(basic/add-sample-data)
(basic/graphql-txn)
(sparql-test))
| |
8230f769c90a12f3fa84930cf6e7ea673a7894691354f64a2377b22a665ed337 | uim/uim | dict-socket.scm | ;;; dict.scm: rfc2229 (a dictionary server protocol) for uim.
;;;
Copyright ( c ) 2010 - 2013 uim Project
;;;
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
1 . Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
3 . Neither the name of authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS IS '' AND
;;; ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
;;; OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
;;; LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
;;; OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
;;; SUCH DAMAGE.
;;;;
(require-extension (srfi 1 8))
(require "util.scm")
(require "i18n.scm")
(require "socket.scm")
(require "fileio.scm")
(require "lolevel.scm")
(require "input-parse.scm")
(define $DICT-DEFAULT-PORT 2628)
(define (dict-server-error-responce? responce)
(define dict-server-typical-errors '(500 501 502 503 420 421))
(let ((errno (string->number responce)))
(and (not errno)
(find (lambda (n) (= errno n)) dict-server-typical-errors))))
(define (dict-server-build-message command . messages)
(string-append
(string-join (append (list command) messages) " ")
"\r\n"))
(define (dict-server-parse-responce line)
(define numbers '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9))
(call-with-input-string
line
(lambda (port)
(let* ((responce (next-token-of numbers port))
(skip (skip-while '(#\space) port))
(message (next-token '(#\space) '(#\return *eof*) (N_ "dict: Invalid message") port)))
(values responce message)))))
(define (dict-server-get-1yz port)
(let loop ((line (file-read-line port))
(rest ""))
(if (string=? line ".\r")
(begin
(append (list rest) (dict-server-get-message port)))
(loop (file-read-line port) (string-append rest line)))))
(define (dict-server-get-message port)
(let* ((line (file-read-line port)))
(receive (responce message)
(dict-server-parse-responce line)
(cond ((dict-server-error-responce? responce)
(uim-notify-fatal (format "dict (~a): ~a" (_ "Error Response") message)))
((string=? "151" responce)
(dict-server-get-1yz port))
((string=? "150" responce)
(let* ((responce-line (file-read-line port)))
(receive (responce message)
(dict-server-parse-responce responce-line)
(if (string=? "151" (substring responce-line 0 3))
(dict-server-get-1yz port)
(uim-notify-fatal (format "dict (~a): ~a" (_ "Error Response") message))))))
((string=? "2" (substring responce 0 1))
'())
((string=? "4" (substring responce 0 1))
'())
((string=? "5" (substring responce 0 1))
'())
(else
(uim-notify-fatal (format "~a ~a" (_ "dict: Protocol error") message)))))))
(define (dict-server-parse-banner port)
get 1yz type message , maybe
(define (dict-server-open hostname . args)
(let-optionals* args ((servname $DICT-DEFAULT-PORT))
(let ((fd (tcp-connect hostname servname)))
(if (not fd)
(uim-notify-fatal (N_ "dict: cannot connect server")))
(let ((port (open-file-port fd)))
(dict-server-parse-banner port)
port))))
(define (dict-server-get-dictionary-list port)
(file-display (dict-server-build-message "SHOW" "DB") port)
(dict-server-get-message port))
(define (dict-server-get-define port database word)
(file-display (dict-server-build-message "DEFINE" database word) port)
(dict-server-get-message port))
(define (dict-server-close port)
(file-display (dict-server-build-message "Q") port)
(dict-server-get-message port) ;; bye
(close-file-port port))
| null | https://raw.githubusercontent.com/uim/uim/d1ac9d9315ff8c57c713b502544fef9b3a83b3e5/scm/dict-socket.scm | scheme | dict.scm: rfc2229 (a dictionary server protocol) for uim.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
bye | Copyright ( c ) 2010 - 2013 uim Project
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of authors nor the names of its contributors
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS IS '' AND
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
(require-extension (srfi 1 8))
(require "util.scm")
(require "i18n.scm")
(require "socket.scm")
(require "fileio.scm")
(require "lolevel.scm")
(require "input-parse.scm")
(define $DICT-DEFAULT-PORT 2628)
(define (dict-server-error-responce? responce)
(define dict-server-typical-errors '(500 501 502 503 420 421))
(let ((errno (string->number responce)))
(and (not errno)
(find (lambda (n) (= errno n)) dict-server-typical-errors))))
(define (dict-server-build-message command . messages)
(string-append
(string-join (append (list command) messages) " ")
"\r\n"))
(define (dict-server-parse-responce line)
(define numbers '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9))
(call-with-input-string
line
(lambda (port)
(let* ((responce (next-token-of numbers port))
(skip (skip-while '(#\space) port))
(message (next-token '(#\space) '(#\return *eof*) (N_ "dict: Invalid message") port)))
(values responce message)))))
(define (dict-server-get-1yz port)
(let loop ((line (file-read-line port))
(rest ""))
(if (string=? line ".\r")
(begin
(append (list rest) (dict-server-get-message port)))
(loop (file-read-line port) (string-append rest line)))))
(define (dict-server-get-message port)
(let* ((line (file-read-line port)))
(receive (responce message)
(dict-server-parse-responce line)
(cond ((dict-server-error-responce? responce)
(uim-notify-fatal (format "dict (~a): ~a" (_ "Error Response") message)))
((string=? "151" responce)
(dict-server-get-1yz port))
((string=? "150" responce)
(let* ((responce-line (file-read-line port)))
(receive (responce message)
(dict-server-parse-responce responce-line)
(if (string=? "151" (substring responce-line 0 3))
(dict-server-get-1yz port)
(uim-notify-fatal (format "dict (~a): ~a" (_ "Error Response") message))))))
((string=? "2" (substring responce 0 1))
'())
((string=? "4" (substring responce 0 1))
'())
((string=? "5" (substring responce 0 1))
'())
(else
(uim-notify-fatal (format "~a ~a" (_ "dict: Protocol error") message)))))))
(define (dict-server-parse-banner port)
get 1yz type message , maybe
(define (dict-server-open hostname . args)
(let-optionals* args ((servname $DICT-DEFAULT-PORT))
(let ((fd (tcp-connect hostname servname)))
(if (not fd)
(uim-notify-fatal (N_ "dict: cannot connect server")))
(let ((port (open-file-port fd)))
(dict-server-parse-banner port)
port))))
(define (dict-server-get-dictionary-list port)
(file-display (dict-server-build-message "SHOW" "DB") port)
(dict-server-get-message port))
(define (dict-server-get-define port database word)
(file-display (dict-server-build-message "DEFINE" database word) port)
(dict-server-get-message port))
(define (dict-server-close port)
(file-display (dict-server-build-message "Q") port)
(close-file-port port))
|
8d889ae83c007b4e6eeb068ab88a349a596c2947643fc4133f1eee6d2e66d60b | emc2/clash-riscv | FP.hs | Copyright ( c ) 2017 . All rights reserved .
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
1 . Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
3 . Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ` ` AS IS ''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
# LANGUAGE DataKinds , TypeFamilies #
module RISCV.ISA.Registers.FP(
FPReg(..)
) where
import Prelude
import CLaSH.Class.BitPack
-- | RISC-V floating-point registers.
data FPReg =
FT0
| FT1
| FT2
| FT3
| FT4
| FT5
| FT6
| FT7
| FS0
| FS1
| FA0
| FA1
| FA2
| FA3
| FA4
| FA5
| FA6
| FA7
| FS2
| FS3
| FS4
| FS5
| FS6
| FS7
| FS8
| FS9
| FS10
| FS11
| FT8
| FT9
| FT10
| FT11
deriving (Eq, Ord, Show)
instance Enum FPReg where
fromEnum FT0 = 0x00
fromEnum FT1 = 0x01
fromEnum FT2 = 0x02
fromEnum FT3 = 0x03
fromEnum FT4 = 0x04
fromEnum FT5 = 0x05
fromEnum FT6 = 0x06
fromEnum FT7 = 0x07
fromEnum FS0 = 0x08
fromEnum FS1 = 0x09
fromEnum FA0 = 0x0a
fromEnum FA1 = 0x0b
fromEnum FA2 = 0x0c
fromEnum FA3 = 0x0d
fromEnum FA4 = 0x0e
fromEnum FA5 = 0x0f
fromEnum FA6 = 0x10
fromEnum FA7 = 0x11
fromEnum FS2 = 0x12
fromEnum FS3 = 0x13
fromEnum FS4 = 0x14
fromEnum FS5 = 0x15
fromEnum FS6 = 0x16
fromEnum FS7 = 0x17
fromEnum FS8 = 0x18
fromEnum FS9 = 0x19
fromEnum FS10 = 0x1a
fromEnum FS11 = 0x1b
fromEnum FT8 = 0x1c
fromEnum FT9 = 0x1d
fromEnum FT10 = 0x1e
fromEnum FT11 = 0x1f
toEnum 0x00 = FT0
toEnum 0x01 = FT1
toEnum 0x02 = FT2
toEnum 0x03 = FT3
toEnum 0x04 = FT4
toEnum 0x05 = FT5
toEnum 0x06 = FT6
toEnum 0x07 = FT7
toEnum 0x08 = FS0
toEnum 0x09 = FS1
toEnum 0x0a = FA0
toEnum 0x0b = FA1
toEnum 0x0c = FA2
toEnum 0x0d = FA3
toEnum 0x0e = FA4
toEnum 0x0f = FA5
toEnum 0x10 = FA6
toEnum 0x11 = FA7
toEnum 0x12 = FS2
toEnum 0x13 = FS3
toEnum 0x14 = FS4
toEnum 0x15 = FS5
toEnum 0x16 = FS6
toEnum 0x17 = FS7
toEnum 0x18 = FS8
toEnum 0x19 = FS9
toEnum 0x1a = FS10
toEnum 0x1b = FS11
toEnum 0x1c = FT8
toEnum 0x1d = FT9
toEnum 0x1e = FT10
toEnum 0x1f = FT11
toEnum _ = error "Invalid register ID"
instance BitPack FPReg where
type BitSize FPReg = 5
pack = toEnum . fromEnum
unpack = toEnum . fromEnum
| null | https://raw.githubusercontent.com/emc2/clash-riscv/e7404e5f9ff6b1eb22274bf7daa34bbd2768f2a3/src/RISCV/ISA/Registers/FP.hs | haskell |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
# OPTIONS_GHC -Wall -Werror #
| RISC-V floating-point registers. | Copyright ( c ) 2017 . All rights reserved .
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of the author nor the names of any contributors
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ` ` AS IS ''
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
# LANGUAGE DataKinds , TypeFamilies #
module RISCV.ISA.Registers.FP(
FPReg(..)
) where
import Prelude
import CLaSH.Class.BitPack
data FPReg =
FT0
| FT1
| FT2
| FT3
| FT4
| FT5
| FT6
| FT7
| FS0
| FS1
| FA0
| FA1
| FA2
| FA3
| FA4
| FA5
| FA6
| FA7
| FS2
| FS3
| FS4
| FS5
| FS6
| FS7
| FS8
| FS9
| FS10
| FS11
| FT8
| FT9
| FT10
| FT11
deriving (Eq, Ord, Show)
instance Enum FPReg where
fromEnum FT0 = 0x00
fromEnum FT1 = 0x01
fromEnum FT2 = 0x02
fromEnum FT3 = 0x03
fromEnum FT4 = 0x04
fromEnum FT5 = 0x05
fromEnum FT6 = 0x06
fromEnum FT7 = 0x07
fromEnum FS0 = 0x08
fromEnum FS1 = 0x09
fromEnum FA0 = 0x0a
fromEnum FA1 = 0x0b
fromEnum FA2 = 0x0c
fromEnum FA3 = 0x0d
fromEnum FA4 = 0x0e
fromEnum FA5 = 0x0f
fromEnum FA6 = 0x10
fromEnum FA7 = 0x11
fromEnum FS2 = 0x12
fromEnum FS3 = 0x13
fromEnum FS4 = 0x14
fromEnum FS5 = 0x15
fromEnum FS6 = 0x16
fromEnum FS7 = 0x17
fromEnum FS8 = 0x18
fromEnum FS9 = 0x19
fromEnum FS10 = 0x1a
fromEnum FS11 = 0x1b
fromEnum FT8 = 0x1c
fromEnum FT9 = 0x1d
fromEnum FT10 = 0x1e
fromEnum FT11 = 0x1f
toEnum 0x00 = FT0
toEnum 0x01 = FT1
toEnum 0x02 = FT2
toEnum 0x03 = FT3
toEnum 0x04 = FT4
toEnum 0x05 = FT5
toEnum 0x06 = FT6
toEnum 0x07 = FT7
toEnum 0x08 = FS0
toEnum 0x09 = FS1
toEnum 0x0a = FA0
toEnum 0x0b = FA1
toEnum 0x0c = FA2
toEnum 0x0d = FA3
toEnum 0x0e = FA4
toEnum 0x0f = FA5
toEnum 0x10 = FA6
toEnum 0x11 = FA7
toEnum 0x12 = FS2
toEnum 0x13 = FS3
toEnum 0x14 = FS4
toEnum 0x15 = FS5
toEnum 0x16 = FS6
toEnum 0x17 = FS7
toEnum 0x18 = FS8
toEnum 0x19 = FS9
toEnum 0x1a = FS10
toEnum 0x1b = FS11
toEnum 0x1c = FT8
toEnum 0x1d = FT9
toEnum 0x1e = FT10
toEnum 0x1f = FT11
toEnum _ = error "Invalid register ID"
instance BitPack FPReg where
type BitSize FPReg = 5
pack = toEnum . fromEnum
unpack = toEnum . fromEnum
|
eb2a9116dd20bae809b72a15021b9c3554398ef12f62320c55663d2a198a8f63 | mauricioszabo/repl-tooling | connection.cljs | (ns repl-tooling.editor-integration.connection
(:require [reagent.core :as r]
[promesa.core :as p]
[clojure.string :as str]
[repl-tooling.editor-helpers :as helpers]
[repl-tooling.eval :as eval]
[repl-tooling.repl-client.clojure :as clj-repl]
[repl-tooling.editor-integration.evaluation :as e-eval]
[repl-tooling.editor-integration.autocomplete :as autocomplete]
[repl-tooling.integrations.repls :as repls]
[repl-tooling.editor-integration.renderer :as renderer]
[repl-tooling.editor-integration.schemas :as schemas]
[repl-tooling.repl-client.nrepl :as nrepl]
[repl-tooling.commands-to-repl.all-cmds :as cmds]
[repl-tooling.commands-to-repl.pathom :as pathom]
[repl-tooling.editor-integration.commands :as commands]
[schema.core :as s]
[repl-tooling.editor-integration.definition :as definition]
[repl-tooling.editor-integration.configs :as configs]
["fs" :refer [exists readFile existsSync]]
["path" :refer [join]]))
; FIXME: This only here because of tests
(defn disconnect!
"Disconnect all REPLs. Indempotent."
[]
(repls/disconnect! :clj-eval)
(repls/disconnect! :clj-aux)
(repls/disconnect! :cljs-aux)
(repls/disconnect! :cljs-eval))
(defn- features-for [state {:keys [editor-data] :as opts} _repl-kind]
{:autocomplete #(p/let [data (editor-data)]
(autocomplete/command state opts data))
; TODO: Deprecate this
:eval-and-render (fn eval-and-render
([code range] (eval-and-render code range nil))
([code range pass]
(p/let [data (editor-data)]
(cmds/eval-range state
data
(assoc opts :pass pass)
(constantly [range code])))))
:evaluate-and-render (fn [options]
(p/let [data (editor-data)
{:keys [text range]} options]
(cmds/eval-range state
data
(dissoc options :text :range)
(constantly [range text]))))
:eval (fn [options]
(let [code (:text options)
[[row col]] (:range options)
eval-opts (cond-> (dissoc options :text)
row (assoc :row row)
col (assoc :col col))]
(e-eval/eval-with-promise state code eval-opts)))
:result-for-renderer #(renderer/parse-result (:result %) (:repl %) state)
:go-to-var-definition #(definition/goto-definition state %)
:get-full-var-name #(cmds/fqn-for-var state)
:get-code #(e-eval/get-code state %)
:repl-for #(e-eval/repl-for state %1 %2)
:eql (pathom/eql-from-state state)})
(defn- file-exists? [file]
(js/Promise. (fn [resolve] (exists file resolve))))
(defn- read-file [editor-state file]
(let [run-callback (:run-callback @editor-state)
existing-file (->> (run-callback :get-config)
:project-paths
(cons ".")
(map #(join % file))
(filter #(existsSync %))
first)]
(js/Promise. (fn [resolve]
(if existing-file
(readFile existing-file (fn [error not-error]
(if error
(resolve nil)
(resolve (str not-error)))))
(resolve nil))))))
(def ^:private default-opts
{:on-start-eval identity
:file-exists file-exists?
:config-file-path nil
:register-commands identity
:open-editor identity
:get-rendered-results (constantly [])
:on-copy identity
:on-eval identity
:on-result identity
:on-stdout identity
:on-stderr identity
:editor-data identity
:notify identity
:get-config (constantly {:project-paths [], :eval-mode :prefer-clj})
:prompt (fn [ & _] (js/Promise. (fn [])))})
(defn- swap-state! [state options kind]
(p/let [cmds (cmds/all state options kind)
feats (features-for state options kind)]
(swap! state assoc
:editor/features feats
:run-callback (partial commands/run-callback! state)
:run-feature (partial commands/run-feature! state))
(swap! state update-in [:editor/callbacks :read-file]
#(or % (partial read-file state)))
(configs/prepare-commands state cmds)))
(defn connect-evaluator!
""
[evaluators opts]
(js/Promise.
(fn [resolve]
(let [state (atom evaluators)
options (merge default-opts opts)]
; TODO: Check this last parameter
(swap-state! state options :clj)
(resolve state)))))
(defn- tr-kind [kind]
(let [kinds {:clj "Clojure"
:cljs "ClojureScript"
:cljr "ClojureCLR"
:clje "Clojerl"
:bb "Babaska"}]
(kinds kind (-> kind name str/capitalize))))
(defn- prepare-cljs [primary host port state options]
(swap! state merge {:cljs/repl primary
:repl/info {:host host :port port :kind :cljs :kind-name (tr-kind :cljs)}})
(eval/eval primary "(set! lumo.repl/*pprint-results* false)" {:ignore true})
(swap-state! state options :cljs))
(defn- prepare-joker [primary host port state options]
(swap! state merge {:clj/repl primary
:clj/aux primary
:repl/info {:host host :port port
:kind :joker :kind-name (tr-kind :joker)}})
(swap-state! state options :joker))
(defn- prepare-generic [primary aux host port state options kind]
(when (= :clj kind)
( clj - repl / disable - limits ! primary )
(eval/evaluate aux ":aux-connected" {:ignore true} #(clj-repl/disable-limits! aux)))
(swap! state merge {:clj/aux aux
:clj/repl primary
:repl/info {:host host :port port :kind kind :kind-name (tr-kind kind)}})
(swap-state! state options kind))
(defn- connection-error! [error notify]
(disconnect!)
(if (= "ECONNREFUSED" error)
(notify {:type :error
:title "REPL not connected"
:message (str "Connection refused. Ensure that you have a "
"Socket REPL started on this host/port")})
(do
(notify {:type :error
:title "REPL not connected"
:message (str "Unknown error while connecting to the REPL: "
error)})
(.error js/console error)))
nil)
(defn- callback-fn [state output]
(let [{:keys [on-stdout on-stderr on-result on-disconnect on-patch]}
(:editor/callbacks @state)]
(when (and (nil? output) on-disconnect)
(cmds/handle-disconnect! state)
(on-disconnect))
(when-let [out (:out output)] (and on-stdout (on-stdout out)))
(when-let [out (:err output)] (and on-stderr (on-stderr out)))
(when (and on-result (or (contains? output :result)
(contains? output :error)))
(on-result (helpers/parse-result output)))
(when-let [patch (:patch output)]
(on-patch (update patch :result helpers/parse-result)))))
(defn- find-patch [id maybe-coll]
(let [elem (if (instance? reagent.ratom/RAtom maybe-coll)
(dissoc @maybe-coll :editor-state :repl)
maybe-coll)]
(if (and (instance? renderer/Patchable elem)
(= id (:id elem)))
maybe-coll
(when (coll? elem)
(->> elem
(map #(find-patch id %))
flatten
(filter identity))))))
(defn- prepare-patch [{:keys [on-patch get-rendered-results] :as callbacks}]
(if on-patch
callbacks
(assoc callbacks
:on-patch (fn [{:keys [id result]}]
(doseq [patchable (find-patch id (get-rendered-results))]
(swap! patchable assoc :value
(renderer/parse-result result
(:repl @patchable)
(:editor-state @patchable))))))))
(defn- callback-aux [original-callback]
(fn [msg]
(if (or (:out msg) (:err msg))
(when helpers/*out-on-aux* (original-callback msg))
(original-callback msg))))
; Config Options:
; {:project-paths [...]
; :eval-mode (enum :clj :cljs :prefer-clj :prefer-cljs)}
(s/defn connect!
"Connects to a clojure-like REPL that supports the socket REPL protocol.
Expects host, port, and some callbacks:
* on-start-eval -> a function that'll be called when an evaluation starts
* on-eval -> a function that'll be called when an evaluation ends
* editor-data -> a function that'll be called when a command needs editor's data.
Editor's data is a map (or a promise that resolves to a map) with the arguments:
:contents - the editor's contents.
:filename - the current file's name. Can be nil if file was not saved yet.
:range - a vector containing [[start-row start-col] [end-row end-col]], representing
the current selection
* open-editor -> asks the editor to open an editor. Expects a map with `:filename`,
`:line` and maybe `:contents`. If there's `:contents` key, it defines a \"virtual
file\" so it's better to open up an read-only editor
* notify -> when something needs to be notified, this function will be called with a map
containing :type (one of :info, :warning, or :error), :title and :message
* get-config -> when some function needs the configuration from the editor, this fn
is called without arguments. Need to return a map with the config options.
* get-rendered-results -> gets all results that are rendered on the editor. This is
used so that the REPL can 'patch' these results when new data appears (think
of resolving promises in JS)
* on-patch -> patches the result. Optional, if you send a :get-rendered-results
callback, one will be generated for you
* prompt -> when some function needs an answer from the editor, it'll call this
callback passing :title, :message, and :arguments (a vector that is composed by
:key and :value). The callback needs to return a `Promise` with one of the
:key from the :arguments, or nil if nothing was selected.
* on-copy -> a function that receives a string and copies its contents to clipboard
* on-stdout -> a function that receives a string when some code prints to stdout
* on-stderr -> a function that receives a string when some code prints to stderr
* on-result -> returns a clojure EDN with the result of code
* on-disconnect -> called with no arguments, will disconnect REPLs. Can be called more
than once
Returns a promise that will resolve to a map with two repls: :clj/aux will be used
to autocomplete/etc, :clj/repl will be used to evaluate code."
[host :- s/Str
port :- s/Int
{:keys [notify] :as opts} :- s/Any]
(p/catch
(p/let [options (-> default-opts (merge opts) prepare-patch)
state (atom {:editor/callbacks options})
callback (partial callback-fn state)
[kind primary] (repls/connect-repl! :clj-eval host port callback)
_ (eval/eval primary "1234")
_ (case kind
:cljs (prepare-cljs primary host port state options)
:joker (prepare-joker primary host port state options)
(p/let [[_ aux] (repls/connect-repl! :clj-aux host port (callback-aux callback))]
(prepare-generic primary aux host port state options kind)))
nrepl? (instance? nrepl/Evaluator primary)]
(do
(notify {:type :info :title (str (tr-kind kind)
(if nrepl? " nREPL" " socket REPL")
" Connected")})
state))
#(connection-error! % notify)))
(defn connect-callbacks!
"Connects callbacks only, for commands that can work without a REPL."
[callbacks]
(let [options (merge default-opts callbacks)
state-ish (atom {:editor/callbacks options})
callback-cmds (commands/->Callbacks state-ish)]
(swap! state-ish assoc
:editor/features {:result-for-renderer
#(renderer/parse-result (:result %)
(:repl %)
state-ish)
;; FIXME: Re-add pathom without REPL
:eql (constantly nil)} ;(partial pathom/eql {:callbacks options})}
:run-callback (partial commands/run-callback! callback-cmds)
:run-feature (partial commands/run-feature! callback-cmds))
((:register-commands options) (cmds/static-commands state-ish))
state-ish))
| null | https://raw.githubusercontent.com/mauricioszabo/repl-tooling/1cea9b411cc118d71266cb8e035e146325baf410/src/repl_tooling/editor_integration/connection.cljs | clojure | FIXME: This only here because of tests
TODO: Deprecate this
TODO: Check this last parameter
Config Options:
{:project-paths [...]
:eval-mode (enum :clj :cljs :prefer-clj :prefer-cljs)}
FIXME: Re-add pathom without REPL
(partial pathom/eql {:callbacks options})} | (ns repl-tooling.editor-integration.connection
(:require [reagent.core :as r]
[promesa.core :as p]
[clojure.string :as str]
[repl-tooling.editor-helpers :as helpers]
[repl-tooling.eval :as eval]
[repl-tooling.repl-client.clojure :as clj-repl]
[repl-tooling.editor-integration.evaluation :as e-eval]
[repl-tooling.editor-integration.autocomplete :as autocomplete]
[repl-tooling.integrations.repls :as repls]
[repl-tooling.editor-integration.renderer :as renderer]
[repl-tooling.editor-integration.schemas :as schemas]
[repl-tooling.repl-client.nrepl :as nrepl]
[repl-tooling.commands-to-repl.all-cmds :as cmds]
[repl-tooling.commands-to-repl.pathom :as pathom]
[repl-tooling.editor-integration.commands :as commands]
[schema.core :as s]
[repl-tooling.editor-integration.definition :as definition]
[repl-tooling.editor-integration.configs :as configs]
["fs" :refer [exists readFile existsSync]]
["path" :refer [join]]))
(defn disconnect!
"Disconnect all REPLs. Indempotent."
[]
(repls/disconnect! :clj-eval)
(repls/disconnect! :clj-aux)
(repls/disconnect! :cljs-aux)
(repls/disconnect! :cljs-eval))
(defn- features-for [state {:keys [editor-data] :as opts} _repl-kind]
{:autocomplete #(p/let [data (editor-data)]
(autocomplete/command state opts data))
:eval-and-render (fn eval-and-render
([code range] (eval-and-render code range nil))
([code range pass]
(p/let [data (editor-data)]
(cmds/eval-range state
data
(assoc opts :pass pass)
(constantly [range code])))))
:evaluate-and-render (fn [options]
(p/let [data (editor-data)
{:keys [text range]} options]
(cmds/eval-range state
data
(dissoc options :text :range)
(constantly [range text]))))
:eval (fn [options]
(let [code (:text options)
[[row col]] (:range options)
eval-opts (cond-> (dissoc options :text)
row (assoc :row row)
col (assoc :col col))]
(e-eval/eval-with-promise state code eval-opts)))
:result-for-renderer #(renderer/parse-result (:result %) (:repl %) state)
:go-to-var-definition #(definition/goto-definition state %)
:get-full-var-name #(cmds/fqn-for-var state)
:get-code #(e-eval/get-code state %)
:repl-for #(e-eval/repl-for state %1 %2)
:eql (pathom/eql-from-state state)})
(defn- file-exists? [file]
(js/Promise. (fn [resolve] (exists file resolve))))
(defn- read-file [editor-state file]
(let [run-callback (:run-callback @editor-state)
existing-file (->> (run-callback :get-config)
:project-paths
(cons ".")
(map #(join % file))
(filter #(existsSync %))
first)]
(js/Promise. (fn [resolve]
(if existing-file
(readFile existing-file (fn [error not-error]
(if error
(resolve nil)
(resolve (str not-error)))))
(resolve nil))))))
(def ^:private default-opts
{:on-start-eval identity
:file-exists file-exists?
:config-file-path nil
:register-commands identity
:open-editor identity
:get-rendered-results (constantly [])
:on-copy identity
:on-eval identity
:on-result identity
:on-stdout identity
:on-stderr identity
:editor-data identity
:notify identity
:get-config (constantly {:project-paths [], :eval-mode :prefer-clj})
:prompt (fn [ & _] (js/Promise. (fn [])))})
(defn- swap-state! [state options kind]
(p/let [cmds (cmds/all state options kind)
feats (features-for state options kind)]
(swap! state assoc
:editor/features feats
:run-callback (partial commands/run-callback! state)
:run-feature (partial commands/run-feature! state))
(swap! state update-in [:editor/callbacks :read-file]
#(or % (partial read-file state)))
(configs/prepare-commands state cmds)))
(defn connect-evaluator!
""
[evaluators opts]
(js/Promise.
(fn [resolve]
(let [state (atom evaluators)
options (merge default-opts opts)]
(swap-state! state options :clj)
(resolve state)))))
(defn- tr-kind [kind]
(let [kinds {:clj "Clojure"
:cljs "ClojureScript"
:cljr "ClojureCLR"
:clje "Clojerl"
:bb "Babaska"}]
(kinds kind (-> kind name str/capitalize))))
(defn- prepare-cljs [primary host port state options]
(swap! state merge {:cljs/repl primary
:repl/info {:host host :port port :kind :cljs :kind-name (tr-kind :cljs)}})
(eval/eval primary "(set! lumo.repl/*pprint-results* false)" {:ignore true})
(swap-state! state options :cljs))
(defn- prepare-joker [primary host port state options]
(swap! state merge {:clj/repl primary
:clj/aux primary
:repl/info {:host host :port port
:kind :joker :kind-name (tr-kind :joker)}})
(swap-state! state options :joker))
(defn- prepare-generic [primary aux host port state options kind]
(when (= :clj kind)
( clj - repl / disable - limits ! primary )
(eval/evaluate aux ":aux-connected" {:ignore true} #(clj-repl/disable-limits! aux)))
(swap! state merge {:clj/aux aux
:clj/repl primary
:repl/info {:host host :port port :kind kind :kind-name (tr-kind kind)}})
(swap-state! state options kind))
(defn- connection-error! [error notify]
(disconnect!)
(if (= "ECONNREFUSED" error)
(notify {:type :error
:title "REPL not connected"
:message (str "Connection refused. Ensure that you have a "
"Socket REPL started on this host/port")})
(do
(notify {:type :error
:title "REPL not connected"
:message (str "Unknown error while connecting to the REPL: "
error)})
(.error js/console error)))
nil)
(defn- callback-fn [state output]
(let [{:keys [on-stdout on-stderr on-result on-disconnect on-patch]}
(:editor/callbacks @state)]
(when (and (nil? output) on-disconnect)
(cmds/handle-disconnect! state)
(on-disconnect))
(when-let [out (:out output)] (and on-stdout (on-stdout out)))
(when-let [out (:err output)] (and on-stderr (on-stderr out)))
(when (and on-result (or (contains? output :result)
(contains? output :error)))
(on-result (helpers/parse-result output)))
(when-let [patch (:patch output)]
(on-patch (update patch :result helpers/parse-result)))))
(defn- find-patch [id maybe-coll]
(let [elem (if (instance? reagent.ratom/RAtom maybe-coll)
(dissoc @maybe-coll :editor-state :repl)
maybe-coll)]
(if (and (instance? renderer/Patchable elem)
(= id (:id elem)))
maybe-coll
(when (coll? elem)
(->> elem
(map #(find-patch id %))
flatten
(filter identity))))))
(defn- prepare-patch [{:keys [on-patch get-rendered-results] :as callbacks}]
(if on-patch
callbacks
(assoc callbacks
:on-patch (fn [{:keys [id result]}]
(doseq [patchable (find-patch id (get-rendered-results))]
(swap! patchable assoc :value
(renderer/parse-result result
(:repl @patchable)
(:editor-state @patchable))))))))
(defn- callback-aux [original-callback]
(fn [msg]
(if (or (:out msg) (:err msg))
(when helpers/*out-on-aux* (original-callback msg))
(original-callback msg))))
(s/defn connect!
"Connects to a clojure-like REPL that supports the socket REPL protocol.
Expects host, port, and some callbacks:
* on-start-eval -> a function that'll be called when an evaluation starts
* on-eval -> a function that'll be called when an evaluation ends
* editor-data -> a function that'll be called when a command needs editor's data.
Editor's data is a map (or a promise that resolves to a map) with the arguments:
:contents - the editor's contents.
:filename - the current file's name. Can be nil if file was not saved yet.
:range - a vector containing [[start-row start-col] [end-row end-col]], representing
the current selection
* open-editor -> asks the editor to open an editor. Expects a map with `:filename`,
`:line` and maybe `:contents`. If there's `:contents` key, it defines a \"virtual
file\" so it's better to open up an read-only editor
* notify -> when something needs to be notified, this function will be called with a map
containing :type (one of :info, :warning, or :error), :title and :message
* get-config -> when some function needs the configuration from the editor, this fn
is called without arguments. Need to return a map with the config options.
* get-rendered-results -> gets all results that are rendered on the editor. This is
used so that the REPL can 'patch' these results when new data appears (think
of resolving promises in JS)
* on-patch -> patches the result. Optional, if you send a :get-rendered-results
callback, one will be generated for you
* prompt -> when some function needs an answer from the editor, it'll call this
callback passing :title, :message, and :arguments (a vector that is composed by
:key and :value). The callback needs to return a `Promise` with one of the
:key from the :arguments, or nil if nothing was selected.
* on-copy -> a function that receives a string and copies its contents to clipboard
* on-stdout -> a function that receives a string when some code prints to stdout
* on-stderr -> a function that receives a string when some code prints to stderr
* on-result -> returns a clojure EDN with the result of code
* on-disconnect -> called with no arguments, will disconnect REPLs. Can be called more
than once
Returns a promise that will resolve to a map with two repls: :clj/aux will be used
to autocomplete/etc, :clj/repl will be used to evaluate code."
[host :- s/Str
port :- s/Int
{:keys [notify] :as opts} :- s/Any]
(p/catch
(p/let [options (-> default-opts (merge opts) prepare-patch)
state (atom {:editor/callbacks options})
callback (partial callback-fn state)
[kind primary] (repls/connect-repl! :clj-eval host port callback)
_ (eval/eval primary "1234")
_ (case kind
:cljs (prepare-cljs primary host port state options)
:joker (prepare-joker primary host port state options)
(p/let [[_ aux] (repls/connect-repl! :clj-aux host port (callback-aux callback))]
(prepare-generic primary aux host port state options kind)))
nrepl? (instance? nrepl/Evaluator primary)]
(do
(notify {:type :info :title (str (tr-kind kind)
(if nrepl? " nREPL" " socket REPL")
" Connected")})
state))
#(connection-error! % notify)))
(defn connect-callbacks!
"Connects callbacks only, for commands that can work without a REPL."
[callbacks]
(let [options (merge default-opts callbacks)
state-ish (atom {:editor/callbacks options})
callback-cmds (commands/->Callbacks state-ish)]
(swap! state-ish assoc
:editor/features {:result-for-renderer
#(renderer/parse-result (:result %)
(:repl %)
state-ish)
:run-callback (partial commands/run-callback! callback-cmds)
:run-feature (partial commands/run-feature! callback-cmds))
((:register-commands options) (cmds/static-commands state-ish))
state-ish))
|
206c5e5f76366f84431b80d7526325fbe57b916d3f7825b2b4fba5b91c9e8cab | camlspotter/ocaml-zippy-tutorial-in-japanese | w8.ml | < = Warning 8
| Some v -> v
(* Warning 8: this pattern-matching is not exhaustive.
Here is an example of a value that is not matched:
None
*)
| null | https://raw.githubusercontent.com/camlspotter/ocaml-zippy-tutorial-in-japanese/c6aeabc08b6e2289a0e66c5b94a89c6723d88a6a/t/warnings/w8.ml | ocaml | Warning 8: this pattern-matching is not exhaustive.
Here is an example of a value that is not matched:
None
| < = Warning 8
| Some v -> v
|
023f895a240d61e1ae1267d7b35ca7888487bdab5bd6364ea25de1a8aaf4583d | Cipherwraith/hblog | CSS.hs | module CSS where
import Data.List
import System.Directory
import System.FilePath
import Config
writeCSS :: IO ()
writeCSS = do
cssFiles <- getCSSFiles
let c = length cssFiles
mapM_ writeCSS' cssFiles
writeCSS' (path, content) = writeFile outPath content
where
outPath = replaceDirectory path htmlDirectory
getCSSFiles :: IO [(String, String)]
getCSSFiles = do
dirContents <- getDirectoryContents cssDirectory
let files = map (cssDirectory </>) . delete "." . delete ".." $ dirContents
fileContent <- mapM readFile files
return $ zip files fileContent
| null | https://raw.githubusercontent.com/Cipherwraith/hblog/f92d96a60e301302c8a0e46fd68ee1e1810eb031/CSS.hs | haskell | module CSS where
import Data.List
import System.Directory
import System.FilePath
import Config
writeCSS :: IO ()
writeCSS = do
cssFiles <- getCSSFiles
let c = length cssFiles
mapM_ writeCSS' cssFiles
writeCSS' (path, content) = writeFile outPath content
where
outPath = replaceDirectory path htmlDirectory
getCSSFiles :: IO [(String, String)]
getCSSFiles = do
dirContents <- getDirectoryContents cssDirectory
let files = map (cssDirectory </>) . delete "." . delete ".." $ dirContents
fileContent <- mapM readFile files
return $ zip files fileContent
| |
5888df92ada85b50819ac2b359dd62adcffda364bafcd5326274576ca1186909 | vyorkin/tiger | type.ml | module U = Unique
module S = Symbol
type t =
| Int
| String
| Record of (S.t * t) list * U.t
| Array of t * U.t
| Nil
| Unit
| Name of S.t * t option ref
[@@deriving show]
(* we cannot use [@@deriving eq] here, so
let's just implement the comparsion ourselves *)
let compare x y =
match x, y with
| Record (_, u1), Record (_, u2) ->
compare u1 u2
| Record _, Nil -> 0
| Nil, Record _ -> 0
| Array (_, u1), Array (_, u2) ->
compare u1 u2
| Name (sx, _), Name (sy, _) ->
S.(compare sx sy)
| x, y ->
compare x y
(** Recursively lookups the underlying type *)
let rec actual = function
| Name (sym, { contents = None }) ->
Err.type_error (Location.dummy sym) @@
Printf.sprintf "type %s is undefined" sym.name
| Name (_, { contents = Some t }) ->
actual t
| t -> t
let (=) x y = compare x y = 0
let (<>) a b = not (a = b)
let (~!) x = actual x
let assignable x y =
match ~!x, ~!y with
(* "nil" is legal value for records *)
| Record _, Nil -> true
| a, b -> a = b
let (@==) x y = assignable x y
let (@<>) x y = not (assignable x y)
let rec to_string x =
let open Core_kernel in
match x with
| Int -> "int"
| String -> "string"
| Nil -> "nil"
| Unit -> "()"
| Name (s, _) -> s.name
| Array (t, u) -> sprintf "[%s]<#%s>" (to_string t) (U.to_string u)
| Record (_, u) -> sprintf "record<#%s>" (U.to_string u)
| null | https://raw.githubusercontent.com/vyorkin/tiger/54dd179c1cd291df42f7894abce3ee9064e18def/chapter6/lib/type.ml | ocaml | we cannot use [@@deriving eq] here, so
let's just implement the comparsion ourselves
* Recursively lookups the underlying type
"nil" is legal value for records | module U = Unique
module S = Symbol
type t =
| Int
| String
| Record of (S.t * t) list * U.t
| Array of t * U.t
| Nil
| Unit
| Name of S.t * t option ref
[@@deriving show]
let compare x y =
match x, y with
| Record (_, u1), Record (_, u2) ->
compare u1 u2
| Record _, Nil -> 0
| Nil, Record _ -> 0
| Array (_, u1), Array (_, u2) ->
compare u1 u2
| Name (sx, _), Name (sy, _) ->
S.(compare sx sy)
| x, y ->
compare x y
let rec actual = function
| Name (sym, { contents = None }) ->
Err.type_error (Location.dummy sym) @@
Printf.sprintf "type %s is undefined" sym.name
| Name (_, { contents = Some t }) ->
actual t
| t -> t
let (=) x y = compare x y = 0
let (<>) a b = not (a = b)
let (~!) x = actual x
let assignable x y =
match ~!x, ~!y with
| Record _, Nil -> true
| a, b -> a = b
let (@==) x y = assignable x y
let (@<>) x y = not (assignable x y)
let rec to_string x =
let open Core_kernel in
match x with
| Int -> "int"
| String -> "string"
| Nil -> "nil"
| Unit -> "()"
| Name (s, _) -> s.name
| Array (t, u) -> sprintf "[%s]<#%s>" (to_string t) (U.to_string u)
| Record (_, u) -> sprintf "record<#%s>" (U.to_string u)
|
228119e86acd19d8468741ea24a748c9728d2781f95c34a8397f020c4ffa36d0 | binghe/fm-plugin-tools | prepare.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : PREPARE - FM - PLUGIN - TOOLS ; Base : 10 -*-
Copyright ( c ) 2006 - 2010 , Dr. . All rights reserved .
Copyright ( c ) 2021 - 2022 , ( binghe ) . All rights reserved .
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :prepare-fm-plugin-tools)
(defun handle-typedef (line)
"Accepts a string which is supposed to be a simple C typedef.
Stores a corresponding entry in *TYPEDEFS*."
(when (scan "typedef(?!.*[A-Z_])" line)
(register-groups-bind (existing-type defined-type)
("typedef\\s+(.*)(?<!\\s)\\s+(\\w+);" line)
(pushnew (cons (make-fli-type defined-type)
(make-fli-type existing-type))
*typedefs*
:key #'car))))
(defun read-enum-value (string)
"Reads the optional value part of a C enum and returns a
corresponding Lisp value - either a number or a LOGIOR
expression."
;; convert hex marker for Lisp reader
(setq string (regex-replace-all "0x" string "#x"))
(if (scan "\\|" string)
contains a pipe symbol , so make LOGIOR of previously defined
;; constants
(let (result)
(do-matches-as-strings (value "[#\\w]+" string)
Piped constants and numbers appeared in SDK version 17 .
-- ( binghe ) , 1 sep 2018 .
(cond ((eq (elt value 0) #\#)
(push (read-from-string value) result))
(t
(push (mangle-name value t) result))))
(cons 'logior (nreverse result)))
;; just read value as a number
(read-from-string string)))
(defun write-function-definition (lisp-name c-name result-type args)
"Accepts values which suffice to create a foreign function
defintion and writes it to the output stream."
;; we use DEFINE-FMXCPT-FUNCTION as defined in FM-PLUGIN-TOOLS
(pprint `(fm-plugin-tools::define-fmxcpt-function (,lisp-name ,c-name)
,(loop for (type name nil) in (butlast args)
collect `(,name ,(if (and #-:win32 nil
(string= c-name "FM_Text_AssignUnicode")
(string-equal name "s"))
special case for this one function ( only on Windows ) -
;; pass Lisp string directly as an argument
'(:reference-pass (:ef-wc-string :external-format :unicode))
type)))
:result-type ,result-type)))
(defun handle-function (line)
"Accepts one line of C code and checks if it's a function prototype.
If it is one, we write a corresponding function definition to the
output stream."
;; all `interesting' prototypes use the FMX_API macro - we just have
;; to throw away the lines where this macro is defined
(when (and (scan "FMX_API.*&_x" line)
(not (scan "#define" line)))
(setq line (simplify line))
;; the part between the parens are the arguments - that's
;; simple... :)
(register-groups-bind (head args)
("(.*)\\((.*)\\);" line)
(destructuring-bind (result-type lisp-name c-name)
(type-and-name head)
(write-function-definition lisp-name c-name result-type
;; args are separated by commas
(loop for arg in (split "," args)
collect (type-and-name arg t)))))))
(defun handle-enum (body)
"Handles the part between `enum {' and `}'. Loops through all
lines, writes one DEFCONSTANT per item and the corresponding
EXPORT statement."
(let ((counter 0))
(do-register-groups (name value)
("(?m)^\\s*(\\w+)\\s*(?:=\\s*([^,/]*\\z|.*?)\\s*)?(?:,|(?:/.*)?$)" body)
;; use value if provided in enum, COUNTER otherwise
(setq value (if value (read-enum-value value) counter))
(let ((lisp-name (mangle-name name t)))
(pprint `(eval-when (:compile-toplevel :load-toplevel :execute)
(defconstant ,lisp-name ,value
"This constant was generated automatically.
See FileMaker header files for details.")))
(print `(export ',lisp-name :fm-plugin-tools)))
;; increment counter or continue with successor of value
(setq counter (1+ (if (numberp value) value counter))))))
(defun handle-struct (struct-name body pack)
"Handles the part between `struct {' and `}' - writes a
corresponding FLI:DEFINE-C-STRUCT definition. If PACK is non-NIL (a number),
byte-packing will be used."
(let (slots)
(do-register-groups (prefix type name)
for some reason FMX_PACK is n't used in 8.5 anymore
("(?m)^\\s*(fmx::)?(\\w+)\\s+(\\w+)(?:\\s*FMX_PACK)?\\s*;(?:\\s*//.*)?\\s*?$" body)
e.g. " fmx::unusedid " in 19
(push (list (cond ((scan "FMX_" type)
;; default types which start with `FMX_' to :VOID
(find-type (make-fli-type (regex-replace "FMX_" type ""))
'(:pointer :void)))
(t
(find-type (make-fli-type type))))
(mangle-name name)
pack)
slots))
(pprint `(fli:define-c-struct ,(mangle-name struct-name)
,@(loop for first = t then nil
for (slot-type slot-name pack) in (nreverse slots)
when (and pack (not first))
collect `(:byte-packing ,pack)
collect `(,slot-name ,(if (and (string= struct-name "FMX_ExternCallStruct")
(string-equal slot-name "which-call"))
special for this one slot
'(:unsigned :char)
slot-type)))))))
NOTE : something has changed after we changed to prepare on FMXExtern.hhh .
In the original FMXExtern.h , for GCC there 's the following definition
;;
;; #define FMX_PACK_ON
;;
;; Note that there's no whitespaces after the word "FMX_PACK_ON", and it turns
;; out that "gcc -E" will not replace it at all, leaving "#pragma FMX_PACK_ON"
UNCHANGED in the * .hhh files . On the other hand , for Windows it 's defined as
;;
# define FMX_PACK_ON pack ( push , 1 )
;;
;; Now, very funny, "cl /E" will replace all "#pragma FMX_PACK_ON" with
" # pragma pack ( push , 1 ) " , causing the pack flag wrongly recognized in the
;; following code with the original pattern:
;;
;; "(?sm)(#pragma\\s+FMX_PACK_ON\\s+)?^\\s*struct (\\w+)$(\\s*){(.*?)\\3}
;;
The new pattern below solves the plugin loading issue on FMP Windows ( 64 - bit ) .
;;
;; Note also that, on macOS, even "#pragma FMX_PACK_ON" is there and FMX_PACK_ON
;; is undefined, it should be understood as still packing, otherwise the plugin
does't load at all . -- , 18/7/2022
(defparameter *if-regex1*
(create-scanner "#ifdef\\s+(.*)"))
(defparameter *if-regex2*
(create-scanner "#(el)?if\\s+(!)?([\\w\\s\\|\\(\\)<>!=_&]+)(?<!\\s)\\s*$"))
(defun parse-header-files ()
"Loops through all C header files in *HEADER-FILE-NAMES*,
checks for enums, structs or function prototypes and writes the
corresponding C code to *STANDARD-OUTPUT*."
(dolist (name *header-file-names*)
(let ((header-file (make-pathname :name name :type "h"
:defaults *fmx-extern-location*))
(file-string (make-array '(0)
:element-type 'simple-char
:fill-pointer 0
:adjustable t))
(*line-number* 0))
(format t "~%;; #include <~A.h>" name)
(format *error-output* "Processing ~A.h...~%" name)
(with-open-file (in header-file)
(with-output-to-string (out file-string)
(loop with contexts = '(:error) ; the polarity of the current #if context
with pos-contexts = '(:error) ; the current #if context when polarity is T
with neg-contexts = '(:error) ; the current #if context when polarity is NIL
for line = (read-line in nil nil)
while line do
(incf *line-number*)
usually only first line of a header
(push :enable contexts))
;; ifdef ...
((scan *if-regex1* line)
(register-groups-bind (context) (*if-regex1* line)
(setq context (regex-replace-all "\\s+" context " "))
(if (member context *negative-macros* :test 'equal)
(push :disable contexts)
(push :enable contexts))))
;; (el)if [!]...
((scan *if-regex2* line)
(register-groups-bind (elif-p neg-p context) (*if-regex2* line)
# elif = # endif + # if ( not # else + # if ! ! ! )
(let ((context (pop contexts)))
(ecase context
((:enable :disable)
t)
(:positive
(pop pos-contexts))
(:negative
(pop neg-contexts)))))
(setq context (regex-replace-all "\\s+" context " "))
(if (or (member context *positive-macros* :test 'equal)
(member context *negative-macros* :test 'equal))
(cond (neg-p
(push context neg-contexts)
(push :negative contexts))
(t
(push context pos-contexts)
(push :positive contexts)))
an irrelevant condition , we choose the first branch
(push :enable contexts))))
((scan "#(el)?if\\s+.*" line) ; the fallback case of #if
(register-groups-bind (elif-p)
("#(el)?if\\s+.*" line)
# elif = # else + # if
(let ((context (pop contexts)))
(ecase context
((:enable :disable)
t)
(:positive
(pop pos-contexts))
(:negative
(pop neg-contexts)))))
;; :enable by default for dont-care #if
(push :enable contexts)))
;; turn over the context if we met #else
((scan "#else" line)
(let ((context (pop contexts)))
(ecase context
(:enable (push :disable contexts))
(:disable (push :enable contexts))
(:positive
(push (pop pos-contexts) neg-contexts)
(push :negative contexts))
(:negative
(push (pop neg-contexts) pos-contexts)
(push :positive contexts)))))
;; pop the current context
((scan "#endif" line)
(let ((context (pop contexts)))
(ecase context
((:enable :disable)
t)
(:positive
(pop pos-contexts))
(:negative
(pop neg-contexts)))))
(t
(cond ((not (null (intersection *negative-macros* pos-contexts :test 'equal)))
;; (format *error-output* "ignored this line~%")
nil) ; ignore this line
((not (null (intersection *positive-macros* neg-contexts :test 'equal)))
;; (format *error-output* "ignored this line~%")
nil) ; ignore this line
((member :disable contexts)
;; (format *error-output* "ignored this line~%")
nil)
(t
;; single-line processing
(handle-typedef line)
(handle-function line)
;; multi-line processing (preparation)
(format out "~A~%" line)))))
;; still inside the loop
#+ignore
(format *error-output* "L~D contexts: ~A, pos-contexts: ~A, neg-contexts: ~A~%"
*line-number* contexts pos-contexts neg-contexts)
)))
(do-register-groups (enum-body)
("(?s)enum(?:\\s+\\w+)?\\s*\\{\\s*(.*?)\\s*,?\\s*\\}" file-string)
(handle-enum enum-body))
(do-register-groups (pack name whitespace struct-body)
" pack ( push , 1 ) " is a macro which translates to (: BYTE - PACKING 1 )
("(?sm)(#pragma.*)?^\\s*struct (\\w+)$(\\s*){(.*?)\\3}"
file-string)
(declare (ignore whitespace))
(handle-struct name struct-body
;; NEW: now we decide if packing is needed in a backward compatible way
(cond ((null pack)
nil) ; no #pragma at all, no packing
((scan "FMX_PACK_ON" pack)
#+win32 nil ; with #pragma but FMX_PACK_ON is undefined
always pack on macOS ( or it does n't load )
((scan "pack \\(push, 1\\)" pack)
with # pragma and FMX_PACK_ON is " pack ( push , 1 ) "
(t
(error "new, unknown #pragma occurs now!")))))
(terpri))))
(defun prepare ()
"Creates the missing file `fli.lisp' for FM-PLUGIN-TOOLS from
the C header files of FileMaker Pro Advanced."
;; find out where to look for headers
(unless *fmx-extern-location*
(set-fmx-extern-location))
;; redirect *STANDARD-OUTPUT* to `fli.lisp'
(with-open-file (*standard-output* *fli-file*
:direction :output
:if-exists :supersede)
;; use correct package for output and refrain from writing
;; everything in uppercase
(with-standard-io-syntax
(let ((*package* (find-package :fm-plugin-tools))
(*print-case* :downcase))
(format t ";;; This file was generated automatically from FileMaker Pro's SDK headers.")
(terpri)
(print '(in-package :fm-plugin-tools))
(terpri)
;; let this function do all the work
(parse-header-files))))
:done)
| null | https://raw.githubusercontent.com/binghe/fm-plugin-tools/7234ca4a0d6a5ab0f5fd22a69b4ecdd798f8a283/prepare-fm-plugin-tools/prepare.lisp | lisp | Syntax : COMMON - LISP ; Package : PREPARE - FM - PLUGIN - TOOLS ; Base : 10 -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
convert hex marker for Lisp reader
constants
just read value as a number
we use DEFINE-FMXCPT-FUNCTION as defined in FM-PLUGIN-TOOLS
pass Lisp string directly as an argument
all `interesting' prototypes use the FMX_API macro - we just have
to throw away the lines where this macro is defined
the part between the parens are the arguments - that's
simple... :)
args are separated by commas
use value if provided in enum, COUNTER otherwise
increment counter or continue with successor of value
default types which start with `FMX_' to :VOID
#define FMX_PACK_ON
Note that there's no whitespaces after the word "FMX_PACK_ON", and it turns
out that "gcc -E" will not replace it at all, leaving "#pragma FMX_PACK_ON"
Now, very funny, "cl /E" will replace all "#pragma FMX_PACK_ON" with
following code with the original pattern:
"(?sm)(#pragma\\s+FMX_PACK_ON\\s+)?^\\s*struct (\\w+)$(\\s*){(.*?)\\3}
Note also that, on macOS, even "#pragma FMX_PACK_ON" is there and FMX_PACK_ON
is undefined, it should be understood as still packing, otherwise the plugin
the polarity of the current #if context
the current #if context when polarity is T
the current #if context when polarity is NIL
ifdef ...
(el)if [!]...
the fallback case of #if
:enable by default for dont-care #if
turn over the context if we met #else
pop the current context
(format *error-output* "ignored this line~%")
ignore this line
(format *error-output* "ignored this line~%")
ignore this line
(format *error-output* "ignored this line~%")
single-line processing
multi-line processing (preparation)
still inside the loop
NEW: now we decide if packing is needed in a backward compatible way
no #pragma at all, no packing
with #pragma but FMX_PACK_ON is undefined
find out where to look for headers
redirect *STANDARD-OUTPUT* to `fli.lisp'
use correct package for output and refrain from writing
everything in uppercase
let this function do all the work |
Copyright ( c ) 2006 - 2010 , Dr. . All rights reserved .
Copyright ( c ) 2021 - 2022 , ( binghe ) . All rights reserved .
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package :prepare-fm-plugin-tools)
(defun handle-typedef (line)
"Accepts a string which is supposed to be a simple C typedef.
Stores a corresponding entry in *TYPEDEFS*."
(when (scan "typedef(?!.*[A-Z_])" line)
(register-groups-bind (existing-type defined-type)
("typedef\\s+(.*)(?<!\\s)\\s+(\\w+);" line)
(pushnew (cons (make-fli-type defined-type)
(make-fli-type existing-type))
*typedefs*
:key #'car))))
(defun read-enum-value (string)
"Reads the optional value part of a C enum and returns a
corresponding Lisp value - either a number or a LOGIOR
expression."
(setq string (regex-replace-all "0x" string "#x"))
(if (scan "\\|" string)
contains a pipe symbol , so make LOGIOR of previously defined
(let (result)
(do-matches-as-strings (value "[#\\w]+" string)
Piped constants and numbers appeared in SDK version 17 .
-- ( binghe ) , 1 sep 2018 .
(cond ((eq (elt value 0) #\#)
(push (read-from-string value) result))
(t
(push (mangle-name value t) result))))
(cons 'logior (nreverse result)))
(read-from-string string)))
(defun write-function-definition (lisp-name c-name result-type args)
"Accepts values which suffice to create a foreign function
defintion and writes it to the output stream."
(pprint `(fm-plugin-tools::define-fmxcpt-function (,lisp-name ,c-name)
,(loop for (type name nil) in (butlast args)
collect `(,name ,(if (and #-:win32 nil
(string= c-name "FM_Text_AssignUnicode")
(string-equal name "s"))
special case for this one function ( only on Windows ) -
'(:reference-pass (:ef-wc-string :external-format :unicode))
type)))
:result-type ,result-type)))
(defun handle-function (line)
"Accepts one line of C code and checks if it's a function prototype.
If it is one, we write a corresponding function definition to the
output stream."
(when (and (scan "FMX_API.*&_x" line)
(not (scan "#define" line)))
(setq line (simplify line))
(register-groups-bind (head args)
("(.*)\\((.*)\\);" line)
(destructuring-bind (result-type lisp-name c-name)
(type-and-name head)
(write-function-definition lisp-name c-name result-type
(loop for arg in (split "," args)
collect (type-and-name arg t)))))))
(defun handle-enum (body)
"Handles the part between `enum {' and `}'. Loops through all
lines, writes one DEFCONSTANT per item and the corresponding
EXPORT statement."
(let ((counter 0))
(do-register-groups (name value)
("(?m)^\\s*(\\w+)\\s*(?:=\\s*([^,/]*\\z|.*?)\\s*)?(?:,|(?:/.*)?$)" body)
(setq value (if value (read-enum-value value) counter))
(let ((lisp-name (mangle-name name t)))
(pprint `(eval-when (:compile-toplevel :load-toplevel :execute)
(defconstant ,lisp-name ,value
"This constant was generated automatically.
See FileMaker header files for details.")))
(print `(export ',lisp-name :fm-plugin-tools)))
(setq counter (1+ (if (numberp value) value counter))))))
(defun handle-struct (struct-name body pack)
"Handles the part between `struct {' and `}' - writes a
corresponding FLI:DEFINE-C-STRUCT definition. If PACK is non-NIL (a number),
byte-packing will be used."
(let (slots)
(do-register-groups (prefix type name)
for some reason FMX_PACK is n't used in 8.5 anymore
("(?m)^\\s*(fmx::)?(\\w+)\\s+(\\w+)(?:\\s*FMX_PACK)?\\s*;(?:\\s*//.*)?\\s*?$" body)
e.g. " fmx::unusedid " in 19
(push (list (cond ((scan "FMX_" type)
(find-type (make-fli-type (regex-replace "FMX_" type ""))
'(:pointer :void)))
(t
(find-type (make-fli-type type))))
(mangle-name name)
pack)
slots))
(pprint `(fli:define-c-struct ,(mangle-name struct-name)
,@(loop for first = t then nil
for (slot-type slot-name pack) in (nreverse slots)
when (and pack (not first))
collect `(:byte-packing ,pack)
collect `(,slot-name ,(if (and (string= struct-name "FMX_ExternCallStruct")
(string-equal slot-name "which-call"))
special for this one slot
'(:unsigned :char)
slot-type)))))))
NOTE : something has changed after we changed to prepare on FMXExtern.hhh .
In the original FMXExtern.h , for GCC there 's the following definition
UNCHANGED in the * .hhh files . On the other hand , for Windows it 's defined as
# define FMX_PACK_ON pack ( push , 1 )
" # pragma pack ( push , 1 ) " , causing the pack flag wrongly recognized in the
The new pattern below solves the plugin loading issue on FMP Windows ( 64 - bit ) .
does't load at all . -- , 18/7/2022
(defparameter *if-regex1*
(create-scanner "#ifdef\\s+(.*)"))
(defparameter *if-regex2*
(create-scanner "#(el)?if\\s+(!)?([\\w\\s\\|\\(\\)<>!=_&]+)(?<!\\s)\\s*$"))
(defun parse-header-files ()
"Loops through all C header files in *HEADER-FILE-NAMES*,
checks for enums, structs or function prototypes and writes the
corresponding C code to *STANDARD-OUTPUT*."
(dolist (name *header-file-names*)
(let ((header-file (make-pathname :name name :type "h"
:defaults *fmx-extern-location*))
(file-string (make-array '(0)
:element-type 'simple-char
:fill-pointer 0
:adjustable t))
(*line-number* 0))
(format t "~%;; #include <~A.h>" name)
(format *error-output* "Processing ~A.h...~%" name)
(with-open-file (in header-file)
(with-output-to-string (out file-string)
for line = (read-line in nil nil)
while line do
(incf *line-number*)
usually only first line of a header
(push :enable contexts))
((scan *if-regex1* line)
(register-groups-bind (context) (*if-regex1* line)
(setq context (regex-replace-all "\\s+" context " "))
(if (member context *negative-macros* :test 'equal)
(push :disable contexts)
(push :enable contexts))))
((scan *if-regex2* line)
(register-groups-bind (elif-p neg-p context) (*if-regex2* line)
# elif = # endif + # if ( not # else + # if ! ! ! )
(let ((context (pop contexts)))
(ecase context
((:enable :disable)
t)
(:positive
(pop pos-contexts))
(:negative
(pop neg-contexts)))))
(setq context (regex-replace-all "\\s+" context " "))
(if (or (member context *positive-macros* :test 'equal)
(member context *negative-macros* :test 'equal))
(cond (neg-p
(push context neg-contexts)
(push :negative contexts))
(t
(push context pos-contexts)
(push :positive contexts)))
an irrelevant condition , we choose the first branch
(push :enable contexts))))
(register-groups-bind (elif-p)
("#(el)?if\\s+.*" line)
# elif = # else + # if
(let ((context (pop contexts)))
(ecase context
((:enable :disable)
t)
(:positive
(pop pos-contexts))
(:negative
(pop neg-contexts)))))
(push :enable contexts)))
((scan "#else" line)
(let ((context (pop contexts)))
(ecase context
(:enable (push :disable contexts))
(:disable (push :enable contexts))
(:positive
(push (pop pos-contexts) neg-contexts)
(push :negative contexts))
(:negative
(push (pop neg-contexts) pos-contexts)
(push :positive contexts)))))
((scan "#endif" line)
(let ((context (pop contexts)))
(ecase context
((:enable :disable)
t)
(:positive
(pop pos-contexts))
(:negative
(pop neg-contexts)))))
(t
(cond ((not (null (intersection *negative-macros* pos-contexts :test 'equal)))
((not (null (intersection *positive-macros* neg-contexts :test 'equal)))
((member :disable contexts)
nil)
(t
(handle-typedef line)
(handle-function line)
(format out "~A~%" line)))))
#+ignore
(format *error-output* "L~D contexts: ~A, pos-contexts: ~A, neg-contexts: ~A~%"
*line-number* contexts pos-contexts neg-contexts)
)))
(do-register-groups (enum-body)
("(?s)enum(?:\\s+\\w+)?\\s*\\{\\s*(.*?)\\s*,?\\s*\\}" file-string)
(handle-enum enum-body))
(do-register-groups (pack name whitespace struct-body)
" pack ( push , 1 ) " is a macro which translates to (: BYTE - PACKING 1 )
("(?sm)(#pragma.*)?^\\s*struct (\\w+)$(\\s*){(.*?)\\3}"
file-string)
(declare (ignore whitespace))
(handle-struct name struct-body
(cond ((null pack)
((scan "FMX_PACK_ON" pack)
always pack on macOS ( or it does n't load )
((scan "pack \\(push, 1\\)" pack)
with # pragma and FMX_PACK_ON is " pack ( push , 1 ) "
(t
(error "new, unknown #pragma occurs now!")))))
(terpri))))
(defun prepare ()
"Creates the missing file `fli.lisp' for FM-PLUGIN-TOOLS from
the C header files of FileMaker Pro Advanced."
(unless *fmx-extern-location*
(set-fmx-extern-location))
(with-open-file (*standard-output* *fli-file*
:direction :output
:if-exists :supersede)
(with-standard-io-syntax
(let ((*package* (find-package :fm-plugin-tools))
(*print-case* :downcase))
(format t ";;; This file was generated automatically from FileMaker Pro's SDK headers.")
(terpri)
(print '(in-package :fm-plugin-tools))
(terpri)
(parse-header-files))))
:done)
|
985985a276ecda33d19ea0db55bcff65765d8f96317227ee87cf7d229e56fb72 | lixiangqi/medic | fact-iter.rkt | #lang racket
(define (fact x a)
(if (zero? x)
a
(fact (sub1 x) (* x a))))
(fact 3 1)
| null | https://raw.githubusercontent.com/lixiangqi/medic/0920090d3c77d6873b8481841622a5f2d13a732c/demos/aggregate/fact-iter.rkt | racket | #lang racket
(define (fact x a)
(if (zero? x)
a
(fact (sub1 x) (* x a))))
(fact 3 1)
| |
8af6c9b536fb5d22f783b5bff01ea84268eca8b57ff4e25d3a9ee7c3f1b53ffc | technion/erlvulnscan | erlvulnscan_SUITE.erl | -module(erlvulnscan_SUITE).
-include_lib("common_test/include/ct.hrl").
-compile(export_all).
-define(TESTPORT, 8085).
-define(R, "{\"network\":\"127.0.0.0\",\"recaptcha\":\"03AO\"}").
all() -> [invalid_request, valid_request, valid_json].
init_per_suite(Config) ->
% Run tests on a non-default port, so they can coexist with a
% running environment.
ok = application:load(erlvulnscan),
ok = application:set_env(erlvulnscan, bind_port, ?TESTPORT),
{ok, _Started} = application:ensure_all_started(erlvulnscan),
inets:start(),
Config.
invalid_request(_Config) ->
% Although we are testing a "failure", the fact it connects at all
% shows most of the app is running and handles GET with the right
% error
URL = ":" ++ integer_to_list(?TESTPORT) ++ "/netscan/",
{ok, {{_Version, 400, "Bad Request"}, _Headers, _Body}} =
httpc:request(get, {URL, []}, [], []).
valid_request(_Config) ->
URL = ":" ++ integer_to_list(?TESTPORT) ++ "/netscan/",
{ok, {{_Version, 200, "OK"}, _Headers, _Body}} =
httpc:request(post, {URL, [], [], ?R}, [], []).
valid_json(_Config) ->
%Same test as valid_request, but tests the JSON
URL = ":" ++ integer_to_list(?TESTPORT) ++ "/netscan/",
{ok, {{_Version, 200, "OK"}, _Headers, Body}} =
httpc:request(post, {URL, [], [], ?R}, [], []),
JSON = jiffy:decode(Body),
254 = length(JSON).
end_per_suite(_Config) ->
inets:stop(),
application:stop(erlvulnscan).
| null | https://raw.githubusercontent.com/technion/erlvulnscan/8d109c49c8ecf331a4a859296e9fafd57aa458cd/test/erlvulnscan_SUITE.erl | erlang | Run tests on a non-default port, so they can coexist with a
running environment.
Although we are testing a "failure", the fact it connects at all
shows most of the app is running and handles GET with the right
error
Same test as valid_request, but tests the JSON | -module(erlvulnscan_SUITE).
-include_lib("common_test/include/ct.hrl").
-compile(export_all).
-define(TESTPORT, 8085).
-define(R, "{\"network\":\"127.0.0.0\",\"recaptcha\":\"03AO\"}").
all() -> [invalid_request, valid_request, valid_json].
init_per_suite(Config) ->
ok = application:load(erlvulnscan),
ok = application:set_env(erlvulnscan, bind_port, ?TESTPORT),
{ok, _Started} = application:ensure_all_started(erlvulnscan),
inets:start(),
Config.
invalid_request(_Config) ->
URL = ":" ++ integer_to_list(?TESTPORT) ++ "/netscan/",
{ok, {{_Version, 400, "Bad Request"}, _Headers, _Body}} =
httpc:request(get, {URL, []}, [], []).
valid_request(_Config) ->
URL = ":" ++ integer_to_list(?TESTPORT) ++ "/netscan/",
{ok, {{_Version, 200, "OK"}, _Headers, _Body}} =
httpc:request(post, {URL, [], [], ?R}, [], []).
valid_json(_Config) ->
URL = ":" ++ integer_to_list(?TESTPORT) ++ "/netscan/",
{ok, {{_Version, 200, "OK"}, _Headers, Body}} =
httpc:request(post, {URL, [], [], ?R}, [], []),
JSON = jiffy:decode(Body),
254 = length(JSON).
end_per_suite(_Config) ->
inets:stop(),
application:stop(erlvulnscan).
|
aaac646558e52832e3f472063a0b56fc9ced34b6b585b978e4028fdfdc810909 | huangz1990/real-world-haskell-cn | LineChunks.hs | -- file: ch24/LineChunks.hs
module LineChunks
(
chunkedReadWith
) where
import Control.Exception (bracket, finally)
import Control.Monad (forM, liftM)
import Control.Parallel.Strategies (NFData, rnf)
import Data.Int (Int64)
import qualified Data.ByteString.Lazy.Char8 as LB
import GHC.Conc (numCapabilities)
import System.IO
data ChunkSpec = CS {
chunkOffset :: !Int64
, chunkLength :: !Int64
} deriving (Eq, Show)
withChunks :: (NFData a) =>
(FilePath -> IO [ChunkSpec])
-> ([LB.ByteString] -> a)
-> FilePath
-> IO a
withChunks chunkFunc process path = do
(chunks, handles) <- chunkedRead chunkFunc path
let r = process chunks
(rnf r `seq` return r) `finally` mapM_ hClose handles
chunkedReadWith :: (NFData a) =>
([LB.ByteString] -> a) -> FilePath -> IO a
chunkedReadWith func path =
withChunks (lineChunks (numCapabilities * 4)) func path
chunkedRead :: (FilePath -> IO [ChunkSpec])
-> FilePath
-> IO ([LB.ByteString], [Handle])
chunkedRead chunkFunc path = do
chunks <- chunkFunc path
liftM unzip . forM chunks $ \spec -> do
h <- openFile path ReadMode
hSeek h AbsoluteSeek (fromIntegral (chunkOffset spec))
chunk <- LB.take (chunkLength spec) `liftM` LB.hGetContents h
return (chunk, h)
lineChunks :: Int -> FilePath -> IO [ChunkSpec]
lineChunks numChunks path = do
bracket (openFile path ReadMode) hClose $ \h -> do
totalSize <- fromIntegral `liftM` hFileSize h
let chunkSize = totalSize `div` fromIntegral numChunks
findChunks offset = do
let newOffset = offset + chunkSize
hSeek h AbsoluteSeek (fromIntegral newOffset)
let findNewline off = do
eof <- hIsEOF h
if eof
then return [CS offset (totalSize - offset)]
else do
bytes <- LB.hGet h 4096
case LB.elemIndex '\n' bytes of
Just n -> do
chunks@(c:_) <- findChunks (off + n + 1)
let coff = chunkOffset c
return (CS offset (coff - offset):chunks)
Nothing -> findNewline (off + LB.length bytes)
findNewline newOffset
findChunks 0 | null | https://raw.githubusercontent.com/huangz1990/real-world-haskell-cn/f67b07dd846b1950d17ff941d650089fcbbe9586/code/ch24/LineChunks.hs | haskell | file: ch24/LineChunks.hs | module LineChunks
(
chunkedReadWith
) where
import Control.Exception (bracket, finally)
import Control.Monad (forM, liftM)
import Control.Parallel.Strategies (NFData, rnf)
import Data.Int (Int64)
import qualified Data.ByteString.Lazy.Char8 as LB
import GHC.Conc (numCapabilities)
import System.IO
data ChunkSpec = CS {
chunkOffset :: !Int64
, chunkLength :: !Int64
} deriving (Eq, Show)
withChunks :: (NFData a) =>
(FilePath -> IO [ChunkSpec])
-> ([LB.ByteString] -> a)
-> FilePath
-> IO a
withChunks chunkFunc process path = do
(chunks, handles) <- chunkedRead chunkFunc path
let r = process chunks
(rnf r `seq` return r) `finally` mapM_ hClose handles
chunkedReadWith :: (NFData a) =>
([LB.ByteString] -> a) -> FilePath -> IO a
chunkedReadWith func path =
withChunks (lineChunks (numCapabilities * 4)) func path
chunkedRead :: (FilePath -> IO [ChunkSpec])
-> FilePath
-> IO ([LB.ByteString], [Handle])
chunkedRead chunkFunc path = do
chunks <- chunkFunc path
liftM unzip . forM chunks $ \spec -> do
h <- openFile path ReadMode
hSeek h AbsoluteSeek (fromIntegral (chunkOffset spec))
chunk <- LB.take (chunkLength spec) `liftM` LB.hGetContents h
return (chunk, h)
lineChunks :: Int -> FilePath -> IO [ChunkSpec]
lineChunks numChunks path = do
bracket (openFile path ReadMode) hClose $ \h -> do
totalSize <- fromIntegral `liftM` hFileSize h
let chunkSize = totalSize `div` fromIntegral numChunks
findChunks offset = do
let newOffset = offset + chunkSize
hSeek h AbsoluteSeek (fromIntegral newOffset)
let findNewline off = do
eof <- hIsEOF h
if eof
then return [CS offset (totalSize - offset)]
else do
bytes <- LB.hGet h 4096
case LB.elemIndex '\n' bytes of
Just n -> do
chunks@(c:_) <- findChunks (off + n + 1)
let coff = chunkOffset c
return (CS offset (coff - offset):chunks)
Nothing -> findNewline (off + LB.length bytes)
findNewline newOffset
findChunks 0 |
8a7e72fd60e47d226440e972b2c2424b2cd2c3c9c813503047bf9ff72267322e | brabster/dynamodb-expressions | integration_test.clj | (ns dynamodb-expression.integration-test
(:require [dynamodb-expression.core :as dx]
[clojure.test :refer :all]
[docker.fixture :as docker]
[amazonica.aws.dynamodbv2 :as ddb]
[amazonica.core :refer [defcredential]]))
(def port (docker/rand-port))
(def host (docker/host))
(def creds {:access-key "foo" :secret-key "bar" :endpoint (str "http://" host ":" port)})
(def table-name "foo")
(def dynamodb-container
(docker/new-fixture
{:cmd ["docker" "run" "-d" "-p" (str port ":" port) "tray/dynamodb-local"
"-inMemory" "-port" port]
:sleep 500}))
(defn table [name hash]
(fn [f]
(println "Creating table" name)
(ddb/create-table creds
:table-name name
:key-schema [{:attribute-name (:name hash) :key-type "HASH"}]
:attribute-definitions [{:attribute-name (:name hash) :attribute-type (:type hash)}]
:provisioned-throughput {:read-capacity-units 1
:write-capacity-units 1})
(f)
(println "Deleting table" name)
(ddb/delete-table creds :table-name name)))
(use-fixtures :once dynamodb-container (table table-name {:name "id" :type "S"}))
(deftest basic-ops-test
(testing "set and add work"
(is (= {:item {:id "1" :bar 0 :foo 1}}
(do
(ddb/update-item creds
(->
(dx/update-expr {})
(dx/add :foo 1)
(dx/set :bar 0)
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "1"})))
(ddb/get-item creds
:table-name table-name
:key {:id "1"})))))
(testing "remove works"
(is (= {:item {:id "2"}}
(do
(ddb/update-item creds
(->
(dx/update-expr {})
(dx/set :bar 0)
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "2"})))
(ddb/update-item creds
(-> (dx/update-expr {})
(dx/remove :bar)
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "2"})))
(ddb/get-item creds
:table-name table-name
:key {:id "2"})))))
#_(testing "delete works"
(let [delete-expr (->
(dx/update-expr {})
(dx/set :bar #{"foo"})
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "3"}))]
(prn delete-expr)
(is (= {:item {:id "3" :things []}}
(do
(ddb/update-item creds delete-expr)
(ddb/update-item creds
(-> (dx/update-expr {})
(dx/delete :bar "foo")
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "3"})))
(ddb/get-item creds
:table-name table-name
:key {:id "3"})))))))
| null | https://raw.githubusercontent.com/brabster/dynamodb-expressions/8ca5ca758485fa272cec6e836e71279cd08b0104/test/dynamodb_expression/integration_test.clj | clojure | (ns dynamodb-expression.integration-test
(:require [dynamodb-expression.core :as dx]
[clojure.test :refer :all]
[docker.fixture :as docker]
[amazonica.aws.dynamodbv2 :as ddb]
[amazonica.core :refer [defcredential]]))
(def port (docker/rand-port))
(def host (docker/host))
(def creds {:access-key "foo" :secret-key "bar" :endpoint (str "http://" host ":" port)})
(def table-name "foo")
(def dynamodb-container
(docker/new-fixture
{:cmd ["docker" "run" "-d" "-p" (str port ":" port) "tray/dynamodb-local"
"-inMemory" "-port" port]
:sleep 500}))
(defn table [name hash]
(fn [f]
(println "Creating table" name)
(ddb/create-table creds
:table-name name
:key-schema [{:attribute-name (:name hash) :key-type "HASH"}]
:attribute-definitions [{:attribute-name (:name hash) :attribute-type (:type hash)}]
:provisioned-throughput {:read-capacity-units 1
:write-capacity-units 1})
(f)
(println "Deleting table" name)
(ddb/delete-table creds :table-name name)))
(use-fixtures :once dynamodb-container (table table-name {:name "id" :type "S"}))
(deftest basic-ops-test
(testing "set and add work"
(is (= {:item {:id "1" :bar 0 :foo 1}}
(do
(ddb/update-item creds
(->
(dx/update-expr {})
(dx/add :foo 1)
(dx/set :bar 0)
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "1"})))
(ddb/get-item creds
:table-name table-name
:key {:id "1"})))))
(testing "remove works"
(is (= {:item {:id "2"}}
(do
(ddb/update-item creds
(->
(dx/update-expr {})
(dx/set :bar 0)
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "2"})))
(ddb/update-item creds
(-> (dx/update-expr {})
(dx/remove :bar)
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "2"})))
(ddb/get-item creds
:table-name table-name
:key {:id "2"})))))
#_(testing "delete works"
(let [delete-expr (->
(dx/update-expr {})
(dx/set :bar #{"foo"})
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "3"}))]
(prn delete-expr)
(is (= {:item {:id "3" :things []}}
(do
(ddb/update-item creds delete-expr)
(ddb/update-item creds
(-> (dx/update-expr {})
(dx/delete :bar "foo")
(dx/expr)
(assoc :table-name table-name)
(assoc :key {:id "3"})))
(ddb/get-item creds
:table-name table-name
:key {:id "3"})))))))
| |
cc7368e08be7eb462e6b33b1965d28eabd106ec46da1afca8008233a10f9049a | osener/markup.rocks | Main.hs | {-# LANGUAGE CPP #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE ForeignFunctionInterface #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE JavaScriptFFI #-}
# LANGUAGE TemplateHaskell #
{-# LANGUAGE RecursiveDo #-}
module Main where
import Control.Concurrent
import Control.Concurrent.MVar
import Control.DeepSeq
import Control.Monad
import Control.Monad.IO.Class
import Data.Default
import Data.Either
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import GHCJS.DOM.HTMLElement
import GHCJS.Foreign
import GHCJS.Types
import Reflex
import Reflex.Dom
import Reflex.Dom.Class
import Reflex.Dynamic.TH
import Text.Pandoc
import Editor
import Formats
import Example
import LocalStorage (getPref, setPref)
import Widgets.Menu
import Widgets.CodeMirror
import Widgets.Misc (icon, iconLinkClass, lastDoc, lastExt)
import Widgets.Setting
#ifdef __GHCJS__
#define JS(name, js, type) foreign import javascript unsafe js name :: type
#else
#define JS(name, js, type) name :: type ; name = undefined
#endif
JS(getTime,"(new Date())['getTime']()", IO Double)
JS(highlightCode,"highlightCode()", IO ())
main :: IO ()
main =
mainWidget $
do postGui <- askPostGui
divClass "ui two column padded grid" $
do (readerD,t,exts) <-
divClass "left column" $
divClass "ui segment" editor
divClass "right column" $
divClass "ui segment" $
do writerD <-
divClass "ui top left attached label" $
selection def {_selectionConfig_label = "Preview"
,_selectionConfig_initialValue = "1preview"
,_selectionConfig_options = constDyn resultFormats}
parsed <-
$(qDyn [|convertDoc $(unqDyn [|_selection_value readerD|])
$(unqDyn [|_selection_value writerD|])
$(unqDyn [|exts|])
$(unqDyn [|value t|])|])
result <-
forceLossy (updated parsed)
ext <- liftIO lastExt
doc <- liftIO lastDoc
resCM <-
divClass "ui top right attached label" $
do let output =
attachDyn (_selection_value writerD) result
makeSaveMenu "Save"
output
(ext,doc)
elAttr "div"
("class" =: "ui left dropdown compact icon button") $
elAttr "a"
("target" =: "_blank" <> "href" =:
"")
(icon "github")
(menu,resCM) <-
elAttr' "div"
("class" =: "ui left dropdown compact icon button") $
do icon "settings"
divClass "menu" $
do divClass "header" (text "Result Settings")
divClass "item" (setting "CodeMirror Display" True)
liftIO $
enableMenu (_el_element menu)
(toJSString "nothing")
return resCM
let initial =
convertDoc ext "1preview" githubMarkdownExtensions doc
resultDyn <-
holdDyn initial result
cmEnabled <-
liftIO $
getPref "CodeMirror Display" True
cmAttrs <-
mapDyn (\w ->
case w of
"1preview" ->
("style" =: "display: none;" <> "class" =:
"outputCM")
otherwise ->
("class" =: "outputCM"))
(_selection_value writerD)
elDynAttr "div" cmAttrs $
codeMirror
def {_codeMirrorConfig_initialValue = initial
,_codeMirrorConfig_enabled = cmEnabled
,_codeMirrorConfig_enableCodeMirror =
updated (_setting_value resCM)
,_codeMirrorConfig_changeLang =
updated (_selection_value writerD)
,_codeMirrorConfig_setValue = result}
htmlAttrs <-
mapDyn (\w ->
case w of
"1preview" ->
("class" =: "output")
otherwise ->
("style" =: "display: none;" <> "class" =:
"output"))
(_selection_value writerD)
elDynAttr "div" htmlAttrs $
elDynHtmlAttr' "div"
("class" =: "preview")
resultDyn
performEvent_ $
fmap (const . liftIO . void . forkIO $ highlightCode) result
performEvent_ $
fmap (liftIO .
void .
forkIO .
setPref "Last Document" .
show)
(updated $ value t)
forceLossy :: (MonadWidget t m,NFData a)
=> Event t a -> m (Event t a)
forceLossy e =
do mvar <- liftIO newEmptyMVar
diffsMVar <- liftIO (newMVar [])
performEventAsync (fmap (callAtNextInterval mvar diffsMVar) e)
where callAtNextInterval mvar diffsMVar e cb =
void . liftIO $
do maybeThreadId <- tryTakeMVar mvar
case maybeThreadId of
Just threadId -> killThread threadId
Nothing -> return ()
threadId <-
forkIO $
do start <- getTime
diffs <- readMVar diffsMVar
let avg =
round $ sum diffs / genericLength diffs :: Int
when (avg > 300)
(threadDelay (min (avg * 1000) 1000000))
cb $!! e
end <- getTime
let diff = end - start
let appendTime =
return .
(subtract start end :) .
take 2 .
reverse
modifyMVar_ diffsMVar appendTime
putMVar mvar threadId
convertDoc :: String -> String -> Set Extension -> String -> String
convertDoc readerStr writerStr extensions t =
either (const "") writer parsed
where parsed = reader t
writer = stringToWriter writerStr def
reader =
stringToReader
readerStr
def {readerApplyMacros = False
,readerExtensions = extensions}
stringToWriter :: String -> WriterOptions -> Pandoc -> String
stringToWriter s =
case s of
"1preview" -> writeHtmlString
"ascii" -> writeAsciiDoc
"html" -> writeHtmlString
"latex" -> writeLaTeX
"man" -> writeMan
"mw" -> writeMediaWiki
"dw" -> writeDokuWiki
"dbk" -> writeDocbook
"odt" -> writeOpenDocument
"opml" -> writeOPML
"icml" -> writeICML
"org" -> writeOrg
"plain" -> writePlain
"rst" -> writeRST
"texinfo" -> writeTexinfo
"textile" -> writeTextile
otherwise -> writeMarkdown
| null | https://raw.githubusercontent.com/osener/markup.rocks/02bb050df6b1e4c3a84f9a6aeaad8217215a2b64/src/Main.hs | haskell | # LANGUAGE CPP #
# LANGUAGE GADTs #
# LANGUAGE JavaScriptFFI #
# LANGUAGE RecursiveDo # | # LANGUAGE FlexibleContexts #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE TemplateHaskell #
module Main where
import Control.Concurrent
import Control.Concurrent.MVar
import Control.DeepSeq
import Control.Monad
import Control.Monad.IO.Class
import Data.Default
import Data.Either
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import GHCJS.DOM.HTMLElement
import GHCJS.Foreign
import GHCJS.Types
import Reflex
import Reflex.Dom
import Reflex.Dom.Class
import Reflex.Dynamic.TH
import Text.Pandoc
import Editor
import Formats
import Example
import LocalStorage (getPref, setPref)
import Widgets.Menu
import Widgets.CodeMirror
import Widgets.Misc (icon, iconLinkClass, lastDoc, lastExt)
import Widgets.Setting
#ifdef __GHCJS__
#define JS(name, js, type) foreign import javascript unsafe js name :: type
#else
#define JS(name, js, type) name :: type ; name = undefined
#endif
JS(getTime,"(new Date())['getTime']()", IO Double)
JS(highlightCode,"highlightCode()", IO ())
main :: IO ()
main =
mainWidget $
do postGui <- askPostGui
divClass "ui two column padded grid" $
do (readerD,t,exts) <-
divClass "left column" $
divClass "ui segment" editor
divClass "right column" $
divClass "ui segment" $
do writerD <-
divClass "ui top left attached label" $
selection def {_selectionConfig_label = "Preview"
,_selectionConfig_initialValue = "1preview"
,_selectionConfig_options = constDyn resultFormats}
parsed <-
$(qDyn [|convertDoc $(unqDyn [|_selection_value readerD|])
$(unqDyn [|_selection_value writerD|])
$(unqDyn [|exts|])
$(unqDyn [|value t|])|])
result <-
forceLossy (updated parsed)
ext <- liftIO lastExt
doc <- liftIO lastDoc
resCM <-
divClass "ui top right attached label" $
do let output =
attachDyn (_selection_value writerD) result
makeSaveMenu "Save"
output
(ext,doc)
elAttr "div"
("class" =: "ui left dropdown compact icon button") $
elAttr "a"
("target" =: "_blank" <> "href" =:
"")
(icon "github")
(menu,resCM) <-
elAttr' "div"
("class" =: "ui left dropdown compact icon button") $
do icon "settings"
divClass "menu" $
do divClass "header" (text "Result Settings")
divClass "item" (setting "CodeMirror Display" True)
liftIO $
enableMenu (_el_element menu)
(toJSString "nothing")
return resCM
let initial =
convertDoc ext "1preview" githubMarkdownExtensions doc
resultDyn <-
holdDyn initial result
cmEnabled <-
liftIO $
getPref "CodeMirror Display" True
cmAttrs <-
mapDyn (\w ->
case w of
"1preview" ->
("style" =: "display: none;" <> "class" =:
"outputCM")
otherwise ->
("class" =: "outputCM"))
(_selection_value writerD)
elDynAttr "div" cmAttrs $
codeMirror
def {_codeMirrorConfig_initialValue = initial
,_codeMirrorConfig_enabled = cmEnabled
,_codeMirrorConfig_enableCodeMirror =
updated (_setting_value resCM)
,_codeMirrorConfig_changeLang =
updated (_selection_value writerD)
,_codeMirrorConfig_setValue = result}
htmlAttrs <-
mapDyn (\w ->
case w of
"1preview" ->
("class" =: "output")
otherwise ->
("style" =: "display: none;" <> "class" =:
"output"))
(_selection_value writerD)
elDynAttr "div" htmlAttrs $
elDynHtmlAttr' "div"
("class" =: "preview")
resultDyn
performEvent_ $
fmap (const . liftIO . void . forkIO $ highlightCode) result
performEvent_ $
fmap (liftIO .
void .
forkIO .
setPref "Last Document" .
show)
(updated $ value t)
forceLossy :: (MonadWidget t m,NFData a)
=> Event t a -> m (Event t a)
forceLossy e =
do mvar <- liftIO newEmptyMVar
diffsMVar <- liftIO (newMVar [])
performEventAsync (fmap (callAtNextInterval mvar diffsMVar) e)
where callAtNextInterval mvar diffsMVar e cb =
void . liftIO $
do maybeThreadId <- tryTakeMVar mvar
case maybeThreadId of
Just threadId -> killThread threadId
Nothing -> return ()
threadId <-
forkIO $
do start <- getTime
diffs <- readMVar diffsMVar
let avg =
round $ sum diffs / genericLength diffs :: Int
when (avg > 300)
(threadDelay (min (avg * 1000) 1000000))
cb $!! e
end <- getTime
let diff = end - start
let appendTime =
return .
(subtract start end :) .
take 2 .
reverse
modifyMVar_ diffsMVar appendTime
putMVar mvar threadId
convertDoc :: String -> String -> Set Extension -> String -> String
convertDoc readerStr writerStr extensions t =
either (const "") writer parsed
where parsed = reader t
writer = stringToWriter writerStr def
reader =
stringToReader
readerStr
def {readerApplyMacros = False
,readerExtensions = extensions}
stringToWriter :: String -> WriterOptions -> Pandoc -> String
stringToWriter s =
case s of
"1preview" -> writeHtmlString
"ascii" -> writeAsciiDoc
"html" -> writeHtmlString
"latex" -> writeLaTeX
"man" -> writeMan
"mw" -> writeMediaWiki
"dw" -> writeDokuWiki
"dbk" -> writeDocbook
"odt" -> writeOpenDocument
"opml" -> writeOPML
"icml" -> writeICML
"org" -> writeOrg
"plain" -> writePlain
"rst" -> writeRST
"texinfo" -> writeTexinfo
"textile" -> writeTextile
otherwise -> writeMarkdown
|
a812e12089f86be46d037db2ae2df079620e25c17f524f2e0365854397e70387 | fluree/db | cljs_shim.clj | (ns fluree.db.util.cljs-shim
(:require [clojure.java.io :as io]))
(set! *warn-on-reflection* true)
(defmacro inline-resource
"Macro allowing ClojureScript to inline a SMALL bundle of resource file(s) (< 1mb)
at compile time. If inline content grows, need to consider publishing to
and downloading from a cdn."
[resource-path]
(slurp (io/resource resource-path)))
| null | https://raw.githubusercontent.com/fluree/db/354dbd02de3dec6013f2a7fe992ac1d2674be1aa/src/fluree/db/util/cljs_shim.clj | clojure | (ns fluree.db.util.cljs-shim
(:require [clojure.java.io :as io]))
(set! *warn-on-reflection* true)
(defmacro inline-resource
"Macro allowing ClojureScript to inline a SMALL bundle of resource file(s) (< 1mb)
at compile time. If inline content grows, need to consider publishing to
and downloading from a cdn."
[resource-path]
(slurp (io/resource resource-path)))
| |
ff02b3e0690e90fdfc2ef79f8aeccab1cb9821beae59a0ea4ca328c7d0df1234 | heechul/crest-z3 | dataslicing.mli |
*
* Copyright ( c ) 2001 - 2002 ,
* < >
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2002,
* Jeremy Condit <>
* George C. Necula <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
(* This feature implements data slicing. The user annotates base types
* and function types with region(i) annotations, and this transformation
* will separate the fields into parallel data structures accordingly. *)
val feature: Cil.featureDescr
| null | https://raw.githubusercontent.com/heechul/crest-z3/cfcebadddb5e9d69e9956644fc37b46f6c2a21a0/cil/src/ext/dataslicing.mli | ocaml | This feature implements data slicing. The user annotates base types
* and function types with region(i) annotations, and this transformation
* will separate the fields into parallel data structures accordingly. |
*
* Copyright ( c ) 2001 - 2002 ,
* < >
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2002,
* Jeremy Condit <>
* George C. Necula <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
val feature: Cil.featureDescr
|
0b7dca3e5cb342adf9be3908d580d0e3e54e650fe0adaff0d5f3d1e9d243b7da | chrovis/cljam | writer.clj | (ns cljam.io.bam-index.writer
(:require [com.climate.claypoole :as cp]
[cljam.common :refer [get-exec-n-threads]]
[cljam.io.util.bgzf :as bgzf]
[cljam.io.util.lsb :as lsb]
[cljam.io.util.bin :as util-bin]
[cljam.io.bam-index.common :refer [linear-index-shift
linear-index-depth
max-bins
bai-magic]]
[cljam.io.util.chunk :as chunk]
[cljam.io.bam.decoder :as bam-decoder])
(:import [java.io DataOutputStream Closeable]
[cljam.io.bam.decoder BAMPointerBlock]
[cljam.io.util.chunk Chunk]))
BAIWriter
;; ---------
(deftype BAIWriter [^DataOutputStream writer refs url]
Closeable
(close [this]
(.close writer)))
# # # Intermediate data definitions
;;
;; Use record for performance.
;; Record is faster than map for retrieving elements.
(defrecord MetaData [^long first-offset ^long last-offset ^long aligned-alns ^long unaligned-alns])
(defrecord IndexStatus [^MetaData meta-data bin-index linear-index])
# # # Initializing
(defn- init-index-status
"Returns initialized index status. This data structure is intermediate. Must
be passed `finalize-index` in the final stage."
[]
(IndexStatus. (MetaData. -1 0 0 0)
Intermediate bin - index - > { bin1 , bin2 chunks2 , ... }
e.g. { 4681 [ { : beg 97 , : end 555 } ... ] , 37450 [ ... ] ... }
{}
Intermediate linear - index - > { pos1 , pos2 value2 , ... }
e.g. { 5415 4474732776 , 14827 5955073327 , ... }
{}))
# # # Updating
(defn- update-meta-data
[^MetaData meta-data ^BAMPointerBlock aln]
(let [first-offset (.first-offset meta-data)
last-offset (.last-offset meta-data)
aligned-alns (.aligned-alns meta-data)
unaligned-alns (.unaligned-alns meta-data)
beg (.pointer-beg aln)
end (.pointer-end aln)
aligned? (zero? (bit-and (.flag aln) 4))]
(MetaData.
first - offset
(if (or (< (bgzf/compare beg first-offset) 1)
(= first-offset -1))
beg first-offset)
;; last-offset
(if (< (bgzf/compare last-offset end) 1)
end last-offset)
;; aligned-alns
(if aligned? (inc aligned-alns) aligned-alns)
;; unaligned-alns
(if-not aligned? (inc unaligned-alns) unaligned-alns))))
(defn- update-bin-index
[bin-index ^BAMPointerBlock aln]
(let [bin (util-bin/reg->bin
(.pos aln) (.end aln) linear-index-shift linear-index-depth)
beg (.pointer-beg aln)
end (.pointer-end aln)]
(assoc bin-index bin
(if-let [chunks (get bin-index bin)]
(let [last-chunk ^Chunk (peek chunks)]
(if (bgzf/same-or-adjacent-blocks? (.end last-chunk) beg)
(conj (pop chunks) (Chunk. (.beg last-chunk) end))
(conj chunks (Chunk. beg end))))
[(Chunk. beg end)]))))
(defn- update-linear-index
[linear-index ^BAMPointerBlock aln]
(let [beg (.pointer-beg aln)
aln-beg (.pos aln)
aln-end (.end aln)
win-beg (if (zero? aln-end)
(util-bin/pos->lidx-offset (dec aln-beg) linear-index-shift)
(util-bin/pos->lidx-offset aln-beg linear-index-shift))
win-end (if (zero? aln-end)
win-beg
(util-bin/pos->lidx-offset aln-end linear-index-shift))
min* (fn [x]
(if x (min x beg) beg))]
(loop [i win-beg, ret linear-index]
(if (<= i win-end)
(recur (inc i) (assoc ret i (min* (get ret i))))
ret))))
(defn- update-index-status
[^IndexStatus index-status aln]
(IndexStatus. (update-meta-data (.meta-data index-status) aln)
(update-bin-index (.bin-index index-status) aln)
(update-linear-index (.linear-index index-status) aln)))
;; Merging indices
;; -------------
(defn- merge-meta-data
[^MetaData meta1 ^MetaData meta2]
(MetaData. (let [f1 (.first-offset meta1)
f2 (.first-offset meta2)]
(cond
(= f1 -1) f2
(= f2 -1) f1
:else (min f1 f2)))
(max (.last-offset meta1) (.last-offset meta2))
(+ (.aligned-alns meta1) (.aligned-alns meta2))
(+ (.unaligned-alns meta1) (.unaligned-alns meta2))))
(defn- merge-chunks
[chunks1 chunks2]
(loop [[^Chunk f & r] (sort chunk/compare (concat chunks1 chunks2))
chunks' []]
(if f
(if-let [last-chunk ^Chunk (peek chunks')]
(if (bgzf/same-or-adjacent-blocks? (.end last-chunk) (.beg f))
(let [l (assoc last-chunk :end (.end f))]
(recur r (assoc chunks' (dec (count chunks')) l)))
(recur r (conj chunks' f)))
(recur r (conj chunks' f)))
chunks')))
(defn- merge-bin-index
[bin-map1 bin-map2]
(merge-with merge-chunks bin-map1 bin-map2))
(defn- merge-linear-index
[lidx1 lidx2]
(merge-with min lidx1 lidx2))
;; Finalizing index
;; ----------------
(defn- finalize-bin-index
[bin-index]
(->> bin-index
(seq)
(sort-by first)
(map (partial zipmap [:bin :chunks]))))
(defn- complement-linear-index
"Complements a linear index.
e.g. ([1 10] [3 30]) -> ([0 0] [1 10] [2 10] [3 30])"
[linear-index]
(loop [[f & r] (if (zero? (ffirst linear-index))
linear-index
(conj linear-index [0 0]))
ret []]
(if (seq r)
(recur r (apply conj ret (map #(conj (vector %) (second f)) (range (first f) (ffirst r)))))
(conj ret f))))
(defn- finalize-linear-index
[linear-index]
(->> linear-index
(seq)
(sort-by first)
(complement-linear-index)
(map second)))
;; Writing index
;; -----------
(defn- write-bin
[w ^long bin chunks]
(lsb/write-int w bin)
;; chunks
(lsb/write-int w (count chunks))
(doseq [^Chunk chunk chunks]
(lsb/write-long w (.beg chunk))
(lsb/write-long w (.end chunk))))
(defn- write-meta-data
[w meta-data]
(lsb/write-int w max-bins)
(lsb/write-int w 2)
(lsb/write-long w (:first-offset meta-data))
(lsb/write-long w (:last-offset meta-data))
(lsb/write-long w (:aligned-alns meta-data))
(lsb/write-long w (:unaligned-alns meta-data)))
;; Public
;; ------
(def ^:dynamic *alignments-partition-size*
"The number of alignments that is loaded each indexing process. This has an
effect on performance of concurrent indexing. The default value is 10,000."
10000)
;; Merging indices
;; -------------
(defn merge-index
"Merges two intermediate indices, returning the merged intermediate index."
[idx1 idx2]
(let [no-coordinate-alns (+ (:no-coordinate-alns idx1) (:no-coordinate-alns idx2))
idx1 (dissoc idx1 :no-coordinate-alns)
idx2 (dissoc idx2 :no-coordinate-alns)]
(-> (merge-with
(fn [^IndexStatus v1 ^IndexStatus v2]
(IndexStatus. (merge-meta-data (.meta-data v1) (.meta-data v2))
(merge-bin-index (.bin-index v1) (.bin-index v2))
(merge-linear-index (.linear-index v1) (.linear-index v2))))
idx1 idx2)
(assoc :no-coordinate-alns no-coordinate-alns))))
;; Making index
;; -----------
(defn finalize-index
"Converts intermediate BAM index data structure into final one. Must be called
in the final stage."
[^long nrefs index]
(loop [i 0
index index]
(if (< i nrefs)
(if (get index i)
(recur (inc i) (-> index
(update-in [i :bin-index] finalize-bin-index)
(update-in [i :linear-index] finalize-linear-index)))
(recur (inc i) index))
index)))
(defn make-index*
"Calculates index from the references and alignments, returning it as a map.
Returned index is still intermediate. It must be passed to finalize function
in the final stage."
[alns]
(loop [[^BAMPointerBlock aln & rest] alns
rid (.ref-id aln)
idx-status (init-index-status)
no-coordinate-alns 0
indices {}]
(if aln
(let [rid' (.ref-id aln)
new-ref? (not= rid' rid)
idx-status' (update-index-status
(if new-ref? (init-index-status) idx-status) aln)
no-coordinate-alns' (if (zero? (.pos aln))
(inc no-coordinate-alns)
no-coordinate-alns)
indices' (if new-ref?
(assoc indices rid idx-status)
indices)]
(recur rest rid' idx-status' no-coordinate-alns' indices'))
(assoc indices rid idx-status
:no-coordinate-alns no-coordinate-alns))))
(defn make-index-from-blocks
"Calculates a BAM index from provided references and alignment blocks.
Optionally, you can do this process concurrently."
[^long nrefs blocks]
(let [n-threads (get-exec-n-threads)
make-index-fn (fn [blocks]
(if (= n-threads 1)
(->> blocks
(eduction (map bam-decoder/decode-pointer-block))
make-index*)
(cp/with-shutdown! [pool (cp/threadpool (dec n-threads))]
(->> blocks
(eduction (partition-all *alignments-partition-size*))
(cp/upmap pool (fn [sub-blocks]
(->> sub-blocks
(eduction (map bam-decoder/decode-pointer-block))
make-index*)))
(reduce merge-index {:no-coordinate-alns 0})))))]
(->> blocks
make-index-fn
(finalize-index nrefs))))
(defn update-last-pointer
"Update the last pointer of the index to the given value."
[index eof-ptr]
(if (or (= (keys index) [:no-coordinate-alns])
(pos? (get index :no-coordinate-alns 0)))
index
(let [last-ref (apply max (keys (dissoc index :no-coordinate-alns)))
last-key (->> (for [[bin chunks] (get-in index [last-ref :bin-index])
[i {:keys [end]}] (map-indexed vector chunks)]
[end [last-ref :bin-index bin i :end]])
(apply max-key first)
last)]
(-> index
(assoc-in [last-ref :meta-data :last-offset] eof-ptr)
(assoc-in last-key eof-ptr)))))
;; Writing index
;; -----------
(defn write-index*!
"Write the index to a file."
[wtr ^long nrefs indices]
;; magic
(lsb/write-bytes wtr (.getBytes ^String bai-magic))
;; n_ref
(lsb/write-int wtr nrefs)
(dotimes [i nrefs]
(let [index (get indices i)
n-bin (count (:bin-index index))]
;; bins
(if (zero? n-bin)
(lsb/write-int wtr 0)
(do
;; # of bins
(lsb/write-int wtr (inc n-bin))
(doseq [bin (:bin-index index)]
(write-bin wtr (:bin bin) (:chunks bin)))
;; meta data
(write-meta-data wtr (:meta-data index))))
;; linear index
(lsb/write-int wtr (count (:linear-index index)))
(doseq [l (:linear-index index)]
(lsb/write-long wtr l))))
;; no coordinate alignments
(lsb/write-long wtr (:no-coordinate-alns indices)))
;; ------
(defn write-index!
"Calculates a BAM index from alns, writing the index to a file."
[^BAIWriter wtr alns]
(let [nrefs (count (.refs wtr))
indices (make-index-from-blocks nrefs alns)]
(write-index*! (.writer wtr) nrefs indices)))
| null | https://raw.githubusercontent.com/chrovis/cljam/2b8e7386765be8efdbbbb4f18dbc52447f4a08af/src/cljam/io/bam_index/writer.clj | clojure | ---------
Use record for performance.
Record is faster than map for retrieving elements.
last-offset
aligned-alns
unaligned-alns
Merging indices
-------------
Finalizing index
----------------
Writing index
-----------
chunks
Public
------
Merging indices
-------------
Making index
-----------
Writing index
-----------
magic
n_ref
bins
# of bins
meta data
linear index
no coordinate alignments
------ | (ns cljam.io.bam-index.writer
(:require [com.climate.claypoole :as cp]
[cljam.common :refer [get-exec-n-threads]]
[cljam.io.util.bgzf :as bgzf]
[cljam.io.util.lsb :as lsb]
[cljam.io.util.bin :as util-bin]
[cljam.io.bam-index.common :refer [linear-index-shift
linear-index-depth
max-bins
bai-magic]]
[cljam.io.util.chunk :as chunk]
[cljam.io.bam.decoder :as bam-decoder])
(:import [java.io DataOutputStream Closeable]
[cljam.io.bam.decoder BAMPointerBlock]
[cljam.io.util.chunk Chunk]))
BAIWriter
(deftype BAIWriter [^DataOutputStream writer refs url]
Closeable
(close [this]
(.close writer)))
# # # Intermediate data definitions
(defrecord MetaData [^long first-offset ^long last-offset ^long aligned-alns ^long unaligned-alns])
(defrecord IndexStatus [^MetaData meta-data bin-index linear-index])
# # # Initializing
(defn- init-index-status
"Returns initialized index status. This data structure is intermediate. Must
be passed `finalize-index` in the final stage."
[]
(IndexStatus. (MetaData. -1 0 0 0)
Intermediate bin - index - > { bin1 , bin2 chunks2 , ... }
e.g. { 4681 [ { : beg 97 , : end 555 } ... ] , 37450 [ ... ] ... }
{}
Intermediate linear - index - > { pos1 , pos2 value2 , ... }
e.g. { 5415 4474732776 , 14827 5955073327 , ... }
{}))
# # # Updating
(defn- update-meta-data
[^MetaData meta-data ^BAMPointerBlock aln]
(let [first-offset (.first-offset meta-data)
last-offset (.last-offset meta-data)
aligned-alns (.aligned-alns meta-data)
unaligned-alns (.unaligned-alns meta-data)
beg (.pointer-beg aln)
end (.pointer-end aln)
aligned? (zero? (bit-and (.flag aln) 4))]
(MetaData.
first - offset
(if (or (< (bgzf/compare beg first-offset) 1)
(= first-offset -1))
beg first-offset)
(if (< (bgzf/compare last-offset end) 1)
end last-offset)
(if aligned? (inc aligned-alns) aligned-alns)
(if-not aligned? (inc unaligned-alns) unaligned-alns))))
(defn- update-bin-index
[bin-index ^BAMPointerBlock aln]
(let [bin (util-bin/reg->bin
(.pos aln) (.end aln) linear-index-shift linear-index-depth)
beg (.pointer-beg aln)
end (.pointer-end aln)]
(assoc bin-index bin
(if-let [chunks (get bin-index bin)]
(let [last-chunk ^Chunk (peek chunks)]
(if (bgzf/same-or-adjacent-blocks? (.end last-chunk) beg)
(conj (pop chunks) (Chunk. (.beg last-chunk) end))
(conj chunks (Chunk. beg end))))
[(Chunk. beg end)]))))
(defn- update-linear-index
[linear-index ^BAMPointerBlock aln]
(let [beg (.pointer-beg aln)
aln-beg (.pos aln)
aln-end (.end aln)
win-beg (if (zero? aln-end)
(util-bin/pos->lidx-offset (dec aln-beg) linear-index-shift)
(util-bin/pos->lidx-offset aln-beg linear-index-shift))
win-end (if (zero? aln-end)
win-beg
(util-bin/pos->lidx-offset aln-end linear-index-shift))
min* (fn [x]
(if x (min x beg) beg))]
(loop [i win-beg, ret linear-index]
(if (<= i win-end)
(recur (inc i) (assoc ret i (min* (get ret i))))
ret))))
(defn- update-index-status
[^IndexStatus index-status aln]
(IndexStatus. (update-meta-data (.meta-data index-status) aln)
(update-bin-index (.bin-index index-status) aln)
(update-linear-index (.linear-index index-status) aln)))
(defn- merge-meta-data
[^MetaData meta1 ^MetaData meta2]
(MetaData. (let [f1 (.first-offset meta1)
f2 (.first-offset meta2)]
(cond
(= f1 -1) f2
(= f2 -1) f1
:else (min f1 f2)))
(max (.last-offset meta1) (.last-offset meta2))
(+ (.aligned-alns meta1) (.aligned-alns meta2))
(+ (.unaligned-alns meta1) (.unaligned-alns meta2))))
(defn- merge-chunks
[chunks1 chunks2]
(loop [[^Chunk f & r] (sort chunk/compare (concat chunks1 chunks2))
chunks' []]
(if f
(if-let [last-chunk ^Chunk (peek chunks')]
(if (bgzf/same-or-adjacent-blocks? (.end last-chunk) (.beg f))
(let [l (assoc last-chunk :end (.end f))]
(recur r (assoc chunks' (dec (count chunks')) l)))
(recur r (conj chunks' f)))
(recur r (conj chunks' f)))
chunks')))
(defn- merge-bin-index
[bin-map1 bin-map2]
(merge-with merge-chunks bin-map1 bin-map2))
(defn- merge-linear-index
[lidx1 lidx2]
(merge-with min lidx1 lidx2))
(defn- finalize-bin-index
[bin-index]
(->> bin-index
(seq)
(sort-by first)
(map (partial zipmap [:bin :chunks]))))
(defn- complement-linear-index
"Complements a linear index.
e.g. ([1 10] [3 30]) -> ([0 0] [1 10] [2 10] [3 30])"
[linear-index]
(loop [[f & r] (if (zero? (ffirst linear-index))
linear-index
(conj linear-index [0 0]))
ret []]
(if (seq r)
(recur r (apply conj ret (map #(conj (vector %) (second f)) (range (first f) (ffirst r)))))
(conj ret f))))
(defn- finalize-linear-index
[linear-index]
(->> linear-index
(seq)
(sort-by first)
(complement-linear-index)
(map second)))
(defn- write-bin
[w ^long bin chunks]
(lsb/write-int w bin)
(lsb/write-int w (count chunks))
(doseq [^Chunk chunk chunks]
(lsb/write-long w (.beg chunk))
(lsb/write-long w (.end chunk))))
(defn- write-meta-data
[w meta-data]
(lsb/write-int w max-bins)
(lsb/write-int w 2)
(lsb/write-long w (:first-offset meta-data))
(lsb/write-long w (:last-offset meta-data))
(lsb/write-long w (:aligned-alns meta-data))
(lsb/write-long w (:unaligned-alns meta-data)))
(def ^:dynamic *alignments-partition-size*
"The number of alignments that is loaded each indexing process. This has an
effect on performance of concurrent indexing. The default value is 10,000."
10000)
(defn merge-index
"Merges two intermediate indices, returning the merged intermediate index."
[idx1 idx2]
(let [no-coordinate-alns (+ (:no-coordinate-alns idx1) (:no-coordinate-alns idx2))
idx1 (dissoc idx1 :no-coordinate-alns)
idx2 (dissoc idx2 :no-coordinate-alns)]
(-> (merge-with
(fn [^IndexStatus v1 ^IndexStatus v2]
(IndexStatus. (merge-meta-data (.meta-data v1) (.meta-data v2))
(merge-bin-index (.bin-index v1) (.bin-index v2))
(merge-linear-index (.linear-index v1) (.linear-index v2))))
idx1 idx2)
(assoc :no-coordinate-alns no-coordinate-alns))))
(defn finalize-index
"Converts intermediate BAM index data structure into final one. Must be called
in the final stage."
[^long nrefs index]
(loop [i 0
index index]
(if (< i nrefs)
(if (get index i)
(recur (inc i) (-> index
(update-in [i :bin-index] finalize-bin-index)
(update-in [i :linear-index] finalize-linear-index)))
(recur (inc i) index))
index)))
(defn make-index*
"Calculates index from the references and alignments, returning it as a map.
Returned index is still intermediate. It must be passed to finalize function
in the final stage."
[alns]
(loop [[^BAMPointerBlock aln & rest] alns
rid (.ref-id aln)
idx-status (init-index-status)
no-coordinate-alns 0
indices {}]
(if aln
(let [rid' (.ref-id aln)
new-ref? (not= rid' rid)
idx-status' (update-index-status
(if new-ref? (init-index-status) idx-status) aln)
no-coordinate-alns' (if (zero? (.pos aln))
(inc no-coordinate-alns)
no-coordinate-alns)
indices' (if new-ref?
(assoc indices rid idx-status)
indices)]
(recur rest rid' idx-status' no-coordinate-alns' indices'))
(assoc indices rid idx-status
:no-coordinate-alns no-coordinate-alns))))
(defn make-index-from-blocks
"Calculates a BAM index from provided references and alignment blocks.
Optionally, you can do this process concurrently."
[^long nrefs blocks]
(let [n-threads (get-exec-n-threads)
make-index-fn (fn [blocks]
(if (= n-threads 1)
(->> blocks
(eduction (map bam-decoder/decode-pointer-block))
make-index*)
(cp/with-shutdown! [pool (cp/threadpool (dec n-threads))]
(->> blocks
(eduction (partition-all *alignments-partition-size*))
(cp/upmap pool (fn [sub-blocks]
(->> sub-blocks
(eduction (map bam-decoder/decode-pointer-block))
make-index*)))
(reduce merge-index {:no-coordinate-alns 0})))))]
(->> blocks
make-index-fn
(finalize-index nrefs))))
(defn update-last-pointer
"Update the last pointer of the index to the given value."
[index eof-ptr]
(if (or (= (keys index) [:no-coordinate-alns])
(pos? (get index :no-coordinate-alns 0)))
index
(let [last-ref (apply max (keys (dissoc index :no-coordinate-alns)))
last-key (->> (for [[bin chunks] (get-in index [last-ref :bin-index])
[i {:keys [end]}] (map-indexed vector chunks)]
[end [last-ref :bin-index bin i :end]])
(apply max-key first)
last)]
(-> index
(assoc-in [last-ref :meta-data :last-offset] eof-ptr)
(assoc-in last-key eof-ptr)))))
(defn write-index*!
"Write the index to a file."
[wtr ^long nrefs indices]
(lsb/write-bytes wtr (.getBytes ^String bai-magic))
(lsb/write-int wtr nrefs)
(dotimes [i nrefs]
(let [index (get indices i)
n-bin (count (:bin-index index))]
(if (zero? n-bin)
(lsb/write-int wtr 0)
(do
(lsb/write-int wtr (inc n-bin))
(doseq [bin (:bin-index index)]
(write-bin wtr (:bin bin) (:chunks bin)))
(write-meta-data wtr (:meta-data index))))
(lsb/write-int wtr (count (:linear-index index)))
(doseq [l (:linear-index index)]
(lsb/write-long wtr l))))
(lsb/write-long wtr (:no-coordinate-alns indices)))
(defn write-index!
"Calculates a BAM index from alns, writing the index to a file."
[^BAIWriter wtr alns]
(let [nrefs (count (.refs wtr))
indices (make-index-from-blocks nrefs alns)]
(write-index*! (.writer wtr) nrefs indices)))
|
2fc96e8276bc7c2641adbe1bb6827c6e46e5bf1cf03054880bf137fcfd140710 | glondu/belenios | tool_events.mli | (**************************************************************************)
(* BELENIOS *)
(* *)
Copyright © 2012 - 2022
(* *)
(* This program is free software: you can redistribute it and/or modify *)
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation , either version 3 of the
(* License, or (at your option) any later version, with the additional *)
exemption that compiling , linking , and/or using OpenSSL is allowed .
(* *)
(* This program is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *)
(* Affero General Public License for more details. *)
(* *)
You should have received a copy of the GNU Affero General Public
(* License along with this program. If not, see *)
(* </>. *)
(**************************************************************************)
open Belenios_core.Signatures
open Belenios_core.Serializable_t
open Belenios_core.Archive
type index
val get_index : file:string -> index
val get_data : index -> hash -> string option
val get_event : index -> hash -> event option
val get_roots : index -> roots
val fold_on_event_payload_hashes :
index -> event_type -> hash -> (hash -> 'a -> 'a) -> 'a -> 'a
val fold_on_event_payloads :
index -> event_type -> hash -> (string -> 'a -> 'a) -> 'a -> 'a
val fsck : index -> unit
val starts_with : prefix:index -> index -> bool
type append_operation =
| Data of string
| Event of event_type * hash option
val append : index -> append_operation list -> unit
val init : file:string -> election:string -> trustees:string -> public_creds:string -> index
module DirectMonad : MONAD with type 'a t = 'a
module Writer : ARCHIVE_WRITER with type 'a m := 'a and type archive = out_channel
| null | https://raw.githubusercontent.com/glondu/belenios/64e71651d245ac58a0e909ae0f3c290356f780ee/src/tool/tool_events.mli | ocaml | ************************************************************************
BELENIOS
This program is free software: you can redistribute it and/or modify
License, or (at your option) any later version, with the additional
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Affero General Public License for more details.
License along with this program. If not, see
</>.
************************************************************************ | Copyright © 2012 - 2022
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation , either version 3 of the
exemption that compiling , linking , and/or using OpenSSL is allowed .
You should have received a copy of the GNU Affero General Public
open Belenios_core.Signatures
open Belenios_core.Serializable_t
open Belenios_core.Archive
type index
val get_index : file:string -> index
val get_data : index -> hash -> string option
val get_event : index -> hash -> event option
val get_roots : index -> roots
val fold_on_event_payload_hashes :
index -> event_type -> hash -> (hash -> 'a -> 'a) -> 'a -> 'a
val fold_on_event_payloads :
index -> event_type -> hash -> (string -> 'a -> 'a) -> 'a -> 'a
val fsck : index -> unit
val starts_with : prefix:index -> index -> bool
type append_operation =
| Data of string
| Event of event_type * hash option
val append : index -> append_operation list -> unit
val init : file:string -> election:string -> trustees:string -> public_creds:string -> index
module DirectMonad : MONAD with type 'a t = 'a
module Writer : ARCHIVE_WRITER with type 'a m := 'a and type archive = out_channel
|
e8611092aeec847d7628ff93b24d846e2f17d7d7712de5d4900d388c44e0d329 | camllight/camllight | hashtblc.mli | (* Hash tables and hash functions *)
(* Hash tables are hashed association tables, with in-place modification. *)
type ('a, 'b) t;;
(* The type of hash tables from type ['a] to type ['b]. *)
value new : int -> ('a,'b) t
(* [new n] creates a new, empty hash table, with initial size [n].
The table grows as needed, so [n] is just an initial guess.
Better results are said to be achieved when [n] is a prime
number. *)
and clear : ('a, 'b) t -> unit
(* Empty a hash table. *)
and add : ('a, 'b) t -> 'a -> 'b -> unit
[ add tbl x y ] adds a binding of [ x ] to [ y ] in table [ tbl ] .
Previous bindings for [ x ] are not removed , but simply
hidden . That is , after performing [ remove x ] , the previous
binding for [ x ] , if any , is restored .
( This is the semantics of association lists . )
Previous bindings for [x] are not removed, but simply
hidden. That is, after performing [remove tbl x], the previous
binding for [x], if any, is restored.
(This is the semantics of association lists.) *)
and find : ('a, 'b) t -> 'a -> 'b
[ find x ] returns the current binding of [ x ] in [ tbl ] ,
or raises [ Not_found ] if no such binding exists .
or raises [Not_found] if no such binding exists. *)
and find_all : ('a, 'b) t -> 'a -> 'b list
[ find_all tbl x ] returns the list of all data associated with [ x ]
in [ tbl ] . The current binding is returned first , then the previous
bindings , in reverse order of introduction in the table .
in [tbl]. The current binding is returned first, then the previous
bindings, in reverse order of introduction in the table. *)
and remove : ('a, 'b) t -> 'a -> unit
[ remove x ] removes the current binding of [ x ] in [ tbl ] ,
restoring the previous binding if it exists .
It does nothing if [ x ] is not bound in [ tbl ] .
restoring the previous binding if it exists.
It does nothing if [x] is not bound in [tbl]. *)
and do_table : ('a -> 'b -> 'c) -> ('a, 'b) t -> unit
[ do_table f tbl ] applies [ f ] to all bindings in table [ tbl ] ,
discarding all the results .
[ f ] receives the key as first argument , and the associated value
as second argument .
Each binding is presented exactly once to [ f ] .
The order in which the bindings are passed to
[ f ] is unpredictable , except that successive bindings for the same
key are presented in reverse chronological order
( most recent first ) .
discarding all the results.
[f] receives the key as first argument, and the associated value
as second argument.
Each binding is presented exactly once to [f].
The order in which the bindings are passed to
[f] is unpredictable, except that successive bindings for the same
key are presented in reverse chronological order
(most recent first). *)
and do_table_rev : ('a -> 'b -> 'c) -> ('a, 'b) t -> unit
Same as [ do_table ] , except that successive bindings for the same
key are presented in chronological order ( oldest first ) .
key are presented in chronological order (oldest first). *)
;;
(*** The polymorphic hash primitive *)
value hash : 'a -> int
(* [hash x] associates a positive integer to any value of
any type. It is guaranteed that
if [x = y], then [hash x = hash y].
Moreover, [hash] always terminates, even on cyclic
structures. *)
;;
value hash_param : int -> int -> 'a -> int = 3 "hash_univ_param"
[ hash_param n m x ] computes a hash value for [ x ] , with the
same properties as for [ hash ] . The two extra parameters [ n ] and
[ m ] give more precise control over hashing . Hashing performs a
depth - first , right - to - left traversal of the structure [ x ] , stopping
after [ n ] meaningful nodes were encountered , or [ m ] nodes ,
meaningful or not , were encountered . Meaningful nodes are : integers ;
floating - point numbers ; strings ; characters ; booleans ; and constant
constructors . Larger values of [ m ] and [ n ] means that more
nodes are taken into account to compute the final hash
value , and therefore collisions are less likely to happen .
However , hashing takes longer . The parameters [ m ] and [ n ]
govern the tradeoff between accuracy and speed .
same properties as for [hash]. The two extra parameters [n] and
[m] give more precise control over hashing. Hashing performs a
depth-first, right-to-left traversal of the structure [x], stopping
after [n] meaningful nodes were encountered, or [m] nodes,
meaningful or not, were encountered. Meaningful nodes are: integers;
floating-point numbers; strings; characters; booleans; and constant
constructors. Larger values of [m] and [n] means that more
nodes are taken into account to compute the final hash
value, and therefore collisions are less likely to happen.
However, hashing takes longer. The parameters [m] and [n]
govern the tradeoff between accuracy and speed. *)
;;
| null | https://raw.githubusercontent.com/camllight/camllight/0cc537de0846393322058dbb26449427bfc76786/sources/contrib/camltk4/libsupport/hashtblc.mli | ocaml | Hash tables and hash functions
Hash tables are hashed association tables, with in-place modification.
The type of hash tables from type ['a] to type ['b].
[new n] creates a new, empty hash table, with initial size [n].
The table grows as needed, so [n] is just an initial guess.
Better results are said to be achieved when [n] is a prime
number.
Empty a hash table.
** The polymorphic hash primitive
[hash x] associates a positive integer to any value of
any type. It is guaranteed that
if [x = y], then [hash x = hash y].
Moreover, [hash] always terminates, even on cyclic
structures. |
type ('a, 'b) t;;
value new : int -> ('a,'b) t
and clear : ('a, 'b) t -> unit
and add : ('a, 'b) t -> 'a -> 'b -> unit
[ add tbl x y ] adds a binding of [ x ] to [ y ] in table [ tbl ] .
Previous bindings for [ x ] are not removed , but simply
hidden . That is , after performing [ remove x ] , the previous
binding for [ x ] , if any , is restored .
( This is the semantics of association lists . )
Previous bindings for [x] are not removed, but simply
hidden. That is, after performing [remove tbl x], the previous
binding for [x], if any, is restored.
(This is the semantics of association lists.) *)
and find : ('a, 'b) t -> 'a -> 'b
[ find x ] returns the current binding of [ x ] in [ tbl ] ,
or raises [ Not_found ] if no such binding exists .
or raises [Not_found] if no such binding exists. *)
and find_all : ('a, 'b) t -> 'a -> 'b list
[ find_all tbl x ] returns the list of all data associated with [ x ]
in [ tbl ] . The current binding is returned first , then the previous
bindings , in reverse order of introduction in the table .
in [tbl]. The current binding is returned first, then the previous
bindings, in reverse order of introduction in the table. *)
and remove : ('a, 'b) t -> 'a -> unit
[ remove x ] removes the current binding of [ x ] in [ tbl ] ,
restoring the previous binding if it exists .
It does nothing if [ x ] is not bound in [ tbl ] .
restoring the previous binding if it exists.
It does nothing if [x] is not bound in [tbl]. *)
and do_table : ('a -> 'b -> 'c) -> ('a, 'b) t -> unit
[ do_table f tbl ] applies [ f ] to all bindings in table [ tbl ] ,
discarding all the results .
[ f ] receives the key as first argument , and the associated value
as second argument .
Each binding is presented exactly once to [ f ] .
The order in which the bindings are passed to
[ f ] is unpredictable , except that successive bindings for the same
key are presented in reverse chronological order
( most recent first ) .
discarding all the results.
[f] receives the key as first argument, and the associated value
as second argument.
Each binding is presented exactly once to [f].
The order in which the bindings are passed to
[f] is unpredictable, except that successive bindings for the same
key are presented in reverse chronological order
(most recent first). *)
and do_table_rev : ('a -> 'b -> 'c) -> ('a, 'b) t -> unit
Same as [ do_table ] , except that successive bindings for the same
key are presented in chronological order ( oldest first ) .
key are presented in chronological order (oldest first). *)
;;
value hash : 'a -> int
;;
value hash_param : int -> int -> 'a -> int = 3 "hash_univ_param"
[ hash_param n m x ] computes a hash value for [ x ] , with the
same properties as for [ hash ] . The two extra parameters [ n ] and
[ m ] give more precise control over hashing . Hashing performs a
depth - first , right - to - left traversal of the structure [ x ] , stopping
after [ n ] meaningful nodes were encountered , or [ m ] nodes ,
meaningful or not , were encountered . Meaningful nodes are : integers ;
floating - point numbers ; strings ; characters ; booleans ; and constant
constructors . Larger values of [ m ] and [ n ] means that more
nodes are taken into account to compute the final hash
value , and therefore collisions are less likely to happen .
However , hashing takes longer . The parameters [ m ] and [ n ]
govern the tradeoff between accuracy and speed .
same properties as for [hash]. The two extra parameters [n] and
[m] give more precise control over hashing. Hashing performs a
depth-first, right-to-left traversal of the structure [x], stopping
after [n] meaningful nodes were encountered, or [m] nodes,
meaningful or not, were encountered. Meaningful nodes are: integers;
floating-point numbers; strings; characters; booleans; and constant
constructors. Larger values of [m] and [n] means that more
nodes are taken into account to compute the final hash
value, and therefore collisions are less likely to happen.
However, hashing takes longer. The parameters [m] and [n]
govern the tradeoff between accuracy and speed. *)
;;
|
383eb814fe75e7689385898382fbdec16cf0a0a39b8a81ff36d9cab273de8f2c | rjray/advent-2020-clojure | day12_test.clj | (ns advent-of-code.day12-test
(:require [clojure.test :refer [deftest testing is]]
[advent-of-code.day12 :refer [part-1 part-2]]
[clojure.java.io :refer [resource]]))
(deftest part1
(let [expected 25]
(is (= expected (part-1 (slurp (resource "day12-example.txt")))))))
(deftest part2
(let [expected 286]
(is (= expected (part-2 (slurp (resource "day12-example.txt")))))))
| null | https://raw.githubusercontent.com/rjray/advent-2020-clojure/631b36545ae1efdebd11ca3dd4dca032346e8601/test/advent_of_code/day12_test.clj | clojure | (ns advent-of-code.day12-test
(:require [clojure.test :refer [deftest testing is]]
[advent-of-code.day12 :refer [part-1 part-2]]
[clojure.java.io :refer [resource]]))
(deftest part1
(let [expected 25]
(is (= expected (part-1 (slurp (resource "day12-example.txt")))))))
(deftest part2
(let [expected 286]
(is (= expected (part-2 (slurp (resource "day12-example.txt")))))))
| |
99faabd8580bcdee8a35fd86d5dc082a61bdab27bd233be63d5abcf1e36145e0 | jkarni/ZipperFS | ZFS.hs | {-# LANGUAGE RankNTypes #-}
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE KindSignatures #
# LANGUAGE ScopedTypeVariables #
Zipper - based File / Operating system
with threading and exceptions all realized via delimited continuations .
There are no unsafe operations , no GHC ( let alone ) Unix threads ,
no concurrency problems . Our threads ca n't even do IO and ca n't
mutate any global state -- and the type system sees to it .
Please see /~oleg/ftp/papers/zfs-talk.pdf
for the demo and explanations .
-- $ I d : ZFS.hs , v 1.8 2005/10/14 23:00:41 oleg Exp $
NOTE : the above demo and explanation can be viewed at the following url :
- -talk.pdf
Zipper-based File/Operating system
with threading and exceptions all realized via delimited continuations.
There are no unsafe operations, no GHC (let alone) Unix threads,
no concurrency problems. Our threads can't even do IO and can't
mutate any global state -- and the type system sees to it.
Please see /~oleg/ftp/papers/zfs-talk.pdf
for the demo and explanations.
-- $Id: ZFS.hs,v 1.8 2005/10/14 23:00:41 oleg Exp $
NOTE: the above demo and explanation can be viewed at the following url:
- -talk.pdf
-}
module ZFS where
import ZipperM
import Control.Exception (try, bracket)
import Control.Monad.Trans (liftIO, MonadIO())
import qualified Data.List as List
import qualified Data.Map as Map
import Foreign -- needed for select hacks:
import Foreign.C -- Unix select is not available in
GHC
import Network.Socket
import System.IO
import qualified System.IO.Error as IO
import System.Posix (closeFd)
import System.Posix.Types(Fd(..))
import CC_FrameT ( runCC ) -- have to import runCC manually , even though the import of
-- ZipperM should pull it in.
-- Port to serve clients from
newClientPort :: PortNumber
newClientPort = 1503
select_timeout = 100000 -- microseconds
-- Initial content of the file system
-- Certainly, structurally richer filesystems are equally possible
-- (where content is annotated with attributes, e.g.)
-- A lambda-term can be made a filesystem too
fs1 :: Term
fs1 = Folder $ Map.fromList [("d1",d1), ("d2",Folder $ Map.empty),
("fl1", File "File1"),
("fl2", File "File2")]
where d1 = Folder $ Map.fromList [("fl13",File "File 3"),
("d11", d11)]
d11 = Folder $ Map.fromList [("d111", Folder $ Map.empty)]
-- Another file system -- this time, it is cyclic!
fs2 :: Term
fs2 = Folder $ Map.fromList [("d1",fs2), ("fl1", File "File1")]
-- Operating system requests: from a ``process'' to the ``OS''
type FSZipper r m = DZipper r m Term Path
-- Note: the base monad type `m' is left polymorphic.
-- A Process doesn't do any IO (it asks the ``OS'').
-- So, the significant part of the OS, the process itself, is overtly
outside the IO monad !
-- Note: using different prompts, the requests can be modularized.
Unlike OS ( with its only one syscall handler ) , we can have as
-- many syscall handlers as we wish.
data OSReq r m = OSRDone
| OSRRead (ReadK r m)
| OSRWrite String (UnitK r m)
| OSRTrace String (UnitK r m) -- so a process can syslog
| OSRCommit Term (UnitK r m)
| OSRefresh (CCT r m (FSZipper r m) -> CCT r m (OSReq r m))
type UnitK r m = CCT r m () -> CCT r m (OSReq r m)
type ReadK r m = CCT r m String -> CCT r m (OSReq r m)
data ProcessCTX = ProcessCTX { psocket :: Socket -- process' socket
}
-- A process can only be blocked on reading. For simplicity we assume
-- that writing into the client socket never blocks
data JobQueueT r = JQBlockedOnRead ProcessCTX (ReadK r IO)
| JQRunnable ProcessCTX (UnitK r IO)
| JQNewClient Socket -- accept new clients from
data World r = World { mountedFS :: Term
, jobQueue :: [JobQueueT r]
, osPrompt :: Prompt r (OSReq r IO)
}
main' :: Term -> IO a
main' fs = bracket (serverSocket newClientPort) sClose $
\s ->
do
-- The following doesn't help: accept blocks anyway...
-- setFdOption (Fd (fdSocket s)) NonBlockingRead True
runCCT $ do
p <- newPrompt
syslog ["Entering the osloop",show s]
osloop $ World{
mountedFS = fs,
jobQueue = [JQNewClient s],
osPrompt = p}
where
serverSocket port = do
s <- socket AF_INET Stream 0
setSocketOption s ReuseAddr 1
localhost <- inet_addr "127.0.0.1"
bindSocket s (SockAddrInet port localhost)
listen s 5
return s
-- In OS parlance, the following is the interrupt handler.
-- It `waits' for interrupts that is, if any input socket has something
-- to read from.
-- It doesn't actually return, so the answer type is just any
-- osloop :: World r -> CCT r IO any
osloop world =
maybe (wait'for'intr world) (uncurry try'to'run) (find'runnable world)
>>= osloop
where
Try to find the first runnable job
find'runnable world = case break is'runnable (jobQueue world) of
(_,[]) -> Nothing
(jq1,(runnable:jq2)) -> Just (runnable, world{jobQueue=jq1++jq2})
where is'runnable (JQRunnable _ _) = True
is'runnable _ = False
wait'for'intr world@World{jobQueue=jq} =
do readyfd <- liftIO $ select'read'pending mfd
case break (\e -> maybe False (`elem` readyfd) (toFD e)) jq of
(_,[]) -> return world -- nothing found
(jq1,(now'runnable:jq2)) ->
try'to'run now'runnable world{jobQueue=jq1++jq2}
where
-- compile the list of file descriptors we are waiting at
mfd = foldr (\e a -> maybe [] (:a) (toFD e)) [] jq
toFD (JQNewClient s) = Just $ fdSocket s
toFD (JQBlockedOnRead ProcessCTX{psocket=s} _) = Just $ fdSocket s
toFD _ = Nothing
-- Add to the end of the job queue
enqueue el world = world{jobQueue = jobQueue world ++ [el]}
-- ifnM action onf ont = liftIO action >>= \b -> if b then ont else onf
-- New client is trying to connect
try'to'run qe@(JQNewClient s) world =
do
syslog ["accepting from",show s]
(clientS,addr) <- liftIO $ accept s
liftIO $ setSocketOption clientS NoDelay 1
syslog ["accepted new client connection from ", show addr]
let newCtx = ProcessCTX clientS
run'process (fsProcess (dzip'term (mountedFS world)))(osPrompt world)
>>= interpret'req (enqueue qe world) newCtx
try'to'run (JQRunnable ctx k) world =
k (return ()) >>= interpret'req world ctx
-- A client socket may have something to read
try'to'run (JQBlockedOnRead ctx@ProcessCTX{psocket=s} k) world =
do
syslog ["reading from",show s]
syslog ["osloop: queue size: ", show $ length $ jobQueue world]
dat <- liftIO $ (
do r <- try (recv s (1024 * 8))
case r of
Left err -> if IO.isEOFError err then return ""
else ioError err
Right msg -> return msg)
k (return dat) >>= interpret'req world ctx
-- The system logger
syslog :: (Control.Monad.Trans.MonadIO m) => [String] -> m ()
syslog s = liftIO $ putStrLn (concat s)
-- The interpreter of OS requests -- the syscall handler, in OS parlance
-- It handles simple requests by itself. When the request involves
-- rescheduling or change in the global OS state, it returns to
-- the scheduler/interrupt-handler/osloop.
-- The process is finished
interpret'req :: World r -> ProcessCTX -> OSReq r IO -> CCT r IO (World r)
interpret'req world ctx OSRDone = (liftIO $ sClose $ psocket ctx)
>> return world
-- The request for read may block. So, we do the context switch and go
-- to the main loop, to check if the process socket has something to read
-- from
interpret'req world ctx (OSRRead k) =
return world{jobQueue = (jobQueue world) ++ [JQBlockedOnRead ctx k]}
-- We assume that writing to a socket never blocks
interpret'req world ctx (OSRWrite datum k) =
do
send' (psocket ctx) datum
k (return ()) >>= interpret'req world ctx
where
send' _ "" = return ()
send' s msg = do c <- liftIO $ send s msg
send' s (drop c msg)
interpret'req world ctx (OSRTrace datum k) =
do
syslog ["Trace from",show $ psocket ctx,": ",datum]
k (return ()) >>= interpret'req world ctx
interpret'req world ctx (OSRCommit term k) =
return world{jobQueue = (jobQueue world) ++ [JQRunnable ctx k],
mountedFS = term}
interpret'req world ctx (OSRefresh k) =
k (dzip'term $ mountedFS world) >>= interpret'req world ctx
-- We have the functionality of threads -- although our whole program
is simply threaded , both at the OS level and at the GHC runtime level .
-- Our process functions don't even have the IO type!
-- Note, the function to run the process has forall m. That means, a process
function ca n't do any IO and ca n't have any reference cells .
-- Processes can't mutate the global state -- and the type system checks that!
-- Because processes can't interfere with each other and with the OS, there
-- is no need for any thread synchronization, locking, etc. We get
-- the transactional semantics for free.
-- Of course, as different processes manipulate their own (copy-on-write)
-- terms (file systems), when the processes commit, there may be conflicts.
-- So, one has to implement some conflict resolution -- be it versioning,
-- patching, asking for permission for update, etc. But
-- these policies are implemented at the higher-level; the programmer can
-- implement any set of policies. Because processes always ask the supervisor
-- for anything, and the supervisor has the view of the global state,
-- the resolution policies are easier to implement in this execution model.
run'process :: (forall m. Monad m =>
(Prompt r (OSReq r m)) -> CCT r m (OSReq r m))
-> Prompt r (OSReq r IO) -> CCT r IO (OSReq r IO)
run'process body p = pushPrompt p (body p)
-- Processes. No IO action is possible in here
fsProcess :: Monad m =>
CCT r m (FSZipper r m) -> Prompt r (OSReq r m)
-> CCT r m (OSReq r m)
fsProcess zipper'action svcp =
do
z <- zipper'action
svc svcp $ OSRTrace "Begin process"
fsloop z svcp ""
fsloop :: forall r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (OSReq r m)
-> String
-> CCT r m (OSReq r m)
fsloop z svcp line'acc
= do
send_shell_prompt z svcp
(line,rest) <- read'line line'acc
let (cmd,arg) = breakspan is'whitespace line
svc svcp $ OSRTrace $ "received command: " ++ cmd
maybe (svc svcp (OSRWrite $ "bad command: " ++ cmd) >>
fsloop z svcp rest)
(\h -> h z svcp cmd arg rest)
(List.lookup cmd fsCommands)
where
-- Read until we get newline
read'line acc = case break is'nl acc of
(_,"") -> do
b <- svc svcp OSRRead
svc svcp $ OSRTrace $ "Read str: " ++ b
(l,rest) <- read'line b
return (acc ++ l, rest)
(l,rest) -> return (l,snd $ span is'nl rest)
send_shell_prompt z svcp =
svc svcp $ OSRWrite $ ("\n" ++ show_path (dz_path z) ++ "> ")
show_path :: [Path] -> String
show_path path = concatMap (\pc -> case pc of
Down -> "/"
DownTo s -> s ++ "/")
(reverse path)
fsCommands :: Monad m => [(String,FSZipper r m -> Prompt r (OSReq r m) ->
String -> String -> String ->
CCT r m (OSReq r m))]
fsCommands =
[
("quit", \_ svcp _ _ _ -> svc svcp $ const OSRDone),
("cd", fsWrapper
(\z shp _ path -> cd'zipper z shp path >>= return . FSCZ)),
("ls", fsWrapper cmd'ls),
("cat", fsWrapper cmd'ls),
("next", fsWrapper cmd'next),
("mkdir", fsWrapper (cmd'mknode (Folder Map.empty))),
("touch", fsWrapper (cmd'mknode (File ""))),
("echo", fsWrapper cmd'echo),
("rm", fsWrapper cmd'rm),
("mv", fsWrapper cmd'mv),
("cp", fsWrapper cmd'cp),
("help", fsWrapper cmd'help),
("commit", fcmd'commit),
("refresh", \_ svcp _ _ rest -> svc svcp OSRefresh >>=
\z -> fsloop z svcp rest)
-- could have a command ``down N'' -- positional descend
-- Note: next is really cool!
-- Note, we can cd inside a file! So, cat is just `ls' inside a file
]
fcmd'commit :: forall t t1 r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (OSReq r m)
-> t
-> t1
-> String
-> CCT r m (OSReq r m)
fcmd'commit z svcp _ _ rest = aux z
where
aux (DZipDone term) = (svc svcp $ OSRCommit term) >>
fsloop z svcp rest
aux DZipper{dz_k = k} = k (return (Nothing,Up)) >>= aux
data FSCmdResp r m = FSCS String | FSCZ (FSZipper r m)
-- We use delimited continuations rather than an Error monad
-- A delimited continuation suffices!
fsWrapper :: forall t t1 r (m :: * -> *).
(Monad m) =>
(FSZipper r m
-> Prompt r (FSCmdResp r m)
-> t
-> t1
-> CCT r m (FSCmdResp r m))
-> FSZipper r m
-> Prompt r (OSReq r m)
-> t
-> t1
-> String
-> CCT r m (OSReq r m)
fsWrapper cmd z svcp cmd'name cmd'arg rest =
do
shp <- newPrompt
resp <- pushPrompt shp (cmd z shp cmd'name cmd'arg)
z' <- case resp of
FSCS str -> (svc svcp $ OSRWrite str) >> return z
FSCZ z -> return z
fsloop z' svcp rest
cmd'help :: forall t
t1
t2
(m :: * -> *)
r
(m1 :: * -> *)
r1
(m2 :: * -> *).
(Monad m, Monad m1) =>
FSZipper r m -> t -> t1 -> t2 -> m1 (FSCmdResp r1 m2)
cmd'help z _ _ _ = return $ FSCS $ "Commands: " ++
(concat $ List.intersperse ", " $ List.map fst cmds)
where
cmds :: [(String, FSZipper r2 m
-> Prompt r2 (OSReq r2 m)
-> String
-> String
-> String
-> CCT r2 m (OSReq r2 m))]
cmds = fsCommands
cmd'ls :: forall t
r
(m :: * -> *)
r1
(m1 :: * -> *).
(Monad m) =>
FSZipper r m
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r1 m1)
cmd'ls z shp _ slash'path = cd'zipper z shp slash'path
>>= return . FSCS . list_node
cmd'next :: forall t t1 t2 r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> t
-> t1
-> t2
-> CCT r m (FSCmdResp r m)
cmd'next z _ _ _ =
do z' <- dz_k z (return (Nothing,Next))
return $ FSCZ $ case z' of DZipDone _ -> z; _ -> z'
-- main navigation function
cd'zipper :: Monad m =>
FSZipper r m -> Prompt r (FSCmdResp r m) -> String
-> CCT r m (FSZipper r m)
cd'zipper z _ "" = return z
cd'zipper z shp ('/':path) = do z' <- ascend'to'root z; cd'zipper z' shp path
where
ascend'to'root z =
dz_k z (return (Nothing,Up)) >>= ascend'to'root' z
ascend'to'root' z (DZipDone _) = return z
ascend'to'root' _ z = ascend'to'root z
cd'zipper z shp ('.':'.':path) = aux z (snd $ span (=='/') path)
where
aux DZipper{dz_path = [Down]} _ = return z -- already at the top
aux DZipper{dz_k = k} path = k (return (Nothing,Up)) >>=
(\z -> cd'zipper z shp path)
aux (DZipDone _) _ = return z
cd'zipper DZipper{dz_term = File _} shp _ =
abort shp (return $ FSCS "cannot descend down the file")
cd'zipper DZipper{dz_term = Folder fld, dz_k = k} shp path
= let (pc,prest) = breakspan (== '/') path
in if Map.member pc fld then do
z' <- k (return (Nothing,DownTo pc))
cd'zipper z' shp prest
else abort shp (return $ FSCS $ "No such dir component " ++ pc)
-- List the current contents of the node pointed by the zipper
-- This function subsumes both `ls' and `cat'
-- For files, it sends the content of the file
list_node :: forall t (t1 :: * -> *) t2.
DZipper t t1 Term t2 -> String
list_node DZipper{dz_term = File str} = str
list_node DZipper{dz_term = Folder fld} =
Map.foldWithKey (\name el acc ->
"\n" ++ name ++ (case el of Folder _ -> "/"
_ -> "") ++ acc)
"" fld
list_node _ = ""
-- make a node (an empty directory or an empty file or a moved node)
-- named 'dirn' in the current directory
cmd'mknode :: forall t
r
r1
(m :: * -> *)
(m1 :: * -> *).
(Monad m1) =>
Term
-> DZipper r m1 Term Path
-> Prompt r (FSCmdResp r1 m)
-> t
-> String
-> CCT r m1 (FSCmdResp r m1)
cmd'mknode _ _ shp _ dirn | '/' `elem` dirn =
abort shp (return $ FSCS "the name of the new node can't contain slash")
cmd'mknode _ _ shp _ "" =
abort shp (return $ FSCS "the name of the new node is empty")
cmd'mknode _ DZipper{dz_term = File _} shp _ _ =
abort shp (return $ FSCS "cannot create anything in a file")
cmd'mknode _ DZipper{dz_term = Folder fld} shp _ dirn
| Map.member dirn fld =
abort shp (return $ FSCS $ "node " ++ dirn ++ " already exists")
cmd'mknode newnode DZipper{dz_term = Folder fld, dz_k = k, dz_dir = cn}
_ _ dirn =
let fld' = Folder $ Map.insert dirn newnode fld
in k (return (Just fld',Up)) >>= adj cn >>= return . FSCZ
where
-- go back to the current directory
adj _ (DZipDone term) = dzip'term term
adj cn z = dz_k z $ return (Nothing,cn)
-- echo string > path
cmd'echo :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'echo z shp _ args = aux $ (reads::ReadS String) args
where
aux [(content,rest)] = aux1 content (snd $ span is'whitespace rest)
aux _ = abort shp (return $ FSCS $ "bad format, str, of the echo cmd")
aux1 content ('>':rest) =
cd'zipper z shp (snd $ span is'whitespace rest) >>= aux2 content rest
aux1 _ _ = abort shp (return $ FSCS $ "bad format, path, of the echo cmd")
aux2 content _t DZipper{dz_term = File _, dz_k = k} =
k (return (Just $ File content,Up)) >>= zip'back'to'place shp z
>>= return . FSCZ
aux2 _ rest _ = abort shp
(return $ FSCS $ rest ++ " does not point to a file")
|zip'back'to'place z z1| brings z1 to the same place as z
-- Right now we use a pathetic algorithm -- but it works...
zip'back'to'place :: forall r
(m :: * -> *)
r1
(m1 :: * -> *)
term.
(Monad m) =>
Prompt r (FSCmdResp r m)
-> DZipper r1 m1 term Path
-> DZipper r m Term Path
-> CCT r m (FSZipper r m)
zip'back'to'place shp z (DZipDone term) =
dzip'term term >>= zip'back'to'place shp z
zip'back'to'place shp z z1 = cd'zipper z1 shp (show_path (dz_path z))
-- Delete the node pointed to by path and return the
-- updated zipper (which points to the same location as z) and the
-- deleted node
del'zipper :: forall r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> String
-> CCT r m (FSZipper r m, Term)
del'zipper z shp path = cd'zipper z shp path >>=
\z -> dz_k z (return (Nothing,Up)) >>= aux (dz_dir z)
where
aux _ (DZipDone _) =
abort shp (return $ FSCS $ "cannot remove the root folder")
aux (DownTo pc) DZipper{dz_term = Folder fld, dz_k = k} =
let (Just old'node, fld') = Map.updateLookupWithKey (\_ _ -> Nothing) pc fld
in k (return (Just $ Folder $ fld',Up))
>>= zip'back'to'place shp z >>= \z -> return (z,old'node)
-- insert a node as `path'
ins'zipper :: forall r (m :: * -> *).
(Monad m) =>
Term
-> FSZipper r m
-> Prompt r (FSCmdResp r m)
-> String
-> CCT r m (FSCmdResp r m)
ins'zipper node z0 shp path =
do
let (dirname,basename) = split'path path
z <- if dirname == "" then return z0 else cd'zipper z0 shp dirname
FSCZ z <- cmd'mknode node z shp "mv" basename
zip'back'to'place shp z0 z >>= return . FSCZ
-- rm path
-- works both on directories and files
-- One can even try to remove one's own parent -- and this is safe!
cmd'rm :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'rm z shp _ path = del'zipper z shp path >>= return . FSCZ . fst
mv path_from path_to
cmd'mv :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'mv z shp _ args = aux $ breakspan is'whitespace args
where
aux ("",_) = abort shp (return $ FSCS $ "mv: from-path is empty")
aux (_,"") = abort shp (return $ FSCS $ "mv: to-path is empty")
aux (pfrom,pto) = del'zipper z shp pfrom >>=
\ (z,node) -> ins'zipper node z shp pto
cp path_from path_to
-- We don't do any copying: we merely establish sharing:
-- so a node accessible via `from_path' becomes accessible via `to_path'
The copy - on - write semantics of ZFS does the rest .
So , in ZFS , we can copy arbitrary file systems trees in constant time !
cmd'cp :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'cp z0 shp _ args = aux $ breakspan is'whitespace args
where
aux ("",_) = abort shp (return $ FSCS $ "cp: from-path is empty")
aux (_,"") = abort shp (return $ FSCS $ "cp: to-path is empty")
aux (pfrom,pto) = cd'zipper z0 shp pfrom >>=
\z -> dz_k z (return (Nothing,Up)) >>=
aux' (dz_dir z) pto
aux' _ pto (DZipDone term) =
dzip'term term >>= zip'back'to'place shp z0 >>=
\z -> ins'zipper term z shp pto
aux' (DownTo pc) pto z@DZipper{dz_term = Folder fld} =
zip'back'to'place shp z0 z >>=
\z -> ins'zipper ((Map.!) fld pc) z shp pto
-- Supervisor call
svc :: (Monad m) => Prompt r b -> ((CCT r m a -> CCT r m b) -> b) -> CCT r m a
svc p req = ZipperM.shift p (return . req)
is'nl, is'whitespace :: Char -> Bool
is'whitespace c = c == ' ' || c == '\t'
is'nl c = c == '\n' || c == '\r'
breakspan :: (a -> Bool) -> [a] -> ([a], [a])
breakspan pred l = let (p1,p2) = break pred l
in (p1,snd $ span pred p2)
-- break the path into (dirname,basename)
split'path :: String -> (String, String)
split'path path = let (p1,p2) = breakspan (=='/') (reverse path)
in (reverse p2, reverse p1)
------------------------------------------------------------------------
-- Some hacks to get around the lack of select
-- Darn! We don't have the real select over several descriptors!
-- We have to implement it ourselves
type FDSET = CUInt
Two longs
foreign import ccall "unistd.h select" c_select
:: CInt -> Ptr FDSET -> Ptr FDSET -> Ptr FDSET -> Ptr TIMEVAL -> IO CInt
Convert a file descriptor to an FDSet ( for use with select )
-- essentially encode a file descriptor in a big-endian notation
fd2fds :: CInt -> [FDSET]
fd2fds fd = (replicate nb 0) ++ [setBit 0 off]
where
(nb,off) = quotRem (fromIntegral fd) (bitSize (undefined::FDSET))
fds2mfd :: [FDSET] -> [CInt]
fds2mfd fds = [fromIntegral (j+i*bitsize) |
(afds,i) <- zip fds [0..], j <- [0..bitsize],
testBit afds j]
where bitsize = bitSize (undefined::FDSET)
test_fd_conv, test_fd_conv' :: Bool
test_fd_conv = and $ List.map (\e -> [e] == (fds2mfd $ fd2fds e)) lst
where
lst = [0,1,5,7,8,9,16,17,63,64,65]
test_fd_conv' = mfd == fds2mfd fds
where
mfd = [0,1,5,7,8,9,16,17,63,64,65]
fds :: [FDSET] = foldr ormax [] (List.map fd2fds mfd)
maximum $ List.map fromIntegral mfd
ormax [] x = x
ormax x [] = x
ormax (a:ar) (b:br) = (a .|. b) : ormax ar br
-- poll if file descriptors have something to read
-- Return the list of read-pending descriptors
select'read'pending :: [CInt] -> IO [CInt]
select'read'pending mfd =
withArray ([0,1]::[TIMEVAL]) ( -- holdover...
\_ ->
withArray fds (
\readfs ->
do
_ <- throwErrnoIfMinus1 "select"
(c_select (fdmax+1) readfs nullPtr nullPtr nullPtr)
-- because the wait was indefinite, rc must be positive!
peekArray (length fds) readfs))
>>= (return . fds2mfd)
where
fds :: [FDSET] = foldr ormax [] (List.map fd2fds mfd)
fdmax = maximum $ List.map fromIntegral mfd
ormax [] x = x
ormax x [] = x
ormax (a:ar) (b:br) = (a .|. b) : ormax ar br
foreign import ccall "fcntl.h fcntl" fcntl :: CInt -> CInt -> CInt -> IO CInt
use it as cleanup'fd [ 5 .. 6 ] to clean up the sockets left hanging ...
cleanup'fd :: [CInt] -> IO ()
cleanup'fd = mapM_ (closeFd . Fd)
| null | https://raw.githubusercontent.com/jkarni/ZipperFS/9f714165d5085c52fab1006fa867909d18d019bd/src/ZFS.hs | haskell | # LANGUAGE RankNTypes #
and the type system sees to it .
$ I d : ZFS.hs , v 1.8 2005/10/14 23:00:41 oleg Exp $
and the type system sees to it.
$Id: ZFS.hs,v 1.8 2005/10/14 23:00:41 oleg Exp $
needed for select hacks:
Unix select is not available in
have to import runCC manually , even though the import of
ZipperM should pull it in.
Port to serve clients from
microseconds
Initial content of the file system
Certainly, structurally richer filesystems are equally possible
(where content is annotated with attributes, e.g.)
A lambda-term can be made a filesystem too
Another file system -- this time, it is cyclic!
Operating system requests: from a ``process'' to the ``OS''
Note: the base monad type `m' is left polymorphic.
A Process doesn't do any IO (it asks the ``OS'').
So, the significant part of the OS, the process itself, is overtly
Note: using different prompts, the requests can be modularized.
many syscall handlers as we wish.
so a process can syslog
process' socket
A process can only be blocked on reading. For simplicity we assume
that writing into the client socket never blocks
accept new clients from
The following doesn't help: accept blocks anyway...
setFdOption (Fd (fdSocket s)) NonBlockingRead True
In OS parlance, the following is the interrupt handler.
It `waits' for interrupts that is, if any input socket has something
to read from.
It doesn't actually return, so the answer type is just any
osloop :: World r -> CCT r IO any
nothing found
compile the list of file descriptors we are waiting at
Add to the end of the job queue
ifnM action onf ont = liftIO action >>= \b -> if b then ont else onf
New client is trying to connect
A client socket may have something to read
The system logger
The interpreter of OS requests -- the syscall handler, in OS parlance
It handles simple requests by itself. When the request involves
rescheduling or change in the global OS state, it returns to
the scheduler/interrupt-handler/osloop.
The process is finished
The request for read may block. So, we do the context switch and go
to the main loop, to check if the process socket has something to read
from
We assume that writing to a socket never blocks
We have the functionality of threads -- although our whole program
Our process functions don't even have the IO type!
Note, the function to run the process has forall m. That means, a process
Processes can't mutate the global state -- and the type system checks that!
Because processes can't interfere with each other and with the OS, there
is no need for any thread synchronization, locking, etc. We get
the transactional semantics for free.
Of course, as different processes manipulate their own (copy-on-write)
terms (file systems), when the processes commit, there may be conflicts.
So, one has to implement some conflict resolution -- be it versioning,
patching, asking for permission for update, etc. But
these policies are implemented at the higher-level; the programmer can
implement any set of policies. Because processes always ask the supervisor
for anything, and the supervisor has the view of the global state,
the resolution policies are easier to implement in this execution model.
Processes. No IO action is possible in here
Read until we get newline
could have a command ``down N'' -- positional descend
Note: next is really cool!
Note, we can cd inside a file! So, cat is just `ls' inside a file
We use delimited continuations rather than an Error monad
A delimited continuation suffices!
main navigation function
already at the top
List the current contents of the node pointed by the zipper
This function subsumes both `ls' and `cat'
For files, it sends the content of the file
make a node (an empty directory or an empty file or a moved node)
named 'dirn' in the current directory
go back to the current directory
echo string > path
Right now we use a pathetic algorithm -- but it works...
Delete the node pointed to by path and return the
updated zipper (which points to the same location as z) and the
deleted node
insert a node as `path'
rm path
works both on directories and files
One can even try to remove one's own parent -- and this is safe!
We don't do any copying: we merely establish sharing:
so a node accessible via `from_path' becomes accessible via `to_path'
Supervisor call
break the path into (dirname,basename)
----------------------------------------------------------------------
Some hacks to get around the lack of select
Darn! We don't have the real select over several descriptors!
We have to implement it ourselves
essentially encode a file descriptor in a big-endian notation
poll if file descriptors have something to read
Return the list of read-pending descriptors
holdover...
because the wait was indefinite, rc must be positive! | # LANGUAGE ForeignFunctionInterface #
# LANGUAGE KindSignatures #
# LANGUAGE ScopedTypeVariables #
Zipper - based File / Operating system
with threading and exceptions all realized via delimited continuations .
There are no unsafe operations , no GHC ( let alone ) Unix threads ,
no concurrency problems . Our threads ca n't even do IO and ca n't
Please see /~oleg/ftp/papers/zfs-talk.pdf
for the demo and explanations .
NOTE : the above demo and explanation can be viewed at the following url :
- -talk.pdf
Zipper-based File/Operating system
with threading and exceptions all realized via delimited continuations.
There are no unsafe operations, no GHC (let alone) Unix threads,
no concurrency problems. Our threads can't even do IO and can't
Please see /~oleg/ftp/papers/zfs-talk.pdf
for the demo and explanations.
NOTE: the above demo and explanation can be viewed at the following url:
- -talk.pdf
-}
module ZFS where
import ZipperM
import Control.Exception (try, bracket)
import Control.Monad.Trans (liftIO, MonadIO())
import qualified Data.List as List
import qualified Data.Map as Map
GHC
import Network.Socket
import System.IO
import qualified System.IO.Error as IO
import System.Posix (closeFd)
import System.Posix.Types(Fd(..))
newClientPort :: PortNumber
newClientPort = 1503
fs1 :: Term
fs1 = Folder $ Map.fromList [("d1",d1), ("d2",Folder $ Map.empty),
("fl1", File "File1"),
("fl2", File "File2")]
where d1 = Folder $ Map.fromList [("fl13",File "File 3"),
("d11", d11)]
d11 = Folder $ Map.fromList [("d111", Folder $ Map.empty)]
fs2 :: Term
fs2 = Folder $ Map.fromList [("d1",fs2), ("fl1", File "File1")]
type FSZipper r m = DZipper r m Term Path
outside the IO monad !
Unlike OS ( with its only one syscall handler ) , we can have as
data OSReq r m = OSRDone
| OSRRead (ReadK r m)
| OSRWrite String (UnitK r m)
| OSRCommit Term (UnitK r m)
| OSRefresh (CCT r m (FSZipper r m) -> CCT r m (OSReq r m))
type UnitK r m = CCT r m () -> CCT r m (OSReq r m)
type ReadK r m = CCT r m String -> CCT r m (OSReq r m)
}
data JobQueueT r = JQBlockedOnRead ProcessCTX (ReadK r IO)
| JQRunnable ProcessCTX (UnitK r IO)
data World r = World { mountedFS :: Term
, jobQueue :: [JobQueueT r]
, osPrompt :: Prompt r (OSReq r IO)
}
main' :: Term -> IO a
main' fs = bracket (serverSocket newClientPort) sClose $
\s ->
do
runCCT $ do
p <- newPrompt
syslog ["Entering the osloop",show s]
osloop $ World{
mountedFS = fs,
jobQueue = [JQNewClient s],
osPrompt = p}
where
serverSocket port = do
s <- socket AF_INET Stream 0
setSocketOption s ReuseAddr 1
localhost <- inet_addr "127.0.0.1"
bindSocket s (SockAddrInet port localhost)
listen s 5
return s
osloop world =
maybe (wait'for'intr world) (uncurry try'to'run) (find'runnable world)
>>= osloop
where
Try to find the first runnable job
find'runnable world = case break is'runnable (jobQueue world) of
(_,[]) -> Nothing
(jq1,(runnable:jq2)) -> Just (runnable, world{jobQueue=jq1++jq2})
where is'runnable (JQRunnable _ _) = True
is'runnable _ = False
wait'for'intr world@World{jobQueue=jq} =
do readyfd <- liftIO $ select'read'pending mfd
case break (\e -> maybe False (`elem` readyfd) (toFD e)) jq of
(jq1,(now'runnable:jq2)) ->
try'to'run now'runnable world{jobQueue=jq1++jq2}
where
mfd = foldr (\e a -> maybe [] (:a) (toFD e)) [] jq
toFD (JQNewClient s) = Just $ fdSocket s
toFD (JQBlockedOnRead ProcessCTX{psocket=s} _) = Just $ fdSocket s
toFD _ = Nothing
enqueue el world = world{jobQueue = jobQueue world ++ [el]}
try'to'run qe@(JQNewClient s) world =
do
syslog ["accepting from",show s]
(clientS,addr) <- liftIO $ accept s
liftIO $ setSocketOption clientS NoDelay 1
syslog ["accepted new client connection from ", show addr]
let newCtx = ProcessCTX clientS
run'process (fsProcess (dzip'term (mountedFS world)))(osPrompt world)
>>= interpret'req (enqueue qe world) newCtx
try'to'run (JQRunnable ctx k) world =
k (return ()) >>= interpret'req world ctx
try'to'run (JQBlockedOnRead ctx@ProcessCTX{psocket=s} k) world =
do
syslog ["reading from",show s]
syslog ["osloop: queue size: ", show $ length $ jobQueue world]
dat <- liftIO $ (
do r <- try (recv s (1024 * 8))
case r of
Left err -> if IO.isEOFError err then return ""
else ioError err
Right msg -> return msg)
k (return dat) >>= interpret'req world ctx
syslog :: (Control.Monad.Trans.MonadIO m) => [String] -> m ()
syslog s = liftIO $ putStrLn (concat s)
interpret'req :: World r -> ProcessCTX -> OSReq r IO -> CCT r IO (World r)
interpret'req world ctx OSRDone = (liftIO $ sClose $ psocket ctx)
>> return world
interpret'req world ctx (OSRRead k) =
return world{jobQueue = (jobQueue world) ++ [JQBlockedOnRead ctx k]}
interpret'req world ctx (OSRWrite datum k) =
do
send' (psocket ctx) datum
k (return ()) >>= interpret'req world ctx
where
send' _ "" = return ()
send' s msg = do c <- liftIO $ send s msg
send' s (drop c msg)
interpret'req world ctx (OSRTrace datum k) =
do
syslog ["Trace from",show $ psocket ctx,": ",datum]
k (return ()) >>= interpret'req world ctx
interpret'req world ctx (OSRCommit term k) =
return world{jobQueue = (jobQueue world) ++ [JQRunnable ctx k],
mountedFS = term}
interpret'req world ctx (OSRefresh k) =
k (dzip'term $ mountedFS world) >>= interpret'req world ctx
is simply threaded , both at the OS level and at the GHC runtime level .
function ca n't do any IO and ca n't have any reference cells .
run'process :: (forall m. Monad m =>
(Prompt r (OSReq r m)) -> CCT r m (OSReq r m))
-> Prompt r (OSReq r IO) -> CCT r IO (OSReq r IO)
run'process body p = pushPrompt p (body p)
fsProcess :: Monad m =>
CCT r m (FSZipper r m) -> Prompt r (OSReq r m)
-> CCT r m (OSReq r m)
fsProcess zipper'action svcp =
do
z <- zipper'action
svc svcp $ OSRTrace "Begin process"
fsloop z svcp ""
fsloop :: forall r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (OSReq r m)
-> String
-> CCT r m (OSReq r m)
fsloop z svcp line'acc
= do
send_shell_prompt z svcp
(line,rest) <- read'line line'acc
let (cmd,arg) = breakspan is'whitespace line
svc svcp $ OSRTrace $ "received command: " ++ cmd
maybe (svc svcp (OSRWrite $ "bad command: " ++ cmd) >>
fsloop z svcp rest)
(\h -> h z svcp cmd arg rest)
(List.lookup cmd fsCommands)
where
read'line acc = case break is'nl acc of
(_,"") -> do
b <- svc svcp OSRRead
svc svcp $ OSRTrace $ "Read str: " ++ b
(l,rest) <- read'line b
return (acc ++ l, rest)
(l,rest) -> return (l,snd $ span is'nl rest)
send_shell_prompt z svcp =
svc svcp $ OSRWrite $ ("\n" ++ show_path (dz_path z) ++ "> ")
show_path :: [Path] -> String
show_path path = concatMap (\pc -> case pc of
Down -> "/"
DownTo s -> s ++ "/")
(reverse path)
fsCommands :: Monad m => [(String,FSZipper r m -> Prompt r (OSReq r m) ->
String -> String -> String ->
CCT r m (OSReq r m))]
fsCommands =
[
("quit", \_ svcp _ _ _ -> svc svcp $ const OSRDone),
("cd", fsWrapper
(\z shp _ path -> cd'zipper z shp path >>= return . FSCZ)),
("ls", fsWrapper cmd'ls),
("cat", fsWrapper cmd'ls),
("next", fsWrapper cmd'next),
("mkdir", fsWrapper (cmd'mknode (Folder Map.empty))),
("touch", fsWrapper (cmd'mknode (File ""))),
("echo", fsWrapper cmd'echo),
("rm", fsWrapper cmd'rm),
("mv", fsWrapper cmd'mv),
("cp", fsWrapper cmd'cp),
("help", fsWrapper cmd'help),
("commit", fcmd'commit),
("refresh", \_ svcp _ _ rest -> svc svcp OSRefresh >>=
\z -> fsloop z svcp rest)
]
fcmd'commit :: forall t t1 r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (OSReq r m)
-> t
-> t1
-> String
-> CCT r m (OSReq r m)
fcmd'commit z svcp _ _ rest = aux z
where
aux (DZipDone term) = (svc svcp $ OSRCommit term) >>
fsloop z svcp rest
aux DZipper{dz_k = k} = k (return (Nothing,Up)) >>= aux
data FSCmdResp r m = FSCS String | FSCZ (FSZipper r m)
fsWrapper :: forall t t1 r (m :: * -> *).
(Monad m) =>
(FSZipper r m
-> Prompt r (FSCmdResp r m)
-> t
-> t1
-> CCT r m (FSCmdResp r m))
-> FSZipper r m
-> Prompt r (OSReq r m)
-> t
-> t1
-> String
-> CCT r m (OSReq r m)
fsWrapper cmd z svcp cmd'name cmd'arg rest =
do
shp <- newPrompt
resp <- pushPrompt shp (cmd z shp cmd'name cmd'arg)
z' <- case resp of
FSCS str -> (svc svcp $ OSRWrite str) >> return z
FSCZ z -> return z
fsloop z' svcp rest
cmd'help :: forall t
t1
t2
(m :: * -> *)
r
(m1 :: * -> *)
r1
(m2 :: * -> *).
(Monad m, Monad m1) =>
FSZipper r m -> t -> t1 -> t2 -> m1 (FSCmdResp r1 m2)
cmd'help z _ _ _ = return $ FSCS $ "Commands: " ++
(concat $ List.intersperse ", " $ List.map fst cmds)
where
cmds :: [(String, FSZipper r2 m
-> Prompt r2 (OSReq r2 m)
-> String
-> String
-> String
-> CCT r2 m (OSReq r2 m))]
cmds = fsCommands
cmd'ls :: forall t
r
(m :: * -> *)
r1
(m1 :: * -> *).
(Monad m) =>
FSZipper r m
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r1 m1)
cmd'ls z shp _ slash'path = cd'zipper z shp slash'path
>>= return . FSCS . list_node
cmd'next :: forall t t1 t2 r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> t
-> t1
-> t2
-> CCT r m (FSCmdResp r m)
cmd'next z _ _ _ =
do z' <- dz_k z (return (Nothing,Next))
return $ FSCZ $ case z' of DZipDone _ -> z; _ -> z'
cd'zipper :: Monad m =>
FSZipper r m -> Prompt r (FSCmdResp r m) -> String
-> CCT r m (FSZipper r m)
cd'zipper z _ "" = return z
cd'zipper z shp ('/':path) = do z' <- ascend'to'root z; cd'zipper z' shp path
where
ascend'to'root z =
dz_k z (return (Nothing,Up)) >>= ascend'to'root' z
ascend'to'root' z (DZipDone _) = return z
ascend'to'root' _ z = ascend'to'root z
cd'zipper z shp ('.':'.':path) = aux z (snd $ span (=='/') path)
where
aux DZipper{dz_k = k} path = k (return (Nothing,Up)) >>=
(\z -> cd'zipper z shp path)
aux (DZipDone _) _ = return z
cd'zipper DZipper{dz_term = File _} shp _ =
abort shp (return $ FSCS "cannot descend down the file")
cd'zipper DZipper{dz_term = Folder fld, dz_k = k} shp path
= let (pc,prest) = breakspan (== '/') path
in if Map.member pc fld then do
z' <- k (return (Nothing,DownTo pc))
cd'zipper z' shp prest
else abort shp (return $ FSCS $ "No such dir component " ++ pc)
list_node :: forall t (t1 :: * -> *) t2.
DZipper t t1 Term t2 -> String
list_node DZipper{dz_term = File str} = str
list_node DZipper{dz_term = Folder fld} =
Map.foldWithKey (\name el acc ->
"\n" ++ name ++ (case el of Folder _ -> "/"
_ -> "") ++ acc)
"" fld
list_node _ = ""
cmd'mknode :: forall t
r
r1
(m :: * -> *)
(m1 :: * -> *).
(Monad m1) =>
Term
-> DZipper r m1 Term Path
-> Prompt r (FSCmdResp r1 m)
-> t
-> String
-> CCT r m1 (FSCmdResp r m1)
cmd'mknode _ _ shp _ dirn | '/' `elem` dirn =
abort shp (return $ FSCS "the name of the new node can't contain slash")
cmd'mknode _ _ shp _ "" =
abort shp (return $ FSCS "the name of the new node is empty")
cmd'mknode _ DZipper{dz_term = File _} shp _ _ =
abort shp (return $ FSCS "cannot create anything in a file")
cmd'mknode _ DZipper{dz_term = Folder fld} shp _ dirn
| Map.member dirn fld =
abort shp (return $ FSCS $ "node " ++ dirn ++ " already exists")
cmd'mknode newnode DZipper{dz_term = Folder fld, dz_k = k, dz_dir = cn}
_ _ dirn =
let fld' = Folder $ Map.insert dirn newnode fld
in k (return (Just fld',Up)) >>= adj cn >>= return . FSCZ
where
adj _ (DZipDone term) = dzip'term term
adj cn z = dz_k z $ return (Nothing,cn)
cmd'echo :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'echo z shp _ args = aux $ (reads::ReadS String) args
where
aux [(content,rest)] = aux1 content (snd $ span is'whitespace rest)
aux _ = abort shp (return $ FSCS $ "bad format, str, of the echo cmd")
aux1 content ('>':rest) =
cd'zipper z shp (snd $ span is'whitespace rest) >>= aux2 content rest
aux1 _ _ = abort shp (return $ FSCS $ "bad format, path, of the echo cmd")
aux2 content _t DZipper{dz_term = File _, dz_k = k} =
k (return (Just $ File content,Up)) >>= zip'back'to'place shp z
>>= return . FSCZ
aux2 _ rest _ = abort shp
(return $ FSCS $ rest ++ " does not point to a file")
|zip'back'to'place z z1| brings z1 to the same place as z
zip'back'to'place :: forall r
(m :: * -> *)
r1
(m1 :: * -> *)
term.
(Monad m) =>
Prompt r (FSCmdResp r m)
-> DZipper r1 m1 term Path
-> DZipper r m Term Path
-> CCT r m (FSZipper r m)
zip'back'to'place shp z (DZipDone term) =
dzip'term term >>= zip'back'to'place shp z
zip'back'to'place shp z z1 = cd'zipper z1 shp (show_path (dz_path z))
del'zipper :: forall r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> String
-> CCT r m (FSZipper r m, Term)
del'zipper z shp path = cd'zipper z shp path >>=
\z -> dz_k z (return (Nothing,Up)) >>= aux (dz_dir z)
where
aux _ (DZipDone _) =
abort shp (return $ FSCS $ "cannot remove the root folder")
aux (DownTo pc) DZipper{dz_term = Folder fld, dz_k = k} =
let (Just old'node, fld') = Map.updateLookupWithKey (\_ _ -> Nothing) pc fld
in k (return (Just $ Folder $ fld',Up))
>>= zip'back'to'place shp z >>= \z -> return (z,old'node)
ins'zipper :: forall r (m :: * -> *).
(Monad m) =>
Term
-> FSZipper r m
-> Prompt r (FSCmdResp r m)
-> String
-> CCT r m (FSCmdResp r m)
ins'zipper node z0 shp path =
do
let (dirname,basename) = split'path path
z <- if dirname == "" then return z0 else cd'zipper z0 shp dirname
FSCZ z <- cmd'mknode node z shp "mv" basename
zip'back'to'place shp z0 z >>= return . FSCZ
cmd'rm :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'rm z shp _ path = del'zipper z shp path >>= return . FSCZ . fst
mv path_from path_to
cmd'mv :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'mv z shp _ args = aux $ breakspan is'whitespace args
where
aux ("",_) = abort shp (return $ FSCS $ "mv: from-path is empty")
aux (_,"") = abort shp (return $ FSCS $ "mv: to-path is empty")
aux (pfrom,pto) = del'zipper z shp pfrom >>=
\ (z,node) -> ins'zipper node z shp pto
cp path_from path_to
The copy - on - write semantics of ZFS does the rest .
So , in ZFS , we can copy arbitrary file systems trees in constant time !
cmd'cp :: forall t r (m :: * -> *).
(Monad m) =>
DZipper r m Term Path
-> Prompt r (FSCmdResp r m)
-> t
-> String
-> CCT r m (FSCmdResp r m)
cmd'cp z0 shp _ args = aux $ breakspan is'whitespace args
where
aux ("",_) = abort shp (return $ FSCS $ "cp: from-path is empty")
aux (_,"") = abort shp (return $ FSCS $ "cp: to-path is empty")
aux (pfrom,pto) = cd'zipper z0 shp pfrom >>=
\z -> dz_k z (return (Nothing,Up)) >>=
aux' (dz_dir z) pto
aux' _ pto (DZipDone term) =
dzip'term term >>= zip'back'to'place shp z0 >>=
\z -> ins'zipper term z shp pto
aux' (DownTo pc) pto z@DZipper{dz_term = Folder fld} =
zip'back'to'place shp z0 z >>=
\z -> ins'zipper ((Map.!) fld pc) z shp pto
svc :: (Monad m) => Prompt r b -> ((CCT r m a -> CCT r m b) -> b) -> CCT r m a
svc p req = ZipperM.shift p (return . req)
is'nl, is'whitespace :: Char -> Bool
is'whitespace c = c == ' ' || c == '\t'
is'nl c = c == '\n' || c == '\r'
breakspan :: (a -> Bool) -> [a] -> ([a], [a])
breakspan pred l = let (p1,p2) = break pred l
in (p1,snd $ span pred p2)
split'path :: String -> (String, String)
split'path path = let (p1,p2) = breakspan (=='/') (reverse path)
in (reverse p2, reverse p1)
type FDSET = CUInt
Two longs
foreign import ccall "unistd.h select" c_select
:: CInt -> Ptr FDSET -> Ptr FDSET -> Ptr FDSET -> Ptr TIMEVAL -> IO CInt
Convert a file descriptor to an FDSet ( for use with select )
fd2fds :: CInt -> [FDSET]
fd2fds fd = (replicate nb 0) ++ [setBit 0 off]
where
(nb,off) = quotRem (fromIntegral fd) (bitSize (undefined::FDSET))
fds2mfd :: [FDSET] -> [CInt]
fds2mfd fds = [fromIntegral (j+i*bitsize) |
(afds,i) <- zip fds [0..], j <- [0..bitsize],
testBit afds j]
where bitsize = bitSize (undefined::FDSET)
test_fd_conv, test_fd_conv' :: Bool
test_fd_conv = and $ List.map (\e -> [e] == (fds2mfd $ fd2fds e)) lst
where
lst = [0,1,5,7,8,9,16,17,63,64,65]
test_fd_conv' = mfd == fds2mfd fds
where
mfd = [0,1,5,7,8,9,16,17,63,64,65]
fds :: [FDSET] = foldr ormax [] (List.map fd2fds mfd)
maximum $ List.map fromIntegral mfd
ormax [] x = x
ormax x [] = x
ormax (a:ar) (b:br) = (a .|. b) : ormax ar br
select'read'pending :: [CInt] -> IO [CInt]
select'read'pending mfd =
\_ ->
withArray fds (
\readfs ->
do
_ <- throwErrnoIfMinus1 "select"
(c_select (fdmax+1) readfs nullPtr nullPtr nullPtr)
peekArray (length fds) readfs))
>>= (return . fds2mfd)
where
fds :: [FDSET] = foldr ormax [] (List.map fd2fds mfd)
fdmax = maximum $ List.map fromIntegral mfd
ormax [] x = x
ormax x [] = x
ormax (a:ar) (b:br) = (a .|. b) : ormax ar br
foreign import ccall "fcntl.h fcntl" fcntl :: CInt -> CInt -> CInt -> IO CInt
use it as cleanup'fd [ 5 .. 6 ] to clean up the sockets left hanging ...
cleanup'fd :: [CInt] -> IO ()
cleanup'fd = mapM_ (closeFd . Fd)
|
986d80b2dcdd228b4efa70692e214b6dabb67c60a1d22bd7353769020d1130bf | mokus0/dependent-sum-template | Internal.hs | # LANGUAGE CPP #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators #
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 708
# LANGUAGE PolyKinds #
#endif
-- | Shared functions for dependent-sum-template
module Data.Dependent.Sum.TH.Internal where
import Control.Monad
import Language.Haskell.TH
import Language.Haskell.TH.Extras
classHeadToParams :: Type -> (Name, [Type])
classHeadToParams t = (h, reverse reversedParams)
where (h, reversedParams) = go t
go :: Type -> (Name, [Type])
go t = case t of
AppT f x ->
let (h, reversedParams) = classHeadToParams f
in (h, x : reversedParams)
_ -> (headOfType t, [])
-- Invoke the deriver for the given class instance. We assume that the type
we 're deriving for is always the first typeclass parameter , if there are
-- multiple.
deriveForDec :: Name -> (Q Type -> Q Type) -> ([TyVarBndr] -> [Con] -> Q Dec) -> Dec -> Q [Dec]
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
deriveForDec className _ f (InstanceD overlaps cxt classHead decs) = do
#else
deriveForDec className _ f (InstanceD cxt classHead decs) = do
#endif
let (givenClassName, firstParam : _) = classHeadToParams classHead
when (givenClassName /= className) $
fail $ "while deriving " ++ show className ++ ": wrong class name in prototype declaration: " ++ show givenClassName
let dataTypeName = headOfType firstParam
dataTypeInfo <- reify dataTypeName
case dataTypeInfo of
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
TyConI (DataD dataCxt name bndrs _ cons _) -> do
#else
TyConI (DataD dataCxt name bndrs cons _) -> do
#endif
dec <- f bndrs cons
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
return [InstanceD overlaps cxt classHead [dec]]
#else
return [InstanceD cxt classHead [dec]]
#endif
_ -> fail $ "while deriving " ++ show className ++ ": the name of an algebraic data type constructor is required"
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
deriveForDec className makeClassHead f (DataD dataCxt name bndrs _ cons _) = return <$> inst
#else
deriveForDec className makeClassHead f (DataD dataCxt name bndrs cons _) = return <$> inst
#endif
where
inst = instanceD (cxt (map return dataCxt)) (makeClassHead $ conT name) [dec]
dec = f bndrs cons
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 612
#if __GLASGOW_HASKELL__ >= 800
deriveForDec className makeClassHead f (DataInstD dataCxt name tyArgs _ cons _) = return <$> inst
#else
deriveForDec className makeClassHead f (DataInstD dataCxt name tyArgs cons _) = return <$> inst
#endif
where
inst = instanceD (cxt (map return dataCxt)) (makeClassHead $ foldl1 appT (map return $ (ConT name : init tyArgs))) [dec]
-- TODO: figure out proper number of family parameters vs instance parameters
bndrs = [PlainTV v | VarT v <- tail tyArgs ]
dec = f bndrs cons
#endif
| null | https://raw.githubusercontent.com/mokus0/dependent-sum-template/4b85b8fc6fd016ef03f4ea97e31a2a573d30f2ae/src/Data/Dependent/Sum/TH/Internal.hs | haskell | # LANGUAGE GADTs #
| Shared functions for dependent-sum-template
Invoke the deriver for the given class instance. We assume that the type
multiple.
TODO: figure out proper number of family parameters vs instance parameters | # LANGUAGE CPP #
# LANGUAGE FlexibleInstances #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators #
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 708
# LANGUAGE PolyKinds #
#endif
module Data.Dependent.Sum.TH.Internal where
import Control.Monad
import Language.Haskell.TH
import Language.Haskell.TH.Extras
classHeadToParams :: Type -> (Name, [Type])
classHeadToParams t = (h, reverse reversedParams)
where (h, reversedParams) = go t
go :: Type -> (Name, [Type])
go t = case t of
AppT f x ->
let (h, reversedParams) = classHeadToParams f
in (h, x : reversedParams)
_ -> (headOfType t, [])
we 're deriving for is always the first typeclass parameter , if there are
deriveForDec :: Name -> (Q Type -> Q Type) -> ([TyVarBndr] -> [Con] -> Q Dec) -> Dec -> Q [Dec]
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
deriveForDec className _ f (InstanceD overlaps cxt classHead decs) = do
#else
deriveForDec className _ f (InstanceD cxt classHead decs) = do
#endif
let (givenClassName, firstParam : _) = classHeadToParams classHead
when (givenClassName /= className) $
fail $ "while deriving " ++ show className ++ ": wrong class name in prototype declaration: " ++ show givenClassName
let dataTypeName = headOfType firstParam
dataTypeInfo <- reify dataTypeName
case dataTypeInfo of
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
TyConI (DataD dataCxt name bndrs _ cons _) -> do
#else
TyConI (DataD dataCxt name bndrs cons _) -> do
#endif
dec <- f bndrs cons
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
return [InstanceD overlaps cxt classHead [dec]]
#else
return [InstanceD cxt classHead [dec]]
#endif
_ -> fail $ "while deriving " ++ show className ++ ": the name of an algebraic data type constructor is required"
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 800
deriveForDec className makeClassHead f (DataD dataCxt name bndrs _ cons _) = return <$> inst
#else
deriveForDec className makeClassHead f (DataD dataCxt name bndrs cons _) = return <$> inst
#endif
where
inst = instanceD (cxt (map return dataCxt)) (makeClassHead $ conT name) [dec]
dec = f bndrs cons
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 612
#if __GLASGOW_HASKELL__ >= 800
deriveForDec className makeClassHead f (DataInstD dataCxt name tyArgs _ cons _) = return <$> inst
#else
deriveForDec className makeClassHead f (DataInstD dataCxt name tyArgs cons _) = return <$> inst
#endif
where
inst = instanceD (cxt (map return dataCxt)) (makeClassHead $ foldl1 appT (map return $ (ConT name : init tyArgs))) [dec]
bndrs = [PlainTV v | VarT v <- tail tyArgs ]
dec = f bndrs cons
#endif
|
8953307434de2b826def8e0f4e95dbb8ae3ef9354388688aae38ff8130220c20 | aviaviavi/toodles | Config.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE TypeOperators #-}
module Config
( toodlesArgs
, ToodlesArgs(..)
, SearchFilter(..)
, AssigneeFilterRegex(..)
)
where
import Paths_toodles
import Types
import Data.Text (Text)
import Data.Version (showVersion)
import System.Console.CmdArgs
toodlesArgs :: IO ToodlesArgs
toodlesArgs = cmdArgs argParser
data ToodlesArgs = ToodlesArgs
{ directory :: FilePath
, assignee_search :: Maybe SearchFilter
, limit_results :: Int
, port :: Maybe Int
, no_server :: Bool
, userFlag :: [UserFlag]
} deriving (Show, Data, Typeable, Eq)
newtype SearchFilter =
AssigneeFilter AssigneeFilterRegex
deriving (Show, Data, Eq)
newtype AssigneeFilterRegex = AssigneeFilterRegex Text
deriving (Show, Data, Eq)
argParser :: ToodlesArgs
argParser = ToodlesArgs
{ directory = def &= typFile &= help "Root directory of your project"
, assignee_search = def &= help "Filter todo's by assignee"
, limit_results = def &= help "Limit number of search results"
, port = def &= help "Run server on port"
, no_server = def &= help "Output matching todos to the command line and exit"
, userFlag = def &= help "Additional flagword (e.g.: MAYBE)"
} &= summary ("toodles " ++ showVersion version)
&= program "toodles"
&= verbosity
&= help "Manage TODO's directly from your codebase"
| null | https://raw.githubusercontent.com/aviaviavi/toodles/cc614616aef18e335f8d920d62371ba2a882d383/src/Config.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeOperators # | # LANGUAGE ScopedTypeVariables #
module Config
( toodlesArgs
, ToodlesArgs(..)
, SearchFilter(..)
, AssigneeFilterRegex(..)
)
where
import Paths_toodles
import Types
import Data.Text (Text)
import Data.Version (showVersion)
import System.Console.CmdArgs
toodlesArgs :: IO ToodlesArgs
toodlesArgs = cmdArgs argParser
data ToodlesArgs = ToodlesArgs
{ directory :: FilePath
, assignee_search :: Maybe SearchFilter
, limit_results :: Int
, port :: Maybe Int
, no_server :: Bool
, userFlag :: [UserFlag]
} deriving (Show, Data, Typeable, Eq)
newtype SearchFilter =
AssigneeFilter AssigneeFilterRegex
deriving (Show, Data, Eq)
newtype AssigneeFilterRegex = AssigneeFilterRegex Text
deriving (Show, Data, Eq)
argParser :: ToodlesArgs
argParser = ToodlesArgs
{ directory = def &= typFile &= help "Root directory of your project"
, assignee_search = def &= help "Filter todo's by assignee"
, limit_results = def &= help "Limit number of search results"
, port = def &= help "Run server on port"
, no_server = def &= help "Output matching todos to the command line and exit"
, userFlag = def &= help "Additional flagword (e.g.: MAYBE)"
} &= summary ("toodles " ++ showVersion version)
&= program "toodles"
&= verbosity
&= help "Manage TODO's directly from your codebase"
|
90e8734ab947a140d75159232a513054214345ad72a16a4610c4cee4233844c7 | zadean/basexerl | binary_example.erl | -module(binary_example).
-export([run/0]).
run() ->
{ok, Db} = basexerl:connect("localhost", 6001, "admin", "admin"),
Bin = list_to_binary([<<X>> || X <- lists:seq(1,256)]),
{ok, Info1} = basexerl:create(Db, "database2"),
io:format("~s~n", [Info1]),
{ok, Info2} = basexerl:store(Db, "test.bin", Bin),
io:format("~s~n", [Info2]),
{ok, Bin2} = basexerl:retrieve(Db, "test.bin"),
io:format("Bin : ~p~n", [Bin]),
io:format("Bin2: ~p~n", [Bin2]),
io:format("They match?: ~s~n", [matches(Bin2)]),
{ok, Info3} = basexerl:execute(Db, "drop db database2"),
io:format("~s~n", [Info3]),
ok.
matches(Bin) ->
case list_to_binary([<<X>> || X <- lists:seq(1,256)]) of
Bin ->
true;
_ ->
false
end.
| null | https://raw.githubusercontent.com/zadean/basexerl/15ce1990eb334268eb0f615be0d55136d537c087/examples/binary_example.erl | erlang | -module(binary_example).
-export([run/0]).
run() ->
{ok, Db} = basexerl:connect("localhost", 6001, "admin", "admin"),
Bin = list_to_binary([<<X>> || X <- lists:seq(1,256)]),
{ok, Info1} = basexerl:create(Db, "database2"),
io:format("~s~n", [Info1]),
{ok, Info2} = basexerl:store(Db, "test.bin", Bin),
io:format("~s~n", [Info2]),
{ok, Bin2} = basexerl:retrieve(Db, "test.bin"),
io:format("Bin : ~p~n", [Bin]),
io:format("Bin2: ~p~n", [Bin2]),
io:format("They match?: ~s~n", [matches(Bin2)]),
{ok, Info3} = basexerl:execute(Db, "drop db database2"),
io:format("~s~n", [Info3]),
ok.
matches(Bin) ->
case list_to_binary([<<X>> || X <- lists:seq(1,256)]) of
Bin ->
true;
_ ->
false
end.
| |
7f26eee42d0f7f19809596f4d79d906832a836291408afbf3eceab3aeeda2ac2 | ghc/packages-Cabal | setup.test.hs | import Test.Cabal.Prelude
main = setupAndCabalTest $ do
r <- fails $ setup' "configure" []
assertOutputContains "cycl" r -- match cyclic or cycle
assertOutputContains "bar" r
assertOutputContains "foo" r
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/cabal-testsuite/PackageTests/BuildDeps/DepCycle/setup.test.hs | haskell | match cyclic or cycle | import Test.Cabal.Prelude
main = setupAndCabalTest $ do
r <- fails $ setup' "configure" []
assertOutputContains "bar" r
assertOutputContains "foo" r
|
c01506c264c9818e4e1fdb1378ce6c179f2e16d61a8c39e74a91df19fdcf4ee5 | opentelemetry-beam/opentelemetry_zipkin | opentelemetry_zipkin_SUITE.erl | -module(opentelemetry_zipkin_SUITE).
-compile(export_all).
-include_lib("stdlib/include/assert.hrl").
-include_lib("opentelemetry_api/include/opentelemetry.hrl").
-include_lib("opentelemetry/include/otel_span.hrl").
all() ->
[verify_export].
%% insert a couple spans and export to locally running zipkin
verify_export(_Config) ->
Address = case os:getenv("GITHUB_ACTION") of
false ->
":9411/api/v2/spans";
_ ->
":9411/api/v2/spans"
end,
Resource = otel_resource:create([{"service.name",
"my-test-service"}]),
{ok, State} = opentelemetry_zipkin:init(#{address => Address,
local_endpoint => #{service_name => my_service,
ip4 => {1,2,3,4},
port => 8000}}),
Tid = ets:new(span_tab, [{keypos, #span.span_id}]),
?assertMatch(ok, opentelemetry_zipkin:export(Tid, Resource, State)),
TraceId = opentelemetry:generate_trace_id(),
SpanId = opentelemetry:generate_span_id(),
ParentSpan =
#span{name = <<"span-1">>,
trace_id = TraceId,
span_id = SpanId,
kind = ?SPAN_KIND_CLIENT,
start_time = opentelemetry:timestamp(),
end_time = opentelemetry:timestamp(),
events = [#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-1">>,
attributes = [{<<"attr-1">>, <<"value-1">>}]},
#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-2">>,
attributes = [{<<"attr-3">>, <<"value-3">>}]}],
status=opentelemetry:status(?SPAN_KIND_INTERNAL, <<"some message about status">>),
attributes = [{<<"attr-2">>, <<"value-2">>}]},
true = ets:insert(Tid, ParentSpan),
ChildSpan = #span{name = <<"span-2">>,
trace_id = TraceId,
span_id = opentelemetry:generate_span_id(),
parent_span_id = SpanId,
kind = ?SPAN_KIND_SERVER,
start_time = opentelemetry:timestamp(),
end_time = opentelemetry:timestamp(),
events = [#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-1">>,
attributes = [{<<"attr-1">>, <<"value-1">>}]},
#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-2">>,
attributes = [{<<"attr-3">>, <<"value-3">>}]}],
attributes = [{<<"attr-2">>, <<"value-2">>}]},
true = ets:insert(Tid, ChildSpan),
?assertMatch(ok, opentelemetry_zipkin:export(Tid, Resource, State)),
ok.
| null | https://raw.githubusercontent.com/opentelemetry-beam/opentelemetry_zipkin/ee785d294e7f4b324b333a94fcbde5ef285004ad/test/opentelemetry_zipkin_SUITE.erl | erlang | insert a couple spans and export to locally running zipkin | -module(opentelemetry_zipkin_SUITE).
-compile(export_all).
-include_lib("stdlib/include/assert.hrl").
-include_lib("opentelemetry_api/include/opentelemetry.hrl").
-include_lib("opentelemetry/include/otel_span.hrl").
all() ->
[verify_export].
verify_export(_Config) ->
Address = case os:getenv("GITHUB_ACTION") of
false ->
":9411/api/v2/spans";
_ ->
":9411/api/v2/spans"
end,
Resource = otel_resource:create([{"service.name",
"my-test-service"}]),
{ok, State} = opentelemetry_zipkin:init(#{address => Address,
local_endpoint => #{service_name => my_service,
ip4 => {1,2,3,4},
port => 8000}}),
Tid = ets:new(span_tab, [{keypos, #span.span_id}]),
?assertMatch(ok, opentelemetry_zipkin:export(Tid, Resource, State)),
TraceId = opentelemetry:generate_trace_id(),
SpanId = opentelemetry:generate_span_id(),
ParentSpan =
#span{name = <<"span-1">>,
trace_id = TraceId,
span_id = SpanId,
kind = ?SPAN_KIND_CLIENT,
start_time = opentelemetry:timestamp(),
end_time = opentelemetry:timestamp(),
events = [#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-1">>,
attributes = [{<<"attr-1">>, <<"value-1">>}]},
#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-2">>,
attributes = [{<<"attr-3">>, <<"value-3">>}]}],
status=opentelemetry:status(?SPAN_KIND_INTERNAL, <<"some message about status">>),
attributes = [{<<"attr-2">>, <<"value-2">>}]},
true = ets:insert(Tid, ParentSpan),
ChildSpan = #span{name = <<"span-2">>,
trace_id = TraceId,
span_id = opentelemetry:generate_span_id(),
parent_span_id = SpanId,
kind = ?SPAN_KIND_SERVER,
start_time = opentelemetry:timestamp(),
end_time = opentelemetry:timestamp(),
events = [#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-1">>,
attributes = [{<<"attr-1">>, <<"value-1">>}]},
#event{system_time_nano=erlang:system_time(nanosecond),
name = <<"event-2">>,
attributes = [{<<"attr-3">>, <<"value-3">>}]}],
attributes = [{<<"attr-2">>, <<"value-2">>}]},
true = ets:insert(Tid, ChildSpan),
?assertMatch(ok, opentelemetry_zipkin:export(Tid, Resource, State)),
ok.
|
6e057a4c605d3603b23ba29792fa935511d0e0eae1cd18f7072048f2e048b000 | leksah/leksah | SourceBuffer.hs | # LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE RecordWildCards #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
# LANGUAGE LambdaCase #
# OPTIONS_GHC -fno - warn - orphans #
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.SourceBuffer
Copyright : ( c ) ,
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
| The source editor part of Leksah
--
-----------------------------------------------------------------------------------
module IDE.Pane.SourceBuffer (
IDEBuffer(..)
, BufferState(..)
, allBuffers
, maybeActiveBuf
, selectSourceBuf
, goToSourceDefinition
, goToSourceDefinition'
, goToDefinition
, goToLocation
, insertInBuffer
, fileNew
, fileOpenThis
, filePrint
, fileRevert
, fileClose
, fileCloseAll
, fileCloseAllButPackage
, fileCloseAllButWorkspace
, fileSave
, fileSaveAll
, fileSaveBuffer
, fileCheckAll
, editUndo
, editRedo
, editCut
, editCopy
, editPaste
, editDelete
, editSelectAll
, editReformat
, editComment
, editUncomment
, editShiftRight
, editShiftLeft
, editToCandy
, editFromCandy
, editKeystrokeCandy
, switchBuffersCandy
, updateStyle
, updateStyle'
, addLogRef
, removeLogRefs
, removeBuildLogRefs
, removeFileExtLogRefs
, removeTestLogRefs
, removeLintLogRefs
, markRefInSourceBuf
, unmarkRefInSourceBuf
, inBufContext
, inActiveBufContext
, align
, startComplete
, selectedText
, selectedTextOrCurrentLine
, selectedTextOrCurrentIdentifier
, insertTextAfterSelection
, selectedModuleName
, selectedLocation
, recentSourceBuffers
, newTextBuffer
, belongsToPackages
, belongsToPackages'
, belongsToPackage
, belongsToWorkspace
, belongsToWorkspace'
, getIdentifierUnderCursorFromIter
, useCandyFor
, setModifiedOnDisk
) where
import Prelude ()
import Prelude.Compat hiding(getChar, getLine)
import Control.Applicative ((<|>))
import Control.Concurrent (modifyMVar_, putMVar, takeMVar, newMVar, tryPutMVar)
import Control.Event (triggerEvent)
import Control.Exception as E (catch, SomeException)
import Control.Lens ((.~), (%~), (^.), to)
import Control.Monad (filterM, void, unless, when, forM_)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Reader (ask)
import Data.Char (isSymbol, isSpace, isAlphaNum)
import qualified Data.Foldable as F (Foldable(..), forM_)
import Data.IORef (writeIORef,readIORef,newIORef)
import Data.List (isPrefixOf)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
(mapMaybe, fromJust, isNothing, isJust, fromMaybe)
import Data.Sequence (ViewR(..))
import qualified Data.Sequence as Seq
import Data.Text (Text)
import qualified Data.Text as T
(singleton, isInfixOf, breakOn, length, replicate,
lines, dropWhileEnd, unlines, strip, null, pack, unpack)
import qualified Data.Text.IO as T (writeFile, readFile)
import Data.Time (UTCTime(..))
import Data.Time.Clock (addUTCTime, diffUTCTime)
import Data . Time . Clock . POSIX ( getPOSIXTime )
import Data.Traversable (forM)
import Data.Typeable (cast)
import System.Directory
(getHomeDirectory, getModificationTime, doesFileExist)
import System.FilePath
(splitDirectories, (</>), dropFileName,
equalFilePath, takeFileName)
import System.Log.Logger (errorM, warningM, debugM)
import Data.GI.Base.GObject (new')
import Data.GI.Base.ManagedPtr (unsafeCastTo)
import GI.Gdk (windowGetOrigin)
import GI.Gdk.Enums (EventType(..))
import GI.Gdk.Functions (keyvalName)
import GI.Gdk.Flags (ModifierType(..))
import GI.Gdk.Structs.Atom (atomIntern)
import GI.Gdk.Structs.EventButton (getEventButtonType)
import GI.Gdk.Structs.EventKey
(getEventKeyState, getEventKeyKeyval)
import GI.Gtk
(bindingsActivateEvent, onDialogResponse, widgetShowAll,
boxPackStart, boxNew, Container(..), containerAdd,
infoBarGetContentArea, labelNew, infoBarNew)
import GI.Gtk.Enums
(FileChooserAction(..), WindowPosition(..), ResponseType(..),
ButtonsType(..), MessageType(..), ShadowType(..), PolicyType(..),
Orientation(..))
import GI.Gtk.Flags (TextSearchFlags(..))
import GI.Gtk.Interfaces.FileChooser
(fileChooserGetFilename, fileChooserSelectFilename,
fileChooserSetAction)
import GI.Gtk.Objects.Clipboard (clipboardGet)
import GI.Gtk.Objects.Dialog
(constructDialogUseHeaderBar)
import GI.Gtk.Objects.FileChooserDialog (FileChooserDialog(..))
import GI.Gtk.Objects.MessageDialog
(setMessageDialogText, constructMessageDialogButtons, setMessageDialogMessageType,
MessageDialog(..))
import GI.Gtk.Objects.Notebook
(Notebook(..))
import qualified GI.Gtk.Objects.Notebook as Gtk (Notebook(..))
import GI.Gtk.Objects.ScrolledWindow
(setScrolledWindowShadowType, scrolledWindowSetPolicy)
import GI.Gtk.Objects.Widget
(widgetShow, widgetDestroy)
import GI.Gtk.Objects.Window
(setWindowTitle, setWindowWindowPosition, windowSetTransientFor)
import qualified GI.Gtk.Objects.Window as Gtk (Window(..))
import Graphics.UI.Editor.Parameters
(dialogRun', dialogSetDefaultResponse', dialogAddButton')
import Graphics.UI.Frame.Panes (IDEPane(..))
import Graphics.UI.Frame.Rectangle (getRectangleY, getRectangleX)
import IDE.Core.State
(Log, MonadIDE, IDEM, IDEAction, Descr, PackModule, Location(..),
GenScope(..), PackScope(..), LogRefType(..), LogRef(..),
CandyTable, Prefs, IDEEvent(..), IDEState(..), autoLoad, Project,
IDEPackage, liftIDE, readIDE, candy, triggerBuild,
triggerEventIDE_, SensitivityMask(..), ideMessage,
MessageLevel(..), __, dscMbModu, dscMbLocation, pack,
pdMbSourcePath, mdModuleId, pdModules, mdMbSourcePath, dscName,
modifyIDE_, logRefFullFilePath, contextRefs, srcSpanStartColumn,
srcSpanStartLine, srcSpanEndColumn, srcSpanEndLine, prefs,
candyState, textEditorType, textviewFont, unpackDirectory,
showLineNumbers, rightMargin, tabWidth, wrapLines, sysMessage,
SrcSpan(..), currentState, reflectIDE, StatusbarCompartment(..),
SymbolEvent(..), forceLineEnds, removeTBlanks, reifyIDE,
ipdPackageDir, activePack, workspace, wsPackages, recentFiles,
addLogRef', belongsToPackage, belongsToPackages,
belongsToWorkspace, removeLintLogRefs', removeTestLogRefs',
removeBuildLogRefs', removeFileExtLogRefs', removeLogRefs')
import IDE.Gtk.State
(PanePath, Connections, IDEGtkEvent(..), getPanes,
postAsyncIDEIdle, activateThisPane, getBestPathForId,
paneFromName, getNotebook, figureOutPaneName, buildThisPane,
paneName, getMainWindow, getActivePanePath, getTopWidget,
markLabel, guiPropertiesFromName, closeThisPane,
isStartingOrClosing, RecoverablePane(..))
import qualified IDE.Command.Print as Print
import IDE.Utils.GUIUtils (showDialog, showConfirmDialog, showDialogAndGetResponse, showErrorDialog)
import IDE.Utils.FileUtils (isSubPath, myCanonicalizePath)
import IDE.Utils.DirectoryUtils (setModificationTimeOnOSX)
import IDE.Gtk.SourceCandy
(stringToCandy, positionFromCandy, getCandylessPart,
positionToCandy, getCandylessText)
import IDE.SymbolNavigation
(createHyperLinkSupport, mapControlCommand)
import IDE.Completion as Completion (complete,cancel, smartIndent)
import IDE.TextEditor
(newDefaultBuffer, newCMBuffer, newYiBuffer, newGtkBuffer,
TextEditor(..), EditorBuffer, EditorView, EditorIter,
scrollToCursor, updateStyle)
import IDE.Metainfo.Provider (getSystemInfo, getWorkspaceInfo)
import IDE.BufferMode
(recentSourceBuffers, selectedModuleName, editKeystrokeCandy,
editFromCandy, editToCandy, editUncomment, editComment,
editReformat, Mode(..), getStartAndEndLineOfSelection,
doForSelectedLines, inBufContext, inActiveBufContext',
isHaskellMode, modeFromFileName, lastActiveBufferPane,
inActiveBufContext, maybeActiveBuf, IDEBuffer(..), BufferState(..))
import IDE.Utils.DebugUtils (traceTimeTaken)
--time name action = do
-- liftIO . debugM "leksah" $ name <> " start"
-- start <- liftIO $ realToFrac <$> getPOSIXTime
-- result <- action
-- end <- liftIO $ realToFrac <$> getPOSIXTime
liftIO . debugM " leksah " $ name < > " took " < > show ( ( end - start ) * 1000000 ) < > " us "
-- return result
allBuffers :: MonadIDE m => m [IDEBuffer]
allBuffers = liftIDE getPanes
instance RecoverablePane IDEBuffer BufferState IDEM where
saveState p@IDEBuffer{sourceView = v} = do
buf <- getBuffer v
ins <- getInsertMark buf
iter <- getIterAtMark buf ins
offset <- getOffset iter
case fileName p of
Nothing -> do
ct <- readIDE candy
text <- getCandylessText ct buf
return (Just (BufferStateTrans (bufferName p) text offset))
Just fn -> return (Just (BufferState fn offset))
recoverState pp (BufferState n i) = do
mbbuf <- newTextBuffer pp (T.pack $ takeFileName n) (Just n)
case mbbuf of
Just IDEBuffer {sourceView=v} -> do
postAsyncIDEIdle $ do
liftIO $ debugM "leksah" "SourceBuffer recoverState idle callback"
gtkBuf <- getBuffer v
iter <- getIterAtOffset gtkBuf i
placeCursor gtkBuf iter
mark <- getInsertMark gtkBuf
scrollToMark v mark 0.0 (Just (1.0,0.3))
liftIO $ debugM "leksah" "SourceBuffer recoverState done"
return mbbuf
Nothing -> return Nothing
recoverState pp (BufferStateTrans bn text i) = do
mbbuf <- newTextBuffer pp bn Nothing
case mbbuf of
Just buf@IDEBuffer{sourceView = v} -> do
postAsyncIDEIdle $ do
liftIO $ debugM "leksah" "SourceBuffer recoverState idle callback"
useCandy <- useCandyFor buf
gtkBuf <- getBuffer v
setText gtkBuf text
when useCandy $ modeTransformToCandy (mode buf)
(modeEditInCommentOrString (mode buf)) gtkBuf
iter <- getIterAtOffset gtkBuf i
placeCursor gtkBuf iter
mark <- getInsertMark gtkBuf
scrollToMark v mark 0.0 (Just (1.0,0.3))
liftIO $ debugM "leksah" "SourceBuffer recoverState done"
return (Just buf)
Nothing -> return Nothing
makeActive actbuf@IDEBuffer{sourceView = sv} = do
eBuf <- getBuffer sv
writeCursorPositionInStatusbar sv
writeOverwriteInStatusbar sv
ids1 <- eBuf `afterModifiedChanged` markActiveLabelAsChanged
ids2 <- sv `afterMoveCursor` writeCursorPositionInStatusbar sv
-- ids3 <- sv `onLookupInfo` selectInfo sv -- obsolete by hyperlinks
ids4 <- sv `afterToggleOverwrite` writeOverwriteInStatusbar sv
ids5 <- eBuf `afterChanged` do
tb <- readIDE triggerBuild
void . liftIO $ tryPutMVar tb ()
activateThisPane actbuf $ concat [ids1, ids2, ids4, ids5]
triggerEventIDE_ (Sensitivity [(SensitivityEditor, True)])
grabFocus sv
void $ checkModTime actbuf
closePane pane = do makeActive pane
fileClose
buildPane _panePath _notebook _builder = return Nothing
builder _pp _nb _w = return (Nothing,[])
startComplete :: IDEAction
startComplete = do
mbBuf <- maybeActiveBuf
case mbBuf of
Nothing -> return ()
Just IDEBuffer{sourceView = v} -> complete v True
findSourceBuf :: MonadIDE m => FilePath -> m [IDEBuffer]
findSourceBuf fp = do
fpc <- liftIO $ myCanonicalizePath fp
filter (maybe False (equalFilePath fpc) . fileName) <$> allBuffers
selectSourceBuf :: MonadIDE m => FilePath -> m (Maybe IDEBuffer)
selectSourceBuf fp =
findSourceBuf fp >>= \case
hdb:_ -> liftIDE $ do
makeActive hdb
return (Just hdb)
_ -> liftIDE $ do
fpc <- liftIO $ myCanonicalizePath fp
fe <- liftIO $ doesFileExist fpc
if fe
then do
pp <- getBestPathForId "*Buffer"
liftIO $ debugM "lekash" "selectSourceBuf calling newTextBuffer"
nbuf <- newTextBuffer pp (T.pack $ takeFileName fpc) (Just fpc)
liftIO $ debugM "lekash" "selectSourceBuf newTextBuffer returned"
return nbuf
else do
ideMessage Normal (__ "File path not found " <> T.pack fpc)
return Nothing
goToDefinition :: Descr -> IDEAction
goToDefinition idDescr = goToLocation (dscMbModu idDescr) (dscMbLocation idDescr)
goToLocation :: Maybe PackModule -> Maybe Location -> IDEAction
goToLocation mbMod mbLoc = do
mbWorkspaceInfo <- getWorkspaceInfo
mbSystemInfo <- getSystemInfo
let mbPackagePath = (mbWorkspaceInfo >>= (packagePathFromScope . fst))
<|> (mbSystemInfo >>= packagePathFromScope)
mbSourcePath = (mbWorkspaceInfo >>= (sourcePathFromScope . fst))
<|> (mbSystemInfo >>= sourcePathFromScope)
liftIO . debugM "leksah" $ show (mbPackagePath, mbLoc, mbSourcePath)
case (mbPackagePath, mbLoc, mbSourcePath) of
(Just packagePath, Just loc, _) -> void (goToSourceDefinition (dropFileName packagePath) loc)
(_, Just loc, Just sourcePath) -> void (goToSourceDefinition' sourcePath loc)
(_, _, Just sp) -> void (selectSourceBuf sp)
_ -> return ()
where
packagePathFromScope :: GenScope -> Maybe FilePath
packagePathFromScope (GenScopeC (PackScope l _)) =
case mbMod of
Just mod' -> case pack mod' `Map.lookup` l of
Just pack -> pdMbSourcePath pack
Nothing -> Nothing
Nothing -> Nothing
sourcePathFromScope :: GenScope -> Maybe FilePath
sourcePathFromScope (GenScopeC (PackScope l _)) =
case mbMod of
Just mod' -> case pack mod' `Map.lookup` l of
Just pack ->
case filter (\md -> mdModuleId md == mod')
(pdModules pack) of
(mod'' : _) -> mdMbSourcePath mod''
[] -> Nothing
Nothing -> Nothing
Nothing -> Nothing
goToSourceDefinition :: FilePath -> Location -> IDEM (Maybe IDEBuffer)
goToSourceDefinition packagePath loc =
goToSourceDefinition' (packagePath </> locationFile loc) loc
goToSourceDefinition' :: FilePath -> Location -> IDEM (Maybe IDEBuffer)
goToSourceDefinition' sourcePath Location{..} = do
mbBuf <- selectSourceBuf sourcePath
case mbBuf of
Just _ ->
inActiveBufContext () $ \sv ebuf _ -> do
liftIO $ debugM "lekash" "goToSourceDefinition calculating range"
lines' <- getLineCount ebuf
iterTemp <- getIterAtLine ebuf (max 0 (min (lines'-1)
(locationSLine -1)))
chars <- getCharsInLine iterTemp
iter <- atLineOffset iterTemp (max 0 (min (chars-1) (locationSCol -1)))
iter2Temp <- getIterAtLine ebuf (max 0 (min (lines'-1) (locationELine -1)))
chars2 <- getCharsInLine iter2Temp
iter2 <- atLineOffset iter2Temp (max 0 (min (chars2-1) locationECol))
# # # we had a problem before using postAsyncIDEIdle
postAsyncIDEIdle $ do
liftIO $ debugM "lekash" "goToSourceDefinition triggered selectRange"
selectRange ebuf iter iter2
liftIO $ debugM "lekash" "goToSourceDefinition triggered scrollToIter"
scrollToIter sv iter 0.0 (Just (1.0,0.3))
return ()
Nothing -> return ()
return mbBuf
insertInBuffer :: Descr -> IDEAction
insertInBuffer idDescr = do
mbPaneName <- lastActiveBufferPane
case mbPaneName of
Nothing -> return ()
Just name -> do
PaneC p <- paneFromName name
let mbBuf = cast p
case mbBuf of
Nothing -> return ()
Just IDEBuffer{sourceView = v} -> do
ebuf <- getBuffer v
mark <- getInsertMark ebuf
iter <- getIterAtMark ebuf mark
insert ebuf iter (dscName idDescr)
updateStyle' :: IDEBuffer -> IDEAction
updateStyle' IDEBuffer {sourceView = sv} = getBuffer sv >>= updateStyle
removeFromBuffers :: Map FilePath [LogRefType] -> IDEAction
removeFromBuffers removeDetails = do
buffers <- allBuffers
let matchingBufs = filter (maybe False (`Map.member` removeDetails) . fileName) buffers
F.forM_ matchingBufs $ \ IDEBuffer {..} -> do
buf <- getBuffer sourceView
F.forM_ (maybe [] (fromMaybe [] . (`Map.lookup` removeDetails)) fileName) $
removeTagByName buf . T.pack . show
removeLogRefs :: (Log -> FilePath -> Bool) -> [LogRefType] -> IDEAction
removeLogRefs toRemove' types =
removeLogRefs' toRemove' types removeFromBuffers
removeFileLogRefs : : FilePath - > [ LogRefType ] - > IDEAction
--removeFileLogRefs file types =
-- removeFileLogRefs' file types removeFromBuffers
removeFileExtLogRefs :: Log -> String -> [LogRefType] -> IDEAction
removeFileExtLogRefs log' fileExt types =
removeFileExtLogRefs' log' fileExt types removeFromBuffers
removePackageLogRefs : : Log - > [ LogRefType ] - > IDEAction
--removePackageLogRefs log' types =
-- removePackageLogRefs' log' types removeFromBuffers
removeBuildLogRefs :: FilePath -> IDEAction
removeBuildLogRefs file =
removeBuildLogRefs' file removeFromBuffers
removeTestLogRefs :: Log -> IDEAction
removeTestLogRefs log' =
removeTestLogRefs' log' removeFromBuffers
removeLintLogRefs :: FilePath -> IDEAction
removeLintLogRefs file =
removeLintLogRefs' file removeFromBuffers
addLogRef :: Bool -> Bool -> LogRef -> IDEAction
addLogRef hlintFileScope backgroundBuild ref =
addLogRef' hlintFileScope backgroundBuild ref $ do
buffers <- allBuffers
let matchingBufs = filter (maybe False (equalFilePath (logRefFullFilePath ref)) . fileName) buffers
F.forM_ matchingBufs $ \ buf -> markRefInSourceBuf buf ref False
markRefInSourceBuf :: IDEBuffer -> LogRef -> Bool -> IDEAction
markRefInSourceBuf buf@IDEBuffer{sourceView = sv} logRef scrollTo = traceTimeTaken "markRefInSourceBuf" $ do
useCandy <- useCandyFor buf
candy' <- readIDE candy
contextRefs' <- readIDE contextRefs
ebuf <- getBuffer sv
let tagName = T.pack $ show (logRefType logRef)
liftIO . debugM "lekash" . T.unpack $ "markRefInSourceBuf getting or creating tag " <> tagName
liftIO $ debugM "lekash" "markRefInSourceBuf calculating range"
let start' = (srcSpanStartLine (logRefSrcSpan logRef),
srcSpanStartColumn (logRefSrcSpan logRef))
let end' = (srcSpanEndLine (logRefSrcSpan logRef),
srcSpanEndColumn (logRefSrcSpan logRef))
start <- if useCandy
then positionToCandy candy' ebuf start'
else return start'
end <- if useCandy
then positionToCandy candy' ebuf end'
else return end'
lines' <- getLineCount ebuf
iterTmp <- getIterAtLine ebuf (max 0 (min (lines'-1) (fst start - 1)))
chars <- getCharsInLine iterTmp
iter <- atLineOffset iterTmp (max 0 (min (chars-1) (snd start)))
iter2 <- if start == end
then do
maybeWE <- forwardWordEndC iter
case maybeWE of
Nothing -> atEnd iter
Just we -> return we
else do
newTmp <- getIterAtLine ebuf (max 0 (min (lines'-1) (fst end - 1)))
chars' <- getCharsInLine newTmp
new <- atLineOffset newTmp (max 0 (min (chars'-1) (snd end)))
forwardCharC new
let last' (Seq.viewr -> EmptyR) = Nothing
last' (Seq.viewr -> _xs :> x) = Just x
last' _ = Nothing
latest = last' contextRefs'
isOldContext = case (logRefType logRef, latest) of
(ContextRef, Just ctx) | ctx /= logRef -> True
_ -> False
unless isOldContext $ do
liftIO $ debugM "lekash" "markRefInSourceBuf calling applyTagByName"
traceTimeTaken "createMark" $ createMark sv (logRefType logRef) iter . T.unlines
. zipWith ($) (replicate 30 id <> [const "..."]) . T.lines $ refDescription logRef
traceTimeTaken "applyTagByName" $ applyTagByName ebuf tagName iter iter2
when scrollTo $ do
liftIO $ debugM "lekash" "markRefInSourceBuf triggered placeCursor"
placeCursor ebuf iter
mark <- getInsertMark ebuf
liftIO $ debugM "lekash" "markRefInSourceBuf trigged scrollToMark"
scrollToMark sv mark 0.3 Nothing
when isOldContext $ selectRange ebuf iter iter2
unmarkRefInSourceBuf :: IDEBuffer -> LogRef -> IDEAction
unmarkRefInSourceBuf IDEBuffer {sourceView = sv} logRef = do
buf <- getBuffer sv
removeTagByName buf (T.pack $ show (logRefType logRef))
| Tries to create a new text buffer , fails when the given filepath
-- does not exist or when it is not a text file.
newTextBuffer :: PanePath -> Text -> Maybe FilePath -> IDEM (Maybe IDEBuffer)
newTextBuffer panePath bn mbfn =
case mbfn of
Nothing -> buildPane' "" Nothing
Just fn ->
do eErrorContents <- liftIO $
catch (Right <$> T.readFile fn)
(\e -> return $ Left (show (e :: IOError)))
case eErrorContents of
Right contents -> do
modTime <- liftIO $ getModificationTime fn
buildPane' contents (Just modTime)
Left err -> do
ideMessage Normal (__ "Error reading file " <> T.pack err)
return Nothing
where buildPane' contents mModTime = do
nb <- getNotebook panePath
prefs' <- readIDE prefs
let useCandy = candyState prefs'
ct <- readIDE candy
(ind,rbn) <- figureOutPaneName bn
buildThisPane panePath nb (builder' useCandy mbfn ind bn rbn ct prefs' contents mModTime)
data CharacterCategory = IdentifierCharacter | SpaceCharacter | SyntaxCharacter
deriving (Eq)
getCharacterCategory :: Maybe Char -> CharacterCategory
getCharacterCategory Nothing = SpaceCharacter
getCharacterCategory (Just c)
| isAlphaNum c || c == '\'' || c == '_' = IdentifierCharacter
| isSpace c = SpaceCharacter
| otherwise = SyntaxCharacter
builder' :: Bool ->
Maybe FilePath ->
Int ->
Text ->
Text ->
CandyTable ->
Prefs ->
Text ->
Maybe UTCTime ->
PanePath ->
Gtk.Notebook ->
Gtk.Window ->
IDEM (Maybe IDEBuffer,Connections)
builder' useCandy mbfn ind bn _rbn _ct prefs' fileContents modTime _pp _nb _windows =
case textEditorType prefs' of
"GtkSourceView" -> newGtkBuffer mbfn fileContents >>= makeBuffer
"Yi" -> newYiBuffer mbfn fileContents >>= makeBuffer
"CodeMirror" -> newCMBuffer mbfn fileContents >>= makeBuffer
_ -> newDefaultBuffer mbfn fileContents >>= makeBuffer
where
makeBuffer :: TextEditor editor => EditorBuffer editor -> IDEM (Maybe IDEBuffer,Connections)
makeBuffer buffer = do
liftIO $ debugM "lekash" "makeBuffer"
ideR <- ask
beginNotUndoableAction buffer
let mode = modeFromFileName mbfn
when (useCandy && isHaskellMode mode) $ modeTransformToCandy mode
(modeEditInCommentOrString mode) buffer
endNotUndoableAction buffer
setModified buffer False
siter <- getStartIter buffer
placeCursor buffer siter
-- create a new SourceView Widget
(sv, sw, grid) <- newViewWithMap buffer (textviewFont prefs')
-- Files opened from the unpackDirectory are meant for documentation
-- and are not actually a source dependency, they should not be editable.
homeDir <- liftIO getHomeDirectory
let isEditable = fromMaybe True $ do
dir <- unpackDirectory prefs'
let expandedDir = case dir of
'~':rest -> homeDir ++ rest
rest -> rest
file <- mbfn
return (not $ splitDirectories expandedDir `isPrefixOf` splitDirectories file)
setEditable sv isEditable
setShowLineNumbers sv $ showLineNumbers prefs'
setRightMargin sv $ case rightMargin prefs' of
(False,_) -> Nothing
(True,v) -> Just v
setIndentWidth sv $ tabWidth prefs'
GHC treats tabs as 8 we should display them that way
drawTabs sv
updateStyle buffer
if wrapLines prefs'
then scrolledWindowSetPolicy sw PolicyTypeNever PolicyTypeAutomatic
else scrolledWindowSetPolicy sw PolicyTypeAutomatic PolicyTypeAutomatic
liftIO $ debugM "lekash" "makeBuffer setScrolledWindowShadowType"
setScrolledWindowShadowType sw ShadowTypeIn
liftIO $ debugM "lekash" "makeBuffer setScrolledWindowShadowType done"
box <- boxNew OrientationVertical 0
unless isEditable $ liftIO $ do
bar <- infoBarNew
lab <- labelNew (Just "This file is opened in read-only mode because it comes from a non-local package")
area <- infoBarGetContentArea bar >>= unsafeCastTo Container
containerAdd area lab
infoBarAddButton bar " Enable editing " ( fromIntegral . fromEnum $ ResponseTypeReject )
-- infoBarSetShowCloseButton bar True
boxPackStart box bar False False 0
widgetShow bar
boxPackStart box grid True True 0
reloadDialog <- liftIO $ newMVar Nothing
modTimeRef <- liftIO $ newIORef modTime
modifiedOnDiskRef <- liftIO $ newIORef False
let buf = IDEBuffer {
fileName = mbfn,
bufferName = bn,
addedIndex = ind,
sourceView =sv,
vBox = box,
modTime = modTimeRef,
modifiedOnDisk = modifiedOnDiskRef,
mode = mode,
reloadDialog = reloadDialog}
-- events
ids1 <- afterFocusIn sv $ makeActive buf
ids2 <- onCompletion sv (Completion.complete sv False) Completion.cancel
ids3 <- onButtonPress sv $ do
e <- lift ask
click <- getEventButtonType e
liftIDE $
case click of
EventType2buttonPress -> do
(start, end) <- getIdentifierUnderCursor buffer
selectRange buffer start end
return True
_ -> return False
(GtkEvent (GetTextPopup mbTpm)) <- triggerEvent ideR (GtkEvent $ GetTextPopup Nothing)
ids4 <- case mbTpm of
Just tpm -> sv `onPopulatePopup` \menu -> liftIO $ tpm ideR menu
Nothing -> do
sysMessage Normal "SourceBuffer>> no text popup"
return []
hasMatch <- liftIO $ newIORef False
ids5 <- onSelectionChanged buffer $ do
(iStart, iEnd) <- getSelectionBounds buffer
lStart <- (+1) <$> getLine iStart
cStart <- getLineOffset iStart
lEnd <- (+1) <$> getLine iEnd
cEnd <- getLineOffset iEnd
triggerEventIDE_ . SelectSrcSpan $
case mbfn of
Just fn -> Just (SrcSpan fn lStart cStart lEnd cEnd)
Nothing -> Nothing
let tagName = "selection-match"
hasSel <- hasSelection buffer
m <- liftIO $ readIORef hasMatch
when m $ removeTagByName buffer tagName
r <- if hasSel
then do
candy' <- readIDE candy
sTxt <- getCandylessPart candy' buffer iStart iEnd
let strippedSTxt = T.strip sTxt
if T.null strippedSTxt
then return False
else do
bi1 <- getStartIter buffer
bi2 <- getEndIter buffer
r1 <- forwardApplying bi1 strippedSTxt (Just iStart) tagName buffer
r2 <- forwardApplying iEnd strippedSTxt (Just bi2) tagName buffer
return (r1 || r2)
else return False
liftIO $ writeIORef hasMatch r
return ()
ids6 <- onKeyPress sv $ do
e <- lift ask
keyval <- getEventKeyKeyval e
name <- keyvalName keyval
modifier <- getEventKeyState e
liftIDE $ do
let moveToNextWord iterOp sel = do
sel' <- iterOp sel
rs <- isRangeStart sel'
if rs then return sel' else moveToNextWord iterOp sel'
let calculateNewPosition iterOp = getInsertIter buffer >>= moveToNextWord iterOp
let continueSelection keepSelBound nsel = do
if keepSelBound
then do
sb <- getSelectionBoundMark buffer >>= getIterAtMark buffer
selectRange buffer nsel sb
else
placeCursor buffer nsel
scrollToIter sv nsel 0 Nothing
case (name, map mapControlCommand modifier, keyval) of
(Just "Left",[ModifierTypeControlMask],_) -> do
calculateNewPosition backwardCharC >>= continueSelection False
return True
(Just "Left",[ModifierTypeShiftMask, ModifierTypeControlMask],_) -> do
calculateNewPosition backwardCharC >>= continueSelection True
return True
(Just "Right",[ModifierTypeControlMask],_) -> do
calculateNewPosition forwardCharC >>= continueSelection False --placeCursor buffer
return True
(Just "Right",[ModifierTypeControlMask, ModifierTypeControlMask],_) -> do
calculateNewPosition forwardCharC >>= continueSelection True
return True
(Just "BackSpace",[ModifierTypeControlMask],_) -> do -- delete word
here <- getInsertIter buffer
there <- calculateNewPosition backwardCharC
delete buffer here there
return True
(Just "underscore",[ModifierTypeControlMask, ModifierTypeControlMask],_) -> do
selectInfo buf buffer sv True False
return True
-- Redundant should become a go to definition directly
(Just "minus",[ModifierTypeControlMask],_) -> do
selectInfo buf buffer sv True True
return True
(Just "Return", [], _) ->
readIDE currentState >>= \case
IsCompleting _ -> return False
_ -> smartIndent sv >> return True
-- Avoid passing these directly to bindinsActivateEvent because that seems
-- to hide them from the auto complete code (well up and down anyway)
(Just key, _, _) | key `elem`
["Tab", "Return", "Down", "Up", "BackSpace"
,"Shift_L", "Shift_R", "Super_L", "Super_R"] -> return False
_ -> do
w <- getEditorWidget sv
bindingsActivateEvent w e
ids7 <-
createHyperLinkSupport sv sw
(\ctrl _shift iter -> do
(beg, en) <- getIdentifierUnderCursorFromIter (iter, iter)
when ctrl $ selectInfo' buf buffer sv beg en False False
return (beg, if ctrl then en else beg))
(\_ _shift (beg, en) -> selectInfo' buf buffer sv beg en True True)
return (Just buf,concat [ids1, ids2, ids3, ids4, ids5, ids6, ids7])
forwardApplying :: TextEditor editor
=> EditorIter editor
-> Text -- txt
-> Maybe (EditorIter editor)
-> Text -- tagname
-> EditorBuffer editor
-> IDEM Bool
forwardApplying tI txt mbTi tagName ebuf = do
mbFTxt <- forwardSearch tI txt [TextSearchFlagsVisibleOnly, TextSearchFlagsTextOnly] mbTi
case mbFTxt of
Just (start, end) -> do
startsW <- startsWord start
endsW <- endsWord end
when (startsW && endsW) $
applyTagByName ebuf tagName start end
(|| (startsW && endsW)) <$> forwardApplying end txt mbTi tagName ebuf
Nothing -> return False
isRangeStart
:: TextEditor editor
=> EditorIter editor
-> IDEM Bool
isRangeStart sel = do -- if char and previous char are of different char categories
currentChar <- getChar sel
let mbStartCharCat = getCharacterCategory currentChar
mbPrevCharCat <- getCharacterCategory <$> (backwardCharC sel >>= getChar)
return $ isNothing currentChar || currentChar == Just '\n' || mbStartCharCat /= mbPrevCharCat && (mbStartCharCat == SyntaxCharacter || mbStartCharCat == IdentifierCharacter)
-- | Get an iterator pair (start,end) delimiting the identifier currently under the cursor
getIdentifierUnderCursor :: forall editor. TextEditor editor => EditorBuffer editor -> IDEM (EditorIter editor, EditorIter editor)
getIdentifierUnderCursor buffer = do
(startSel, endSel) <- getSelectionBounds buffer
getIdentifierUnderCursorFromIter (startSel, endSel)
-- | Get an iterator pair (start,end) delimiting the identifier currently contained inside the provided iterator pair
getIdentifierUnderCursorFromIter :: TextEditor editor => (EditorIter editor, EditorIter editor) -> IDEM (EditorIter editor, EditorIter editor)
getIdentifierUnderCursorFromIter (startSel, endSel) = do
let isIdent a = isAlphaNum a || a == '\'' || a == '_'
let isOp a = isSymbol a || a == ':' || a == '\\' || a == '*' || a == '/' || a == '-'
|| a == '!' || a == '@' || a == '%' || a == '&' || a == '?'
mbStartChar <- getChar startSel
mbEndChar <- getChar endSel
let isSelectChar =
case mbStartChar of
Just startChar | isIdent startChar -> \a -> isIdent a || a == '.'
Just startChar | isOp startChar -> isOp
_ -> const False
start <- case mbStartChar of
Just startChar | isSelectChar startChar -> do
maybeIter <- backwardFindCharC startSel (not.isSelectChar) Nothing
case maybeIter of
Just iter -> forwardCharC iter
Nothing -> return startSel
_ -> return startSel
end <- case mbEndChar of
Just endChar | isSelectChar endChar -> do
maybeIter <- forwardFindCharC endSel (not.isSelectChar) Nothing
case maybeIter of
Just iter -> return iter
Nothing -> return endSel
_ -> return endSel
return (start, end)
setModifiedOnDisk :: MonadIDE m => FilePath -> m Bool
setModifiedOnDisk fp = do
bufs <- findSourceBuf fp
forM_ bufs $ \buf ->
liftIO $ writeIORef (modifiedOnDisk buf) True
return . not $ null bufs
checkModTime :: MonadIDE m => IDEBuffer -> m Bool
checkModTime buf = do
currentState' <- readIDE currentState
case currentState' of
IsShuttingDown -> return False
_ ->
liftIO (readIORef (modifiedOnDisk buf)) >>= \case
False -> return False
True -> do
liftIO $ writeIORef (modifiedOnDisk buf) False
let name = paneName buf
case fileName buf of
Just fn -> do
exists <- liftIO $ doesFileExist fn
if exists
then do
nmt <- liftIO $ getModificationTime fn
modTime' <- liftIO $ readIORef (modTime buf)
case modTime' of
Nothing -> error $"checkModTime: time not set " ++ show (fileName buf)
Just mt ->
if nmt /= mt -- Fonts get messed up under windows when adding this line.
-- Praises to whoever finds out what happens and how to fix this
then do
load <- readIDE (prefs . to autoLoad)
if load
then do
ideMessage Normal $ __ "Auto Loading " <> T.pack fn
revert buf
return True
else
liftIO (takeMVar $ reloadDialog buf) >>= \case
Just md -> do
liftIO $ putMVar (reloadDialog buf) (Just md)
return True
Nothing -> do
window <- liftIDE getMainWindow
md <- new' MessageDialog [
constructDialogUseHeaderBar 0,
constructMessageDialogButtons ButtonsTypeNone]
liftIO $ putMVar (reloadDialog buf) (Just md)
setMessageDialogMessageType md MessageTypeQuestion
setMessageDialogText md (__ "File \"" <> name <> __ "\" has changed on disk.")
windowSetTransientFor md (Just window)
_ <- dialogAddButton' md (__ "_Load From Disk") (AnotherResponseType 1)
_ <- dialogAddButton' md (__ "_Always Load From Disk") (AnotherResponseType 2)
_ <- dialogAddButton' md (__ "_Don't Load") (AnotherResponseType 3)
dialogSetDefaultResponse' md (AnotherResponseType 1)
setWindowWindowPosition md WindowPositionCenterOnParent
widgetShowAll md
ideR <- liftIDE ask
_ <- onDialogResponse md $ \n32 -> (`reflectIDE` ideR) $ do
liftIO $ modifyMVar_ (reloadDialog buf) . const $ return Nothing
widgetDestroy md
case toEnum (fromIntegral n32) of
AnotherResponseType 1 ->
revert buf
AnotherResponseType 2 -> do
revert buf
modifyIDE_ $ prefs %~ (\p -> p {autoLoad = True})
AnotherResponseType 3 -> dontLoad fn
ResponseTypeDeleteEvent -> dontLoad fn
_ -> return ()
return True
else return False
else return False
Nothing -> return False
where
dontLoad fn = do
nmt2 <- liftIO $ getModificationTime fn
liftIO $ writeIORef (modTime buf) (Just nmt2)
setModTime :: IDEBuffer -> IDEAction
setModTime buf =
case fileName buf of
Nothing -> return ()
Just fn -> liftIO $ E.catch
(do
nmt <- getModificationTime fn
writeIORef (modTime buf) (Just nmt))
(\(e:: SomeException) -> do
sysMessage Normal (T.pack $ show e)
return ())
fileRevert :: IDEAction
fileRevert = inActiveBufContext () $ \_ _ currentBuffer ->
revert currentBuffer
revert :: MonadIDE m => IDEBuffer -> m ()
revert buf@IDEBuffer{sourceView = sv} = do
useCandy <- useCandyFor buf
case fileName buf of
Nothing -> return ()
Just fn -> liftIDE $ do
buffer <- getBuffer sv
fc <- liftIO $ readFile fn
mt <- liftIO $ getModificationTime fn
beginNotUndoableAction buffer
setText buffer $ T.pack fc
when useCandy $
modeTransformToCandy (mode buf)
(modeEditInCommentOrString (mode buf))
buffer
endNotUndoableAction buffer
setModified buffer False
liftIO $ writeIORef (modTime buf) (Just mt)
writeCursorPositionInStatusbar :: TextEditor editor => EditorView editor -> IDEAction
writeCursorPositionInStatusbar sv = do
buf <- getBuffer sv
mark <- getInsertMark buf
iter <- getIterAtMark buf mark
line <- getLine iter
col <- getLineOffset iter
triggerEventIDE_ (StatusbarChanged [CompartmentBufferPos (line,col)])
return ()
writeOverwriteInStatusbar :: TextEditor editor => EditorView editor -> IDEAction
writeOverwriteInStatusbar sv = do
mode <- getOverwrite sv
triggerEventIDE_ (StatusbarChanged [CompartmentOverlay mode])
return ()
selectInfo' :: TextEditor e => IDEBuffer -> EditorBuffer e -> EditorView e -> EditorIter e -> EditorIter e -> Bool -> Bool -> IDEAction
selectInfo' buf ebuf view start end activatePanes gotoSource = do
candy' <- readIDE candy
sTxt <- getCandylessPart candy' ebuf start end
startPos <- getLocation buf ebuf start
endPos <- getLocation buf ebuf end
unless (T.null sTxt) $ do
rect <- getIterLocation view end
bx <- getRectangleX rect
by <- getRectangleY rect
(x, y) <- bufferToWindowCoords view (fromIntegral bx, fromIntegral by)
getWindow view >>= \case
Nothing -> return ()
Just drawWindow -> do
(_, ox, oy) <- windowGetOrigin drawWindow
triggerEventIDE_ (SelectInfo (SymbolEvent sTxt ((, startPos, endPos) <$> fileName buf) activatePanes gotoSource (ox + fromIntegral x, oy + fromIntegral y)))
selectInfo :: TextEditor e => IDEBuffer -> EditorBuffer e -> EditorView e -> Bool -> Bool -> IDEAction
selectInfo buf ebuf view activatePanes gotoSource = do
(l,r) <- getIdentifierUnderCursor ebuf
selectInfo' buf ebuf view l r activatePanes gotoSource
markActiveLabelAsChanged :: IDEAction
markActiveLabelAsChanged = do
mbPath <- getActivePanePath
case mbPath of
Nothing -> return ()
Just path -> do
nb <- getNotebook path
mbBS <- maybeActiveBuf
F.forM_ mbBS (markLabelAsChanged nb)
markLabelAsChanged :: Notebook -> IDEBuffer -> IDEAction
markLabelAsChanged nb buf@IDEBuffer{sourceView = sv} = do
liftIO $ debugM "leksah" "markLabelAsChanged"
ebuf <- getBuffer sv
modified <- getModified ebuf
w <- getTopWidget buf
markLabel nb w modified
fileSaveBuffer :: (MonadIDE m, TextEditor editor) => Bool -> Notebook -> EditorView editor -> EditorBuffer editor -> IDEBuffer -> Int -> m Bool
fileSaveBuffer query nb _ ebuf ideBuf@IDEBuffer{sourceView = sv} _i = liftIDE $ do
window <- getMainWindow
prefs' <- readIDE prefs
useCandy <- useCandyFor ideBuf
candy' <- readIDE candy
(panePath,_connects) <- guiPropertiesFromName (paneName ideBuf)
case fileName ideBuf of
Just fn | not query -> do
modifiedOnDisk <- checkModTime ideBuf -- The user is given option to reload
modifiedInBuffer <- getModified ebuf
if modifiedInBuffer
then do
fileSave' (forceLineEnds prefs') (removeTBlanks prefs')
useCandy candy' fn
setModTime ideBuf
return True
else return modifiedOnDisk
mbfn -> reifyIDE $ \ideR -> do
dialog <- new' FileChooserDialog [constructDialogUseHeaderBar 1]
setWindowTitle dialog (__ "Save File")
windowSetTransientFor dialog $ Just window
fileChooserSetAction dialog FileChooserActionSave
_ <- dialogAddButton' dialog "gtk-cancel" ResponseTypeCancel
_ <- dialogAddButton' dialog "gtk-save" ResponseTypeAccept
forM_ mbfn $ fileChooserSelectFilename dialog
widgetShow dialog
response <- dialogRun' dialog
mbFileName <- case response of
ResponseTypeAccept -> fileChooserGetFilename dialog
ResponseTypeCancel -> return Nothing
ResponseTypeDeleteEvent -> return Nothing
_ -> return Nothing
widgetDestroy dialog
case mbFileName of
Nothing -> return False
Just fn -> do
fileExists <- doesFileExist fn
shouldOverwrite <-
if not fileExists
then return True
else showConfirmDialog (Just window) False (__ "_Overwrite") (__ "File already exist.")
when shouldOverwrite $ do
reflectIDE (do
fileSave' (forceLineEnds prefs') (removeTBlanks prefs')
useCandy candy' fn
_ <- closePane ideBuf
cfn <- liftIO $ myCanonicalizePath fn
void $ newTextBuffer panePath (T.pack $ takeFileName cfn) (Just cfn)
) ideR
return shouldOverwrite
where
fileSave' :: Bool -> Bool -> Bool -> CandyTable -> FilePath -> IDEAction
fileSave' _forceLineEnds removeTBlanks _useCandy candyTable fn = do
buf <- getBuffer sv
text <- getCandylessText candyTable buf
let text' = if removeTBlanks
then T.unlines $ map (T.dropWhileEnd $ \c -> c == ' ') $ T.lines text
else text
alreadyExists <- liftIO $ doesFileExist fn
mbModTimeBefore <- if alreadyExists
then liftIO $ Just <$> getModificationTime fn
else return Nothing
succ' <- liftIO $ E.catch (do T.writeFile fn text'; return True)
(\(e :: SomeException) -> do
sysMessage Normal . T.pack $ show e
return False)
Truely horrible hack to work around HFS+ only having 1sec resolution
and ghc ignoring files unless the modifiction time has moved forward .
The limitation means we can do at most 1 reload a second , but
this hack allows us to take an advance of up to 30 reloads ( by
moving the modidification time up to 30s into the future ) .
modTimeChanged <- liftIO $ case mbModTimeBefore of
Nothing -> return True
Just modTime -> do
newModTime <- getModificationTime fn
let diff = diffUTCTime modTime newModTime
if
| (newModTime > modTime) -> return True -- All good mode time has moved on
| diff < 30 -> do
setModificationTimeOnOSX fn (addUTCTime 1 modTime)
updatedModTime <- getModificationTime fn
return (updatedModTime > modTime)
| diff < 32 -> do
-- Reached our limit of how far in the future we want to set the modifiction time.
Using 32 instead of 31 in case or something is adjusting the clock back .
warningM "leksah" $ "Modification time for " <> fn
<> " was already " <> show (diffUTCTime modTime newModTime)
<> " in the future"
-- We still want to keep the modification time the same though.
If it went back the future date ghc has might cause it to
-- continue to ignore the file.
setModificationTimeOnOSX fn modTime
return False
| otherwise -> do
-- This should never happen unless something else is messing
-- with the modification time or the clock.
-- If it does happen we will leave the modifiction time alone.
errorM "leksah" $ "Modification time for " <> fn
<> " was already " <> show (diffUTCTime modTime newModTime)
<> " in the future"
return True
-- Only consider the file saved if the modification time changed
otherwise another save is really needed to trigger ghc .
when modTimeChanged $ do
setModified buf (not succ')
markLabelAsChanged nb ideBuf
triggerEventIDE_ $ SavedFile fn
fileSave :: Bool -> IDEM Bool
fileSave query = inActiveBufContext' False $ fileSaveBuffer query
fileSaveAll :: MonadIDE m => (IDEBuffer -> m Bool) -> m Bool
fileSaveAll filterFunc = do
bufs <- allBuffers
filtered <- filterM filterFunc bufs
modified <- filterM fileCheckBuffer filtered
results <- forM modified (\buf -> inBufContext False buf (fileSaveBuffer False))
return $ True `elem` results
fileCheckBuffer :: (MonadIDE m) => IDEBuffer -> m Bool
fileCheckBuffer ideBuf@IDEBuffer{sourceView = v} =
case fileName ideBuf of
Just _fn -> do
modifiedOnDisk <- checkModTime ideBuf -- The user is given option to reload
modifiedInBuffer <- liftIDE $ getModified =<< getBuffer v
return (modifiedOnDisk || modifiedInBuffer)
_ -> return False
fileCheckAll :: MonadIDE m => (IDEBuffer -> m [alpha]) -> m [alpha]
fileCheckAll filterFunc = do
bufs <- allBuffers
fmap concat . forM bufs $ \ buf -> do
ps <- filterFunc buf
case ps of
[] -> return []
_ -> do
modified <- fileCheckBuffer buf
if modified
then return ps
else return []
fileNew :: IDEAction
fileNew = do
pp <- getBestPathForId "*Buffer"
void $ newTextBuffer pp (__ "Unnamed") Nothing
fileClose :: IDEM Bool
fileClose = inActiveBufContext True fileClose'
fileClose' :: TextEditor editor => EditorView editor -> EditorBuffer editor -> IDEBuffer -> IDEM Bool
fileClose' _ ebuf currentBuffer = do
window <- getMainWindow
modified <- getModified ebuf
shouldContinue <- reifyIDE $ \ideR ->
if modified
then do
resp <- showDialogAndGetResponse
(Just window)
(__ "Save changes to document: "
<> paneName currentBuffer
<> "?")
MessageTypeQuestion
ResponseTypeYes
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeCancel ]
[ (__ "_Save", ResponseTypeYes)
, (__ "_Don't Save", ResponseTypeNo) ]
case resp of
ResponseTypeYes -> do
_ <- reflectIDE (fileSave False) ideR
return True
ResponseTypeNo -> return True
ResponseTypeCancel -> return False
_ -> return False
else return True
when shouldContinue $ do
_ <- closeThisPane currentBuffer
F.forM_ (fileName currentBuffer) addRecentlyUsedFile
return shouldContinue
fileCloseAll :: (IDEBuffer -> IDEM Bool) -> IDEM Bool
fileCloseAll filterFunc = do
bufs <- allBuffers
filtered <- filterM filterFunc bufs
case filtered of
[] -> return True
(h:_) -> do
makeActive h
r <- fileClose
if r
then fileCloseAll filterFunc
else return False
fileCloseAllButPackage :: IDEAction
fileCloseAllButPackage = do
mbActivePath <- fmap ipdPackageDir <$> readIDE activePack
bufs <- allBuffers
case mbActivePath of
Just p -> mapM_ (close' p) bufs
Nothing -> return ()
where
close' dir buf@IDEBuffer{sourceView = sv} = do
ebuf <- getBuffer sv
when (isJust (fileName buf)) $ do
modified <- getModified ebuf
when (not modified && not (isSubPath dir (fromJust (fileName buf))))
$ void $ fileClose' sv ebuf buf
fileCloseAllButWorkspace :: IDEAction
fileCloseAllButWorkspace = do
bufs <- allBuffers
readIDE workspace >>= mapM_ (\ws ->
unless (null bufs) $ mapM_ (close' ws) bufs)
where
close' ws buf@IDEBuffer{sourceView = sv} = do
ebuf <- getBuffer sv
when (isJust (fileName buf)) $ do
modified <- getModified ebuf
when (not modified && not (isSubPathOfAny ws (fromJust (fileName buf))))
$ void $ fileClose' sv ebuf buf
isSubPathOfAny ws fileName =
let paths = ipdPackageDir <$> (ws ^. wsPackages)
in any (`isSubPath` fileName) paths
fileOpenThis :: FilePath -> IDEAction
fileOpenThis fp = do
liftIO . debugM "leksah" $ "fileOpenThis " ++ fp
fpc <- liftIO $ myCanonicalizePath fp
findSourceBuf fp >>= \case
hdb:_ -> do
window <- getMainWindow
resp <- showDialogAndGetResponse
(Just window)
(__ "Buffer already open.")
MessageTypeQuestion
(AnotherResponseType 1)
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeNone ]
[ (__ "Make _Active", AnotherResponseType 1)
, (__ "_Open Second", AnotherResponseType 2) ]
case resp of
AnotherResponseType 2 -> reallyOpen fpc
_ -> makeActive hdb
[] -> reallyOpen fpc
where
reallyOpen fpc = do
pp <- getBestPathForId "*Buffer"
void $ newTextBuffer pp (T.pack $ takeFileName fpc) (Just fpc)
filePrint :: IDEAction
filePrint = inActiveBufContext' () filePrint'
filePrint' :: TextEditor editor => Notebook -> EditorView view -> EditorBuffer editor -> IDEBuffer -> Int -> IDEM ()
filePrint' _nb _ ebuf currentBuffer _ = do
let pName = paneName currentBuffer
window <- getMainWindow
yesPrint <- liftIO $ do
resp <- showDialogAndGetResponse
(Just window)
(__"Print document: "
<> pName
<> "?")
MessageTypeQuestion
ResponseTypeYes
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeNone ]
[ (__"_Print", ResponseTypeYes)
, (__"_Don't Print", ResponseTypeNo) ]
case resp of
ResponseTypeYes -> return True
ResponseTypeNo -> return False
ResponseTypeCancel -> return False
_ -> return False
when yesPrint $ do
--real code
modified <- getModified ebuf
shouldContinue <- reifyIDE $ \ideR ->
if modified
then do
resp <- showDialogAndGetResponse
(Just window)
(__"Save changes to document: "
<> pName
<> "?")
MessageTypeQuestion
ResponseTypeYes
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeNone ]
[ (__"_Save", ResponseTypeYes)
, (__"_Don't Save", ResponseTypeNo)
, (__"_Cancel Printing", ResponseTypeCancel) ]
case resp of
ResponseTypeYes -> do
_ <- reflectIDE (fileSave False) ideR
return True
ResponseTypeNo -> return True
ResponseTypeCancel -> return False
_ -> return False
else
return True
when shouldContinue $
case fileName currentBuffer of
Just name -> do
status <- liftIO $ Print.print name
case status of
Left err -> liftIO $ showErrorDialog (Just window) (T.pack $ show err)
Right _ -> liftIO $ showDialog (Just window) "Print job has been sent successfully" MessageTypeInfo
return ()
Nothing -> return ()
editUndo :: IDEAction
editUndo = inActiveBufContext () $ \view buf _ -> do
can <- canUndo buf
when can $ do
undo buf
scrollToCursor view
editRedo :: IDEAction
editRedo = inActiveBufContext () $ \view buf _ -> do
can <- canRedo buf
when can $ redo buf
scrollToCursor view
editDelete :: IDEAction
editDelete = inActiveBufContext () $ \view ebuf _ -> do
deleteSelection ebuf
scrollToCursor view
editSelectAll :: IDEAction
editSelectAll = inActiveBufContext () $ \_ ebuf _ -> do
start <- getStartIter ebuf
end <- getEndIter ebuf
selectRange ebuf start end
editCut :: IDEAction
editCut = inActiveBufContext () $ \_ ebuf _ -> do
clip <- clipboardGet =<< atomIntern "CLIPBOARD" False
cutClipboard ebuf clip True
editCopy :: IDEAction
editCopy = inActiveBufContext () $ \view ebuf _ -> do
clip <- clipboardGet =<< atomIntern "CLIPBOARD" False
copyClipboard ebuf clip
scrollToCursor view
editPaste :: IDEAction
editPaste = inActiveBufContext () $ \_ ebuf _ -> do
mark <- getInsertMark ebuf
iter <- getIterAtMark ebuf mark
clip <- clipboardGet =<< atomIntern "CLIPBOARD" False
pasteClipboard ebuf clip iter True
editShiftLeft :: IDEAction
editShiftLeft = do
prefs' <- readIDE prefs
let str = T.replicate (tabWidth prefs') " "
b <- canShiftLeft str prefs'
when b $ do
_ <- doForSelectedLines [] $ \ebuf lineNr -> do
sol <- getIterAtLine ebuf lineNr
sol2 <- forwardCharsC sol (tabWidth prefs')
delete ebuf sol sol2
return ()
where
canShiftLeft str prefs' = do
boolList <- doForSelectedLines [] $ \ebuf lineNr -> do
sol <- getIterAtLine ebuf lineNr
sol2 <- forwardCharsC sol (tabWidth prefs')
str1 <- getText ebuf sol sol2 True
return (str1 == str)
return (F.foldl' (&&) True boolList)
editShiftRight :: IDEAction
editShiftRight = do
prefs' <- readIDE prefs
let str = T.replicate (tabWidth prefs') " "
_ <- doForSelectedLines [] $ \ebuf lineNr -> do
sol <- getIterAtLine ebuf lineNr
insert ebuf sol str
return ()
align :: Text -> IDEAction
align pat' = inActiveBufContext () $ \_ ebuf ideBuf -> do
useCandy <- useCandyFor ideBuf
let pat = if useCandy
then transChar pat'
else pat'
(start,end) <- getStartAndEndLineOfSelection ebuf
beginUserAction ebuf
let positionsOfChar :: IDEM [(Int, Maybe Int)]
positionsOfChar = forM [start .. end] $ \lineNr -> do
sol <- getIterAtLine ebuf lineNr
eol <- forwardToLineEndC sol
line <- getText ebuf sol eol True
return (lineNr,
if pat `T.isInfixOf` line
then Just . T.length . fst $ T.breakOn pat line
else Nothing)
alignChar :: Map Int (Maybe Int) -> Int -> IDEM ()
alignChar positions alignTo =
forM_ [start .. end] $ \lineNr ->
case lineNr `Map.lookup` positions of
Just (Just n) -> do
sol <- getIterAtLine ebuf lineNr
insertLoc <- forwardCharsC sol n
insert ebuf insertLoc (T.replicate (alignTo - n) " ")
_ -> return ()
positions <- positionsOfChar
let alignTo = F.foldl' max 0 (mapMaybe snd positions)
when (alignTo > 0) $ alignChar (Map.fromList positions) alignTo
endUserAction ebuf
transChar :: Text -> Text
PROPORTION
RIGHTWARDS ARROW
LEFTWARDS ARROW
transChar t = t
addRecentlyUsedFile :: FilePath -> IDEAction
addRecentlyUsedFile fp = do
state <- readIDE currentState
unless (isStartingOrClosing state) $ do
recentFiles' <- readIDE recentFiles
unless (fp `elem` recentFiles') $
modifyIDE_ $ recentFiles .~ take 12 (fp : recentFiles')
triggerEventIDE_ UpdateRecent
--removeRecentlyUsedFile :: FilePath -> IDEAction
--removeRecentlyUsedFile fp = do
-- state <- readIDE currentState
-- unless (isStartingOrClosing state) $ do
recentFiles ' < - readIDE recentFiles
-- when (fp `elem` recentFiles') $
modifyIDE _ $ recentFiles .~ filter ( /= fp ) recentFiles '
-- triggerEventIDE_ UpdateRecent
-- | Get the currently selected text or Nothing is no text is selected
selectedText :: IDEM (Maybe IDEBuffer, Maybe Text)
selectedText = do
candy' <- readIDE candy
inActiveBufContext (Nothing, Nothing) $ \_ ebuf currentBuffer ->
hasSelection ebuf >>= \case
True -> do
(i1,i2) <- getSelectionBounds ebuf
text <- getCandylessPart candy' ebuf i1 i2
return (Just currentBuffer, Just text)
False -> return (Just currentBuffer, Nothing)
-- | Get the currently selected text, or, if none, the current line text
selectedTextOrCurrentLine :: IDEM (Maybe (IDEBuffer, Text))
selectedTextOrCurrentLine = do
candy' <- readIDE candy
inActiveBufContext Nothing $ \_ ebuf currentBuffer -> do
(i1, i2) <- hasSelection ebuf >>= \case
True -> getSelectionBounds ebuf
False -> do
(i, _) <- getSelectionBounds ebuf
line <- getLine i
iStart <- getIterAtLine ebuf line
iEnd <- forwardToLineEndC iStart
return (iStart, iEnd)
Just . (currentBuffer,) <$> getCandylessPart candy' ebuf i1 i2
-- | Get the currently selected text, or, if none, tries to selected the current identifier (the one under the cursor)
selectedTextOrCurrentIdentifier :: IDEM (Maybe IDEBuffer, Maybe Text)
selectedTextOrCurrentIdentifier = do
st <- selectedText
case snd st of
Just _ -> return st
Nothing -> do
candy' <- readIDE candy
inActiveBufContext (Nothing, Nothing) $ \_ ebuf currentBuffer -> do
(l,r) <- getIdentifierUnderCursor ebuf
t <- getCandylessPart candy' ebuf l r
return ( Just currentBuffer
, if T.null t
then Nothing
else Just t)
getLocation :: TextEditor e => IDEBuffer -> EditorBuffer e -> EditorIter e -> IDEM (Int, Int)
getLocation buf ebuf iter = do
candy' <- readIDE candy
useCandy <- useCandyFor buf
line <- getLine iter
lineOffset <- getLineOffset iter
if useCandy
then positionFromCandy candy' ebuf (line, lineOffset)
else return (line, lineOffset)
selectedLocation :: IDEM (Maybe (Int, Int))
selectedLocation =
inActiveBufContext Nothing $ \_ ebuf currentBuffer -> do
(start, _) <- getSelectionBounds ebuf
Just <$> getLocation currentBuffer ebuf start
insertTextAfterSelection :: Text -> IDEAction
insertTextAfterSelection str = do
candy' <- readIDE candy
inActiveBufContext () $ \_ ebuf currentBuffer -> do
useCandy <- useCandyFor currentBuffer
hasSelection ebuf >>= (`when` do
realString <- if useCandy then stringToCandy candy' str else return str
(_,i) <- getSelectionBounds ebuf
insert ebuf i realString
(_,i1) <- getSelectionBounds ebuf
i2 <- forwardCharsC i1 (T.length realString)
selectRange ebuf i1 i2)
-- | Returns the packages to which this buffer belongs
-- uses the 'bufferProjCache' and might extend it
belongsToPackages' :: MonadIDE m => IDEBuffer -> m [(Project, IDEPackage)]
belongsToPackages' = maybe (return []) belongsToPackages . fileName
-- | Checks whether a file belongs to the workspace
belongsToWorkspace' :: MonadIDE m => IDEBuffer -> m Bool
belongsToWorkspace' = maybe (return False) belongsToWorkspace . fileName
useCandyFor :: MonadIDE m => IDEBuffer -> m Bool
useCandyFor aBuffer = do
prefs' <- readIDE prefs
return (candyState prefs' && isHaskellMode (mode aBuffer))
switchBuffersCandy :: IDEAction
switchBuffersCandy = do
prefs' <- readIDE prefs
buffers <- allBuffers
forM_ buffers $ \b@IDEBuffer{sourceView=sv} -> do
buf <- getBuffer sv
if candyState prefs'
then modeTransformToCandy (mode b) (modeEditInCommentOrString (mode b)) buf
else modeTransformFromCandy (mode b) buf
| null | https://raw.githubusercontent.com/leksah/leksah/ec95f33af27fea09cba140d7cddd010935a2cf52/src-gtk/IDE/Pane/SourceBuffer.hs | haskell | # LANGUAGE TypeSynonymInstances #
# LANGUAGE RankNTypes #
# LANGUAGE OverloadedStrings #
# LANGUAGE MultiWayIf #
---------------------------------------------------------------------------
Module : IDE.Pane.SourceBuffer
License : GNU-GPL
Maintainer : <maintainer at leksah.org>
Stability : provisional
Portability : portable
---------------------------------------------------------------------------------
time name action = do
liftIO . debugM "leksah" $ name <> " start"
start <- liftIO $ realToFrac <$> getPOSIXTime
result <- action
end <- liftIO $ realToFrac <$> getPOSIXTime
return result
ids3 <- sv `onLookupInfo` selectInfo sv -- obsolete by hyperlinks
removeFileLogRefs file types =
removeFileLogRefs' file types removeFromBuffers
removePackageLogRefs log' types =
removePackageLogRefs' log' types removeFromBuffers
does not exist or when it is not a text file.
create a new SourceView Widget
Files opened from the unpackDirectory are meant for documentation
and are not actually a source dependency, they should not be editable.
infoBarSetShowCloseButton bar True
events
placeCursor buffer
delete word
Redundant should become a go to definition directly
Avoid passing these directly to bindinsActivateEvent because that seems
to hide them from the auto complete code (well up and down anyway)
txt
tagname
if char and previous char are of different char categories
| Get an iterator pair (start,end) delimiting the identifier currently under the cursor
| Get an iterator pair (start,end) delimiting the identifier currently contained inside the provided iterator pair
Fonts get messed up under windows when adding this line.
Praises to whoever finds out what happens and how to fix this
The user is given option to reload
All good mode time has moved on
Reached our limit of how far in the future we want to set the modifiction time.
We still want to keep the modification time the same though.
continue to ignore the file.
This should never happen unless something else is messing
with the modification time or the clock.
If it does happen we will leave the modifiction time alone.
Only consider the file saved if the modification time changed
The user is given option to reload
real code
removeRecentlyUsedFile :: FilePath -> IDEAction
removeRecentlyUsedFile fp = do
state <- readIDE currentState
unless (isStartingOrClosing state) $ do
when (fp `elem` recentFiles') $
triggerEventIDE_ UpdateRecent
| Get the currently selected text or Nothing is no text is selected
| Get the currently selected text, or, if none, the current line text
| Get the currently selected text, or, if none, tries to selected the current identifier (the one under the cursor)
| Returns the packages to which this buffer belongs
uses the 'bufferProjCache' and might extend it
| Checks whether a file belongs to the workspace | # LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE RecordWildCards #
# LANGUAGE LambdaCase #
# OPTIONS_GHC -fno - warn - orphans #
Copyright : ( c ) ,
| The source editor part of Leksah
module IDE.Pane.SourceBuffer (
IDEBuffer(..)
, BufferState(..)
, allBuffers
, maybeActiveBuf
, selectSourceBuf
, goToSourceDefinition
, goToSourceDefinition'
, goToDefinition
, goToLocation
, insertInBuffer
, fileNew
, fileOpenThis
, filePrint
, fileRevert
, fileClose
, fileCloseAll
, fileCloseAllButPackage
, fileCloseAllButWorkspace
, fileSave
, fileSaveAll
, fileSaveBuffer
, fileCheckAll
, editUndo
, editRedo
, editCut
, editCopy
, editPaste
, editDelete
, editSelectAll
, editReformat
, editComment
, editUncomment
, editShiftRight
, editShiftLeft
, editToCandy
, editFromCandy
, editKeystrokeCandy
, switchBuffersCandy
, updateStyle
, updateStyle'
, addLogRef
, removeLogRefs
, removeBuildLogRefs
, removeFileExtLogRefs
, removeTestLogRefs
, removeLintLogRefs
, markRefInSourceBuf
, unmarkRefInSourceBuf
, inBufContext
, inActiveBufContext
, align
, startComplete
, selectedText
, selectedTextOrCurrentLine
, selectedTextOrCurrentIdentifier
, insertTextAfterSelection
, selectedModuleName
, selectedLocation
, recentSourceBuffers
, newTextBuffer
, belongsToPackages
, belongsToPackages'
, belongsToPackage
, belongsToWorkspace
, belongsToWorkspace'
, getIdentifierUnderCursorFromIter
, useCandyFor
, setModifiedOnDisk
) where
import Prelude ()
import Prelude.Compat hiding(getChar, getLine)
import Control.Applicative ((<|>))
import Control.Concurrent (modifyMVar_, putMVar, takeMVar, newMVar, tryPutMVar)
import Control.Event (triggerEvent)
import Control.Exception as E (catch, SomeException)
import Control.Lens ((.~), (%~), (^.), to)
import Control.Monad (filterM, void, unless, when, forM_)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Reader (ask)
import Data.Char (isSymbol, isSpace, isAlphaNum)
import qualified Data.Foldable as F (Foldable(..), forM_)
import Data.IORef (writeIORef,readIORef,newIORef)
import Data.List (isPrefixOf)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
(mapMaybe, fromJust, isNothing, isJust, fromMaybe)
import Data.Sequence (ViewR(..))
import qualified Data.Sequence as Seq
import Data.Text (Text)
import qualified Data.Text as T
(singleton, isInfixOf, breakOn, length, replicate,
lines, dropWhileEnd, unlines, strip, null, pack, unpack)
import qualified Data.Text.IO as T (writeFile, readFile)
import Data.Time (UTCTime(..))
import Data.Time.Clock (addUTCTime, diffUTCTime)
import Data . Time . Clock . POSIX ( getPOSIXTime )
import Data.Traversable (forM)
import Data.Typeable (cast)
import System.Directory
(getHomeDirectory, getModificationTime, doesFileExist)
import System.FilePath
(splitDirectories, (</>), dropFileName,
equalFilePath, takeFileName)
import System.Log.Logger (errorM, warningM, debugM)
import Data.GI.Base.GObject (new')
import Data.GI.Base.ManagedPtr (unsafeCastTo)
import GI.Gdk (windowGetOrigin)
import GI.Gdk.Enums (EventType(..))
import GI.Gdk.Functions (keyvalName)
import GI.Gdk.Flags (ModifierType(..))
import GI.Gdk.Structs.Atom (atomIntern)
import GI.Gdk.Structs.EventButton (getEventButtonType)
import GI.Gdk.Structs.EventKey
(getEventKeyState, getEventKeyKeyval)
import GI.Gtk
(bindingsActivateEvent, onDialogResponse, widgetShowAll,
boxPackStart, boxNew, Container(..), containerAdd,
infoBarGetContentArea, labelNew, infoBarNew)
import GI.Gtk.Enums
(FileChooserAction(..), WindowPosition(..), ResponseType(..),
ButtonsType(..), MessageType(..), ShadowType(..), PolicyType(..),
Orientation(..))
import GI.Gtk.Flags (TextSearchFlags(..))
import GI.Gtk.Interfaces.FileChooser
(fileChooserGetFilename, fileChooserSelectFilename,
fileChooserSetAction)
import GI.Gtk.Objects.Clipboard (clipboardGet)
import GI.Gtk.Objects.Dialog
(constructDialogUseHeaderBar)
import GI.Gtk.Objects.FileChooserDialog (FileChooserDialog(..))
import GI.Gtk.Objects.MessageDialog
(setMessageDialogText, constructMessageDialogButtons, setMessageDialogMessageType,
MessageDialog(..))
import GI.Gtk.Objects.Notebook
(Notebook(..))
import qualified GI.Gtk.Objects.Notebook as Gtk (Notebook(..))
import GI.Gtk.Objects.ScrolledWindow
(setScrolledWindowShadowType, scrolledWindowSetPolicy)
import GI.Gtk.Objects.Widget
(widgetShow, widgetDestroy)
import GI.Gtk.Objects.Window
(setWindowTitle, setWindowWindowPosition, windowSetTransientFor)
import qualified GI.Gtk.Objects.Window as Gtk (Window(..))
import Graphics.UI.Editor.Parameters
(dialogRun', dialogSetDefaultResponse', dialogAddButton')
import Graphics.UI.Frame.Panes (IDEPane(..))
import Graphics.UI.Frame.Rectangle (getRectangleY, getRectangleX)
import IDE.Core.State
(Log, MonadIDE, IDEM, IDEAction, Descr, PackModule, Location(..),
GenScope(..), PackScope(..), LogRefType(..), LogRef(..),
CandyTable, Prefs, IDEEvent(..), IDEState(..), autoLoad, Project,
IDEPackage, liftIDE, readIDE, candy, triggerBuild,
triggerEventIDE_, SensitivityMask(..), ideMessage,
MessageLevel(..), __, dscMbModu, dscMbLocation, pack,
pdMbSourcePath, mdModuleId, pdModules, mdMbSourcePath, dscName,
modifyIDE_, logRefFullFilePath, contextRefs, srcSpanStartColumn,
srcSpanStartLine, srcSpanEndColumn, srcSpanEndLine, prefs,
candyState, textEditorType, textviewFont, unpackDirectory,
showLineNumbers, rightMargin, tabWidth, wrapLines, sysMessage,
SrcSpan(..), currentState, reflectIDE, StatusbarCompartment(..),
SymbolEvent(..), forceLineEnds, removeTBlanks, reifyIDE,
ipdPackageDir, activePack, workspace, wsPackages, recentFiles,
addLogRef', belongsToPackage, belongsToPackages,
belongsToWorkspace, removeLintLogRefs', removeTestLogRefs',
removeBuildLogRefs', removeFileExtLogRefs', removeLogRefs')
import IDE.Gtk.State
(PanePath, Connections, IDEGtkEvent(..), getPanes,
postAsyncIDEIdle, activateThisPane, getBestPathForId,
paneFromName, getNotebook, figureOutPaneName, buildThisPane,
paneName, getMainWindow, getActivePanePath, getTopWidget,
markLabel, guiPropertiesFromName, closeThisPane,
isStartingOrClosing, RecoverablePane(..))
import qualified IDE.Command.Print as Print
import IDE.Utils.GUIUtils (showDialog, showConfirmDialog, showDialogAndGetResponse, showErrorDialog)
import IDE.Utils.FileUtils (isSubPath, myCanonicalizePath)
import IDE.Utils.DirectoryUtils (setModificationTimeOnOSX)
import IDE.Gtk.SourceCandy
(stringToCandy, positionFromCandy, getCandylessPart,
positionToCandy, getCandylessText)
import IDE.SymbolNavigation
(createHyperLinkSupport, mapControlCommand)
import IDE.Completion as Completion (complete,cancel, smartIndent)
import IDE.TextEditor
(newDefaultBuffer, newCMBuffer, newYiBuffer, newGtkBuffer,
TextEditor(..), EditorBuffer, EditorView, EditorIter,
scrollToCursor, updateStyle)
import IDE.Metainfo.Provider (getSystemInfo, getWorkspaceInfo)
import IDE.BufferMode
(recentSourceBuffers, selectedModuleName, editKeystrokeCandy,
editFromCandy, editToCandy, editUncomment, editComment,
editReformat, Mode(..), getStartAndEndLineOfSelection,
doForSelectedLines, inBufContext, inActiveBufContext',
isHaskellMode, modeFromFileName, lastActiveBufferPane,
inActiveBufContext, maybeActiveBuf, IDEBuffer(..), BufferState(..))
import IDE.Utils.DebugUtils (traceTimeTaken)
liftIO . debugM " leksah " $ name < > " took " < > show ( ( end - start ) * 1000000 ) < > " us "
allBuffers :: MonadIDE m => m [IDEBuffer]
allBuffers = liftIDE getPanes
instance RecoverablePane IDEBuffer BufferState IDEM where
saveState p@IDEBuffer{sourceView = v} = do
buf <- getBuffer v
ins <- getInsertMark buf
iter <- getIterAtMark buf ins
offset <- getOffset iter
case fileName p of
Nothing -> do
ct <- readIDE candy
text <- getCandylessText ct buf
return (Just (BufferStateTrans (bufferName p) text offset))
Just fn -> return (Just (BufferState fn offset))
recoverState pp (BufferState n i) = do
mbbuf <- newTextBuffer pp (T.pack $ takeFileName n) (Just n)
case mbbuf of
Just IDEBuffer {sourceView=v} -> do
postAsyncIDEIdle $ do
liftIO $ debugM "leksah" "SourceBuffer recoverState idle callback"
gtkBuf <- getBuffer v
iter <- getIterAtOffset gtkBuf i
placeCursor gtkBuf iter
mark <- getInsertMark gtkBuf
scrollToMark v mark 0.0 (Just (1.0,0.3))
liftIO $ debugM "leksah" "SourceBuffer recoverState done"
return mbbuf
Nothing -> return Nothing
recoverState pp (BufferStateTrans bn text i) = do
mbbuf <- newTextBuffer pp bn Nothing
case mbbuf of
Just buf@IDEBuffer{sourceView = v} -> do
postAsyncIDEIdle $ do
liftIO $ debugM "leksah" "SourceBuffer recoverState idle callback"
useCandy <- useCandyFor buf
gtkBuf <- getBuffer v
setText gtkBuf text
when useCandy $ modeTransformToCandy (mode buf)
(modeEditInCommentOrString (mode buf)) gtkBuf
iter <- getIterAtOffset gtkBuf i
placeCursor gtkBuf iter
mark <- getInsertMark gtkBuf
scrollToMark v mark 0.0 (Just (1.0,0.3))
liftIO $ debugM "leksah" "SourceBuffer recoverState done"
return (Just buf)
Nothing -> return Nothing
makeActive actbuf@IDEBuffer{sourceView = sv} = do
eBuf <- getBuffer sv
writeCursorPositionInStatusbar sv
writeOverwriteInStatusbar sv
ids1 <- eBuf `afterModifiedChanged` markActiveLabelAsChanged
ids2 <- sv `afterMoveCursor` writeCursorPositionInStatusbar sv
ids4 <- sv `afterToggleOverwrite` writeOverwriteInStatusbar sv
ids5 <- eBuf `afterChanged` do
tb <- readIDE triggerBuild
void . liftIO $ tryPutMVar tb ()
activateThisPane actbuf $ concat [ids1, ids2, ids4, ids5]
triggerEventIDE_ (Sensitivity [(SensitivityEditor, True)])
grabFocus sv
void $ checkModTime actbuf
closePane pane = do makeActive pane
fileClose
buildPane _panePath _notebook _builder = return Nothing
builder _pp _nb _w = return (Nothing,[])
startComplete :: IDEAction
startComplete = do
mbBuf <- maybeActiveBuf
case mbBuf of
Nothing -> return ()
Just IDEBuffer{sourceView = v} -> complete v True
findSourceBuf :: MonadIDE m => FilePath -> m [IDEBuffer]
findSourceBuf fp = do
fpc <- liftIO $ myCanonicalizePath fp
filter (maybe False (equalFilePath fpc) . fileName) <$> allBuffers
selectSourceBuf :: MonadIDE m => FilePath -> m (Maybe IDEBuffer)
selectSourceBuf fp =
findSourceBuf fp >>= \case
hdb:_ -> liftIDE $ do
makeActive hdb
return (Just hdb)
_ -> liftIDE $ do
fpc <- liftIO $ myCanonicalizePath fp
fe <- liftIO $ doesFileExist fpc
if fe
then do
pp <- getBestPathForId "*Buffer"
liftIO $ debugM "lekash" "selectSourceBuf calling newTextBuffer"
nbuf <- newTextBuffer pp (T.pack $ takeFileName fpc) (Just fpc)
liftIO $ debugM "lekash" "selectSourceBuf newTextBuffer returned"
return nbuf
else do
ideMessage Normal (__ "File path not found " <> T.pack fpc)
return Nothing
goToDefinition :: Descr -> IDEAction
goToDefinition idDescr = goToLocation (dscMbModu idDescr) (dscMbLocation idDescr)
goToLocation :: Maybe PackModule -> Maybe Location -> IDEAction
goToLocation mbMod mbLoc = do
mbWorkspaceInfo <- getWorkspaceInfo
mbSystemInfo <- getSystemInfo
let mbPackagePath = (mbWorkspaceInfo >>= (packagePathFromScope . fst))
<|> (mbSystemInfo >>= packagePathFromScope)
mbSourcePath = (mbWorkspaceInfo >>= (sourcePathFromScope . fst))
<|> (mbSystemInfo >>= sourcePathFromScope)
liftIO . debugM "leksah" $ show (mbPackagePath, mbLoc, mbSourcePath)
case (mbPackagePath, mbLoc, mbSourcePath) of
(Just packagePath, Just loc, _) -> void (goToSourceDefinition (dropFileName packagePath) loc)
(_, Just loc, Just sourcePath) -> void (goToSourceDefinition' sourcePath loc)
(_, _, Just sp) -> void (selectSourceBuf sp)
_ -> return ()
where
packagePathFromScope :: GenScope -> Maybe FilePath
packagePathFromScope (GenScopeC (PackScope l _)) =
case mbMod of
Just mod' -> case pack mod' `Map.lookup` l of
Just pack -> pdMbSourcePath pack
Nothing -> Nothing
Nothing -> Nothing
sourcePathFromScope :: GenScope -> Maybe FilePath
sourcePathFromScope (GenScopeC (PackScope l _)) =
case mbMod of
Just mod' -> case pack mod' `Map.lookup` l of
Just pack ->
case filter (\md -> mdModuleId md == mod')
(pdModules pack) of
(mod'' : _) -> mdMbSourcePath mod''
[] -> Nothing
Nothing -> Nothing
Nothing -> Nothing
goToSourceDefinition :: FilePath -> Location -> IDEM (Maybe IDEBuffer)
goToSourceDefinition packagePath loc =
goToSourceDefinition' (packagePath </> locationFile loc) loc
goToSourceDefinition' :: FilePath -> Location -> IDEM (Maybe IDEBuffer)
goToSourceDefinition' sourcePath Location{..} = do
mbBuf <- selectSourceBuf sourcePath
case mbBuf of
Just _ ->
inActiveBufContext () $ \sv ebuf _ -> do
liftIO $ debugM "lekash" "goToSourceDefinition calculating range"
lines' <- getLineCount ebuf
iterTemp <- getIterAtLine ebuf (max 0 (min (lines'-1)
(locationSLine -1)))
chars <- getCharsInLine iterTemp
iter <- atLineOffset iterTemp (max 0 (min (chars-1) (locationSCol -1)))
iter2Temp <- getIterAtLine ebuf (max 0 (min (lines'-1) (locationELine -1)))
chars2 <- getCharsInLine iter2Temp
iter2 <- atLineOffset iter2Temp (max 0 (min (chars2-1) locationECol))
# # # we had a problem before using postAsyncIDEIdle
postAsyncIDEIdle $ do
liftIO $ debugM "lekash" "goToSourceDefinition triggered selectRange"
selectRange ebuf iter iter2
liftIO $ debugM "lekash" "goToSourceDefinition triggered scrollToIter"
scrollToIter sv iter 0.0 (Just (1.0,0.3))
return ()
Nothing -> return ()
return mbBuf
insertInBuffer :: Descr -> IDEAction
insertInBuffer idDescr = do
mbPaneName <- lastActiveBufferPane
case mbPaneName of
Nothing -> return ()
Just name -> do
PaneC p <- paneFromName name
let mbBuf = cast p
case mbBuf of
Nothing -> return ()
Just IDEBuffer{sourceView = v} -> do
ebuf <- getBuffer v
mark <- getInsertMark ebuf
iter <- getIterAtMark ebuf mark
insert ebuf iter (dscName idDescr)
updateStyle' :: IDEBuffer -> IDEAction
updateStyle' IDEBuffer {sourceView = sv} = getBuffer sv >>= updateStyle
removeFromBuffers :: Map FilePath [LogRefType] -> IDEAction
removeFromBuffers removeDetails = do
buffers <- allBuffers
let matchingBufs = filter (maybe False (`Map.member` removeDetails) . fileName) buffers
F.forM_ matchingBufs $ \ IDEBuffer {..} -> do
buf <- getBuffer sourceView
F.forM_ (maybe [] (fromMaybe [] . (`Map.lookup` removeDetails)) fileName) $
removeTagByName buf . T.pack . show
removeLogRefs :: (Log -> FilePath -> Bool) -> [LogRefType] -> IDEAction
removeLogRefs toRemove' types =
removeLogRefs' toRemove' types removeFromBuffers
removeFileLogRefs : : FilePath - > [ LogRefType ] - > IDEAction
removeFileExtLogRefs :: Log -> String -> [LogRefType] -> IDEAction
removeFileExtLogRefs log' fileExt types =
removeFileExtLogRefs' log' fileExt types removeFromBuffers
removePackageLogRefs : : Log - > [ LogRefType ] - > IDEAction
removeBuildLogRefs :: FilePath -> IDEAction
removeBuildLogRefs file =
removeBuildLogRefs' file removeFromBuffers
removeTestLogRefs :: Log -> IDEAction
removeTestLogRefs log' =
removeTestLogRefs' log' removeFromBuffers
removeLintLogRefs :: FilePath -> IDEAction
removeLintLogRefs file =
removeLintLogRefs' file removeFromBuffers
addLogRef :: Bool -> Bool -> LogRef -> IDEAction
addLogRef hlintFileScope backgroundBuild ref =
addLogRef' hlintFileScope backgroundBuild ref $ do
buffers <- allBuffers
let matchingBufs = filter (maybe False (equalFilePath (logRefFullFilePath ref)) . fileName) buffers
F.forM_ matchingBufs $ \ buf -> markRefInSourceBuf buf ref False
markRefInSourceBuf :: IDEBuffer -> LogRef -> Bool -> IDEAction
markRefInSourceBuf buf@IDEBuffer{sourceView = sv} logRef scrollTo = traceTimeTaken "markRefInSourceBuf" $ do
useCandy <- useCandyFor buf
candy' <- readIDE candy
contextRefs' <- readIDE contextRefs
ebuf <- getBuffer sv
let tagName = T.pack $ show (logRefType logRef)
liftIO . debugM "lekash" . T.unpack $ "markRefInSourceBuf getting or creating tag " <> tagName
liftIO $ debugM "lekash" "markRefInSourceBuf calculating range"
let start' = (srcSpanStartLine (logRefSrcSpan logRef),
srcSpanStartColumn (logRefSrcSpan logRef))
let end' = (srcSpanEndLine (logRefSrcSpan logRef),
srcSpanEndColumn (logRefSrcSpan logRef))
start <- if useCandy
then positionToCandy candy' ebuf start'
else return start'
end <- if useCandy
then positionToCandy candy' ebuf end'
else return end'
lines' <- getLineCount ebuf
iterTmp <- getIterAtLine ebuf (max 0 (min (lines'-1) (fst start - 1)))
chars <- getCharsInLine iterTmp
iter <- atLineOffset iterTmp (max 0 (min (chars-1) (snd start)))
iter2 <- if start == end
then do
maybeWE <- forwardWordEndC iter
case maybeWE of
Nothing -> atEnd iter
Just we -> return we
else do
newTmp <- getIterAtLine ebuf (max 0 (min (lines'-1) (fst end - 1)))
chars' <- getCharsInLine newTmp
new <- atLineOffset newTmp (max 0 (min (chars'-1) (snd end)))
forwardCharC new
let last' (Seq.viewr -> EmptyR) = Nothing
last' (Seq.viewr -> _xs :> x) = Just x
last' _ = Nothing
latest = last' contextRefs'
isOldContext = case (logRefType logRef, latest) of
(ContextRef, Just ctx) | ctx /= logRef -> True
_ -> False
unless isOldContext $ do
liftIO $ debugM "lekash" "markRefInSourceBuf calling applyTagByName"
traceTimeTaken "createMark" $ createMark sv (logRefType logRef) iter . T.unlines
. zipWith ($) (replicate 30 id <> [const "..."]) . T.lines $ refDescription logRef
traceTimeTaken "applyTagByName" $ applyTagByName ebuf tagName iter iter2
when scrollTo $ do
liftIO $ debugM "lekash" "markRefInSourceBuf triggered placeCursor"
placeCursor ebuf iter
mark <- getInsertMark ebuf
liftIO $ debugM "lekash" "markRefInSourceBuf trigged scrollToMark"
scrollToMark sv mark 0.3 Nothing
when isOldContext $ selectRange ebuf iter iter2
unmarkRefInSourceBuf :: IDEBuffer -> LogRef -> IDEAction
unmarkRefInSourceBuf IDEBuffer {sourceView = sv} logRef = do
buf <- getBuffer sv
removeTagByName buf (T.pack $ show (logRefType logRef))
| Tries to create a new text buffer , fails when the given filepath
newTextBuffer :: PanePath -> Text -> Maybe FilePath -> IDEM (Maybe IDEBuffer)
newTextBuffer panePath bn mbfn =
case mbfn of
Nothing -> buildPane' "" Nothing
Just fn ->
do eErrorContents <- liftIO $
catch (Right <$> T.readFile fn)
(\e -> return $ Left (show (e :: IOError)))
case eErrorContents of
Right contents -> do
modTime <- liftIO $ getModificationTime fn
buildPane' contents (Just modTime)
Left err -> do
ideMessage Normal (__ "Error reading file " <> T.pack err)
return Nothing
where buildPane' contents mModTime = do
nb <- getNotebook panePath
prefs' <- readIDE prefs
let useCandy = candyState prefs'
ct <- readIDE candy
(ind,rbn) <- figureOutPaneName bn
buildThisPane panePath nb (builder' useCandy mbfn ind bn rbn ct prefs' contents mModTime)
data CharacterCategory = IdentifierCharacter | SpaceCharacter | SyntaxCharacter
deriving (Eq)
getCharacterCategory :: Maybe Char -> CharacterCategory
getCharacterCategory Nothing = SpaceCharacter
getCharacterCategory (Just c)
| isAlphaNum c || c == '\'' || c == '_' = IdentifierCharacter
| isSpace c = SpaceCharacter
| otherwise = SyntaxCharacter
builder' :: Bool ->
Maybe FilePath ->
Int ->
Text ->
Text ->
CandyTable ->
Prefs ->
Text ->
Maybe UTCTime ->
PanePath ->
Gtk.Notebook ->
Gtk.Window ->
IDEM (Maybe IDEBuffer,Connections)
builder' useCandy mbfn ind bn _rbn _ct prefs' fileContents modTime _pp _nb _windows =
case textEditorType prefs' of
"GtkSourceView" -> newGtkBuffer mbfn fileContents >>= makeBuffer
"Yi" -> newYiBuffer mbfn fileContents >>= makeBuffer
"CodeMirror" -> newCMBuffer mbfn fileContents >>= makeBuffer
_ -> newDefaultBuffer mbfn fileContents >>= makeBuffer
where
makeBuffer :: TextEditor editor => EditorBuffer editor -> IDEM (Maybe IDEBuffer,Connections)
makeBuffer buffer = do
liftIO $ debugM "lekash" "makeBuffer"
ideR <- ask
beginNotUndoableAction buffer
let mode = modeFromFileName mbfn
when (useCandy && isHaskellMode mode) $ modeTransformToCandy mode
(modeEditInCommentOrString mode) buffer
endNotUndoableAction buffer
setModified buffer False
siter <- getStartIter buffer
placeCursor buffer siter
(sv, sw, grid) <- newViewWithMap buffer (textviewFont prefs')
homeDir <- liftIO getHomeDirectory
let isEditable = fromMaybe True $ do
dir <- unpackDirectory prefs'
let expandedDir = case dir of
'~':rest -> homeDir ++ rest
rest -> rest
file <- mbfn
return (not $ splitDirectories expandedDir `isPrefixOf` splitDirectories file)
setEditable sv isEditable
setShowLineNumbers sv $ showLineNumbers prefs'
setRightMargin sv $ case rightMargin prefs' of
(False,_) -> Nothing
(True,v) -> Just v
setIndentWidth sv $ tabWidth prefs'
GHC treats tabs as 8 we should display them that way
drawTabs sv
updateStyle buffer
if wrapLines prefs'
then scrolledWindowSetPolicy sw PolicyTypeNever PolicyTypeAutomatic
else scrolledWindowSetPolicy sw PolicyTypeAutomatic PolicyTypeAutomatic
liftIO $ debugM "lekash" "makeBuffer setScrolledWindowShadowType"
setScrolledWindowShadowType sw ShadowTypeIn
liftIO $ debugM "lekash" "makeBuffer setScrolledWindowShadowType done"
box <- boxNew OrientationVertical 0
unless isEditable $ liftIO $ do
bar <- infoBarNew
lab <- labelNew (Just "This file is opened in read-only mode because it comes from a non-local package")
area <- infoBarGetContentArea bar >>= unsafeCastTo Container
containerAdd area lab
infoBarAddButton bar " Enable editing " ( fromIntegral . fromEnum $ ResponseTypeReject )
boxPackStart box bar False False 0
widgetShow bar
boxPackStart box grid True True 0
reloadDialog <- liftIO $ newMVar Nothing
modTimeRef <- liftIO $ newIORef modTime
modifiedOnDiskRef <- liftIO $ newIORef False
let buf = IDEBuffer {
fileName = mbfn,
bufferName = bn,
addedIndex = ind,
sourceView =sv,
vBox = box,
modTime = modTimeRef,
modifiedOnDisk = modifiedOnDiskRef,
mode = mode,
reloadDialog = reloadDialog}
ids1 <- afterFocusIn sv $ makeActive buf
ids2 <- onCompletion sv (Completion.complete sv False) Completion.cancel
ids3 <- onButtonPress sv $ do
e <- lift ask
click <- getEventButtonType e
liftIDE $
case click of
EventType2buttonPress -> do
(start, end) <- getIdentifierUnderCursor buffer
selectRange buffer start end
return True
_ -> return False
(GtkEvent (GetTextPopup mbTpm)) <- triggerEvent ideR (GtkEvent $ GetTextPopup Nothing)
ids4 <- case mbTpm of
Just tpm -> sv `onPopulatePopup` \menu -> liftIO $ tpm ideR menu
Nothing -> do
sysMessage Normal "SourceBuffer>> no text popup"
return []
hasMatch <- liftIO $ newIORef False
ids5 <- onSelectionChanged buffer $ do
(iStart, iEnd) <- getSelectionBounds buffer
lStart <- (+1) <$> getLine iStart
cStart <- getLineOffset iStart
lEnd <- (+1) <$> getLine iEnd
cEnd <- getLineOffset iEnd
triggerEventIDE_ . SelectSrcSpan $
case mbfn of
Just fn -> Just (SrcSpan fn lStart cStart lEnd cEnd)
Nothing -> Nothing
let tagName = "selection-match"
hasSel <- hasSelection buffer
m <- liftIO $ readIORef hasMatch
when m $ removeTagByName buffer tagName
r <- if hasSel
then do
candy' <- readIDE candy
sTxt <- getCandylessPart candy' buffer iStart iEnd
let strippedSTxt = T.strip sTxt
if T.null strippedSTxt
then return False
else do
bi1 <- getStartIter buffer
bi2 <- getEndIter buffer
r1 <- forwardApplying bi1 strippedSTxt (Just iStart) tagName buffer
r2 <- forwardApplying iEnd strippedSTxt (Just bi2) tagName buffer
return (r1 || r2)
else return False
liftIO $ writeIORef hasMatch r
return ()
ids6 <- onKeyPress sv $ do
e <- lift ask
keyval <- getEventKeyKeyval e
name <- keyvalName keyval
modifier <- getEventKeyState e
liftIDE $ do
let moveToNextWord iterOp sel = do
sel' <- iterOp sel
rs <- isRangeStart sel'
if rs then return sel' else moveToNextWord iterOp sel'
let calculateNewPosition iterOp = getInsertIter buffer >>= moveToNextWord iterOp
let continueSelection keepSelBound nsel = do
if keepSelBound
then do
sb <- getSelectionBoundMark buffer >>= getIterAtMark buffer
selectRange buffer nsel sb
else
placeCursor buffer nsel
scrollToIter sv nsel 0 Nothing
case (name, map mapControlCommand modifier, keyval) of
(Just "Left",[ModifierTypeControlMask],_) -> do
calculateNewPosition backwardCharC >>= continueSelection False
return True
(Just "Left",[ModifierTypeShiftMask, ModifierTypeControlMask],_) -> do
calculateNewPosition backwardCharC >>= continueSelection True
return True
(Just "Right",[ModifierTypeControlMask],_) -> do
return True
(Just "Right",[ModifierTypeControlMask, ModifierTypeControlMask],_) -> do
calculateNewPosition forwardCharC >>= continueSelection True
return True
here <- getInsertIter buffer
there <- calculateNewPosition backwardCharC
delete buffer here there
return True
(Just "underscore",[ModifierTypeControlMask, ModifierTypeControlMask],_) -> do
selectInfo buf buffer sv True False
return True
(Just "minus",[ModifierTypeControlMask],_) -> do
selectInfo buf buffer sv True True
return True
(Just "Return", [], _) ->
readIDE currentState >>= \case
IsCompleting _ -> return False
_ -> smartIndent sv >> return True
(Just key, _, _) | key `elem`
["Tab", "Return", "Down", "Up", "BackSpace"
,"Shift_L", "Shift_R", "Super_L", "Super_R"] -> return False
_ -> do
w <- getEditorWidget sv
bindingsActivateEvent w e
ids7 <-
createHyperLinkSupport sv sw
(\ctrl _shift iter -> do
(beg, en) <- getIdentifierUnderCursorFromIter (iter, iter)
when ctrl $ selectInfo' buf buffer sv beg en False False
return (beg, if ctrl then en else beg))
(\_ _shift (beg, en) -> selectInfo' buf buffer sv beg en True True)
return (Just buf,concat [ids1, ids2, ids3, ids4, ids5, ids6, ids7])
forwardApplying :: TextEditor editor
=> EditorIter editor
-> Maybe (EditorIter editor)
-> EditorBuffer editor
-> IDEM Bool
forwardApplying tI txt mbTi tagName ebuf = do
mbFTxt <- forwardSearch tI txt [TextSearchFlagsVisibleOnly, TextSearchFlagsTextOnly] mbTi
case mbFTxt of
Just (start, end) -> do
startsW <- startsWord start
endsW <- endsWord end
when (startsW && endsW) $
applyTagByName ebuf tagName start end
(|| (startsW && endsW)) <$> forwardApplying end txt mbTi tagName ebuf
Nothing -> return False
isRangeStart
:: TextEditor editor
=> EditorIter editor
-> IDEM Bool
currentChar <- getChar sel
let mbStartCharCat = getCharacterCategory currentChar
mbPrevCharCat <- getCharacterCategory <$> (backwardCharC sel >>= getChar)
return $ isNothing currentChar || currentChar == Just '\n' || mbStartCharCat /= mbPrevCharCat && (mbStartCharCat == SyntaxCharacter || mbStartCharCat == IdentifierCharacter)
getIdentifierUnderCursor :: forall editor. TextEditor editor => EditorBuffer editor -> IDEM (EditorIter editor, EditorIter editor)
getIdentifierUnderCursor buffer = do
(startSel, endSel) <- getSelectionBounds buffer
getIdentifierUnderCursorFromIter (startSel, endSel)
getIdentifierUnderCursorFromIter :: TextEditor editor => (EditorIter editor, EditorIter editor) -> IDEM (EditorIter editor, EditorIter editor)
getIdentifierUnderCursorFromIter (startSel, endSel) = do
let isIdent a = isAlphaNum a || a == '\'' || a == '_'
let isOp a = isSymbol a || a == ':' || a == '\\' || a == '*' || a == '/' || a == '-'
|| a == '!' || a == '@' || a == '%' || a == '&' || a == '?'
mbStartChar <- getChar startSel
mbEndChar <- getChar endSel
let isSelectChar =
case mbStartChar of
Just startChar | isIdent startChar -> \a -> isIdent a || a == '.'
Just startChar | isOp startChar -> isOp
_ -> const False
start <- case mbStartChar of
Just startChar | isSelectChar startChar -> do
maybeIter <- backwardFindCharC startSel (not.isSelectChar) Nothing
case maybeIter of
Just iter -> forwardCharC iter
Nothing -> return startSel
_ -> return startSel
end <- case mbEndChar of
Just endChar | isSelectChar endChar -> do
maybeIter <- forwardFindCharC endSel (not.isSelectChar) Nothing
case maybeIter of
Just iter -> return iter
Nothing -> return endSel
_ -> return endSel
return (start, end)
setModifiedOnDisk :: MonadIDE m => FilePath -> m Bool
setModifiedOnDisk fp = do
bufs <- findSourceBuf fp
forM_ bufs $ \buf ->
liftIO $ writeIORef (modifiedOnDisk buf) True
return . not $ null bufs
checkModTime :: MonadIDE m => IDEBuffer -> m Bool
checkModTime buf = do
currentState' <- readIDE currentState
case currentState' of
IsShuttingDown -> return False
_ ->
liftIO (readIORef (modifiedOnDisk buf)) >>= \case
False -> return False
True -> do
liftIO $ writeIORef (modifiedOnDisk buf) False
let name = paneName buf
case fileName buf of
Just fn -> do
exists <- liftIO $ doesFileExist fn
if exists
then do
nmt <- liftIO $ getModificationTime fn
modTime' <- liftIO $ readIORef (modTime buf)
case modTime' of
Nothing -> error $"checkModTime: time not set " ++ show (fileName buf)
Just mt ->
then do
load <- readIDE (prefs . to autoLoad)
if load
then do
ideMessage Normal $ __ "Auto Loading " <> T.pack fn
revert buf
return True
else
liftIO (takeMVar $ reloadDialog buf) >>= \case
Just md -> do
liftIO $ putMVar (reloadDialog buf) (Just md)
return True
Nothing -> do
window <- liftIDE getMainWindow
md <- new' MessageDialog [
constructDialogUseHeaderBar 0,
constructMessageDialogButtons ButtonsTypeNone]
liftIO $ putMVar (reloadDialog buf) (Just md)
setMessageDialogMessageType md MessageTypeQuestion
setMessageDialogText md (__ "File \"" <> name <> __ "\" has changed on disk.")
windowSetTransientFor md (Just window)
_ <- dialogAddButton' md (__ "_Load From Disk") (AnotherResponseType 1)
_ <- dialogAddButton' md (__ "_Always Load From Disk") (AnotherResponseType 2)
_ <- dialogAddButton' md (__ "_Don't Load") (AnotherResponseType 3)
dialogSetDefaultResponse' md (AnotherResponseType 1)
setWindowWindowPosition md WindowPositionCenterOnParent
widgetShowAll md
ideR <- liftIDE ask
_ <- onDialogResponse md $ \n32 -> (`reflectIDE` ideR) $ do
liftIO $ modifyMVar_ (reloadDialog buf) . const $ return Nothing
widgetDestroy md
case toEnum (fromIntegral n32) of
AnotherResponseType 1 ->
revert buf
AnotherResponseType 2 -> do
revert buf
modifyIDE_ $ prefs %~ (\p -> p {autoLoad = True})
AnotherResponseType 3 -> dontLoad fn
ResponseTypeDeleteEvent -> dontLoad fn
_ -> return ()
return True
else return False
else return False
Nothing -> return False
where
dontLoad fn = do
nmt2 <- liftIO $ getModificationTime fn
liftIO $ writeIORef (modTime buf) (Just nmt2)
setModTime :: IDEBuffer -> IDEAction
setModTime buf =
case fileName buf of
Nothing -> return ()
Just fn -> liftIO $ E.catch
(do
nmt <- getModificationTime fn
writeIORef (modTime buf) (Just nmt))
(\(e:: SomeException) -> do
sysMessage Normal (T.pack $ show e)
return ())
fileRevert :: IDEAction
fileRevert = inActiveBufContext () $ \_ _ currentBuffer ->
revert currentBuffer
revert :: MonadIDE m => IDEBuffer -> m ()
revert buf@IDEBuffer{sourceView = sv} = do
useCandy <- useCandyFor buf
case fileName buf of
Nothing -> return ()
Just fn -> liftIDE $ do
buffer <- getBuffer sv
fc <- liftIO $ readFile fn
mt <- liftIO $ getModificationTime fn
beginNotUndoableAction buffer
setText buffer $ T.pack fc
when useCandy $
modeTransformToCandy (mode buf)
(modeEditInCommentOrString (mode buf))
buffer
endNotUndoableAction buffer
setModified buffer False
liftIO $ writeIORef (modTime buf) (Just mt)
writeCursorPositionInStatusbar :: TextEditor editor => EditorView editor -> IDEAction
writeCursorPositionInStatusbar sv = do
buf <- getBuffer sv
mark <- getInsertMark buf
iter <- getIterAtMark buf mark
line <- getLine iter
col <- getLineOffset iter
triggerEventIDE_ (StatusbarChanged [CompartmentBufferPos (line,col)])
return ()
writeOverwriteInStatusbar :: TextEditor editor => EditorView editor -> IDEAction
writeOverwriteInStatusbar sv = do
mode <- getOverwrite sv
triggerEventIDE_ (StatusbarChanged [CompartmentOverlay mode])
return ()
selectInfo' :: TextEditor e => IDEBuffer -> EditorBuffer e -> EditorView e -> EditorIter e -> EditorIter e -> Bool -> Bool -> IDEAction
selectInfo' buf ebuf view start end activatePanes gotoSource = do
candy' <- readIDE candy
sTxt <- getCandylessPart candy' ebuf start end
startPos <- getLocation buf ebuf start
endPos <- getLocation buf ebuf end
unless (T.null sTxt) $ do
rect <- getIterLocation view end
bx <- getRectangleX rect
by <- getRectangleY rect
(x, y) <- bufferToWindowCoords view (fromIntegral bx, fromIntegral by)
getWindow view >>= \case
Nothing -> return ()
Just drawWindow -> do
(_, ox, oy) <- windowGetOrigin drawWindow
triggerEventIDE_ (SelectInfo (SymbolEvent sTxt ((, startPos, endPos) <$> fileName buf) activatePanes gotoSource (ox + fromIntegral x, oy + fromIntegral y)))
selectInfo :: TextEditor e => IDEBuffer -> EditorBuffer e -> EditorView e -> Bool -> Bool -> IDEAction
selectInfo buf ebuf view activatePanes gotoSource = do
(l,r) <- getIdentifierUnderCursor ebuf
selectInfo' buf ebuf view l r activatePanes gotoSource
markActiveLabelAsChanged :: IDEAction
markActiveLabelAsChanged = do
mbPath <- getActivePanePath
case mbPath of
Nothing -> return ()
Just path -> do
nb <- getNotebook path
mbBS <- maybeActiveBuf
F.forM_ mbBS (markLabelAsChanged nb)
markLabelAsChanged :: Notebook -> IDEBuffer -> IDEAction
markLabelAsChanged nb buf@IDEBuffer{sourceView = sv} = do
liftIO $ debugM "leksah" "markLabelAsChanged"
ebuf <- getBuffer sv
modified <- getModified ebuf
w <- getTopWidget buf
markLabel nb w modified
fileSaveBuffer :: (MonadIDE m, TextEditor editor) => Bool -> Notebook -> EditorView editor -> EditorBuffer editor -> IDEBuffer -> Int -> m Bool
fileSaveBuffer query nb _ ebuf ideBuf@IDEBuffer{sourceView = sv} _i = liftIDE $ do
window <- getMainWindow
prefs' <- readIDE prefs
useCandy <- useCandyFor ideBuf
candy' <- readIDE candy
(panePath,_connects) <- guiPropertiesFromName (paneName ideBuf)
case fileName ideBuf of
Just fn | not query -> do
modifiedInBuffer <- getModified ebuf
if modifiedInBuffer
then do
fileSave' (forceLineEnds prefs') (removeTBlanks prefs')
useCandy candy' fn
setModTime ideBuf
return True
else return modifiedOnDisk
mbfn -> reifyIDE $ \ideR -> do
dialog <- new' FileChooserDialog [constructDialogUseHeaderBar 1]
setWindowTitle dialog (__ "Save File")
windowSetTransientFor dialog $ Just window
fileChooserSetAction dialog FileChooserActionSave
_ <- dialogAddButton' dialog "gtk-cancel" ResponseTypeCancel
_ <- dialogAddButton' dialog "gtk-save" ResponseTypeAccept
forM_ mbfn $ fileChooserSelectFilename dialog
widgetShow dialog
response <- dialogRun' dialog
mbFileName <- case response of
ResponseTypeAccept -> fileChooserGetFilename dialog
ResponseTypeCancel -> return Nothing
ResponseTypeDeleteEvent -> return Nothing
_ -> return Nothing
widgetDestroy dialog
case mbFileName of
Nothing -> return False
Just fn -> do
fileExists <- doesFileExist fn
shouldOverwrite <-
if not fileExists
then return True
else showConfirmDialog (Just window) False (__ "_Overwrite") (__ "File already exist.")
when shouldOverwrite $ do
reflectIDE (do
fileSave' (forceLineEnds prefs') (removeTBlanks prefs')
useCandy candy' fn
_ <- closePane ideBuf
cfn <- liftIO $ myCanonicalizePath fn
void $ newTextBuffer panePath (T.pack $ takeFileName cfn) (Just cfn)
) ideR
return shouldOverwrite
where
fileSave' :: Bool -> Bool -> Bool -> CandyTable -> FilePath -> IDEAction
fileSave' _forceLineEnds removeTBlanks _useCandy candyTable fn = do
buf <- getBuffer sv
text <- getCandylessText candyTable buf
let text' = if removeTBlanks
then T.unlines $ map (T.dropWhileEnd $ \c -> c == ' ') $ T.lines text
else text
alreadyExists <- liftIO $ doesFileExist fn
mbModTimeBefore <- if alreadyExists
then liftIO $ Just <$> getModificationTime fn
else return Nothing
succ' <- liftIO $ E.catch (do T.writeFile fn text'; return True)
(\(e :: SomeException) -> do
sysMessage Normal . T.pack $ show e
return False)
Truely horrible hack to work around HFS+ only having 1sec resolution
and ghc ignoring files unless the modifiction time has moved forward .
The limitation means we can do at most 1 reload a second , but
this hack allows us to take an advance of up to 30 reloads ( by
moving the modidification time up to 30s into the future ) .
modTimeChanged <- liftIO $ case mbModTimeBefore of
Nothing -> return True
Just modTime -> do
newModTime <- getModificationTime fn
let diff = diffUTCTime modTime newModTime
if
| diff < 30 -> do
setModificationTimeOnOSX fn (addUTCTime 1 modTime)
updatedModTime <- getModificationTime fn
return (updatedModTime > modTime)
| diff < 32 -> do
Using 32 instead of 31 in case or something is adjusting the clock back .
warningM "leksah" $ "Modification time for " <> fn
<> " was already " <> show (diffUTCTime modTime newModTime)
<> " in the future"
If it went back the future date ghc has might cause it to
setModificationTimeOnOSX fn modTime
return False
| otherwise -> do
errorM "leksah" $ "Modification time for " <> fn
<> " was already " <> show (diffUTCTime modTime newModTime)
<> " in the future"
return True
otherwise another save is really needed to trigger ghc .
when modTimeChanged $ do
setModified buf (not succ')
markLabelAsChanged nb ideBuf
triggerEventIDE_ $ SavedFile fn
fileSave :: Bool -> IDEM Bool
fileSave query = inActiveBufContext' False $ fileSaveBuffer query
fileSaveAll :: MonadIDE m => (IDEBuffer -> m Bool) -> m Bool
fileSaveAll filterFunc = do
bufs <- allBuffers
filtered <- filterM filterFunc bufs
modified <- filterM fileCheckBuffer filtered
results <- forM modified (\buf -> inBufContext False buf (fileSaveBuffer False))
return $ True `elem` results
fileCheckBuffer :: (MonadIDE m) => IDEBuffer -> m Bool
fileCheckBuffer ideBuf@IDEBuffer{sourceView = v} =
case fileName ideBuf of
Just _fn -> do
modifiedInBuffer <- liftIDE $ getModified =<< getBuffer v
return (modifiedOnDisk || modifiedInBuffer)
_ -> return False
fileCheckAll :: MonadIDE m => (IDEBuffer -> m [alpha]) -> m [alpha]
fileCheckAll filterFunc = do
bufs <- allBuffers
fmap concat . forM bufs $ \ buf -> do
ps <- filterFunc buf
case ps of
[] -> return []
_ -> do
modified <- fileCheckBuffer buf
if modified
then return ps
else return []
fileNew :: IDEAction
fileNew = do
pp <- getBestPathForId "*Buffer"
void $ newTextBuffer pp (__ "Unnamed") Nothing
fileClose :: IDEM Bool
fileClose = inActiveBufContext True fileClose'
fileClose' :: TextEditor editor => EditorView editor -> EditorBuffer editor -> IDEBuffer -> IDEM Bool
fileClose' _ ebuf currentBuffer = do
window <- getMainWindow
modified <- getModified ebuf
shouldContinue <- reifyIDE $ \ideR ->
if modified
then do
resp <- showDialogAndGetResponse
(Just window)
(__ "Save changes to document: "
<> paneName currentBuffer
<> "?")
MessageTypeQuestion
ResponseTypeYes
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeCancel ]
[ (__ "_Save", ResponseTypeYes)
, (__ "_Don't Save", ResponseTypeNo) ]
case resp of
ResponseTypeYes -> do
_ <- reflectIDE (fileSave False) ideR
return True
ResponseTypeNo -> return True
ResponseTypeCancel -> return False
_ -> return False
else return True
when shouldContinue $ do
_ <- closeThisPane currentBuffer
F.forM_ (fileName currentBuffer) addRecentlyUsedFile
return shouldContinue
fileCloseAll :: (IDEBuffer -> IDEM Bool) -> IDEM Bool
fileCloseAll filterFunc = do
bufs <- allBuffers
filtered <- filterM filterFunc bufs
case filtered of
[] -> return True
(h:_) -> do
makeActive h
r <- fileClose
if r
then fileCloseAll filterFunc
else return False
fileCloseAllButPackage :: IDEAction
fileCloseAllButPackage = do
mbActivePath <- fmap ipdPackageDir <$> readIDE activePack
bufs <- allBuffers
case mbActivePath of
Just p -> mapM_ (close' p) bufs
Nothing -> return ()
where
close' dir buf@IDEBuffer{sourceView = sv} = do
ebuf <- getBuffer sv
when (isJust (fileName buf)) $ do
modified <- getModified ebuf
when (not modified && not (isSubPath dir (fromJust (fileName buf))))
$ void $ fileClose' sv ebuf buf
fileCloseAllButWorkspace :: IDEAction
fileCloseAllButWorkspace = do
bufs <- allBuffers
readIDE workspace >>= mapM_ (\ws ->
unless (null bufs) $ mapM_ (close' ws) bufs)
where
close' ws buf@IDEBuffer{sourceView = sv} = do
ebuf <- getBuffer sv
when (isJust (fileName buf)) $ do
modified <- getModified ebuf
when (not modified && not (isSubPathOfAny ws (fromJust (fileName buf))))
$ void $ fileClose' sv ebuf buf
isSubPathOfAny ws fileName =
let paths = ipdPackageDir <$> (ws ^. wsPackages)
in any (`isSubPath` fileName) paths
fileOpenThis :: FilePath -> IDEAction
fileOpenThis fp = do
liftIO . debugM "leksah" $ "fileOpenThis " ++ fp
fpc <- liftIO $ myCanonicalizePath fp
findSourceBuf fp >>= \case
hdb:_ -> do
window <- getMainWindow
resp <- showDialogAndGetResponse
(Just window)
(__ "Buffer already open.")
MessageTypeQuestion
(AnotherResponseType 1)
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeNone ]
[ (__ "Make _Active", AnotherResponseType 1)
, (__ "_Open Second", AnotherResponseType 2) ]
case resp of
AnotherResponseType 2 -> reallyOpen fpc
_ -> makeActive hdb
[] -> reallyOpen fpc
where
reallyOpen fpc = do
pp <- getBestPathForId "*Buffer"
void $ newTextBuffer pp (T.pack $ takeFileName fpc) (Just fpc)
filePrint :: IDEAction
filePrint = inActiveBufContext' () filePrint'
filePrint' :: TextEditor editor => Notebook -> EditorView view -> EditorBuffer editor -> IDEBuffer -> Int -> IDEM ()
filePrint' _nb _ ebuf currentBuffer _ = do
let pName = paneName currentBuffer
window <- getMainWindow
yesPrint <- liftIO $ do
resp <- showDialogAndGetResponse
(Just window)
(__"Print document: "
<> pName
<> "?")
MessageTypeQuestion
ResponseTypeYes
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeNone ]
[ (__"_Print", ResponseTypeYes)
, (__"_Don't Print", ResponseTypeNo) ]
case resp of
ResponseTypeYes -> return True
ResponseTypeNo -> return False
ResponseTypeCancel -> return False
_ -> return False
when yesPrint $ do
modified <- getModified ebuf
shouldContinue <- reifyIDE $ \ideR ->
if modified
then do
resp <- showDialogAndGetResponse
(Just window)
(__"Save changes to document: "
<> pName
<> "?")
MessageTypeQuestion
ResponseTypeYes
[ constructDialogUseHeaderBar 0, constructMessageDialogButtons ButtonsTypeNone ]
[ (__"_Save", ResponseTypeYes)
, (__"_Don't Save", ResponseTypeNo)
, (__"_Cancel Printing", ResponseTypeCancel) ]
case resp of
ResponseTypeYes -> do
_ <- reflectIDE (fileSave False) ideR
return True
ResponseTypeNo -> return True
ResponseTypeCancel -> return False
_ -> return False
else
return True
when shouldContinue $
case fileName currentBuffer of
Just name -> do
status <- liftIO $ Print.print name
case status of
Left err -> liftIO $ showErrorDialog (Just window) (T.pack $ show err)
Right _ -> liftIO $ showDialog (Just window) "Print job has been sent successfully" MessageTypeInfo
return ()
Nothing -> return ()
editUndo :: IDEAction
editUndo = inActiveBufContext () $ \view buf _ -> do
can <- canUndo buf
when can $ do
undo buf
scrollToCursor view
editRedo :: IDEAction
editRedo = inActiveBufContext () $ \view buf _ -> do
can <- canRedo buf
when can $ redo buf
scrollToCursor view
editDelete :: IDEAction
editDelete = inActiveBufContext () $ \view ebuf _ -> do
deleteSelection ebuf
scrollToCursor view
editSelectAll :: IDEAction
editSelectAll = inActiveBufContext () $ \_ ebuf _ -> do
start <- getStartIter ebuf
end <- getEndIter ebuf
selectRange ebuf start end
editCut :: IDEAction
editCut = inActiveBufContext () $ \_ ebuf _ -> do
clip <- clipboardGet =<< atomIntern "CLIPBOARD" False
cutClipboard ebuf clip True
editCopy :: IDEAction
editCopy = inActiveBufContext () $ \view ebuf _ -> do
clip <- clipboardGet =<< atomIntern "CLIPBOARD" False
copyClipboard ebuf clip
scrollToCursor view
editPaste :: IDEAction
editPaste = inActiveBufContext () $ \_ ebuf _ -> do
mark <- getInsertMark ebuf
iter <- getIterAtMark ebuf mark
clip <- clipboardGet =<< atomIntern "CLIPBOARD" False
pasteClipboard ebuf clip iter True
editShiftLeft :: IDEAction
editShiftLeft = do
prefs' <- readIDE prefs
let str = T.replicate (tabWidth prefs') " "
b <- canShiftLeft str prefs'
when b $ do
_ <- doForSelectedLines [] $ \ebuf lineNr -> do
sol <- getIterAtLine ebuf lineNr
sol2 <- forwardCharsC sol (tabWidth prefs')
delete ebuf sol sol2
return ()
where
canShiftLeft str prefs' = do
boolList <- doForSelectedLines [] $ \ebuf lineNr -> do
sol <- getIterAtLine ebuf lineNr
sol2 <- forwardCharsC sol (tabWidth prefs')
str1 <- getText ebuf sol sol2 True
return (str1 == str)
return (F.foldl' (&&) True boolList)
editShiftRight :: IDEAction
editShiftRight = do
prefs' <- readIDE prefs
let str = T.replicate (tabWidth prefs') " "
_ <- doForSelectedLines [] $ \ebuf lineNr -> do
sol <- getIterAtLine ebuf lineNr
insert ebuf sol str
return ()
align :: Text -> IDEAction
align pat' = inActiveBufContext () $ \_ ebuf ideBuf -> do
useCandy <- useCandyFor ideBuf
let pat = if useCandy
then transChar pat'
else pat'
(start,end) <- getStartAndEndLineOfSelection ebuf
beginUserAction ebuf
let positionsOfChar :: IDEM [(Int, Maybe Int)]
positionsOfChar = forM [start .. end] $ \lineNr -> do
sol <- getIterAtLine ebuf lineNr
eol <- forwardToLineEndC sol
line <- getText ebuf sol eol True
return (lineNr,
if pat `T.isInfixOf` line
then Just . T.length . fst $ T.breakOn pat line
else Nothing)
alignChar :: Map Int (Maybe Int) -> Int -> IDEM ()
alignChar positions alignTo =
forM_ [start .. end] $ \lineNr ->
case lineNr `Map.lookup` positions of
Just (Just n) -> do
sol <- getIterAtLine ebuf lineNr
insertLoc <- forwardCharsC sol n
insert ebuf insertLoc (T.replicate (alignTo - n) " ")
_ -> return ()
positions <- positionsOfChar
let alignTo = F.foldl' max 0 (mapMaybe snd positions)
when (alignTo > 0) $ alignChar (Map.fromList positions) alignTo
endUserAction ebuf
transChar :: Text -> Text
PROPORTION
RIGHTWARDS ARROW
LEFTWARDS ARROW
transChar t = t
addRecentlyUsedFile :: FilePath -> IDEAction
addRecentlyUsedFile fp = do
state <- readIDE currentState
unless (isStartingOrClosing state) $ do
recentFiles' <- readIDE recentFiles
unless (fp `elem` recentFiles') $
modifyIDE_ $ recentFiles .~ take 12 (fp : recentFiles')
triggerEventIDE_ UpdateRecent
recentFiles ' < - readIDE recentFiles
modifyIDE _ $ recentFiles .~ filter ( /= fp ) recentFiles '
selectedText :: IDEM (Maybe IDEBuffer, Maybe Text)
selectedText = do
candy' <- readIDE candy
inActiveBufContext (Nothing, Nothing) $ \_ ebuf currentBuffer ->
hasSelection ebuf >>= \case
True -> do
(i1,i2) <- getSelectionBounds ebuf
text <- getCandylessPart candy' ebuf i1 i2
return (Just currentBuffer, Just text)
False -> return (Just currentBuffer, Nothing)
selectedTextOrCurrentLine :: IDEM (Maybe (IDEBuffer, Text))
selectedTextOrCurrentLine = do
candy' <- readIDE candy
inActiveBufContext Nothing $ \_ ebuf currentBuffer -> do
(i1, i2) <- hasSelection ebuf >>= \case
True -> getSelectionBounds ebuf
False -> do
(i, _) <- getSelectionBounds ebuf
line <- getLine i
iStart <- getIterAtLine ebuf line
iEnd <- forwardToLineEndC iStart
return (iStart, iEnd)
Just . (currentBuffer,) <$> getCandylessPart candy' ebuf i1 i2
selectedTextOrCurrentIdentifier :: IDEM (Maybe IDEBuffer, Maybe Text)
selectedTextOrCurrentIdentifier = do
st <- selectedText
case snd st of
Just _ -> return st
Nothing -> do
candy' <- readIDE candy
inActiveBufContext (Nothing, Nothing) $ \_ ebuf currentBuffer -> do
(l,r) <- getIdentifierUnderCursor ebuf
t <- getCandylessPart candy' ebuf l r
return ( Just currentBuffer
, if T.null t
then Nothing
else Just t)
getLocation :: TextEditor e => IDEBuffer -> EditorBuffer e -> EditorIter e -> IDEM (Int, Int)
getLocation buf ebuf iter = do
candy' <- readIDE candy
useCandy <- useCandyFor buf
line <- getLine iter
lineOffset <- getLineOffset iter
if useCandy
then positionFromCandy candy' ebuf (line, lineOffset)
else return (line, lineOffset)
selectedLocation :: IDEM (Maybe (Int, Int))
selectedLocation =
inActiveBufContext Nothing $ \_ ebuf currentBuffer -> do
(start, _) <- getSelectionBounds ebuf
Just <$> getLocation currentBuffer ebuf start
insertTextAfterSelection :: Text -> IDEAction
insertTextAfterSelection str = do
candy' <- readIDE candy
inActiveBufContext () $ \_ ebuf currentBuffer -> do
useCandy <- useCandyFor currentBuffer
hasSelection ebuf >>= (`when` do
realString <- if useCandy then stringToCandy candy' str else return str
(_,i) <- getSelectionBounds ebuf
insert ebuf i realString
(_,i1) <- getSelectionBounds ebuf
i2 <- forwardCharsC i1 (T.length realString)
selectRange ebuf i1 i2)
belongsToPackages' :: MonadIDE m => IDEBuffer -> m [(Project, IDEPackage)]
belongsToPackages' = maybe (return []) belongsToPackages . fileName
belongsToWorkspace' :: MonadIDE m => IDEBuffer -> m Bool
belongsToWorkspace' = maybe (return False) belongsToWorkspace . fileName
useCandyFor :: MonadIDE m => IDEBuffer -> m Bool
useCandyFor aBuffer = do
prefs' <- readIDE prefs
return (candyState prefs' && isHaskellMode (mode aBuffer))
switchBuffersCandy :: IDEAction
switchBuffersCandy = do
prefs' <- readIDE prefs
buffers <- allBuffers
forM_ buffers $ \b@IDEBuffer{sourceView=sv} -> do
buf <- getBuffer sv
if candyState prefs'
then modeTransformToCandy (mode b) (modeEditInCommentOrString (mode b)) buf
else modeTransformFromCandy (mode b) buf
|
6df8db3e8dc29728158bc50984b26dc15ac18a41f9ff1c58b9ee56fd6db4b537 | polyfy/polylith | test.clj | (ns polylith.clj.core.workspace.settings.test)
(def default-test-runner
'polylith.clj.core.clojure-test-test-runner.interface/create)
(def default-test-runner-aliases
#{nil :default})
(defn add-default-test-runner-if-missing [project-settings]
(cond-> project-settings
(empty? (-> project-settings :test :create-test-runner))
(update :test assoc :create-test-runner [default-test-runner])))
(defn convert-create-test-runner-to-vector [project-settings]
(let [test-runner-configuration (-> project-settings :test :create-test-runner)]
(cond-> project-settings
(not (coll? test-runner-configuration))
(update-in [:test :create-test-runner] vector))))
(defn alias->default-test-runner [test-runner-constructor]
(if (contains? default-test-runner-aliases test-runner-constructor)
default-test-runner
test-runner-constructor))
(defn replace-default-test-runner-constructors [test-runner-constructors]
(mapv alias->default-test-runner test-runner-constructors))
(defn update-default-test-runner-constructors [project-settings]
(update-in project-settings [:test :create-test-runner]
replace-default-test-runner-constructors))
(defn enrich-test-settings [acc [project-name project-settings]]
(let [enriched-project-settings (-> project-settings
(convert-create-test-runner-to-vector)
(update-default-test-runner-constructors)
(add-default-test-runner-if-missing))]
(assoc acc project-name enriched-project-settings)))
(defn enrich-settings
"Enrich the project's test configuration with test runners. It replaces
default test runner aliases with the actual default test runner constructor.
It also adds the default test runner to the projects missing a test runner."
[settings]
(let [project-settings (:projects settings)
enriched-projects (reduce enrich-test-settings
{}
project-settings)]
(assoc settings :projects enriched-projects)))
| null | https://raw.githubusercontent.com/polyfy/polylith/ca7c38eb2df7a489202742ac28c0cfe9ef9ae9d2/components/workspace/src/polylith/clj/core/workspace/settings/test.clj | clojure | (ns polylith.clj.core.workspace.settings.test)
(def default-test-runner
'polylith.clj.core.clojure-test-test-runner.interface/create)
(def default-test-runner-aliases
#{nil :default})
(defn add-default-test-runner-if-missing [project-settings]
(cond-> project-settings
(empty? (-> project-settings :test :create-test-runner))
(update :test assoc :create-test-runner [default-test-runner])))
(defn convert-create-test-runner-to-vector [project-settings]
(let [test-runner-configuration (-> project-settings :test :create-test-runner)]
(cond-> project-settings
(not (coll? test-runner-configuration))
(update-in [:test :create-test-runner] vector))))
(defn alias->default-test-runner [test-runner-constructor]
(if (contains? default-test-runner-aliases test-runner-constructor)
default-test-runner
test-runner-constructor))
(defn replace-default-test-runner-constructors [test-runner-constructors]
(mapv alias->default-test-runner test-runner-constructors))
(defn update-default-test-runner-constructors [project-settings]
(update-in project-settings [:test :create-test-runner]
replace-default-test-runner-constructors))
(defn enrich-test-settings [acc [project-name project-settings]]
(let [enriched-project-settings (-> project-settings
(convert-create-test-runner-to-vector)
(update-default-test-runner-constructors)
(add-default-test-runner-if-missing))]
(assoc acc project-name enriched-project-settings)))
(defn enrich-settings
"Enrich the project's test configuration with test runners. It replaces
default test runner aliases with the actual default test runner constructor.
It also adds the default test runner to the projects missing a test runner."
[settings]
(let [project-settings (:projects settings)
enriched-projects (reduce enrich-test-settings
{}
project-settings)]
(assoc settings :projects enriched-projects)))
| |
a8dfae0374b12ba5b9014417c7e761e043f1f47eab0746c33bd10347f3352e81 | KMahoney/squee | QueryBuilder.hs | module Squee.QueryBuilder
( Expression(..)
, Source(..)
, Query(..)
, Settings(..)
, buildSqlNoPlaceholder
, buildSqlWithPlaceholder
, buildSqlExpressionNoPlaceholder
, buildSqlExpressionWithPlaceholder
, collectPlaceholders
, columnNames
, applyFilter
, applyOrder
, applyMap
, applyNatJoin
, applyJoin
, toSql
, queryAsText
, expressionAsText
) where
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Text (Text)
import Control.Arrow (second)
import Control.Monad.Reader
import Database.Sql
import qualified Database.Schema as Schema
import qualified Database.PostgreSQL.Simple as PG
data Expression
= EField Text
| EQualifiedField Text Text
| EBinOp Text Expression Expression
| EString Text
| EInt Integer
| ECast Expression Text
| EPlaceholder Integer
| EFn Text [Expression]
| ERaw Text
deriving (Show)
data Source
= SourceTable Schema.TableName
| SourceQuery Query
deriving (Show)
data Join
= NatJoin Source
| JoinOn Expression Source
deriving (Show)
data Query
= Query
{ columns :: [(Schema.ColumnName, Expression)]
, querySource :: Source
, queryJoins :: [Join]
, queryFilter :: Maybe Expression
, queryOrder :: Maybe Expression
}
deriving (Show)
data Settings
= Settings { placeholderFormat :: Integer -> Sql }
type Build a = Reader Settings a
buildSqlNoPlaceholder :: Query -> Sql
buildSqlNoPlaceholder = buildSqlWithPlaceholder undefined
buildSqlWithPlaceholder :: (Integer -> Sql) -> Query -> Sql
buildSqlWithPlaceholder f query = runReader (toSql query) (Settings f)
buildSqlExpressionNoPlaceholder :: Expression -> Sql
buildSqlExpressionNoPlaceholder = buildSqlExpressionWithPlaceholder undefined
buildSqlExpressionWithPlaceholder :: (Integer -> Sql) -> Expression -> Sql
buildSqlExpressionWithPlaceholder f e = "SELECT " <> runReader (expressionToSql e) (Settings f)
collectPlaceholders :: Query -> [Integer]
collectPlaceholders (Query { columns, querySource, queryJoins, queryFilter }) =
concatMap colPlaceholders columns ++
sourcePlaceholders querySource ++
concatMap joinPlaceholders queryJoins ++
maybe [] expressionPlaceholders queryFilter
where
colPlaceholders (_, e) = expressionPlaceholders e
sourcePlaceholders = \case
SourceTable _ -> []
SourceQuery query -> collectPlaceholders query
joinPlaceholders = \case
NatJoin source -> sourcePlaceholders source
JoinOn e source -> sourcePlaceholders source ++ expressionPlaceholders e
expressionPlaceholders =
\case
EBinOp _ e1 e2 -> expressionPlaceholders e1 ++ expressionPlaceholders e2
ECast e _ -> expressionPlaceholders e
EPlaceholder i -> [i]
_ -> []
columnNames :: Query -> [Schema.ColumnName]
columnNames = map fst . columns
applyFilter :: Expression -> Query -> Query
applyFilter expression query@(Query { queryFilter }) =
query { queryFilter = maybe (Just expression) (Just . EBinOp "AND" expression) queryFilter }
applyOrder :: Expression -> Query -> Query
applyOrder expression query =
query { queryOrder = Just expression }
applyMap :: M.Map Text Expression -> Query -> Query
applyMap newColumns query =
Query { columns = map (\(c, e) -> (Schema.ColumnName c, e)) (M.toList newColumns)
, querySource = SourceQuery query
, queryJoins = []
, queryFilter = Nothing
, queryOrder = Nothing
}
applyNatJoin :: Query -> Query -> Query
applyNatJoin a b =
Query { columns = map selectColumn (S.toList (S.fromList (columnNames a) `S.union` S.fromList (columnNames b)))
, querySource = SourceQuery a
, queryJoins = [NatJoin (SourceQuery b)]
, queryFilter = Nothing
, queryOrder = Nothing
}
where
selectColumn (Schema.ColumnName name) = (Schema.ColumnName name, EField name)
applyJoin :: Expression -> M.Map Text Expression -> Query -> Query -> Query
applyJoin cond merge a b =
Query { columns = map (\(c, e) -> (Schema.ColumnName c, e)) (M.toList merge)
, querySource = SourceQuery a
, queryJoins = [JoinOn cond (SourceQuery b)]
, queryFilter = Nothing
, queryOrder = Nothing
}
fieldExpressionToSql :: (Schema.ColumnName, Expression) -> Build Sql
fieldExpressionToSql (Schema.ColumnName columnName, EField f)
| columnName == f =
return (quoteName columnName)
fieldExpressionToSql (Schema.ColumnName columnName, e) = do
e' <- expressionToSql e
return $ e' <+> "AS" <+> quoteName columnName
expressionToSql :: Expression -> Build Sql
expressionToSql = \case
EField name ->
return (quoteName name)
EQualifiedField table name ->
return $ quoteName table <> "." <> quoteName name
EBinOp op e1 e2 -> do
e1' <- expressionToSql e1
e2' <- expressionToSql e2
return $ parens e1' <+> Sql op <+> parens e2'
EString s ->
return (quoteString s)
EInt i ->
return $ Sql $ T.pack $ show i
ECast e t -> do
e' <- expressionToSql e
return $ "(" <> e' <> ")::" <> Sql t
EPlaceholder i -> do
f <- asks placeholderFormat
return (f i)
EFn name args -> do
args' <- mapM expressionToSql args
return $ Sql name <> "(" <> intercalate ", " args' <> ")"
ERaw x ->
return (Sql x)
sourceToSql :: Source -> Build Sql
sourceToSql (SourceTable name) = return $ quoteName $ Schema.tableName name
sourceToSql (SourceQuery query) = parens <$> toSql query
joinToSql :: Int -> Join -> Build Sql
joinToSql i = \case
NatJoin source -> do
source' <- sourceToSql source
return $ " NATURAL JOIN" <+> source' <+> name
JoinOn e source -> do
e' <- expressionToSql e
source' <- sourceToSql source
return $ " INNER JOIN" <+> source' <+> name <+> "ON (" <> e' <> ")"
where
name = "AS _j" <> Sql (T.pack (show i))
parens :: Sql -> Sql
parens e = "(" <> e <> ")"
toSql :: Query -> Build Sql
toSql (Query { columns, querySource, queryJoins, queryFilter, queryOrder }) = do
columns' <- mapM fieldExpressionToSql columns
querySource' <- sourceToSql querySource
queryJoins' <- joinSql
queryFilter' <- filterSql
queryOrder' <- orderSql
return $
"SELECT" <+> intercalate "," columns' <+>
"FROM" <+> querySource' <> " AS _t" <> queryJoins' <> queryFilter' <> queryOrder'
where
joinSql =
mconcat <$> mapM (uncurry joinToSql) (zip [(0::Int)..] queryJoins)
filterSql =
case queryFilter of
Nothing -> return ""
Just e -> (" WHERE" <+>) <$> expressionToSql e
orderSql =
case queryOrder of
Nothing -> return ""
Just e -> (" ORDER BY" <+>) <$> expressionToSql e
castTextColumns :: Query -> Query
castTextColumns query@(Query { columns }) =
query { columns = fmap (second (flip ECast "text")) columns }
queryAsText :: PG.Connection -> Query -> IO [[Text]]
queryAsText conn query =
PG.query_ conn $ toPGQuery $ buildSqlNoPlaceholder $ castTextColumns query
expressionAsText :: PG.Connection -> Expression -> IO Text
expressionAsText conn expression = do
[[result]] <- PG.query_ conn $ toPGQuery $ buildSqlExpressionNoPlaceholder (ECast expression "text")
return result
| null | https://raw.githubusercontent.com/KMahoney/squee/b2ebad2ec6f3c866398600793d0a231edc8c23e4/src/Squee/QueryBuilder.hs | haskell | module Squee.QueryBuilder
( Expression(..)
, Source(..)
, Query(..)
, Settings(..)
, buildSqlNoPlaceholder
, buildSqlWithPlaceholder
, buildSqlExpressionNoPlaceholder
, buildSqlExpressionWithPlaceholder
, collectPlaceholders
, columnNames
, applyFilter
, applyOrder
, applyMap
, applyNatJoin
, applyJoin
, toSql
, queryAsText
, expressionAsText
) where
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Text (Text)
import Control.Arrow (second)
import Control.Monad.Reader
import Database.Sql
import qualified Database.Schema as Schema
import qualified Database.PostgreSQL.Simple as PG
data Expression
= EField Text
| EQualifiedField Text Text
| EBinOp Text Expression Expression
| EString Text
| EInt Integer
| ECast Expression Text
| EPlaceholder Integer
| EFn Text [Expression]
| ERaw Text
deriving (Show)
data Source
= SourceTable Schema.TableName
| SourceQuery Query
deriving (Show)
data Join
= NatJoin Source
| JoinOn Expression Source
deriving (Show)
data Query
= Query
{ columns :: [(Schema.ColumnName, Expression)]
, querySource :: Source
, queryJoins :: [Join]
, queryFilter :: Maybe Expression
, queryOrder :: Maybe Expression
}
deriving (Show)
data Settings
= Settings { placeholderFormat :: Integer -> Sql }
type Build a = Reader Settings a
buildSqlNoPlaceholder :: Query -> Sql
buildSqlNoPlaceholder = buildSqlWithPlaceholder undefined
buildSqlWithPlaceholder :: (Integer -> Sql) -> Query -> Sql
buildSqlWithPlaceholder f query = runReader (toSql query) (Settings f)
buildSqlExpressionNoPlaceholder :: Expression -> Sql
buildSqlExpressionNoPlaceholder = buildSqlExpressionWithPlaceholder undefined
buildSqlExpressionWithPlaceholder :: (Integer -> Sql) -> Expression -> Sql
buildSqlExpressionWithPlaceholder f e = "SELECT " <> runReader (expressionToSql e) (Settings f)
collectPlaceholders :: Query -> [Integer]
collectPlaceholders (Query { columns, querySource, queryJoins, queryFilter }) =
concatMap colPlaceholders columns ++
sourcePlaceholders querySource ++
concatMap joinPlaceholders queryJoins ++
maybe [] expressionPlaceholders queryFilter
where
colPlaceholders (_, e) = expressionPlaceholders e
sourcePlaceholders = \case
SourceTable _ -> []
SourceQuery query -> collectPlaceholders query
joinPlaceholders = \case
NatJoin source -> sourcePlaceholders source
JoinOn e source -> sourcePlaceholders source ++ expressionPlaceholders e
expressionPlaceholders =
\case
EBinOp _ e1 e2 -> expressionPlaceholders e1 ++ expressionPlaceholders e2
ECast e _ -> expressionPlaceholders e
EPlaceholder i -> [i]
_ -> []
columnNames :: Query -> [Schema.ColumnName]
columnNames = map fst . columns
applyFilter :: Expression -> Query -> Query
applyFilter expression query@(Query { queryFilter }) =
query { queryFilter = maybe (Just expression) (Just . EBinOp "AND" expression) queryFilter }
applyOrder :: Expression -> Query -> Query
applyOrder expression query =
query { queryOrder = Just expression }
applyMap :: M.Map Text Expression -> Query -> Query
applyMap newColumns query =
Query { columns = map (\(c, e) -> (Schema.ColumnName c, e)) (M.toList newColumns)
, querySource = SourceQuery query
, queryJoins = []
, queryFilter = Nothing
, queryOrder = Nothing
}
applyNatJoin :: Query -> Query -> Query
applyNatJoin a b =
Query { columns = map selectColumn (S.toList (S.fromList (columnNames a) `S.union` S.fromList (columnNames b)))
, querySource = SourceQuery a
, queryJoins = [NatJoin (SourceQuery b)]
, queryFilter = Nothing
, queryOrder = Nothing
}
where
selectColumn (Schema.ColumnName name) = (Schema.ColumnName name, EField name)
applyJoin :: Expression -> M.Map Text Expression -> Query -> Query -> Query
applyJoin cond merge a b =
Query { columns = map (\(c, e) -> (Schema.ColumnName c, e)) (M.toList merge)
, querySource = SourceQuery a
, queryJoins = [JoinOn cond (SourceQuery b)]
, queryFilter = Nothing
, queryOrder = Nothing
}
fieldExpressionToSql :: (Schema.ColumnName, Expression) -> Build Sql
fieldExpressionToSql (Schema.ColumnName columnName, EField f)
| columnName == f =
return (quoteName columnName)
fieldExpressionToSql (Schema.ColumnName columnName, e) = do
e' <- expressionToSql e
return $ e' <+> "AS" <+> quoteName columnName
expressionToSql :: Expression -> Build Sql
expressionToSql = \case
EField name ->
return (quoteName name)
EQualifiedField table name ->
return $ quoteName table <> "." <> quoteName name
EBinOp op e1 e2 -> do
e1' <- expressionToSql e1
e2' <- expressionToSql e2
return $ parens e1' <+> Sql op <+> parens e2'
EString s ->
return (quoteString s)
EInt i ->
return $ Sql $ T.pack $ show i
ECast e t -> do
e' <- expressionToSql e
return $ "(" <> e' <> ")::" <> Sql t
EPlaceholder i -> do
f <- asks placeholderFormat
return (f i)
EFn name args -> do
args' <- mapM expressionToSql args
return $ Sql name <> "(" <> intercalate ", " args' <> ")"
ERaw x ->
return (Sql x)
sourceToSql :: Source -> Build Sql
sourceToSql (SourceTable name) = return $ quoteName $ Schema.tableName name
sourceToSql (SourceQuery query) = parens <$> toSql query
joinToSql :: Int -> Join -> Build Sql
joinToSql i = \case
NatJoin source -> do
source' <- sourceToSql source
return $ " NATURAL JOIN" <+> source' <+> name
JoinOn e source -> do
e' <- expressionToSql e
source' <- sourceToSql source
return $ " INNER JOIN" <+> source' <+> name <+> "ON (" <> e' <> ")"
where
name = "AS _j" <> Sql (T.pack (show i))
parens :: Sql -> Sql
parens e = "(" <> e <> ")"
toSql :: Query -> Build Sql
toSql (Query { columns, querySource, queryJoins, queryFilter, queryOrder }) = do
columns' <- mapM fieldExpressionToSql columns
querySource' <- sourceToSql querySource
queryJoins' <- joinSql
queryFilter' <- filterSql
queryOrder' <- orderSql
return $
"SELECT" <+> intercalate "," columns' <+>
"FROM" <+> querySource' <> " AS _t" <> queryJoins' <> queryFilter' <> queryOrder'
where
joinSql =
mconcat <$> mapM (uncurry joinToSql) (zip [(0::Int)..] queryJoins)
filterSql =
case queryFilter of
Nothing -> return ""
Just e -> (" WHERE" <+>) <$> expressionToSql e
orderSql =
case queryOrder of
Nothing -> return ""
Just e -> (" ORDER BY" <+>) <$> expressionToSql e
castTextColumns :: Query -> Query
castTextColumns query@(Query { columns }) =
query { columns = fmap (second (flip ECast "text")) columns }
queryAsText :: PG.Connection -> Query -> IO [[Text]]
queryAsText conn query =
PG.query_ conn $ toPGQuery $ buildSqlNoPlaceholder $ castTextColumns query
expressionAsText :: PG.Connection -> Expression -> IO Text
expressionAsText conn expression = do
[[result]] <- PG.query_ conn $ toPGQuery $ buildSqlExpressionNoPlaceholder (ECast expression "text")
return result
| |
fedb279e310da2fecf68bafdd794ce2278b3d5ae757e01218d28abe1c5c55721 | mariari/Misc-Lisp-Scripts | HW7.lisp |
(/ (random 100000000000) 100000000000)
(defun v ()
(case (random 4)
(0 1/5)
(1 2/5)
(2 3/5)
(3 4/5)))
(let ((acc 0))
(dotimes (i 1000)
(incf acc (/ (reduce #'+ (mapcar (lambda (x) (max (v) (/ (random 100000000000) 100000000000))) (range 1e4 1))) 1e4)))
(/ acc 1000))
| null | https://raw.githubusercontent.com/mariari/Misc-Lisp-Scripts/acecadc75fcbe15e6b97e084d179aacdbbde06a8/Class/probability/HW7.lisp | lisp |
(/ (random 100000000000) 100000000000)
(defun v ()
(case (random 4)
(0 1/5)
(1 2/5)
(2 3/5)
(3 4/5)))
(let ((acc 0))
(dotimes (i 1000)
(incf acc (/ (reduce #'+ (mapcar (lambda (x) (max (v) (/ (random 100000000000) 100000000000))) (range 1e4 1))) 1e4)))
(/ acc 1000))
| |
1965c1ab5643e7a3c091797a63336ff50798ffec79c852564e5fd517cdeb2df0 | rainoftime/PFAD | ch5.erl |
@doc Chapter 5 : Sorting pairwize sums
-module(ch5).
-export([
sortsums/2,
lambert_sortsums/2
]).
@doc O(N^2 * log(N ) ): N = ) = length(Ys )
-spec sortsums([term()], [term()]) -> [term()].
sortsums(Xs, Ys) ->
lists:sort([sum(X, Y) || X <- Xs, Y <- Ys]).
-spec lambert_sortsums([term()], [term()]) -> [term()].
lambert_sortsums(Xs, Ys) ->
[V || {V, _} <- sortsubs(Xs, lists:map(fun negate/1, Ys))].
-spec sortsubs([term()]) -> [{term(), {integer(), integer()}}].
sortsubs([]) -> [];
sortsubs([W]) -> [{W - W, {0, 0}}];
sortsubs(Ws) ->
M = length(Ws) div 2,
{Xs, Ys} = lists:split(M, Ws),
Xxs = sortsubs(Xs),
Xys = sortsubs(Xs, Ys),
Yxs = lists:map(fun ({X, {I, J}}) -> {-X, {J, I}} end, lists:reverse(Xys)),
Yys = sortsubs(Ys),
Incl = fun ({X, {I, J}}) -> {X, {M + I, J}} end,
Incr = fun ({X, {I, J}}) -> {X, {I, M + J}} end,
Incb = fun ({X, {I, J}}) -> {X, {M + I, M + J}} end,
lists:foldr(fun lists:merge/2, [], [Xxs, lists:map(Incl, Xys), lists:map(Incr, Yxs), lists:map(Incb, Yys)]).
-spec sortsubs([term()], [term()]) -> [{term(), {integer(), integer()}}].
sortsubs(Xs, Ys) ->
Table0 = table(Xs, Ys),
XsLen = length(Xs),
YsLen = length(Ys),
ToIndex = fun ({A, B, C}) -> A * XsLen * YsLen + B * YsLen + C end,
Table = list_to_tuple([Rank || {_, Rank} <- lists:sort(lists:zip(lists:map(ToIndex, Table0), lists:seq(1,length(Table0))))]),
GetRank = fun ({A, B, C}) -> element(ToIndex({A, B, C}) + 1, Table) end,
lists:sort(fun ({_X, {I, J}}, {_Y, {K, L}}) -> GetRank({0, I, J}) < GetRank({1, K, L}) end,
subs(Xs, Ys)).
-spec subs([term()], [term()]) -> [{term(), {integer(), integer()}}].
subs(Xs, Ys) ->
[{sub(X, Y), {I, J}} || {X, I} <- lists:zip(Xs, lists:seq(0, length(Xs) - 1)),
{Y, J} <- lists:zip(Ys, lists:seq(0, length(Ys) - 1))].
-spec table([term()], [term()]) -> [{integer(), integer(), integer()}].
table(Xs, Ys) ->
Xxs = sortsubs(Xs),
Yys = sortsubs(Ys),
Tag = fun (I) -> fun ({X, {J, K}}) -> {X, {I, J, K}} end end,
[Index || {_, Index} <- lists:merge(lists:map(Tag(0), Xxs), lists:map(Tag(1), Yys))].
-spec sum(number(), number()) -> number().
sum(X, Y) -> X + Y.
-spec sub(number(), number()) -> number().
sub(X, Y) -> X - Y.
-spec negate(number()) -> number().
negate(X) -> -X. | null | https://raw.githubusercontent.com/rainoftime/PFAD/e594c027fc49da6f546701f6a550271b99442e0a/ch5/ch5.erl | erlang |
@doc Chapter 5 : Sorting pairwize sums
-module(ch5).
-export([
sortsums/2,
lambert_sortsums/2
]).
@doc O(N^2 * log(N ) ): N = ) = length(Ys )
-spec sortsums([term()], [term()]) -> [term()].
sortsums(Xs, Ys) ->
lists:sort([sum(X, Y) || X <- Xs, Y <- Ys]).
-spec lambert_sortsums([term()], [term()]) -> [term()].
lambert_sortsums(Xs, Ys) ->
[V || {V, _} <- sortsubs(Xs, lists:map(fun negate/1, Ys))].
-spec sortsubs([term()]) -> [{term(), {integer(), integer()}}].
sortsubs([]) -> [];
sortsubs([W]) -> [{W - W, {0, 0}}];
sortsubs(Ws) ->
M = length(Ws) div 2,
{Xs, Ys} = lists:split(M, Ws),
Xxs = sortsubs(Xs),
Xys = sortsubs(Xs, Ys),
Yxs = lists:map(fun ({X, {I, J}}) -> {-X, {J, I}} end, lists:reverse(Xys)),
Yys = sortsubs(Ys),
Incl = fun ({X, {I, J}}) -> {X, {M + I, J}} end,
Incr = fun ({X, {I, J}}) -> {X, {I, M + J}} end,
Incb = fun ({X, {I, J}}) -> {X, {M + I, M + J}} end,
lists:foldr(fun lists:merge/2, [], [Xxs, lists:map(Incl, Xys), lists:map(Incr, Yxs), lists:map(Incb, Yys)]).
-spec sortsubs([term()], [term()]) -> [{term(), {integer(), integer()}}].
sortsubs(Xs, Ys) ->
Table0 = table(Xs, Ys),
XsLen = length(Xs),
YsLen = length(Ys),
ToIndex = fun ({A, B, C}) -> A * XsLen * YsLen + B * YsLen + C end,
Table = list_to_tuple([Rank || {_, Rank} <- lists:sort(lists:zip(lists:map(ToIndex, Table0), lists:seq(1,length(Table0))))]),
GetRank = fun ({A, B, C}) -> element(ToIndex({A, B, C}) + 1, Table) end,
lists:sort(fun ({_X, {I, J}}, {_Y, {K, L}}) -> GetRank({0, I, J}) < GetRank({1, K, L}) end,
subs(Xs, Ys)).
-spec subs([term()], [term()]) -> [{term(), {integer(), integer()}}].
subs(Xs, Ys) ->
[{sub(X, Y), {I, J}} || {X, I} <- lists:zip(Xs, lists:seq(0, length(Xs) - 1)),
{Y, J} <- lists:zip(Ys, lists:seq(0, length(Ys) - 1))].
-spec table([term()], [term()]) -> [{integer(), integer(), integer()}].
table(Xs, Ys) ->
Xxs = sortsubs(Xs),
Yys = sortsubs(Ys),
Tag = fun (I) -> fun ({X, {J, K}}) -> {X, {I, J, K}} end end,
[Index || {_, Index} <- lists:merge(lists:map(Tag(0), Xxs), lists:map(Tag(1), Yys))].
-spec sum(number(), number()) -> number().
sum(X, Y) -> X + Y.
-spec sub(number(), number()) -> number().
sub(X, Y) -> X - Y.
-spec negate(number()) -> number().
negate(X) -> -X. | |
f0a614e49e819a49d38e5cde8251385db42e2fdaa46b1d990d7806296ec9d866 | hgoes/smtlib2 | Container.hs | # LANGUAGE PolyKinds #
module Language.SMTLib2.Composite.Container (
-- * Container class
Container(..),
-- * Paths
Path(..),
pathGet,
pathSet,
withPath,
-- * Accessors
Acc(..),
Accessor(..),
AccessorFork(..),
withAccessor,
access,
accessorPaths,
-- ** Construction
Access,(|*>),idAcc,field,
at,
*
Muxer(..),
Paths(..),
Muxed(..),
withMuxer,
mux,
-- ** Construction
(<|*>),idMux,
-- * Helper functions
update,updateTrack,updateList
) where
import Language.SMTLib2
import Language.SMTLib2.Composite.Class
import Language.SMTLib2.Composite.Null (NoComp(..))
import qualified Language.SMTLib2.Internals.Type.List as L
import Language.SMTLib2.Internals.Type.Nat
import Data.GADT.Compare
import Data.GADT.Show
import Data.Foldable
import Data.Functor.Identity
import Prelude hiding (read)
import Text.Show
import Control.Monad (unless)
data Acc a = Id
| Seq a (Acc a)
| Br [Acc a]
class Composite c => Container c where
data Index c :: ((Type -> *) -> *) -> (Type -> *) -> *
elementGet :: (Embed m e,Monad m,Composite el)
=> c e
-> Index c el e
-> m (el e)
elementsGet :: (Embed m e,Monad m,Composite el)
=> c e
-> [Index c el e]
-> m [el e]
elementsGet x = mapM (elementGet x)
elementSet :: (Embed m e,Monad m,Composite el)
=> c e
-> Index c el e
-> el e
-> m (c e)
elementsSet :: (Embed m e,Monad m,Composite el)
=> c e
-> [(Index c el e,el e)]
-> m (c e)
elementsSet c upd = foldlM (\cc (idx,el) -> elementSet cc idx el
) c upd
withElement :: (Embed m e,Monad m,Composite el)
=> c e
-> Index c el e
-> (el e -> m (el e))
-> m (c e)
withElement c idx f = do
el <- elementGet c idx
nel <- f el
elementSet c idx nel
showIndex :: GShow e => Int -> Index c el e -> ShowS
update :: (Composite a,Embed m e,Monad m,GCompare e)
=> a e -> [e BoolType] -> a e -> m (a e)
update x [] _ = return x
update x cs y = do
cond <- case cs of
[c] -> return c
_ -> and' cs
res <- compITE cond x y
case res of
Nothing -> error $ "Container.update: Incompatible element written."
Just res' -> return res'
updateTrack :: (Composite a,Embed m e,Monad m,GCompare e)
=> (forall tp. e tp -> e tp -> Bool)
-> m () -> a e -> [e BoolType] -> a e -> m (a e)
updateTrack f act x [] oldx = do
unless (compIsSubsetOf f x oldx) act
return x
updateTrack f act x cs y = do
cond <- case cs of
[c] -> return c
_ -> and' cs
res <- compITE cond x y
case res of
Nothing -> error $ "Container.updateTrack: Incompatible element written."
Just res' -> do
unless (compIsSubsetOf f x y) act
return res'
data Path a (idx :: Acc ((Type -> *) -> *)) b e where
PathId :: Path a 'Id a e
PathSeq :: (Container a,Composite b)
=> Index a b e
-> Path b idx c e
-> Path a ('Seq b idx) c e
PathBr :: Composite b
=> Natural n
-> Path a (L.Index idxs n) b e
-> Path a (Br idxs) b e
PathFun :: (a e -> b e)
-> (b e -> a e)
-> Path b idx c e
-> Path a idx c e
data AccessorFork a (idxs :: [Acc ((Type -> *) -> *)]) b e where
NilFork :: AccessorFork a '[] b e
Fork :: Maybe ([e BoolType],Accessor a idx b e)
-> AccessorFork a idxs b e
-> AccessorFork a (idx ': idxs) b e
data Accessor a (idx :: Acc ((Type -> *) -> *)) b e where
AccId :: Accessor a 'Id a e
AccSeq :: (Container a,Composite b)
=> [(Index a b e,[e BoolType],Accessor b idx c e)]
-> Accessor a ('Seq b idx) c e
AccFork :: Composite b
=> AccessorFork a idxs b e
-> Accessor a ('Br idxs) b e
AccFun :: (a e -> b e)
-> (b e -> a e)
-> Accessor b idx c e
-> Accessor a idx c e
withAccessor :: (Embed m e,Monad m)
=> Accessor a idx b e
-> a e
-> (Path a idx b e -> [e BoolType] -> b e -> m (b e))
-> m (a e)
withAccessor AccId x f = f PathId [] x
withAccessor (AccSeq acc) x f = do
els <- elementsGet x (fmap (\(idx,_,_) -> idx) acc)
nels <- mapM (\((idx,cond,acc),el) -> do
nel <- withAccessor acc el
(\path cond' -> f (PathSeq idx path) (cond++cond'))
return (idx,nel)
) (zip acc els)
elementsSet x nels
withAccessor (AccFork fork) x f
= withAccessorFork fork x (\n path -> f (PathBr n path))
where
withAccessorFork :: (Embed m e,Monad m)
=> AccessorFork a idxs b e
-> a e
-> (forall n. Natural n ->
Path a (L.Index idxs n) b e ->
[e BoolType] -> b e -> m (b e))
-> m (a e)
withAccessorFork NilFork x _ = return x
withAccessorFork (Fork Nothing rest) x f
= withAccessorFork rest x (\n -> f (Succ n))
withAccessorFork (Fork (Just (cond,acc)) rest) x f = do
nx <- withAccessor acc x (\path cond'
-> f Zero path (cond++cond'))
withAccessorFork rest nx (\n -> f (Succ n))
withAccessor (AccFun g h acc) x f
= fmap h $ withAccessor acc (g x) (\path -> f (PathFun g h path))
accessorPaths :: Accessor a idx b e -> [([e BoolType],Path a idx b e)]
accessorPaths AccId = [([],PathId)]
accessorPaths (AccSeq lst)
= [ (cond++cond',PathSeq idx path)
| (idx,cond,acc) <- lst
, (cond',path) <- accessorPaths acc ]
accessorPaths (AccFork fork)
= forkPaths fork
where
forkPaths :: (Composite b)
=> AccessorFork a idxs b e
-> [([e BoolType],Path a ('Br idxs) b e)]
forkPaths NilFork = []
forkPaths (Fork f fs)
= (case f of
Nothing -> []
Just (cond,acc) -> [ (cond++cond',PathBr Zero path)
| (cond',path) <- accessorPaths acc ])++
[ (cond,PathBr (Succ n) path)
| (cond,PathBr n path) <- forkPaths fs ]
accessorPaths (AccFun f g acc)
= [ (cond,PathFun f g path)
| (cond,path) <- accessorPaths acc ]
data Paths a (idxs :: [Acc ((Type -> *) -> *)]) bs e where
NilPaths :: Paths a '[] '[] e
Paths :: Path a idx b e
-> Paths a idxs bs e
-> Paths a (idx ': idxs) (b ': bs) e
data Muxer a (idxs :: [Acc ((Type -> *) -> *)]) bs e where
NoMux :: Muxer a '[] '[] e
Mux :: Accessor a idx b e
-> Muxer a idxs bs e
-> Muxer a (idx ': idxs) (b ': bs) e
data Muxed els (e :: Type -> *) where
NoMuxed :: Muxed '[] e
Muxed :: a e -> Muxed as e -> Muxed (a ': as) e
accessorHead :: Accessor a idx b e
-> Maybe (Path a idx b e,
[e BoolType],
Accessor a idx b e)
accessorHead AccId = Nothing
accessorHead (AccSeq []) = Nothing
accessorHead (AccSeq ((idx,cond,AccId):accs))
= Just (PathSeq idx PathId,cond,AccSeq accs)
accessorHead (AccSeq ((idx,cond,acc):accs))
= case accessorHead acc of
Just (path,cond',acc') -> Just (PathSeq idx path,cond++cond'
,AccSeq ((idx,cond,acc'):accs))
Nothing -> accessorHead (AccSeq accs)
accessorHead (AccFork NilFork) = Nothing
accessorHead (AccFork (Fork Nothing rest)) = do
(PathBr n path,cond,AccFork nrest) <- accessorHead (AccFork rest)
return (PathBr (Succ n) path,cond,AccFork (Fork Nothing nrest))
accessorHead (AccFork (Fork (Just (cond,acc)) rest))
= case accessorHead acc of
Nothing -> accessorHead (AccFork (Fork Nothing rest))
Just (path,cond',acc') -> Just (PathBr Zero path,cond++cond',
AccFork (Fork (Just (cond,acc')) rest))
accessorHead (AccFun f g acc) = do
(path,cond,acc') <- accessorHead acc
return (PathFun f g path,cond,AccFun f g acc')
pathGet :: (Embed m e,Monad m)
=> Path a idx b e
-> a e
-> m (b e)
pathGet PathId x = return x
pathGet (PathSeq idx path) x = do
nx <- elementGet x idx
pathGet path nx
pathGet (PathBr _ path) x = pathGet path x
pathGet (PathFun f g path) x = pathGet path (f x)
pathSet :: (Embed m e,Monad m)
=> Path a idx b e
-> a e
-> b e
-> m (a e)
pathSet PathId _ x = return x
pathSet (PathSeq idx path) x y = do
el <- elementGet x idx
nel <- pathSet path el y
elementSet x idx nel
pathSet (PathBr _ path) x y = pathSet path x y
pathSet (PathFun f g path) x y = fmap g $ pathSet path (f x) y
withPath :: (Embed m e,Monad m)
=> Path a idx b e
-> a e
-> (b e -> m (b e))
-> m (a e)
withPath PathId x f = f x
withPath (PathSeq idx path) x f = withElement x idx $
\el -> withPath path el f
withPath (PathBr _ path) x f = withPath path x f
withPath (PathFun g h path) x f = fmap h $ withPath path (g x) f
withMuxer' :: (Embed m e,Monad m)
=> Muxer a idxs bs e
-> a e
-> st
-> (Paths a idxs bs e -> [e BoolType] ->
Muxed bs e -> st -> m (Muxed bs e,st))
-> m (a e,st)
withMuxer' NoMux x st f = do
(NoMuxed,nst) <- f NilPaths [] NoMuxed st
return (x,nst)
withMuxer' (Mux acc accs) x st f = case accessorHead acc of
Nothing -> return (x,st)
Just (path,cond,acc') -> do
el <- pathGet path x
(x1,(nel,nst)) <- withMuxer' accs x (el,st) $
\paths cond' muxed (el,st) -> do
(Muxed nel nmuxed,nst) <- f (Paths path paths) (cond++cond')
(Muxed el muxed) st
return (nmuxed,(nel,nst))
x2 <- pathSet path x1 nel
withMuxer' (Mux acc' accs) x2 nst f
withMuxer :: (Embed m e,Monad m)
=> Muxer a idxs bs e
-> a e
-> (Paths a idxs bs e -> [e BoolType] ->
Muxed bs e -> m (Muxed bs e))
-> m (a e)
withMuxer mux x f = do
(nx,()) <- withMuxer' mux x () $
\paths cond muxed () -> do
nmuxed <- f paths cond muxed
return (nmuxed,())
return nx
access :: (Container a,Embed m e,Monad m)
=> Access a idx b m e
-> a e
-> (Path a idx b e -> [e BoolType] -> b e -> m (b e))
-> m (a e)
access getAcc x f = do
acc <- getAcc x
withAccessor acc x f
type Access a idx b m e = a e -> m (Accessor a idx b e)
type family PathConcats (p1 :: [Acc a]) (p2 :: Acc a) :: [Acc a] where
PathConcats '[] ys = '[]
PathConcats (x ': xs) ys = PathConcat x ys ': PathConcats xs ys
type family PathConcat (p1 :: Acc a) (p2 :: Acc a) :: Acc a where
PathConcat 'Id acc = acc
PathConcat ('Seq x xs) ys = 'Seq x (PathConcat xs ys)
PathConcat ('Br xs) ys = 'Br (PathConcats xs ys)
(|*>) :: (Embed m e,Monad m,Composite c)
=> Access a idx1 b m e
-> Access b idx2 c m e
-> Access a (PathConcat idx1 idx2) c m e
(|*>) f g x = do
acc <- f x
concatAcc acc x g
where
concatAcc :: (Embed m e,Monad m,Composite c)
=> Accessor a idx1 b e
-> a e
-> (b e -> m (Accessor b idx2 c e))
-> m (Accessor a (PathConcat idx1 idx2) c e)
concatAcc AccId x f = f x
concatAcc (AccSeq lst) x f = do
nlst <- mapM (\(idx,cond,acc) -> do
nx <- elementGet x idx
nacc <- concatAcc acc nx f
return (idx,cond,nacc)
) lst
return (AccSeq nlst)
concatAcc (AccFork fork) x f = do
nfork <- concatFork fork x f
return (AccFork nfork)
concatAcc (AccFun g h acc) x f = do
nacc <- concatAcc acc (g x) f
return (AccFun g h nacc)
concatFork :: (Embed m e,Monad m,Composite c)
=> AccessorFork a idx1 b e
-> a e
-> (b e -> m (Accessor b idx2 c e))
-> m (AccessorFork a (PathConcats idx1 idx2) c e)
concatFork NilFork _ _ = return NilFork
concatFork (Fork Nothing xs) obj f = do
xs' <- concatFork xs obj f
return (Fork Nothing xs')
concatFork (Fork (Just (cond,acc)) xs) obj f = do
nacc <- concatAcc acc obj f
nxs <- concatFork xs obj f
return (Fork (Just (cond,nacc)) nxs)
infixr 5 |*>
idAcc :: Monad m => Access a 'Id a m e
idAcc _ = return AccId
at :: (Container c,Composite el,Monad m,Embed m e)
=> Index c el e -> Access c ('Seq el 'Id) el m e
at idx x = do
el <- elementGet x idx
return $ AccSeq [(idx,[],AccId)]
field :: Monad m => (a e -> b e) -> (b e -> a e) -> Access a 'Id b m e
field f g _ = return $ AccFun f g AccId
(<|*>) :: (Embed m e,Monad m)
=> Access a idx b m e
-> (a e -> m (Muxer a idxs bs e))
-> a e
-> m (Muxer a (idx ': idxs) (b ': bs) e)
(<|*>) f g x = do
acc <- f x
accs <- g x
return (Mux acc accs)
infixr 4 <|*>
idMux :: Monad m => a e -> m (Muxer a '[] '[] e)
idMux _ = return NoMux
mux :: (Embed m e,Monad m)
=> (a e -> m (Muxer a idxs bs e))
-> a e
-> (Paths a idxs bs e -> [e BoolType] ->
Muxed bs e -> m (Muxed bs e))
-> m (a e)
mux f x g = do
muxer <- f x
withMuxer muxer x g
updateList' :: (Embed m e,Monad m)
=> Accessor a idx b e
-> a e
-> m [(Path a idx b e,[e BoolType],b e,a e -> b e -> m (a e))]
updateList' AccId x = return [(PathId,[],x,const return)]
updateList' (AccSeq lst) x
= fmap concat $
mapM (\(idx,cond,acc) -> do
el <- elementGet x idx
nlst <- updateList' acc el
return (fmap (\(path,cond',el',wr)
-> (PathSeq idx path,cond++cond',el',
\cx nel' -> withElement cx idx
(\cel -> wr cel nel'))
) nlst)
) lst
updateList' (AccFork fork) x = fromFork fork x
where
fromFork :: (Embed m e,Monad m,Composite b)
=> AccessorFork a idxs b e
-> a e
-> m [((Path a ('Br idxs) b e),[e BoolType],b e,a e -> b e -> m (a e))]
fromFork NilFork _ = return []
fromFork (Fork Nothing rest) x = do
lst <- fromFork rest x
return $ fmap (\(PathBr n path,cond,el,up)
-> (PathBr (Succ n) path,cond,el,up)
) lst
fromFork (Fork (Just (cond,acc)) rest) x = do
lst1 <- updateList' acc x
let lst1' = fmap (\(path,cond',el,up)
-> (PathBr Zero path,cond++cond',el,up)
) lst1
lst2 <- fromFork rest x
let lst2' = fmap (\(PathBr n path,cond',el,up)
-> (PathBr (Succ n) path,cond',el,up)
) lst2
return $ lst1'++lst2'
updateList' (AccFun f g acc) x = do
lst <- updateList' acc (f x)
return $ fmap (\(path,cond,el,upd)
-> (PathFun f g path,cond,el,
\cx nel -> do
ny <- upd (f cx) nel
return $ g ny)
) lst
updateList :: (Embed m e,Monad m)
=> Access a idx b m e
-> a e
-> m [(Path a idx b e,[e BoolType],b e,a e -> b e -> m (a e))]
updateList f x = do
acc <- f x
updateList' acc x
instance GShow e => Show (Accessor a idx b e) where
showsPrec _ AccId = showString "id"
showsPrec _ (AccSeq lst)
= showListWith (\(idx,cond,acc)
-> showIndex 5 idx .
showString ": " .
showListWith (gshowsPrec 0) cond .
showString " -> " .
showsPrec 5 acc) lst
showsPrec p (AccFork fork) = showsPrec p fork
showsPrec p (AccFun _ _ acc) = showsPrec p acc
instance GShow e => Show (AccessorFork a idxs b e) where
showsPrec _ NilFork = showString "empty"
showsPrec p (Fork acc fork)
= (case acc of
Nothing -> showString "empty"
Just (cond,acc') -> showListWith (gshowsPrec 0) cond .
showString " -> " .
showsPrec 5 acc'
) .
(case fork of
NilFork -> id
_ -> showString " | " .
showsPrec 5 fork)
| null | https://raw.githubusercontent.com/hgoes/smtlib2/c35747f2a5a9ec88dc7b1db41a5aab6e98c0458d/extras/composite/Language/SMTLib2/Composite/Container.hs | haskell | * Container class
* Paths
* Accessors
** Construction
** Construction
* Helper functions | # LANGUAGE PolyKinds #
module Language.SMTLib2.Composite.Container (
Container(..),
Path(..),
pathGet,
pathSet,
withPath,
Acc(..),
Accessor(..),
AccessorFork(..),
withAccessor,
access,
accessorPaths,
Access,(|*>),idAcc,field,
at,
*
Muxer(..),
Paths(..),
Muxed(..),
withMuxer,
mux,
(<|*>),idMux,
update,updateTrack,updateList
) where
import Language.SMTLib2
import Language.SMTLib2.Composite.Class
import Language.SMTLib2.Composite.Null (NoComp(..))
import qualified Language.SMTLib2.Internals.Type.List as L
import Language.SMTLib2.Internals.Type.Nat
import Data.GADT.Compare
import Data.GADT.Show
import Data.Foldable
import Data.Functor.Identity
import Prelude hiding (read)
import Text.Show
import Control.Monad (unless)
data Acc a = Id
| Seq a (Acc a)
| Br [Acc a]
class Composite c => Container c where
data Index c :: ((Type -> *) -> *) -> (Type -> *) -> *
elementGet :: (Embed m e,Monad m,Composite el)
=> c e
-> Index c el e
-> m (el e)
elementsGet :: (Embed m e,Monad m,Composite el)
=> c e
-> [Index c el e]
-> m [el e]
elementsGet x = mapM (elementGet x)
elementSet :: (Embed m e,Monad m,Composite el)
=> c e
-> Index c el e
-> el e
-> m (c e)
elementsSet :: (Embed m e,Monad m,Composite el)
=> c e
-> [(Index c el e,el e)]
-> m (c e)
elementsSet c upd = foldlM (\cc (idx,el) -> elementSet cc idx el
) c upd
withElement :: (Embed m e,Monad m,Composite el)
=> c e
-> Index c el e
-> (el e -> m (el e))
-> m (c e)
withElement c idx f = do
el <- elementGet c idx
nel <- f el
elementSet c idx nel
showIndex :: GShow e => Int -> Index c el e -> ShowS
update :: (Composite a,Embed m e,Monad m,GCompare e)
=> a e -> [e BoolType] -> a e -> m (a e)
update x [] _ = return x
update x cs y = do
cond <- case cs of
[c] -> return c
_ -> and' cs
res <- compITE cond x y
case res of
Nothing -> error $ "Container.update: Incompatible element written."
Just res' -> return res'
updateTrack :: (Composite a,Embed m e,Monad m,GCompare e)
=> (forall tp. e tp -> e tp -> Bool)
-> m () -> a e -> [e BoolType] -> a e -> m (a e)
updateTrack f act x [] oldx = do
unless (compIsSubsetOf f x oldx) act
return x
updateTrack f act x cs y = do
cond <- case cs of
[c] -> return c
_ -> and' cs
res <- compITE cond x y
case res of
Nothing -> error $ "Container.updateTrack: Incompatible element written."
Just res' -> do
unless (compIsSubsetOf f x y) act
return res'
data Path a (idx :: Acc ((Type -> *) -> *)) b e where
PathId :: Path a 'Id a e
PathSeq :: (Container a,Composite b)
=> Index a b e
-> Path b idx c e
-> Path a ('Seq b idx) c e
PathBr :: Composite b
=> Natural n
-> Path a (L.Index idxs n) b e
-> Path a (Br idxs) b e
PathFun :: (a e -> b e)
-> (b e -> a e)
-> Path b idx c e
-> Path a idx c e
data AccessorFork a (idxs :: [Acc ((Type -> *) -> *)]) b e where
NilFork :: AccessorFork a '[] b e
Fork :: Maybe ([e BoolType],Accessor a idx b e)
-> AccessorFork a idxs b e
-> AccessorFork a (idx ': idxs) b e
data Accessor a (idx :: Acc ((Type -> *) -> *)) b e where
AccId :: Accessor a 'Id a e
AccSeq :: (Container a,Composite b)
=> [(Index a b e,[e BoolType],Accessor b idx c e)]
-> Accessor a ('Seq b idx) c e
AccFork :: Composite b
=> AccessorFork a idxs b e
-> Accessor a ('Br idxs) b e
AccFun :: (a e -> b e)
-> (b e -> a e)
-> Accessor b idx c e
-> Accessor a idx c e
withAccessor :: (Embed m e,Monad m)
=> Accessor a idx b e
-> a e
-> (Path a idx b e -> [e BoolType] -> b e -> m (b e))
-> m (a e)
withAccessor AccId x f = f PathId [] x
withAccessor (AccSeq acc) x f = do
els <- elementsGet x (fmap (\(idx,_,_) -> idx) acc)
nels <- mapM (\((idx,cond,acc),el) -> do
nel <- withAccessor acc el
(\path cond' -> f (PathSeq idx path) (cond++cond'))
return (idx,nel)
) (zip acc els)
elementsSet x nels
withAccessor (AccFork fork) x f
= withAccessorFork fork x (\n path -> f (PathBr n path))
where
withAccessorFork :: (Embed m e,Monad m)
=> AccessorFork a idxs b e
-> a e
-> (forall n. Natural n ->
Path a (L.Index idxs n) b e ->
[e BoolType] -> b e -> m (b e))
-> m (a e)
withAccessorFork NilFork x _ = return x
withAccessorFork (Fork Nothing rest) x f
= withAccessorFork rest x (\n -> f (Succ n))
withAccessorFork (Fork (Just (cond,acc)) rest) x f = do
nx <- withAccessor acc x (\path cond'
-> f Zero path (cond++cond'))
withAccessorFork rest nx (\n -> f (Succ n))
withAccessor (AccFun g h acc) x f
= fmap h $ withAccessor acc (g x) (\path -> f (PathFun g h path))
accessorPaths :: Accessor a idx b e -> [([e BoolType],Path a idx b e)]
accessorPaths AccId = [([],PathId)]
accessorPaths (AccSeq lst)
= [ (cond++cond',PathSeq idx path)
| (idx,cond,acc) <- lst
, (cond',path) <- accessorPaths acc ]
accessorPaths (AccFork fork)
= forkPaths fork
where
forkPaths :: (Composite b)
=> AccessorFork a idxs b e
-> [([e BoolType],Path a ('Br idxs) b e)]
forkPaths NilFork = []
forkPaths (Fork f fs)
= (case f of
Nothing -> []
Just (cond,acc) -> [ (cond++cond',PathBr Zero path)
| (cond',path) <- accessorPaths acc ])++
[ (cond,PathBr (Succ n) path)
| (cond,PathBr n path) <- forkPaths fs ]
accessorPaths (AccFun f g acc)
= [ (cond,PathFun f g path)
| (cond,path) <- accessorPaths acc ]
data Paths a (idxs :: [Acc ((Type -> *) -> *)]) bs e where
NilPaths :: Paths a '[] '[] e
Paths :: Path a idx b e
-> Paths a idxs bs e
-> Paths a (idx ': idxs) (b ': bs) e
data Muxer a (idxs :: [Acc ((Type -> *) -> *)]) bs e where
NoMux :: Muxer a '[] '[] e
Mux :: Accessor a idx b e
-> Muxer a idxs bs e
-> Muxer a (idx ': idxs) (b ': bs) e
data Muxed els (e :: Type -> *) where
NoMuxed :: Muxed '[] e
Muxed :: a e -> Muxed as e -> Muxed (a ': as) e
accessorHead :: Accessor a idx b e
-> Maybe (Path a idx b e,
[e BoolType],
Accessor a idx b e)
accessorHead AccId = Nothing
accessorHead (AccSeq []) = Nothing
accessorHead (AccSeq ((idx,cond,AccId):accs))
= Just (PathSeq idx PathId,cond,AccSeq accs)
accessorHead (AccSeq ((idx,cond,acc):accs))
= case accessorHead acc of
Just (path,cond',acc') -> Just (PathSeq idx path,cond++cond'
,AccSeq ((idx,cond,acc'):accs))
Nothing -> accessorHead (AccSeq accs)
accessorHead (AccFork NilFork) = Nothing
accessorHead (AccFork (Fork Nothing rest)) = do
(PathBr n path,cond,AccFork nrest) <- accessorHead (AccFork rest)
return (PathBr (Succ n) path,cond,AccFork (Fork Nothing nrest))
accessorHead (AccFork (Fork (Just (cond,acc)) rest))
= case accessorHead acc of
Nothing -> accessorHead (AccFork (Fork Nothing rest))
Just (path,cond',acc') -> Just (PathBr Zero path,cond++cond',
AccFork (Fork (Just (cond,acc')) rest))
accessorHead (AccFun f g acc) = do
(path,cond,acc') <- accessorHead acc
return (PathFun f g path,cond,AccFun f g acc')
pathGet :: (Embed m e,Monad m)
=> Path a idx b e
-> a e
-> m (b e)
pathGet PathId x = return x
pathGet (PathSeq idx path) x = do
nx <- elementGet x idx
pathGet path nx
pathGet (PathBr _ path) x = pathGet path x
pathGet (PathFun f g path) x = pathGet path (f x)
pathSet :: (Embed m e,Monad m)
=> Path a idx b e
-> a e
-> b e
-> m (a e)
pathSet PathId _ x = return x
pathSet (PathSeq idx path) x y = do
el <- elementGet x idx
nel <- pathSet path el y
elementSet x idx nel
pathSet (PathBr _ path) x y = pathSet path x y
pathSet (PathFun f g path) x y = fmap g $ pathSet path (f x) y
withPath :: (Embed m e,Monad m)
=> Path a idx b e
-> a e
-> (b e -> m (b e))
-> m (a e)
withPath PathId x f = f x
withPath (PathSeq idx path) x f = withElement x idx $
\el -> withPath path el f
withPath (PathBr _ path) x f = withPath path x f
withPath (PathFun g h path) x f = fmap h $ withPath path (g x) f
withMuxer' :: (Embed m e,Monad m)
=> Muxer a idxs bs e
-> a e
-> st
-> (Paths a idxs bs e -> [e BoolType] ->
Muxed bs e -> st -> m (Muxed bs e,st))
-> m (a e,st)
withMuxer' NoMux x st f = do
(NoMuxed,nst) <- f NilPaths [] NoMuxed st
return (x,nst)
withMuxer' (Mux acc accs) x st f = case accessorHead acc of
Nothing -> return (x,st)
Just (path,cond,acc') -> do
el <- pathGet path x
(x1,(nel,nst)) <- withMuxer' accs x (el,st) $
\paths cond' muxed (el,st) -> do
(Muxed nel nmuxed,nst) <- f (Paths path paths) (cond++cond')
(Muxed el muxed) st
return (nmuxed,(nel,nst))
x2 <- pathSet path x1 nel
withMuxer' (Mux acc' accs) x2 nst f
withMuxer :: (Embed m e,Monad m)
=> Muxer a idxs bs e
-> a e
-> (Paths a idxs bs e -> [e BoolType] ->
Muxed bs e -> m (Muxed bs e))
-> m (a e)
withMuxer mux x f = do
(nx,()) <- withMuxer' mux x () $
\paths cond muxed () -> do
nmuxed <- f paths cond muxed
return (nmuxed,())
return nx
access :: (Container a,Embed m e,Monad m)
=> Access a idx b m e
-> a e
-> (Path a idx b e -> [e BoolType] -> b e -> m (b e))
-> m (a e)
access getAcc x f = do
acc <- getAcc x
withAccessor acc x f
type Access a idx b m e = a e -> m (Accessor a idx b e)
type family PathConcats (p1 :: [Acc a]) (p2 :: Acc a) :: [Acc a] where
PathConcats '[] ys = '[]
PathConcats (x ': xs) ys = PathConcat x ys ': PathConcats xs ys
type family PathConcat (p1 :: Acc a) (p2 :: Acc a) :: Acc a where
PathConcat 'Id acc = acc
PathConcat ('Seq x xs) ys = 'Seq x (PathConcat xs ys)
PathConcat ('Br xs) ys = 'Br (PathConcats xs ys)
(|*>) :: (Embed m e,Monad m,Composite c)
=> Access a idx1 b m e
-> Access b idx2 c m e
-> Access a (PathConcat idx1 idx2) c m e
(|*>) f g x = do
acc <- f x
concatAcc acc x g
where
concatAcc :: (Embed m e,Monad m,Composite c)
=> Accessor a idx1 b e
-> a e
-> (b e -> m (Accessor b idx2 c e))
-> m (Accessor a (PathConcat idx1 idx2) c e)
concatAcc AccId x f = f x
concatAcc (AccSeq lst) x f = do
nlst <- mapM (\(idx,cond,acc) -> do
nx <- elementGet x idx
nacc <- concatAcc acc nx f
return (idx,cond,nacc)
) lst
return (AccSeq nlst)
concatAcc (AccFork fork) x f = do
nfork <- concatFork fork x f
return (AccFork nfork)
concatAcc (AccFun g h acc) x f = do
nacc <- concatAcc acc (g x) f
return (AccFun g h nacc)
concatFork :: (Embed m e,Monad m,Composite c)
=> AccessorFork a idx1 b e
-> a e
-> (b e -> m (Accessor b idx2 c e))
-> m (AccessorFork a (PathConcats idx1 idx2) c e)
concatFork NilFork _ _ = return NilFork
concatFork (Fork Nothing xs) obj f = do
xs' <- concatFork xs obj f
return (Fork Nothing xs')
concatFork (Fork (Just (cond,acc)) xs) obj f = do
nacc <- concatAcc acc obj f
nxs <- concatFork xs obj f
return (Fork (Just (cond,nacc)) nxs)
infixr 5 |*>
idAcc :: Monad m => Access a 'Id a m e
idAcc _ = return AccId
at :: (Container c,Composite el,Monad m,Embed m e)
=> Index c el e -> Access c ('Seq el 'Id) el m e
at idx x = do
el <- elementGet x idx
return $ AccSeq [(idx,[],AccId)]
field :: Monad m => (a e -> b e) -> (b e -> a e) -> Access a 'Id b m e
field f g _ = return $ AccFun f g AccId
(<|*>) :: (Embed m e,Monad m)
=> Access a idx b m e
-> (a e -> m (Muxer a idxs bs e))
-> a e
-> m (Muxer a (idx ': idxs) (b ': bs) e)
(<|*>) f g x = do
acc <- f x
accs <- g x
return (Mux acc accs)
infixr 4 <|*>
idMux :: Monad m => a e -> m (Muxer a '[] '[] e)
idMux _ = return NoMux
mux :: (Embed m e,Monad m)
=> (a e -> m (Muxer a idxs bs e))
-> a e
-> (Paths a idxs bs e -> [e BoolType] ->
Muxed bs e -> m (Muxed bs e))
-> m (a e)
mux f x g = do
muxer <- f x
withMuxer muxer x g
updateList' :: (Embed m e,Monad m)
=> Accessor a idx b e
-> a e
-> m [(Path a idx b e,[e BoolType],b e,a e -> b e -> m (a e))]
updateList' AccId x = return [(PathId,[],x,const return)]
updateList' (AccSeq lst) x
= fmap concat $
mapM (\(idx,cond,acc) -> do
el <- elementGet x idx
nlst <- updateList' acc el
return (fmap (\(path,cond',el',wr)
-> (PathSeq idx path,cond++cond',el',
\cx nel' -> withElement cx idx
(\cel -> wr cel nel'))
) nlst)
) lst
updateList' (AccFork fork) x = fromFork fork x
where
fromFork :: (Embed m e,Monad m,Composite b)
=> AccessorFork a idxs b e
-> a e
-> m [((Path a ('Br idxs) b e),[e BoolType],b e,a e -> b e -> m (a e))]
fromFork NilFork _ = return []
fromFork (Fork Nothing rest) x = do
lst <- fromFork rest x
return $ fmap (\(PathBr n path,cond,el,up)
-> (PathBr (Succ n) path,cond,el,up)
) lst
fromFork (Fork (Just (cond,acc)) rest) x = do
lst1 <- updateList' acc x
let lst1' = fmap (\(path,cond',el,up)
-> (PathBr Zero path,cond++cond',el,up)
) lst1
lst2 <- fromFork rest x
let lst2' = fmap (\(PathBr n path,cond',el,up)
-> (PathBr (Succ n) path,cond',el,up)
) lst2
return $ lst1'++lst2'
updateList' (AccFun f g acc) x = do
lst <- updateList' acc (f x)
return $ fmap (\(path,cond,el,upd)
-> (PathFun f g path,cond,el,
\cx nel -> do
ny <- upd (f cx) nel
return $ g ny)
) lst
updateList :: (Embed m e,Monad m)
=> Access a idx b m e
-> a e
-> m [(Path a idx b e,[e BoolType],b e,a e -> b e -> m (a e))]
updateList f x = do
acc <- f x
updateList' acc x
instance GShow e => Show (Accessor a idx b e) where
showsPrec _ AccId = showString "id"
showsPrec _ (AccSeq lst)
= showListWith (\(idx,cond,acc)
-> showIndex 5 idx .
showString ": " .
showListWith (gshowsPrec 0) cond .
showString " -> " .
showsPrec 5 acc) lst
showsPrec p (AccFork fork) = showsPrec p fork
showsPrec p (AccFun _ _ acc) = showsPrec p acc
instance GShow e => Show (AccessorFork a idxs b e) where
showsPrec _ NilFork = showString "empty"
showsPrec p (Fork acc fork)
= (case acc of
Nothing -> showString "empty"
Just (cond,acc') -> showListWith (gshowsPrec 0) cond .
showString " -> " .
showsPrec 5 acc'
) .
(case fork of
NilFork -> id
_ -> showString " | " .
showsPrec 5 fork)
|
9b91f31fd739b4b48b7dbee50725fc9be42d194d7e42c86355e0ea63ddfacf8f | eponai/sulolive | aleph.clj | (ns eponai.server.external.aleph
(:require [com.stuartsierra.component :as component]
[suspendable.core :as suspendable]
[aleph.http :as aleph]
[aleph.netty]
[taoensso.timbre :refer [debug error]])
(:import (io.netty.channel ChannelPipeline)
(io.netty.handler.codec.http HttpContentCompressor)
(io.netty.handler.stream ChunkedWriteHandler)
(java.net BindException)))
(defn close-quietly [server]
(try
(some-> server (.close))
(catch Throwable e)))
(defrecord Aleph [handler port netty-options]
component/Lifecycle
(start [this]
(if (:server this)
this
(let [gzip-pipeline (fn [^ChannelPipeline pipeline]
(doto pipeline
(.addBefore "request-handler" "deflater" (HttpContentCompressor.))
(.addBefore "request-handler" "streamer" (ChunkedWriteHandler.))))
server-options (-> {:port port}
(merge netty-options)
(update :pipeline-transform (fn [current-transform]
(fn [^ChannelPipeline pipeline]
(cond-> pipeline
(:enable-gzip netty-options true)
(gzip-pipeline)
(some? current-transform)
(current-transform))))))
server (try
(aleph/start-server (:handler handler) server-options)
(catch BindException e
(error "Unable to start aleph: " e)))]
(assoc this :server server))))
(stop [this]
(when-let [server (:server this)]
(debug "Stopping aleph..")
(close-quietly server)
;; (aleph.netty/wait-for-close server)
(debug "Stopped aleph!"))
(dissoc this :server))
suspendable/Suspendable
(suspend [this]
this)
(resume [this old-this]
(if-let [server (:server old-this)]
(assoc this :server server)
(do (component/stop old-this)
(component/start this)))))
| null | https://raw.githubusercontent.com/eponai/sulolive/7a70701bbd3df6bbb92682679dcedb53f8822c18/src/eponai/server/external/aleph.clj | clojure | (aleph.netty/wait-for-close server) | (ns eponai.server.external.aleph
(:require [com.stuartsierra.component :as component]
[suspendable.core :as suspendable]
[aleph.http :as aleph]
[aleph.netty]
[taoensso.timbre :refer [debug error]])
(:import (io.netty.channel ChannelPipeline)
(io.netty.handler.codec.http HttpContentCompressor)
(io.netty.handler.stream ChunkedWriteHandler)
(java.net BindException)))
(defn close-quietly [server]
(try
(some-> server (.close))
(catch Throwable e)))
(defrecord Aleph [handler port netty-options]
component/Lifecycle
(start [this]
(if (:server this)
this
(let [gzip-pipeline (fn [^ChannelPipeline pipeline]
(doto pipeline
(.addBefore "request-handler" "deflater" (HttpContentCompressor.))
(.addBefore "request-handler" "streamer" (ChunkedWriteHandler.))))
server-options (-> {:port port}
(merge netty-options)
(update :pipeline-transform (fn [current-transform]
(fn [^ChannelPipeline pipeline]
(cond-> pipeline
(:enable-gzip netty-options true)
(gzip-pipeline)
(some? current-transform)
(current-transform))))))
server (try
(aleph/start-server (:handler handler) server-options)
(catch BindException e
(error "Unable to start aleph: " e)))]
(assoc this :server server))))
(stop [this]
(when-let [server (:server this)]
(debug "Stopping aleph..")
(close-quietly server)
(debug "Stopped aleph!"))
(dissoc this :server))
suspendable/Suspendable
(suspend [this]
this)
(resume [this old-this]
(if-let [server (:server old-this)]
(assoc this :server server)
(do (component/stop old-this)
(component/start this)))))
|
ce9b048eb8f4738a38e5ee12cfffdb4a3d8c399d4c3c9ff0690a0167999f0a0d | input-output-hk/cardano-wallet | Metadata.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE NumericUnderscores #
{-# LANGUAGE Rank2Types #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
-- |
Copyright : © 2018 - 2020 IOHK
-- License: Apache-2.0
--
-- HTTP-client(s) for fetching stake pool metadata from remote servers (directly
-- from pool operators, or from smash).
module Cardano.Pool.Metadata
(
-- * Fetch
fetchFromRemote
, StakePoolMetadataFetchLog (..)
, fetchDelistedPools
, HealthCheckSMASH(..)
, healthCheck
, isHealthyStatus
, toHealthCheckSMASH
, HealthStatusSMASH (..)
-- * Construct URLs
, UrlBuilder
, identityUrlBuilder
, registryUrlBuilder
-- * re-exports
, Manager
, newManager
, defaultManagerSettings
-- * Types
, SMASHPoolId (..)
) where
import Prelude
import Cardano.BM.Data.Severity
( Severity (..) )
import Cardano.BM.Data.Tracer
( HasPrivacyAnnotation (..), HasSeverityAnnotation (..) )
import Cardano.Pool.Metadata.Types
( StakePoolMetadata
, StakePoolMetadataHash (..)
, StakePoolMetadataUrl (..)
, UrlBuilder
)
import Cardano.Pool.Types
( PoolId, decodePoolIdBech32 )
import Cardano.Wallet.Primitive.AddressDerivation
( hex )
import Control.Error
( note )
import Control.Monad
( forM, when )
import Control.Monad.IO.Class
( MonadIO (..) )
import Control.Monad.Trans.Except
( ExceptT (..), except, runExceptT, throwE, withExceptT )
import Control.Tracer
( Tracer, traceWith )
import Crypto.Hash.Utils
( blake2b256 )
import Data.Aeson
( FromJSON
, Options (..)
, camelTo2
, eitherDecodeStrict
, genericParseJSON
, parseJSON
)
import Data.Bifunctor
( first )
import Data.ByteArray.Encoding
( Base (..), convertToBase )
import Data.ByteString
( ByteString )
import Data.Coerce
( coerce )
import Data.List
( intercalate )
import Data.Text
( Text )
import Data.Text.Class
( TextDecodingError (..), ToText (..), fromText )
import Fmt
( pretty )
import GHC.Generics
( Generic )
import Network.HTTP.Client
( HttpException (..)
, Manager
, ManagerSettings
, brConsume
, brReadSome
, managerResponseTimeout
, requestFromURI
, responseBody
, responseStatus
, responseTimeoutMicro
, withResponse
)
import Network.HTTP.Types.Status
( status200, status404 )
import Network.URI
( URI (..), parseURI )
import UnliftIO.Exception
( Exception (displayException), IOException, handle )
import qualified Data.Aeson as Aeson
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Client.TLS as HTTPS
-- | Build the SMASH metadata fetch endpoint for a single pool. Does not
-- contain leading '/'.
metadaFetchEp :: PoolId -> StakePoolMetadataHash -> String
metadaFetchEp pid (StakePoolMetadataHash bytes)
= intercalate "/" (["api", "v1", "metadata"] ++ [pidStr, hashStr])
where
hashStr = T.unpack $ T.decodeUtf8 $ convertToBase Base16 bytes
pidStr = T.unpack $ toText pid
-- TODO: use SMASH servant types
healthCheckEP :: String
healthCheckEP = T.unpack $ T.intercalate "/" ["api", "v1", "status"]
delistedEP :: String
delistedEP = T.unpack $ T.intercalate "/" ["api", "v1", "delisted"]
toPoolId :: SMASHPoolId -> Either TextDecodingError PoolId
toPoolId (SMASHPoolId pid) =
either (\_ -> decodePoolIdBech32 pid) Right (fromText @PoolId pid)
-- | Some default settings, overriding some of the library's default with
-- stricter values.
defaultManagerSettings :: ManagerSettings
defaultManagerSettings =
HTTPS.tlsManagerSettings
{ managerResponseTimeout = responseTimeoutMicro tenSeconds }
where
tenSeconds = 10_000_000 -- in μs
| Create a connection manager that supports TLS connections .
newManager :: MonadIO m => ManagerSettings -> m Manager
newManager = HTTPS.newTlsManagerWith
-- | Simply return a pool metadata url, unchanged
identityUrlBuilder :: UrlBuilder
identityUrlBuilder _poolId (StakePoolMetadataUrl urlText) _metadataHash =
note (InvalidUrlException url "Invalid URL") (parseURI url)
where
url = T.unpack urlText
-- | Build a URL from a metadata hash compatible with an aggregation registry
registryUrlBuilder :: URI -> UrlBuilder
registryUrlBuilder baseUrl pid _metadataUrl hash =
pure baseUrl{ uriPath = "/" <> metadaFetchEp pid hash }
-- | A smash GET request that reads the result at once into memory.
smashRequest
:: Tracer IO StakePoolMetadataFetchLog
-> URI
-> Manager
-> ExceptT String IO ByteString
smashRequest tr uri manager = getPayload
where
getPayload :: ExceptT String IO ByteString
getPayload = do
req <- withExceptT show $ except $ requestFromURI uri
liftIO $ traceWith tr $ MsgFetchSMASH uri
ExceptT
$ handle fromIOException
$ handle fromHttpException
$ withResponse req manager handleResponseStatus
handleResponseStatus response = case responseStatus response of
s | s == status200 -> do
let body = responseBody response
Right . BS.concat <$> brConsume body
s ->
pure $ Left $ mconcat
[ "The server replied with something unexpected: "
, show s
]
fromHttpException :: Monad m => HttpException -> m (Either String a)
fromHttpException = return . Left . ("HTTP exception: " <>) . show
| Gets the health status from the SMASH server . Returns
-- @Nothing@ if the server is unreachable.
healthCheck
:: Tracer IO StakePoolMetadataFetchLog
-> URI
-> Manager
-> IO (Maybe HealthStatusSMASH)
healthCheck tr uri manager = runExceptTLog $ do
pl <- smashRequest tr (
uri { uriPath = "/" <> healthCheckEP
, uriQuery = ""
, uriFragment = ""
}
)
manager
except . eitherDecodeStrict @HealthStatusSMASH $ pl
where
runExceptTLog
:: ExceptT String IO HealthStatusSMASH
-> IO (Maybe HealthStatusSMASH)
runExceptTLog action = runExceptT action >>= \case
Left msg ->
Nothing <$ traceWith tr (MsgFetchHealthCheckFailure msg)
Right health -> do
traceWith tr (MsgFetchHealthCheckSuccess health)
pure $ Just health
-- | Convert the result of @healthCheck@, which represents the
-- server response to our own @HealthCheckSMASH@ type, which is a
-- superset of it.
toHealthCheckSMASH :: Maybe HealthStatusSMASH -> HealthCheckSMASH
toHealthCheckSMASH = \case
(Just health)
| isHealthyStatus health -> Available
| otherwise -> Unavailable
_ -> Unreachable
isHealthyStatus :: HealthStatusSMASH -> Bool
isHealthyStatus (HealthStatusSMASH {..}) = T.toLower status == "ok"
fetchDelistedPools
:: Tracer IO StakePoolMetadataFetchLog
-> URI
-> Manager
-> IO (Maybe [PoolId])
fetchDelistedPools tr uri manager = runExceptTLog $ do
pl <- smashRequest tr
(uri { uriPath = "/" <> delistedEP , uriQuery = "", uriFragment = "" })
manager
smashPids <- except $ eitherDecodeStrict @[SMASHPoolId] pl
forM smashPids $ except . first getTextDecodingError . toPoolId
where
runExceptTLog :: ExceptT String IO [PoolId] -> IO (Maybe [PoolId])
runExceptTLog action = runExceptT action >>= \case
Left msg -> Nothing <$ traceWith tr (MsgFetchDelistedPoolsFailure msg)
Right meta ->
Just meta <$ traceWith tr (MsgFetchDelistedPoolsSuccess meta)
-- TODO: refactor/simplify this
fetchFromRemote
:: Tracer IO StakePoolMetadataFetchLog
-> [UrlBuilder]
-> Manager
-> PoolId
-> StakePoolMetadataUrl
-> StakePoolMetadataHash
-> IO (Maybe StakePoolMetadata)
fetchFromRemote tr builders manager pid url hash = runExceptTLog $ do
chunk <- getChunk `fromFirst` builders
when (BS.length chunk > 512) $ throwE
"Metadata exceeds max length of 512 bytes"
when (blake2b256 chunk /= coerce hash) $ throwE $ mconcat
[ "Metadata hash mismatch. Saw: "
, B8.unpack $ hex $ blake2b256 chunk
, ", but expected: "
, B8.unpack $ hex $ coerce @_ @ByteString hash
]
except $ eitherDecodeStrict chunk
where
runExceptTLog
:: ExceptT String IO StakePoolMetadata
-> IO (Maybe StakePoolMetadata)
runExceptTLog action = runExceptT action >>= \case
Left msg ->
Nothing <$ traceWith tr (MsgFetchPoolMetadataFailure hash msg)
Right meta ->
Just meta <$ traceWith tr (MsgFetchPoolMetadataSuccess hash meta)
-- Try each builder in order, but only if the previous builder led to an
-- IO exception. Other exceptions like HTTP exceptions are treated as
-- 'normal' responses from the an aggregation server and do not cause a
-- retry.
fromFirst _ [] =
throwE "Metadata server(s) didn't reply in a timely manner."
fromFirst action (builder:rest) = do
uri <- withExceptT show $ except $ builder pid url hash
action uri >>= \case
Nothing -> do
liftIO $ traceWith tr $ MsgFetchPoolMetadataFallback uri (null rest)
fromFirst action rest
Just chunk ->
pure chunk
getChunk :: URI -> ExceptT String IO (Maybe ByteString)
getChunk uri = do
req <- withExceptT show $ except $ requestFromURI uri
liftIO $ traceWith tr $ MsgFetchPoolMetadata hash uri
ExceptT
$ handle fromIOException
$ handle fromHttpException
$ withResponse req manager $ \res -> do
-- NOTE
-- Metadata are _supposed to_ be made of:
--
-- - A name (at most 50 UTF-8 bytes)
- An optional description ( at most 255 UTF-8 bytes )
- A ticker ( between 3 and 5 UTF-8 bytes )
--
-- So, the total, including a pretty JSON encoding with newlines ought
to be less than or equal to 512 bytes . For security reasons , we only
download the first 513 bytes and check the length at the
-- call-site.
case responseStatus res of
s | s == status200 -> do
let body = responseBody res
Right . Just . BL.toStrict <$> brReadSome body 513
s | s == status404 ->
pure $ Left "There's no known metadata for this pool."
s ->
pure $ Left $ mconcat
[ "The server replied with something unexpected: "
, show s
]
fromHttpException :: HttpException -> IO (Either String (Maybe a))
fromHttpException exception = do
traceWith tr $ MsgFetchPoolMetadataHttpException exception
pure $ Right Nothing
fromIOException :: Monad m => IOException -> m (Either String a)
fromIOException = pure . Left . ("IO exception: " <>) . show
--------------------------------------------------------------------------------
-- Types
--------------------------------------------------------------------------------
newtype SMASHPoolId = SMASHPoolId { poolId :: T.Text }
deriving newtype (Eq, Show, Ord)
deriving stock (Generic)
instance FromJSON SMASHPoolId where
parseJSON = genericParseJSON smashRecordTypeOptions{fieldLabelModifier=id}
| Parses the SMASH HealthCheck type from the SMASH API .
data HealthStatusSMASH = HealthStatusSMASH { status :: Text, version :: Text }
deriving stock (Generic, Show, Eq, Ord)
instance FromJSON HealthStatusSMASH where
parseJSON = genericParseJSON smashRecordTypeOptions
smashRecordTypeOptions :: Aeson.Options
smashRecordTypeOptions = Aeson.defaultOptions
{ fieldLabelModifier = camelTo2 '_' . dropWhile (== '_')
, omitNothingFields = True
}
| the health status of the SMASH server .
data HealthCheckSMASH
= Available -- server available
| Unavailable -- server reachable, but unavailable
could not get a response from the SMASH server
| NoSmashConfigured -- no SMASH server has been configured
deriving stock (Generic, Show, Eq, Ord)
--------------------------------------------------------------------------------
-- Logging
--------------------------------------------------------------------------------
data StakePoolMetadataFetchLog
= MsgFetchPoolMetadata StakePoolMetadataHash URI
| MsgFetchPoolMetadataSuccess StakePoolMetadataHash StakePoolMetadata
| MsgFetchPoolMetadataHttpException HttpException
| MsgFetchPoolMetadataFailure StakePoolMetadataHash String
| MsgFetchPoolMetadataFallback URI Bool
| MsgFetchSMASH URI
| MsgFetchDelistedPoolsFailure String
| MsgFetchDelistedPoolsSuccess [PoolId]
| MsgFetchHealthCheckFailure String
| MsgFetchHealthCheckSuccess HealthStatusSMASH
instance HasPrivacyAnnotation StakePoolMetadataFetchLog
instance HasSeverityAnnotation StakePoolMetadataFetchLog where
getSeverityAnnotation = \case
MsgFetchPoolMetadata{} -> Info
MsgFetchPoolMetadataSuccess{} -> Info
MsgFetchPoolMetadataHttpException{} -> Debug
MsgFetchPoolMetadataFailure{} -> Warning
MsgFetchPoolMetadataFallback{} -> Warning
MsgFetchSMASH{} -> Debug
MsgFetchDelistedPoolsFailure{} -> Warning
MsgFetchDelistedPoolsSuccess{} -> Info
MsgFetchHealthCheckFailure{} -> Warning
MsgFetchHealthCheckSuccess{} -> Info
instance ToText StakePoolMetadataFetchLog where
toText = \case
MsgFetchPoolMetadata hash uri -> mconcat
[ "Fetching metadata with hash ", pretty hash
, " from ", T.pack (show uri)
]
MsgFetchPoolMetadataSuccess hash meta -> mconcat
[ "Successfully fetched metadata with hash ", pretty hash
, ": ", T.pack (show meta)
]
MsgFetchPoolMetadataHttpException exception -> mconcat
[ "Exception occurred while fetching a pool metadata: "
, T.pack (displayException exception)
]
MsgFetchPoolMetadataFailure hash msg -> mconcat
[ "Failed to fetch metadata with hash "
, pretty hash, ": ", T.pack msg
]
MsgFetchPoolMetadataFallback uri noMoreUrls -> mconcat
[ "Couldn't reach server at ", T.pack (show uri), "."
, if noMoreUrls
then ""
else " Falling back using a different strategy."
]
MsgFetchSMASH uri -> mconcat
[ "Making a SMASH request to ", T.pack (show uri)
]
MsgFetchDelistedPoolsSuccess poolIds -> mconcat
[ "Successfully fetched delisted "
, T.pack (show . length $ poolIds)
, " pools."
]
MsgFetchDelistedPoolsFailure err -> mconcat
[ "Failed to fetch delisted pools: ", T.pack err
]
MsgFetchHealthCheckSuccess health -> mconcat
[ "Successfully checked health "
, T.pack (show health)
]
MsgFetchHealthCheckFailure err -> mconcat
[ "Failed to check health: ", T.pack err
]
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet/c4af01b952268b57d3fcd1a0cb476d77dba4f166/lib/wallet/src/Cardano/Pool/Metadata.hs | haskell | # LANGUAGE Rank2Types #
|
License: Apache-2.0
HTTP-client(s) for fetching stake pool metadata from remote servers (directly
from pool operators, or from smash).
* Fetch
* Construct URLs
* re-exports
* Types
| Build the SMASH metadata fetch endpoint for a single pool. Does not
contain leading '/'.
TODO: use SMASH servant types
| Some default settings, overriding some of the library's default with
stricter values.
in μs
| Simply return a pool metadata url, unchanged
| Build a URL from a metadata hash compatible with an aggregation registry
| A smash GET request that reads the result at once into memory.
@Nothing@ if the server is unreachable.
| Convert the result of @healthCheck@, which represents the
server response to our own @HealthCheckSMASH@ type, which is a
superset of it.
TODO: refactor/simplify this
Try each builder in order, but only if the previous builder led to an
IO exception. Other exceptions like HTTP exceptions are treated as
'normal' responses from the an aggregation server and do not cause a
retry.
NOTE
Metadata are _supposed to_ be made of:
- A name (at most 50 UTF-8 bytes)
So, the total, including a pretty JSON encoding with newlines ought
call-site.
------------------------------------------------------------------------------
Types
------------------------------------------------------------------------------
server available
server reachable, but unavailable
no SMASH server has been configured
------------------------------------------------------------------------------
Logging
------------------------------------------------------------------------------ | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE NumericUnderscores #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
Copyright : © 2018 - 2020 IOHK
module Cardano.Pool.Metadata
(
fetchFromRemote
, StakePoolMetadataFetchLog (..)
, fetchDelistedPools
, HealthCheckSMASH(..)
, healthCheck
, isHealthyStatus
, toHealthCheckSMASH
, HealthStatusSMASH (..)
, UrlBuilder
, identityUrlBuilder
, registryUrlBuilder
, Manager
, newManager
, defaultManagerSettings
, SMASHPoolId (..)
) where
import Prelude
import Cardano.BM.Data.Severity
( Severity (..) )
import Cardano.BM.Data.Tracer
( HasPrivacyAnnotation (..), HasSeverityAnnotation (..) )
import Cardano.Pool.Metadata.Types
( StakePoolMetadata
, StakePoolMetadataHash (..)
, StakePoolMetadataUrl (..)
, UrlBuilder
)
import Cardano.Pool.Types
( PoolId, decodePoolIdBech32 )
import Cardano.Wallet.Primitive.AddressDerivation
( hex )
import Control.Error
( note )
import Control.Monad
( forM, when )
import Control.Monad.IO.Class
( MonadIO (..) )
import Control.Monad.Trans.Except
( ExceptT (..), except, runExceptT, throwE, withExceptT )
import Control.Tracer
( Tracer, traceWith )
import Crypto.Hash.Utils
( blake2b256 )
import Data.Aeson
( FromJSON
, Options (..)
, camelTo2
, eitherDecodeStrict
, genericParseJSON
, parseJSON
)
import Data.Bifunctor
( first )
import Data.ByteArray.Encoding
( Base (..), convertToBase )
import Data.ByteString
( ByteString )
import Data.Coerce
( coerce )
import Data.List
( intercalate )
import Data.Text
( Text )
import Data.Text.Class
( TextDecodingError (..), ToText (..), fromText )
import Fmt
( pretty )
import GHC.Generics
( Generic )
import Network.HTTP.Client
( HttpException (..)
, Manager
, ManagerSettings
, brConsume
, brReadSome
, managerResponseTimeout
, requestFromURI
, responseBody
, responseStatus
, responseTimeoutMicro
, withResponse
)
import Network.HTTP.Types.Status
( status200, status404 )
import Network.URI
( URI (..), parseURI )
import UnliftIO.Exception
( Exception (displayException), IOException, handle )
import qualified Data.Aeson as Aeson
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Client.TLS as HTTPS
metadaFetchEp :: PoolId -> StakePoolMetadataHash -> String
metadaFetchEp pid (StakePoolMetadataHash bytes)
= intercalate "/" (["api", "v1", "metadata"] ++ [pidStr, hashStr])
where
hashStr = T.unpack $ T.decodeUtf8 $ convertToBase Base16 bytes
pidStr = T.unpack $ toText pid
healthCheckEP :: String
healthCheckEP = T.unpack $ T.intercalate "/" ["api", "v1", "status"]
delistedEP :: String
delistedEP = T.unpack $ T.intercalate "/" ["api", "v1", "delisted"]
toPoolId :: SMASHPoolId -> Either TextDecodingError PoolId
toPoolId (SMASHPoolId pid) =
either (\_ -> decodePoolIdBech32 pid) Right (fromText @PoolId pid)
defaultManagerSettings :: ManagerSettings
defaultManagerSettings =
HTTPS.tlsManagerSettings
{ managerResponseTimeout = responseTimeoutMicro tenSeconds }
where
| Create a connection manager that supports TLS connections .
newManager :: MonadIO m => ManagerSettings -> m Manager
newManager = HTTPS.newTlsManagerWith
identityUrlBuilder :: UrlBuilder
identityUrlBuilder _poolId (StakePoolMetadataUrl urlText) _metadataHash =
note (InvalidUrlException url "Invalid URL") (parseURI url)
where
url = T.unpack urlText
registryUrlBuilder :: URI -> UrlBuilder
registryUrlBuilder baseUrl pid _metadataUrl hash =
pure baseUrl{ uriPath = "/" <> metadaFetchEp pid hash }
smashRequest
:: Tracer IO StakePoolMetadataFetchLog
-> URI
-> Manager
-> ExceptT String IO ByteString
smashRequest tr uri manager = getPayload
where
getPayload :: ExceptT String IO ByteString
getPayload = do
req <- withExceptT show $ except $ requestFromURI uri
liftIO $ traceWith tr $ MsgFetchSMASH uri
ExceptT
$ handle fromIOException
$ handle fromHttpException
$ withResponse req manager handleResponseStatus
handleResponseStatus response = case responseStatus response of
s | s == status200 -> do
let body = responseBody response
Right . BS.concat <$> brConsume body
s ->
pure $ Left $ mconcat
[ "The server replied with something unexpected: "
, show s
]
fromHttpException :: Monad m => HttpException -> m (Either String a)
fromHttpException = return . Left . ("HTTP exception: " <>) . show
| Gets the health status from the SMASH server . Returns
healthCheck
:: Tracer IO StakePoolMetadataFetchLog
-> URI
-> Manager
-> IO (Maybe HealthStatusSMASH)
healthCheck tr uri manager = runExceptTLog $ do
pl <- smashRequest tr (
uri { uriPath = "/" <> healthCheckEP
, uriQuery = ""
, uriFragment = ""
}
)
manager
except . eitherDecodeStrict @HealthStatusSMASH $ pl
where
runExceptTLog
:: ExceptT String IO HealthStatusSMASH
-> IO (Maybe HealthStatusSMASH)
runExceptTLog action = runExceptT action >>= \case
Left msg ->
Nothing <$ traceWith tr (MsgFetchHealthCheckFailure msg)
Right health -> do
traceWith tr (MsgFetchHealthCheckSuccess health)
pure $ Just health
toHealthCheckSMASH :: Maybe HealthStatusSMASH -> HealthCheckSMASH
toHealthCheckSMASH = \case
(Just health)
| isHealthyStatus health -> Available
| otherwise -> Unavailable
_ -> Unreachable
isHealthyStatus :: HealthStatusSMASH -> Bool
isHealthyStatus (HealthStatusSMASH {..}) = T.toLower status == "ok"
fetchDelistedPools
:: Tracer IO StakePoolMetadataFetchLog
-> URI
-> Manager
-> IO (Maybe [PoolId])
fetchDelistedPools tr uri manager = runExceptTLog $ do
pl <- smashRequest tr
(uri { uriPath = "/" <> delistedEP , uriQuery = "", uriFragment = "" })
manager
smashPids <- except $ eitherDecodeStrict @[SMASHPoolId] pl
forM smashPids $ except . first getTextDecodingError . toPoolId
where
runExceptTLog :: ExceptT String IO [PoolId] -> IO (Maybe [PoolId])
runExceptTLog action = runExceptT action >>= \case
Left msg -> Nothing <$ traceWith tr (MsgFetchDelistedPoolsFailure msg)
Right meta ->
Just meta <$ traceWith tr (MsgFetchDelistedPoolsSuccess meta)
fetchFromRemote
:: Tracer IO StakePoolMetadataFetchLog
-> [UrlBuilder]
-> Manager
-> PoolId
-> StakePoolMetadataUrl
-> StakePoolMetadataHash
-> IO (Maybe StakePoolMetadata)
fetchFromRemote tr builders manager pid url hash = runExceptTLog $ do
chunk <- getChunk `fromFirst` builders
when (BS.length chunk > 512) $ throwE
"Metadata exceeds max length of 512 bytes"
when (blake2b256 chunk /= coerce hash) $ throwE $ mconcat
[ "Metadata hash mismatch. Saw: "
, B8.unpack $ hex $ blake2b256 chunk
, ", but expected: "
, B8.unpack $ hex $ coerce @_ @ByteString hash
]
except $ eitherDecodeStrict chunk
where
runExceptTLog
:: ExceptT String IO StakePoolMetadata
-> IO (Maybe StakePoolMetadata)
runExceptTLog action = runExceptT action >>= \case
Left msg ->
Nothing <$ traceWith tr (MsgFetchPoolMetadataFailure hash msg)
Right meta ->
Just meta <$ traceWith tr (MsgFetchPoolMetadataSuccess hash meta)
fromFirst _ [] =
throwE "Metadata server(s) didn't reply in a timely manner."
fromFirst action (builder:rest) = do
uri <- withExceptT show $ except $ builder pid url hash
action uri >>= \case
Nothing -> do
liftIO $ traceWith tr $ MsgFetchPoolMetadataFallback uri (null rest)
fromFirst action rest
Just chunk ->
pure chunk
getChunk :: URI -> ExceptT String IO (Maybe ByteString)
getChunk uri = do
req <- withExceptT show $ except $ requestFromURI uri
liftIO $ traceWith tr $ MsgFetchPoolMetadata hash uri
ExceptT
$ handle fromIOException
$ handle fromHttpException
$ withResponse req manager $ \res -> do
- An optional description ( at most 255 UTF-8 bytes )
- A ticker ( between 3 and 5 UTF-8 bytes )
to be less than or equal to 512 bytes . For security reasons , we only
download the first 513 bytes and check the length at the
case responseStatus res of
s | s == status200 -> do
let body = responseBody res
Right . Just . BL.toStrict <$> brReadSome body 513
s | s == status404 ->
pure $ Left "There's no known metadata for this pool."
s ->
pure $ Left $ mconcat
[ "The server replied with something unexpected: "
, show s
]
fromHttpException :: HttpException -> IO (Either String (Maybe a))
fromHttpException exception = do
traceWith tr $ MsgFetchPoolMetadataHttpException exception
pure $ Right Nothing
fromIOException :: Monad m => IOException -> m (Either String a)
fromIOException = pure . Left . ("IO exception: " <>) . show
newtype SMASHPoolId = SMASHPoolId { poolId :: T.Text }
deriving newtype (Eq, Show, Ord)
deriving stock (Generic)
instance FromJSON SMASHPoolId where
parseJSON = genericParseJSON smashRecordTypeOptions{fieldLabelModifier=id}
| Parses the SMASH HealthCheck type from the SMASH API .
data HealthStatusSMASH = HealthStatusSMASH { status :: Text, version :: Text }
deriving stock (Generic, Show, Eq, Ord)
instance FromJSON HealthStatusSMASH where
parseJSON = genericParseJSON smashRecordTypeOptions
smashRecordTypeOptions :: Aeson.Options
smashRecordTypeOptions = Aeson.defaultOptions
{ fieldLabelModifier = camelTo2 '_' . dropWhile (== '_')
, omitNothingFields = True
}
| the health status of the SMASH server .
data HealthCheckSMASH
could not get a response from the SMASH server
deriving stock (Generic, Show, Eq, Ord)
data StakePoolMetadataFetchLog
= MsgFetchPoolMetadata StakePoolMetadataHash URI
| MsgFetchPoolMetadataSuccess StakePoolMetadataHash StakePoolMetadata
| MsgFetchPoolMetadataHttpException HttpException
| MsgFetchPoolMetadataFailure StakePoolMetadataHash String
| MsgFetchPoolMetadataFallback URI Bool
| MsgFetchSMASH URI
| MsgFetchDelistedPoolsFailure String
| MsgFetchDelistedPoolsSuccess [PoolId]
| MsgFetchHealthCheckFailure String
| MsgFetchHealthCheckSuccess HealthStatusSMASH
instance HasPrivacyAnnotation StakePoolMetadataFetchLog
instance HasSeverityAnnotation StakePoolMetadataFetchLog where
getSeverityAnnotation = \case
MsgFetchPoolMetadata{} -> Info
MsgFetchPoolMetadataSuccess{} -> Info
MsgFetchPoolMetadataHttpException{} -> Debug
MsgFetchPoolMetadataFailure{} -> Warning
MsgFetchPoolMetadataFallback{} -> Warning
MsgFetchSMASH{} -> Debug
MsgFetchDelistedPoolsFailure{} -> Warning
MsgFetchDelistedPoolsSuccess{} -> Info
MsgFetchHealthCheckFailure{} -> Warning
MsgFetchHealthCheckSuccess{} -> Info
instance ToText StakePoolMetadataFetchLog where
toText = \case
MsgFetchPoolMetadata hash uri -> mconcat
[ "Fetching metadata with hash ", pretty hash
, " from ", T.pack (show uri)
]
MsgFetchPoolMetadataSuccess hash meta -> mconcat
[ "Successfully fetched metadata with hash ", pretty hash
, ": ", T.pack (show meta)
]
MsgFetchPoolMetadataHttpException exception -> mconcat
[ "Exception occurred while fetching a pool metadata: "
, T.pack (displayException exception)
]
MsgFetchPoolMetadataFailure hash msg -> mconcat
[ "Failed to fetch metadata with hash "
, pretty hash, ": ", T.pack msg
]
MsgFetchPoolMetadataFallback uri noMoreUrls -> mconcat
[ "Couldn't reach server at ", T.pack (show uri), "."
, if noMoreUrls
then ""
else " Falling back using a different strategy."
]
MsgFetchSMASH uri -> mconcat
[ "Making a SMASH request to ", T.pack (show uri)
]
MsgFetchDelistedPoolsSuccess poolIds -> mconcat
[ "Successfully fetched delisted "
, T.pack (show . length $ poolIds)
, " pools."
]
MsgFetchDelistedPoolsFailure err -> mconcat
[ "Failed to fetch delisted pools: ", T.pack err
]
MsgFetchHealthCheckSuccess health -> mconcat
[ "Successfully checked health "
, T.pack (show health)
]
MsgFetchHealthCheckFailure err -> mconcat
[ "Failed to check health: ", T.pack err
]
|
06292a6e566cd85a901115387fd4b3c52c2a8c63ab1d538b425b7f9d51fcf858 | well-typed/cborg | Issue67.hs | Avoid some warnings in case the backend is n't being used
# OPTIONS_GHC -fno - warn - unused - binds -fno - warn - unused - imports #
# LANGUAGE CPP #
Issue # 67 : Invalid compilation with backend .
--
Reported in the wild , and cribbed from -bench
module Tests.Regress.Issue67
( testTree -- :: TestTree
) where
import Data.Int
import Data.Monoid ((<>))
import Data.Word
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>), (<*>))
#endif
import qualified Data.ByteString.Lazy as L
import qualified Data.Vector as V
import Codec.Serialise
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit
--------------------------------------------------------------------------------
-- Tests and properties
data SomeData = SomeData !Int64 !Word8 !Double
deriving (Eq, Show)
instance Serialise SomeData where
decode = SomeData <$> decode <*> decode <*> decode
# INLINE decode #
encode (SomeData a b c) = encode a <> encode b <> encode c
# INLINE encode #
newtype ArbSomeData = ArbSomeData { toSomeData :: SomeData }
deriving (Show, Eq)
instance Arbitrary ArbSomeData where
arbitrary = fmap ArbSomeData $ SomeData
<$> arbitrary
<*> arbitrary
<*> arbitrary
--------------------------------------------------------------------------------
-- TestTree API
to :: V.Vector SomeData -> L.ByteString
to = serialise
from :: L.ByteString -> Maybe (V.Vector SomeData)
from = Just . deserialise
repro1 :: Bool
repro1 =
let v = V.fromList [SomeData 53169 70 55.3817683321392]
in from (to v) == Just v
prop_vectorRoundtrip :: [ArbSomeData] -> Bool
prop_vectorRoundtrip list =
let v = V.fromList (map toSomeData list)
in from (to v) == Just v
testTree :: TestTree
testTree =
#if defined(__GLASGOW_HASKELL_LLVM__)
testGroup "Issue 67 - LLVM bogons"
[ testCase "simple reproduction case" (True @=? repro1)
, testProperty "vector roundtrip works" prop_vectorRoundtrip
]
#else
testGroup "Issue 67 - LLVM bogons (NO LLVM - SKIPPING)"
[ testCase "simple reproduction case (SKIPPED)" (True @=? True)
, testCase "vector roundtrip works (SKIPPED)" (True @=? True)
]
#endif
| null | https://raw.githubusercontent.com/well-typed/cborg/9be3fd5437f9d2ec1df784d5d939efb9a85fd1fb/serialise/tests/Tests/Regress/Issue67.hs | haskell |
:: TestTree
------------------------------------------------------------------------------
Tests and properties
------------------------------------------------------------------------------
TestTree API | Avoid some warnings in case the backend is n't being used
# OPTIONS_GHC -fno - warn - unused - binds -fno - warn - unused - imports #
# LANGUAGE CPP #
Issue # 67 : Invalid compilation with backend .
Reported in the wild , and cribbed from -bench
module Tests.Regress.Issue67
) where
import Data.Int
import Data.Monoid ((<>))
import Data.Word
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>), (<*>))
#endif
import qualified Data.ByteString.Lazy as L
import qualified Data.Vector as V
import Codec.Serialise
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit
data SomeData = SomeData !Int64 !Word8 !Double
deriving (Eq, Show)
instance Serialise SomeData where
decode = SomeData <$> decode <*> decode <*> decode
# INLINE decode #
encode (SomeData a b c) = encode a <> encode b <> encode c
# INLINE encode #
newtype ArbSomeData = ArbSomeData { toSomeData :: SomeData }
deriving (Show, Eq)
instance Arbitrary ArbSomeData where
arbitrary = fmap ArbSomeData $ SomeData
<$> arbitrary
<*> arbitrary
<*> arbitrary
to :: V.Vector SomeData -> L.ByteString
to = serialise
from :: L.ByteString -> Maybe (V.Vector SomeData)
from = Just . deserialise
repro1 :: Bool
repro1 =
let v = V.fromList [SomeData 53169 70 55.3817683321392]
in from (to v) == Just v
prop_vectorRoundtrip :: [ArbSomeData] -> Bool
prop_vectorRoundtrip list =
let v = V.fromList (map toSomeData list)
in from (to v) == Just v
testTree :: TestTree
testTree =
#if defined(__GLASGOW_HASKELL_LLVM__)
testGroup "Issue 67 - LLVM bogons"
[ testCase "simple reproduction case" (True @=? repro1)
, testProperty "vector roundtrip works" prop_vectorRoundtrip
]
#else
testGroup "Issue 67 - LLVM bogons (NO LLVM - SKIPPING)"
[ testCase "simple reproduction case (SKIPPED)" (True @=? True)
, testCase "vector roundtrip works (SKIPPED)" (True @=? True)
]
#endif
|
7206fce5b7db65c7a78abe2a78e91adcadb679b536b407af1994f4c0b5b6a440 | uw-unsat/serval | objdump.rkt | #lang rosette
(require
"base.rkt"
"interp.rkt"
"decode.rkt"
(prefix-in core: "../lib/core.rkt")
racket/list
racket/match
racket/port
racket/string
syntax/strip-context)
(provide
(rename-out [literal-read read]
[literal-read-syntax read-syntax])
compile-objdump-program
interpret-objdump-program
(all-from-out "base.rkt")
(all-from-out "interp.rkt"))
(define (literal-read in)
(syntax->datum
(literal-read-syntax #f in)))
(define (is-branch code)
(match code
; only use "core" branch instrs
[(or "beq" "bne" "blt" "bge" "bltu" "bgeu") #t]
[_ #f]))
(define (is-jump code)
(match code
["jal" #t]
; jalr is not relative: can be handled like every other instr
[_ #f]))
(define (parse-operand code idx str)
(match str
0xXXXX is always hex
[(pregexp #px"^0x([0-9a-f]+)$" (list _ num))
(with-syntax ([n (string->number num 16)])
#'n)]
The third operand of branches is in hex
[(pregexp #px"^[0-9a-f]+$" (list num))
#:when (or (and (= idx 2) (is-branch code))
(and (= idx 1) (is-jump code)))
(with-syntax ([n (string->number num 16)])
#'n)]
Immediate using only 0 - 9 : probably a base-10 value ,
; double check that this isn't a branch instr.
[(pregexp #px"^-?[0-9]+$" (list num))
(when (or (is-jump code) (is-branch code))
(error "Base 10 immediate in branch instr"))
(with-syntax ([n (string->number num 10)])
#'n)]
Memory offset e.g. , " 4(a5 ) "
[(pregexp #px"^(-?[0-9]+)\\((.+)\\)$" (list _ off reg))
(with-syntax ([n (string->number off)]
[r (string->symbol reg)])
#'(offset n r))]
Match amo reg address , e.g. " ( a5 ) "
[(pregexp #px"^\\((.+)\\)$" (list _ reg))
(with-syntax ([r (string->symbol reg)])
#'(r))]
; Anything else we don't recognize is a register.
; Have to be careful here otherwise registers like "a5"
might look like numbers .
[else (with-syntax ([s (string->symbol str)]) #'s)]))
(define (read-instructions in)
(datum->syntax #f
(for/list ([line (port->lines in)])
(match line
[(pregexp #px"([0-9a-f]+) <.+> ([a-f0-9]+)[ \t]+(\\S+)[ \t]*(\\S*)" (list _ addr raw code ops))
(with-syntax ([a (string->number addr 16)]
[os (for/list ([o (string-split ops ",")]
[idx (range 3)])
(parse-operand code idx o))]
[size (/ (string-length raw) 2)]
[o (string->symbol code)]
[r raw])
(strip-context
#'(a (o . os) #:size size #:raw r)))]))))
(define (read-header in)
(define line (read-line in))
(match line
[(pregexp #px"^architecture: (.+), flags 0x(.+):$" (list _ arch flags))
(with-syntax ([arch (string->symbol arch)]
[fl (string->number flags 16)])
(append (list #'(define flags 'fl)
#'(define architecture 'arch))
(read-header in)))]
[(pregexp #px"Disassembly") null]
[_ (read-header in)]))
(define (literal-read-syntax src in)
(define header-stx (datum->syntax #f (read-header in)))
(strip-context
#`(module anything racket/base
(provide (all-defined-out))
#,@header-stx
(define instructions '#,(read-instructions in)))))
(define (parse-objdump-instr i #:addr i-addr #:size size #:raw [raw #f])
(define rawbv (bv (string->number raw 16) (* 8 size)))
(decode rawbv))
(define (compile-objdump-program instructions)
(core:bug-on (! (core:concrete?)))
(core:bug-on (null? instructions) #:msg "Cannot have empty objdump program.")
(define base (bv (car (list-ref instructions 0)) (XLEN)))
(define insn-hash
(for/hash ([insn instructions])
(define addr (bv (car insn) (XLEN)))
(define instr
(match (cdr insn)
[(list (list i ...) '#:size size '#:raw raw)
(parse-objdump-instr i #:addr addr #:size size #:raw raw)]
[(list i ...) #:when (riscv-default-size)
(parse-objdump-instr i #:addr addr #:size (riscv-default-size))]
[_
(core:bug #:msg (format "Bad objdump ~e" instr))]))
(values addr instr)))
(program base insn-hash))
(define (interpret-objdump-program cpu instructions)
(define program (compile-objdump-program instructions))
(interpret-program cpu program)) | null | https://raw.githubusercontent.com/uw-unsat/serval/be11ecccf03f81b8bd0557acf8385a6a5d4f51ed/serval/riscv/objdump.rkt | racket | only use "core" branch instrs
jalr is not relative: can be handled like every other instr
double check that this isn't a branch instr.
Anything else we don't recognize is a register.
Have to be careful here otherwise registers like "a5" | #lang rosette
(require
"base.rkt"
"interp.rkt"
"decode.rkt"
(prefix-in core: "../lib/core.rkt")
racket/list
racket/match
racket/port
racket/string
syntax/strip-context)
(provide
(rename-out [literal-read read]
[literal-read-syntax read-syntax])
compile-objdump-program
interpret-objdump-program
(all-from-out "base.rkt")
(all-from-out "interp.rkt"))
(define (literal-read in)
(syntax->datum
(literal-read-syntax #f in)))
(define (is-branch code)
(match code
[(or "beq" "bne" "blt" "bge" "bltu" "bgeu") #t]
[_ #f]))
(define (is-jump code)
(match code
["jal" #t]
[_ #f]))
(define (parse-operand code idx str)
(match str
0xXXXX is always hex
[(pregexp #px"^0x([0-9a-f]+)$" (list _ num))
(with-syntax ([n (string->number num 16)])
#'n)]
The third operand of branches is in hex
[(pregexp #px"^[0-9a-f]+$" (list num))
#:when (or (and (= idx 2) (is-branch code))
(and (= idx 1) (is-jump code)))
(with-syntax ([n (string->number num 16)])
#'n)]
Immediate using only 0 - 9 : probably a base-10 value ,
[(pregexp #px"^-?[0-9]+$" (list num))
(when (or (is-jump code) (is-branch code))
(error "Base 10 immediate in branch instr"))
(with-syntax ([n (string->number num 10)])
#'n)]
Memory offset e.g. , " 4(a5 ) "
[(pregexp #px"^(-?[0-9]+)\\((.+)\\)$" (list _ off reg))
(with-syntax ([n (string->number off)]
[r (string->symbol reg)])
#'(offset n r))]
Match amo reg address , e.g. " ( a5 ) "
[(pregexp #px"^\\((.+)\\)$" (list _ reg))
(with-syntax ([r (string->symbol reg)])
#'(r))]
might look like numbers .
[else (with-syntax ([s (string->symbol str)]) #'s)]))
(define (read-instructions in)
(datum->syntax #f
(for/list ([line (port->lines in)])
(match line
[(pregexp #px"([0-9a-f]+) <.+> ([a-f0-9]+)[ \t]+(\\S+)[ \t]*(\\S*)" (list _ addr raw code ops))
(with-syntax ([a (string->number addr 16)]
[os (for/list ([o (string-split ops ",")]
[idx (range 3)])
(parse-operand code idx o))]
[size (/ (string-length raw) 2)]
[o (string->symbol code)]
[r raw])
(strip-context
#'(a (o . os) #:size size #:raw r)))]))))
(define (read-header in)
(define line (read-line in))
(match line
[(pregexp #px"^architecture: (.+), flags 0x(.+):$" (list _ arch flags))
(with-syntax ([arch (string->symbol arch)]
[fl (string->number flags 16)])
(append (list #'(define flags 'fl)
#'(define architecture 'arch))
(read-header in)))]
[(pregexp #px"Disassembly") null]
[_ (read-header in)]))
(define (literal-read-syntax src in)
(define header-stx (datum->syntax #f (read-header in)))
(strip-context
#`(module anything racket/base
(provide (all-defined-out))
#,@header-stx
(define instructions '#,(read-instructions in)))))
(define (parse-objdump-instr i #:addr i-addr #:size size #:raw [raw #f])
(define rawbv (bv (string->number raw 16) (* 8 size)))
(decode rawbv))
(define (compile-objdump-program instructions)
(core:bug-on (! (core:concrete?)))
(core:bug-on (null? instructions) #:msg "Cannot have empty objdump program.")
(define base (bv (car (list-ref instructions 0)) (XLEN)))
(define insn-hash
(for/hash ([insn instructions])
(define addr (bv (car insn) (XLEN)))
(define instr
(match (cdr insn)
[(list (list i ...) '#:size size '#:raw raw)
(parse-objdump-instr i #:addr addr #:size size #:raw raw)]
[(list i ...) #:when (riscv-default-size)
(parse-objdump-instr i #:addr addr #:size (riscv-default-size))]
[_
(core:bug #:msg (format "Bad objdump ~e" instr))]))
(values addr instr)))
(program base insn-hash))
(define (interpret-objdump-program cpu instructions)
(define program (compile-objdump-program instructions))
(interpret-program cpu program)) |
486f8eba15f5afe6d2b5c74563d638ddec2d7b956f3c8a48045a48fc8ccebbba | windorg/app-old | Show.hs | module Web.View.Inbox.Show where
import Web.Helper.View
import Web.View.Prelude
import Web.ViewTypes
data InboxView = InboxView {unreadReplies :: [ReplyV]}
instance View InboxView where
html InboxView{..} =
[hsx|
<nav>
<ol class="breadcrumb">
<li class="breadcrumb-item"><a href={BoardsAction}>Boards</a></li>
<li class="breadcrumb-item active">Inbox</li>
</ol>
</nav>
<h1 class="mb-4">Inbox</h1>
<div class="woc-inbox">
{forEach unreadReplies renderReply}
</div>
|]
renderReply :: ReplyV -> Html
renderReply replyV =
[hsx|
<div class="reply media">
{gravatar}
<div class="media-body ml-2 mt-n2">
<div class="mb-1">
<span class="text-muted small">
{authorName}
<span>
<a href={pathTo (ShowCardAction (get #cardId replyV)) <> "#reply-" <> show (get #id reply)}>
{renderTimestamp createdAt}
</a>
</span>
<!-- We won't render the "delete" button to not confuse people into thinking delete = mark as read -->
<div class="ml-2 d-inline">
{renderReplyMarkAsReadButton reply}
</div>
</span>
</div>
<div class="rendered-content small">{renderMarkdown content}</div>
</div>
</div>
|]
where
reply@Reply{..} = get #reply replyV
authorName = case get #author replyV of
Nothing -> [hsx|<span class="mr-2 font-weight-bold">[deleted]</span>|]
Just author ->
[hsx|
<span class="mr-2 font-weight-bold">
<a href={ShowUserAction (get #id author)}>{get #displayName author}</a>
</span>
|]
gravatar = case get #author replyV of
Nothing -> [hsx|<span>{gravatarSmall ""}</span>|]
Just author -> [hsx|<a href={ShowUserAction (get #id author)}>{gravatarSmall (get #email author)}</a>|]
renderReplyMarkAsReadButton :: Reply -> Html
renderReplyMarkAsReadButton reply =
[hsx|
<form class="d-inline" method="POST" action={UpdateMarkReplyAsReadAction (get #id reply) (show replySource)}>
<button class="btn btn-tiny btn-outline-info">Mark as read</button>
</form>
|]
where
replySource = ReplySourceInbox
| null | https://raw.githubusercontent.com/windorg/app-old/ed9c5322c8ab8a0275bdcd479be12a3f230da8c9/Web/View/Inbox/Show.hs | haskell | We won't render the "delete" button to not confuse people into thinking delete = mark as read --> | module Web.View.Inbox.Show where
import Web.Helper.View
import Web.View.Prelude
import Web.ViewTypes
data InboxView = InboxView {unreadReplies :: [ReplyV]}
instance View InboxView where
html InboxView{..} =
[hsx|
<nav>
<ol class="breadcrumb">
<li class="breadcrumb-item"><a href={BoardsAction}>Boards</a></li>
<li class="breadcrumb-item active">Inbox</li>
</ol>
</nav>
<h1 class="mb-4">Inbox</h1>
<div class="woc-inbox">
{forEach unreadReplies renderReply}
</div>
|]
renderReply :: ReplyV -> Html
renderReply replyV =
[hsx|
<div class="reply media">
{gravatar}
<div class="media-body ml-2 mt-n2">
<div class="mb-1">
<span class="text-muted small">
{authorName}
<span>
<a href={pathTo (ShowCardAction (get #cardId replyV)) <> "#reply-" <> show (get #id reply)}>
{renderTimestamp createdAt}
</a>
</span>
<div class="ml-2 d-inline">
{renderReplyMarkAsReadButton reply}
</div>
</span>
</div>
<div class="rendered-content small">{renderMarkdown content}</div>
</div>
</div>
|]
where
reply@Reply{..} = get #reply replyV
authorName = case get #author replyV of
Nothing -> [hsx|<span class="mr-2 font-weight-bold">[deleted]</span>|]
Just author ->
[hsx|
<span class="mr-2 font-weight-bold">
<a href={ShowUserAction (get #id author)}>{get #displayName author}</a>
</span>
|]
gravatar = case get #author replyV of
Nothing -> [hsx|<span>{gravatarSmall ""}</span>|]
Just author -> [hsx|<a href={ShowUserAction (get #id author)}>{gravatarSmall (get #email author)}</a>|]
renderReplyMarkAsReadButton :: Reply -> Html
renderReplyMarkAsReadButton reply =
[hsx|
<form class="d-inline" method="POST" action={UpdateMarkReplyAsReadAction (get #id reply) (show replySource)}>
<button class="btn btn-tiny btn-outline-info">Mark as read</button>
</form>
|]
where
replySource = ReplySourceInbox
|
cc7e02784698a69440738fdfdac35d1341be70ffd2c9d4f4e59c805a99bd4f3d | Haskell-OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator | Test6.hs | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
| Contains the types generated from the schema Test6
module OpenAPI.Types.Test6 where
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified OpenAPI.Common
import OpenAPI.TypeAlias
| Defines the enum schema located at @components.schemas . Test6@ in the specification .
--
--
data Test6 =
Test6Other Data.Aeson.Types.Internal.Value -- ^ This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
| Test6Typed Data.Text.Internal.Text -- ^ This constructor can be used to send values to the server which are not present in the specification yet.
| Test6EnumHello -- ^ Represents the JSON value @"hello"@
| Test6EnumThere -- ^ Represents the JSON value @"there"@
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON Test6
where toJSON (Test6Other val) = val
toJSON (Test6Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (Test6EnumHello) = "hello"
toJSON (Test6EnumThere) = "there"
instance Data.Aeson.Types.FromJSON.FromJSON Test6
where parseJSON val = GHC.Base.pure (if | val GHC.Classes.== "hello" -> Test6EnumHello
| val GHC.Classes.== "there" -> Test6EnumThere
| GHC.Base.otherwise -> Test6Other val)
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator/5d8f5da48273d006dd0e2cdb6d2425fb6b52f0c5/testing/golden-output/src/OpenAPI/Types/Test6.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE MultiWayIf #
^ This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
^ This constructor can be used to send values to the server which are not present in the specification yet.
^ Represents the JSON value @"hello"@
^ Represents the JSON value @"there"@ | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
| Contains the types generated from the schema Test6
module OpenAPI.Types.Test6 where
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified OpenAPI.Common
import OpenAPI.TypeAlias
| Defines the enum schema located at @components.schemas . Test6@ in the specification .
data Test6 =
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON Test6
where toJSON (Test6Other val) = val
toJSON (Test6Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (Test6EnumHello) = "hello"
toJSON (Test6EnumThere) = "there"
instance Data.Aeson.Types.FromJSON.FromJSON Test6
where parseJSON val = GHC.Base.pure (if | val GHC.Classes.== "hello" -> Test6EnumHello
| val GHC.Classes.== "there" -> Test6EnumThere
| GHC.Base.otherwise -> Test6Other val)
|
c5d2d4aaf84ecacd711b4d29cb93f3ddac68dcc7d77e837da366dc4ac15447f3 | mwunsch/overscan | factories.rkt | #lang racket/base
(require ffi/unsafe/introspection
racket/class
racket/contract
(only-in racket/list first last)
"private/core.rkt"
gstreamer/clock
gstreamer/element
gstreamer/bin
gstreamer/pipeline
gstreamer/device)
(provide (contract-out [element-factory%-find
(-> string? (or/c false/c
(is-a?/c element-factory%)))]
[element-factory%-make
(->* (string?)
((or/c string? false/c)
#:class (subclass?/c element%))
(or/c false/c
(is-a?/c element%)))]
[pad%-new-from-template
(->* (pad-template?)
((or/c string? false/c))
(or/c (is-a?/c pad%)
false/c))]
[ghost-pad%-new
(-> (or/c string? false/c) (is-a?/c pad%)
(or/c (is-a?/c ghost-pad%)
false/c))]
[ghost-pad%-new-no-target
(-> (or/c string? false/c) pad-direction?
(or/c (is-a?/c ghost-pad%)
false/c))]
[bin%-new
(->* ()
((or/c string? false/c))
(is-a?/c bin%))]
[bin%-compose
(-> (or/c string? false/c)
(is-a?/c element%) (is-a?/c element%) ...
(or/c (is-a?/c bin%) false/c))]
[pipeline%-new
(->* ()
((or/c string? false/c))
(is-a?/c pipeline%))]
[pipeline%-compose
(-> (or/c string? false/c)
(is-a?/c element%) ...
(or/c (is-a?/c pipeline%) false/c))]
[obtain-system-clock
(-> (is-a?/c clock%))]
[parse/launch
(-> string? (or/c (is-a?/c element%) false/c))]
[device-monitor%-new
(-> (is-a?/c device-monitor%))]))
(define (element-factory%-find name)
(let ([factory (gst-element-factory 'find name)])
(and factory
(new element-factory% [pointer factory]))))
(define (element-factory%-make factory-name [name #f]
#:class [factory% element%])
(let ([el (gst-element-factory 'make factory-name name)])
(and el
(new factory% [pointer el]))))
(define (pad%-new-from-template templ [name #f])
(let ([pad (gst-pad 'new_from_template templ name)])
(and pad
(new pad% [pointer pad]))))
(define (ghost-pad%-new name target)
(let ([ghost (gst-ghost-pad 'new name target)])
(and ghost
(new ghost-pad% [pointer ghost]))))
(define (ghost-pad%-new-no-target name dir)
(let ([ghost (gst-ghost-pad 'new_no_target name dir)])
(and ghost
(new ghost-pad% [pointer ghost]))))
(define (bin%-new [name #f])
(new bin% [pointer (gst-bin 'new name)]))
(define (bin%-compose name el . els)
(let* ([bin (bin%-new name)]
[sink el]
[source (if (null? els) el (last els))])
(and (send/apply bin add-many el els)
(when (pair? els)
(send/apply el link-many els))
(let ([sink-pad (send sink get-static-pad "sink")])
(when sink-pad
(send bin add-pad (ghost-pad%-new "sink" sink-pad))))
(let ([source-pad (send source get-static-pad "src")])
(when source-pad
(send bin add-pad (ghost-pad%-new "src" source-pad))))
bin)))
(define (pipeline%-new [name #f])
(new pipeline% [pointer (gst-pipeline 'new name)]))
(define (pipeline%-compose name . els)
(let* ([pl (pipeline%-new name)]
[bin (apply bin%-compose #f els)])
(and bin
(send pl add bin)
pl)))
(define (obtain-system-clock)
(new clock% [pointer ((gst 'SystemClock) 'obtain)]))
(define (parse/launch description)
(let ([parsed ((gst 'parse_launch) description)])
(and parsed
(new element% [pointer parsed]))))
(define (device-monitor%-new)
(let ([monitor ((gst 'DeviceMonitor) 'new)])
(new device-monitor% [pointer monitor])))
| null | https://raw.githubusercontent.com/mwunsch/overscan/f198e6b4c1f64cf5720e66ab5ad27fdc4b9e67e9/gstreamer/factories.rkt | racket | #lang racket/base
(require ffi/unsafe/introspection
racket/class
racket/contract
(only-in racket/list first last)
"private/core.rkt"
gstreamer/clock
gstreamer/element
gstreamer/bin
gstreamer/pipeline
gstreamer/device)
(provide (contract-out [element-factory%-find
(-> string? (or/c false/c
(is-a?/c element-factory%)))]
[element-factory%-make
(->* (string?)
((or/c string? false/c)
#:class (subclass?/c element%))
(or/c false/c
(is-a?/c element%)))]
[pad%-new-from-template
(->* (pad-template?)
((or/c string? false/c))
(or/c (is-a?/c pad%)
false/c))]
[ghost-pad%-new
(-> (or/c string? false/c) (is-a?/c pad%)
(or/c (is-a?/c ghost-pad%)
false/c))]
[ghost-pad%-new-no-target
(-> (or/c string? false/c) pad-direction?
(or/c (is-a?/c ghost-pad%)
false/c))]
[bin%-new
(->* ()
((or/c string? false/c))
(is-a?/c bin%))]
[bin%-compose
(-> (or/c string? false/c)
(is-a?/c element%) (is-a?/c element%) ...
(or/c (is-a?/c bin%) false/c))]
[pipeline%-new
(->* ()
((or/c string? false/c))
(is-a?/c pipeline%))]
[pipeline%-compose
(-> (or/c string? false/c)
(is-a?/c element%) ...
(or/c (is-a?/c pipeline%) false/c))]
[obtain-system-clock
(-> (is-a?/c clock%))]
[parse/launch
(-> string? (or/c (is-a?/c element%) false/c))]
[device-monitor%-new
(-> (is-a?/c device-monitor%))]))
(define (element-factory%-find name)
(let ([factory (gst-element-factory 'find name)])
(and factory
(new element-factory% [pointer factory]))))
(define (element-factory%-make factory-name [name #f]
#:class [factory% element%])
(let ([el (gst-element-factory 'make factory-name name)])
(and el
(new factory% [pointer el]))))
(define (pad%-new-from-template templ [name #f])
(let ([pad (gst-pad 'new_from_template templ name)])
(and pad
(new pad% [pointer pad]))))
(define (ghost-pad%-new name target)
(let ([ghost (gst-ghost-pad 'new name target)])
(and ghost
(new ghost-pad% [pointer ghost]))))
(define (ghost-pad%-new-no-target name dir)
(let ([ghost (gst-ghost-pad 'new_no_target name dir)])
(and ghost
(new ghost-pad% [pointer ghost]))))
(define (bin%-new [name #f])
(new bin% [pointer (gst-bin 'new name)]))
(define (bin%-compose name el . els)
(let* ([bin (bin%-new name)]
[sink el]
[source (if (null? els) el (last els))])
(and (send/apply bin add-many el els)
(when (pair? els)
(send/apply el link-many els))
(let ([sink-pad (send sink get-static-pad "sink")])
(when sink-pad
(send bin add-pad (ghost-pad%-new "sink" sink-pad))))
(let ([source-pad (send source get-static-pad "src")])
(when source-pad
(send bin add-pad (ghost-pad%-new "src" source-pad))))
bin)))
(define (pipeline%-new [name #f])
(new pipeline% [pointer (gst-pipeline 'new name)]))
(define (pipeline%-compose name . els)
(let* ([pl (pipeline%-new name)]
[bin (apply bin%-compose #f els)])
(and bin
(send pl add bin)
pl)))
(define (obtain-system-clock)
(new clock% [pointer ((gst 'SystemClock) 'obtain)]))
(define (parse/launch description)
(let ([parsed ((gst 'parse_launch) description)])
(and parsed
(new element% [pointer parsed]))))
(define (device-monitor%-new)
(let ([monitor ((gst 'DeviceMonitor) 'new)])
(new device-monitor% [pointer monitor])))
| |
fccac076b1bc649eb7d2e9448e233e4792fb4be555162c604cdb0ea72dd22841 | bobzhang/ocaml-book | mlastback.ml | -*- Mode : Tuareg ; -*-
Version : $ I d : template.ml , v 0.0 2012/02/19 01:23:57
Version: $Id: template.ml,v 0.0 2012/02/19 01:23:57 bobzhang1988 Exp $ *)
open Camlp4.PreCast
open Printf
module Mlast = struct
type int' = int
type ml_unop = Ml_fst | Ml_snd
and ml_binop =
Ml_add | Ml_sub | Ml_mult | Ml_eq | Ml_less | Ml_gt
and ml_exp =
| Ml_int_const of int'
| Ml_bool_const of bool
| Ml_pair of ml_exp * ml_exp
| Ml_unop of ml_unop * ml_exp
| Ml_binop of ml_binop * ml_exp * ml_exp
| Ml_var of string
| Ml_if of ml_exp * ml_exp * ml_exp
| Ml_fun of ml_patt * ml_exp
| Ml_app of ml_exp * ml_exp
| Ml_let of ml_patt * ml_exp * ml_exp
| Ml_letrec of ml_patt * ml_exp * ml_exp
| Ml_Ant of Loc.t * string (** meta filter special treatment *)
and ml_type =
| Int_type
| Bool_type
| Pair_type of ml_type * ml_type
| Arrow_type of ml_type * ml_type
| Var_type of string
| Type_Ant of Loc.t * string
and ml_patt =
| Ml_pat_id of string
| Ml_patAnt of Loc.t * string
end
include Mlast
module MetaExpr :sig
val meta_ml_exp: Loc.t -> ml_exp -> Ast.expr
val meta_ml_type: Loc.t -> ml_type -> Ast.expr
end = struct
(**FIX camlp4 int bug*)
let meta_int' _loc s = Ast.ExInt(_loc,string_of_int s)
include Camlp4Filters.MetaGeneratorExpr(Mlast)
end
module MetaPatt : sig
val meta_ml_exp : Loc.t -> ml_exp -> Ast.patt
val meta_ml_type : Loc.t -> ml_type -> Ast.patt
end = struct
let meta_int' _loc s = Ast.PaInt(_loc, string_of_int s)
include Camlp4Filters.MetaGeneratorPatt(Mlast)
end
| null | https://raw.githubusercontent.com/bobzhang/ocaml-book/09a575b0d1fedfce565ecb9a0ae9cf0df37fdc75/code/unify/mlastback.ml | ocaml | * meta filter special treatment
*FIX camlp4 int bug | -*- Mode : Tuareg ; -*-
Version : $ I d : template.ml , v 0.0 2012/02/19 01:23:57
Version: $Id: template.ml,v 0.0 2012/02/19 01:23:57 bobzhang1988 Exp $ *)
open Camlp4.PreCast
open Printf
module Mlast = struct
type int' = int
type ml_unop = Ml_fst | Ml_snd
and ml_binop =
Ml_add | Ml_sub | Ml_mult | Ml_eq | Ml_less | Ml_gt
and ml_exp =
| Ml_int_const of int'
| Ml_bool_const of bool
| Ml_pair of ml_exp * ml_exp
| Ml_unop of ml_unop * ml_exp
| Ml_binop of ml_binop * ml_exp * ml_exp
| Ml_var of string
| Ml_if of ml_exp * ml_exp * ml_exp
| Ml_fun of ml_patt * ml_exp
| Ml_app of ml_exp * ml_exp
| Ml_let of ml_patt * ml_exp * ml_exp
| Ml_letrec of ml_patt * ml_exp * ml_exp
and ml_type =
| Int_type
| Bool_type
| Pair_type of ml_type * ml_type
| Arrow_type of ml_type * ml_type
| Var_type of string
| Type_Ant of Loc.t * string
and ml_patt =
| Ml_pat_id of string
| Ml_patAnt of Loc.t * string
end
include Mlast
module MetaExpr :sig
val meta_ml_exp: Loc.t -> ml_exp -> Ast.expr
val meta_ml_type: Loc.t -> ml_type -> Ast.expr
end = struct
let meta_int' _loc s = Ast.ExInt(_loc,string_of_int s)
include Camlp4Filters.MetaGeneratorExpr(Mlast)
end
module MetaPatt : sig
val meta_ml_exp : Loc.t -> ml_exp -> Ast.patt
val meta_ml_type : Loc.t -> ml_type -> Ast.patt
end = struct
let meta_int' _loc s = Ast.PaInt(_loc, string_of_int s)
include Camlp4Filters.MetaGeneratorPatt(Mlast)
end
|
7764d4327fef42270e98cfce078f0ca3bd226dde687a06466091935d4cf71eb6 | WorksHub/client | db.cljc | (ns wh.blogs.blog.db
(:require [#?(:cljs cljs.spec.alpha :clj clojure.spec.alpha) :as s]))
(defn id
[db]
(get-in db [:wh.db/page-params :id]))
(s/def ::share-links-shown? boolean?)
(s/def ::author-info-visible? boolean?)
(s/def ::upvotes (s/map-of string? nat-int?))
(s/def ::sub-db (s/keys :req [::share-links-shown? ::author-info-visible? ::upvotes]))
(def default-db
{::share-links-shown? true
::author-info-visible? false
::upvotes {}})
(def page-size 4)
(defn params [db]
{:id (id db)
:page_size page-size
:vertical (:wh.db/vertical db)})
| null | https://raw.githubusercontent.com/WorksHub/client/a7293627d80b96ba2451542242ba263e179f82d8/common-pages/src/wh/blogs/blog/db.cljc | clojure | (ns wh.blogs.blog.db
(:require [#?(:cljs cljs.spec.alpha :clj clojure.spec.alpha) :as s]))
(defn id
[db]
(get-in db [:wh.db/page-params :id]))
(s/def ::share-links-shown? boolean?)
(s/def ::author-info-visible? boolean?)
(s/def ::upvotes (s/map-of string? nat-int?))
(s/def ::sub-db (s/keys :req [::share-links-shown? ::author-info-visible? ::upvotes]))
(def default-db
{::share-links-shown? true
::author-info-visible? false
::upvotes {}})
(def page-size 4)
(defn params [db]
{:id (id db)
:page_size page-size
:vertical (:wh.db/vertical db)})
| |
250652ab60acbf1dec8ef3cceb6c66074e0de4b9fa78fec1d1a8eef058d53771 | dpiponi/Moodler | seqq.hs | do
plane <- currentPlane
(x, y) <- fmap (quantise2 quantum) mouse
panel <- container' "panel_4x1.png" (x, y) (Inside plane)
lab <- label' "seqq" (x-25.0, y+75.0) (Inside plane)
parent panel lab
name <- new' "seqq"
inp <- plugin' (name ++ ".gate") (x-21, y+675) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".add") (x-21, y+625) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".slide_rate") (x-21, y+575) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".length") (x-21, y+525) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq1") (x-21, y+475) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq2") (x-21, y+425) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq3") (x-21, y+375) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq4") (x-21, y+325) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq5") (x-21, y+275) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq6") (x-21, y+225) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq7") (x-21, y+175) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq8") (x-21, y+125) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse1") (x-21, y+75) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse2") (x-21, y+25) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse3") (x-21, y-25) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse4") (x-21, y-75) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse5") (x-21, y-125) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse6") (x-21, y-175) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse7") (x-21, y-225) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse8") (x-21, y-275) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode1") (x-21, y-325) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode2") (x-21, y-375) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode3") (x-21, y-425) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode4") (x-21, y-475) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode5") (x-21, y-525) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode6") (x-21, y-575) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode7") (x-21, y-625) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode8") (x-21, y-675) (Inside plane)
setColour inp "#control"
parent panel inp
out <- plugout' (name ++ ".result") (x+20, y+25) (Inside plane)
setColour out "#control"
parent panel out
out <- plugout' (name ++ ".trigger") (x+20, y-25) (Inside plane)
setColour out "#control"
parent panel out
recompile
return ()
| null | https://raw.githubusercontent.com/dpiponi/Moodler/a0c984c36abae52668d00f25eb3749e97e8936d3/Moodler/scripts/seqq.hs | haskell | do
plane <- currentPlane
(x, y) <- fmap (quantise2 quantum) mouse
panel <- container' "panel_4x1.png" (x, y) (Inside plane)
lab <- label' "seqq" (x-25.0, y+75.0) (Inside plane)
parent panel lab
name <- new' "seqq"
inp <- plugin' (name ++ ".gate") (x-21, y+675) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".add") (x-21, y+625) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".slide_rate") (x-21, y+575) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".length") (x-21, y+525) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq1") (x-21, y+475) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq2") (x-21, y+425) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq3") (x-21, y+375) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq4") (x-21, y+325) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq5") (x-21, y+275) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq6") (x-21, y+225) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq7") (x-21, y+175) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".freq8") (x-21, y+125) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse1") (x-21, y+75) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse2") (x-21, y+25) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse3") (x-21, y-25) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse4") (x-21, y-75) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse5") (x-21, y-125) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse6") (x-21, y-175) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse7") (x-21, y-225) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".pulse8") (x-21, y-275) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode1") (x-21, y-325) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode2") (x-21, y-375) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode3") (x-21, y-425) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode4") (x-21, y-475) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode5") (x-21, y-525) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode6") (x-21, y-575) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode7") (x-21, y-625) (Inside plane)
setColour inp "#control"
parent panel inp
inp <- plugin' (name ++ ".mode8") (x-21, y-675) (Inside plane)
setColour inp "#control"
parent panel inp
out <- plugout' (name ++ ".result") (x+20, y+25) (Inside plane)
setColour out "#control"
parent panel out
out <- plugout' (name ++ ".trigger") (x+20, y-25) (Inside plane)
setColour out "#control"
parent panel out
recompile
return ()
| |
8764f8e023eff13731e5c386dd109f2f350b868ed3bde44f002c6733b52a63b3 | facebook/pyre-check | sanitizeTransform.mli |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module Source : sig
type t = Named of string [@@deriving compare, eq, hash, sexp, show]
end
module Sink : sig
type t = Named of string [@@deriving compare, eq, hash, sexp, show]
end
type t =
| Source of Source.t
| Sink of Sink.t
module type S = sig
type elt
type set [@@deriving compare, eq, hash, sexp, show]
include Abstract.Domain.S with type t = set
type t = set [@@deriving compare, eq, hash, sexp, show]
val empty : t
val is_empty : t -> bool
val add : elt -> t -> t
val mem : elt -> t -> bool
val diff : t -> t -> t
val singleton : elt -> t
val all : t
val is_all : t -> bool
val of_list : elt list -> t
val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a
val to_json : t -> Yojson.Safe.t option
end
module SourceSet : S with type elt = Source.t
module SinkSet : S with type elt = Sink.t
| null | https://raw.githubusercontent.com/facebook/pyre-check/36dcce30c623068f0efece29c53abb6abfd729d9/source/interprocedural_analyses/taint/sanitizeTransform.mli | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module Source : sig
type t = Named of string [@@deriving compare, eq, hash, sexp, show]
end
module Sink : sig
type t = Named of string [@@deriving compare, eq, hash, sexp, show]
end
type t =
| Source of Source.t
| Sink of Sink.t
module type S = sig
type elt
type set [@@deriving compare, eq, hash, sexp, show]
include Abstract.Domain.S with type t = set
type t = set [@@deriving compare, eq, hash, sexp, show]
val empty : t
val is_empty : t -> bool
val add : elt -> t -> t
val mem : elt -> t -> bool
val diff : t -> t -> t
val singleton : elt -> t
val all : t
val is_all : t -> bool
val of_list : elt list -> t
val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a
val to_json : t -> Yojson.Safe.t option
end
module SourceSet : S with type elt = Source.t
module SinkSet : S with type elt = Sink.t
| |
ea54c3917602d3641eff1e39811294e126dd97eacc8ac7c0ec31821fbb99fb2d | silky/myth | bool_impl.ml | (* CONTAINS CONTEXT FUNCTION *)
type bool =
| True
| False
(* NO BOOL OPERATIONS *)
type nat =
| O
| S of nat
let rec plus (n1:nat) (n2:nat) : nat =
match n1 with
| O -> n2
| S (n1) -> S (plus n1 n2)
;;
let rec div2 (n:nat) : nat =
match n with
| O -> O
| S (n1) -> match n1 with
| O -> O
| S (n2) -> S (div2 n2)
;;
type list =
| Nil
| Cons of nat * list
let rec append (l1:list) (l2:list) : list =
match l1 with
| Nil -> l2
| Cons (x, l1) -> Cons (x, append l1 l2)
;;
type cmp =
| LT
| EQ
| GT
let rec compare (n1 : nat) (n2 :nat) : cmp =
match n1 with
| O -> (match n2 with
| O -> EQ
| S (m) -> LT
)
| S (m1) ->
( match n2 with
| O -> GT
| S (m2) -> (compare m1 m2) )
;;
type btree =
| Leaf
| Node of tree * bool * tree
type ntree =
| Leaf
| Node of tree * nat * tree
let bool_impl : bool -> bool -> bool |>
{ True => True => True
; True => False => False
; False => True => True
; False => False => True } = ?
| null | https://raw.githubusercontent.com/silky/myth/b631edefb2a27a1d731daa6654517d91ca660b95/tests/pldi-2015-contexts/bool_impl.ml | ocaml | CONTAINS CONTEXT FUNCTION
NO BOOL OPERATIONS |
type bool =
| True
| False
type nat =
| O
| S of nat
let rec plus (n1:nat) (n2:nat) : nat =
match n1 with
| O -> n2
| S (n1) -> S (plus n1 n2)
;;
let rec div2 (n:nat) : nat =
match n with
| O -> O
| S (n1) -> match n1 with
| O -> O
| S (n2) -> S (div2 n2)
;;
type list =
| Nil
| Cons of nat * list
let rec append (l1:list) (l2:list) : list =
match l1 with
| Nil -> l2
| Cons (x, l1) -> Cons (x, append l1 l2)
;;
type cmp =
| LT
| EQ
| GT
let rec compare (n1 : nat) (n2 :nat) : cmp =
match n1 with
| O -> (match n2 with
| O -> EQ
| S (m) -> LT
)
| S (m1) ->
( match n2 with
| O -> GT
| S (m2) -> (compare m1 m2) )
;;
type btree =
| Leaf
| Node of tree * bool * tree
type ntree =
| Leaf
| Node of tree * nat * tree
let bool_impl : bool -> bool -> bool |>
{ True => True => True
; True => False => False
; False => True => True
; False => False => True } = ?
|
e6bd3dceb9d9b86e614c151a09e4c81deac12a8a59f9a002bfea7805c9e2f49f | hipsleek/hipsleek | cilint.ml | cilint : infinite - precision , 2 's complement arithmetic .
An infinite - precision 2 's complement number is a good way of
understanding the meaning of bitwise operations on infinite
precision numbers : positive numbers have the normal base-2
representation , while negative numbers are represented in
a 2 's complement representation with an infinite series of
leading 1 bits . I.e. ,
3 = ... 0000000000011
-3 = ... 1111111111101
We represent cilints using a big_int , except that we specialise the
case where the number fits in a regular int . This specialisation
has two benefits :
- more compact ( and potentially faster ops , though more would need to be
specialised for this to be really worth it )
- ability to see the value of small constants in ocamldebug
The implementation can be simplified once OCaml 3.11.1 shows up , with
bitwise operations on big_ints , and bug - fixed versions of int64_of_big_int
and big_int_of_int64 .
understanding the meaning of bitwise operations on infinite
precision numbers: positive numbers have the normal base-2
representation, while negative numbers are represented in
a 2's complement representation with an infinite series of
leading 1 bits. I.e.,
3 = ...0000000000011
-3 = ...1111111111101
We represent cilints using a big_int, except that we specialise the
case where the number fits in a regular int. This specialisation
has two benefits:
- more compact (and potentially faster ops, though more would need to be
specialised for this to be really worth it)
- ability to see the value of small constants in ocamldebug
The implementation can be simplified once OCaml 3.11.1 shows up, with
bitwise operations on big_ints, and bug-fixed versions of int64_of_big_int
and big_int_of_int64. *)
open Big_int
type cilint = Small of int | Big of big_int
type truncation = NoTruncation | ValueTruncation | BitTruncation
let zero_cilint = Small 0
let one_cilint = Small 1
(* Precompute useful big_ints *)
let b30 = power_int_positive_int 2 30
let m1 = minus_big_int unit_big_int
(* True if 'b' is all 0's or all 1's *)
let nobits (b:big_int) : bool =
sign_big_int b = 0 || compare_big_int b m1 = 0
let big_int_of_cilint (c:cilint) : big_int =
match c with
| Small i -> big_int_of_int i
| Big b -> b
let cilint_of_big_int (b:big_int) : cilint =
if is_int_big_int b then
Small (int_of_big_int b)
else
Big b
let neg_cilint c =
match c with
| Small i when i <> min_int -> Small (-i)
| _ -> Big (minus_big_int (big_int_of_cilint c))
Apply big_int ' op ' to two cilints , returning a cilint
let b op c1 c2 = cilint_of_big_int (op (big_int_of_cilint c1) (big_int_of_cilint c2))
let add_cilint = b add_big_int
let sub_cilint = b sub_big_int
let mul_cilint = b mult_big_int
let div_cilint = b div_big_int
let mod_cilint = b mod_big_int
let compare_cilint (c1:cilint) (c2:cilint) : int =
match c1, c2 with
| Small i1, Small i2 -> compare i1 i2
| _ -> compare_big_int (big_int_of_cilint c1) (big_int_of_cilint c2)
let is_zero_cilint (c:cilint) : bool =
match c with
| Small i -> i = 0
| Big b -> sign_big_int b = 0
let negative_cilint (c:cilint) : bool =
match c with
| Small i -> i < 0
| Big b -> sign_big_int b < 0
let cilint_of_int (i:int) : cilint = Small i
let int_of_cilint (c:cilint) : int =
match c with
| Small i -> i
| Big b -> int_of_big_int b
let rec cilint_of_int64 (i64:int64) : cilint =
if Int64.compare i64 (Int64.of_int min_int) >= 0 &&
Int64.compare i64 (Int64.of_int max_int) <= 0 then
Small (Int64.to_int i64)
else
We convert 30 bits at a time
let rec loop i mul acc =
if i = 0L then acc
else if i = -1L then sub_big_int acc mul
else
let lo30 = Int64.to_int (Int64.logand i 0x3fffffffL) in
loop (Int64.shift_right i 30) (mult_big_int mul b30)
(add_big_int acc (mult_big_int mul (big_int_of_int lo30)))
in Big (loop i64 unit_big_int zero_big_int)
Note that this never fails , instead it returns the low - order 64 - bits
of the cilint .
of the cilint. *)
let rec int64_of_cilint (c:cilint) : int64 =
match c with
| Small i -> Int64.of_int i
| Big b ->
let rec loop b mul acc =
if sign_big_int b = 0 then
acc
else if compare_big_int b m1 == 0 then
Int64.sub acc mul
else
let hi, lo = quomod_big_int b b30 in
loop hi (Int64.mul mul 0x40000000L)
(Int64.add acc (Int64.mul mul (Int64.of_int (int_of_big_int lo))))
in loop b 1L 0L
let cilint_of_string (s:string) : cilint =
cilint_of_big_int (big_int_of_string s)
let string_of_cilint (c:cilint) : string =
match c with
| Small i -> string_of_int i
| Big b -> string_of_big_int b
Divide rounding towards zero
let div0_cilint (c1:cilint) (c2:cilint) =
match c1, c2 with
| Small i1, Small i2 -> Small (i1 / i2)
| _ ->
let b1 = big_int_of_cilint c1 in
let b2 = big_int_of_cilint c2 in
let q, r = quomod_big_int b1 b2 in
if lt_big_int b1 zero_big_int && (not (eq_big_int r zero_big_int)) then
if gt_big_int b2 zero_big_int then
Big (succ_big_int q)
else
Big (pred_big_int q)
else
Big q
(* And the corresponding remainder *)
let rem_cilint (c1:cilint) (c2:cilint) =
(sub_cilint c1 (mul_cilint c2 (div0_cilint c1 c2)))
Perform logical op ' op ' over ' int ' on two cilints . Does it work
30 - bits at a time as that is guaranteed to fit in an ' int ' .
30-bits at a time as that is guaranteed to fit in an 'int'. *)
let logop op c1 c2 =
match c1, c2 with
| Small i1, Small i2 -> Small (op i1 i2)
| _ ->
let b1 = big_int_of_cilint c1 in
let b2 = big_int_of_cilint c2 in
let rec loop b1 b2 mul acc =
if nobits b1 && nobits b2 then
Once we only have all-0 / all-1 values left , we can find whether
the infinite high - order bits are all-0 or all-1 by checking the
behaviour of op on b1 and b2 .
the infinite high-order bits are all-0 or all-1 by checking the
behaviour of op on b1 and b2. *)
if op (int_of_big_int b1) (int_of_big_int b2) = 0 then
acc
else
sub_big_int acc mul
else
let hi1, lo1 = quomod_big_int b1 b30 in
let hi2, lo2 = quomod_big_int b2 b30 in
let lo = op (int_of_big_int lo1) (int_of_big_int lo2) in
loop hi1 hi2 (mult_big_int mul b30)
(add_big_int acc (mult_big_int mul (big_int_of_int lo)))
in cilint_of_big_int (loop b1 b2 unit_big_int zero_big_int)
let logand_cilint = logop (land)
let logor_cilint = logop (lor)
let logxor_cilint = logop (lxor)
let shift_right_cilint (c1:cilint) (n:int) : cilint =
match c1 with
| Small i -> Small (i asr n)
| Big b -> cilint_of_big_int (div_big_int b (power_int_positive_int 2 n))
let shift_left_cilint (c1:cilint) (n:int) : cilint =
cilint_of_big_int (mult_big_int (big_int_of_cilint c1) (power_int_positive_int 2 n))
let lognot_cilint (c1:cilint) : cilint =
match c1 with
| Small i -> Small (lnot i)
| Big b -> Big (pred_big_int (minus_big_int b))
let truncate_signed_cilint (c:cilint) (n:int) : cilint * truncation =
match c with
| Small i when n >= Nativeint.size - 1 -> Small i, NoTruncation
| Small i when n < Nativeint.size - 2 ->
let max = 1 lsl (n - 1) in
let truncmax = 1 lsl n in
let bits = i land (truncmax - 1) in
let tval =
check if the n - th bit is 1 ...
if bits < max then
bits
else
and fill with 1 bits on the left if it is
bits - truncmax
in
let trunc =
if i >= max || i < -max then
if i >= truncmax then
BitTruncation
else
ValueTruncation
else
NoTruncation
in Small tval, trunc
| _ ->
let b = big_int_of_cilint c in
let max = power_int_positive_int 2 (n - 1) in
let truncmax = power_int_positive_int 2 n in
let bits = mod_big_int b truncmax in
let tval =
if lt_big_int bits max then
bits
else
sub_big_int bits truncmax
in
let trunc =
if ge_big_int b max || lt_big_int b (minus_big_int max) then
if ge_big_int b truncmax then
BitTruncation
else
ValueTruncation
else
NoTruncation
in cilint_of_big_int tval, trunc
let truncate_unsigned_cilint (c:cilint) (n:int) : cilint * truncation =
match c with
| Small i when i > 0 && n >= Nativeint.size - 2 -> Small i, NoTruncation
| Small i when n < Nativeint.size - 2 ->
let max = 1 lsl (n - 1) in
let truncmax = 1 lsl n in
let bits = i land (truncmax - 1) in
let trunc =
if i >= truncmax || i < 0 then
if i < -max then
BitTruncation
else
ValueTruncation
else
NoTruncation
in Small bits, trunc
| _ ->
let b = big_int_of_cilint c in
let max = power_int_positive_int 2 (n - 1) in
let truncmax = power_int_positive_int 2 n in
let bits = mod_big_int b truncmax in
let trunc =
if ge_big_int b truncmax || lt_big_int b zero_big_int then
if lt_big_int b (minus_big_int max) then
BitTruncation
else
ValueTruncation
else
NoTruncation
in cilint_of_big_int bits, trunc
let is_int_cilint (c:cilint) : bool =
match c with
| Small _ -> true
| Big b -> is_int_big_int b
| null | https://raw.githubusercontent.com/hipsleek/hipsleek/596f7fa7f67444c8309da2ca86ba4c47d376618c/cil/src/cilint.ml | ocaml | Precompute useful big_ints
True if 'b' is all 0's or all 1's
And the corresponding remainder | cilint : infinite - precision , 2 's complement arithmetic .
An infinite - precision 2 's complement number is a good way of
understanding the meaning of bitwise operations on infinite
precision numbers : positive numbers have the normal base-2
representation , while negative numbers are represented in
a 2 's complement representation with an infinite series of
leading 1 bits . I.e. ,
3 = ... 0000000000011
-3 = ... 1111111111101
We represent cilints using a big_int , except that we specialise the
case where the number fits in a regular int . This specialisation
has two benefits :
- more compact ( and potentially faster ops , though more would need to be
specialised for this to be really worth it )
- ability to see the value of small constants in ocamldebug
The implementation can be simplified once OCaml 3.11.1 shows up , with
bitwise operations on big_ints , and bug - fixed versions of int64_of_big_int
and big_int_of_int64 .
understanding the meaning of bitwise operations on infinite
precision numbers: positive numbers have the normal base-2
representation, while negative numbers are represented in
a 2's complement representation with an infinite series of
leading 1 bits. I.e.,
3 = ...0000000000011
-3 = ...1111111111101
We represent cilints using a big_int, except that we specialise the
case where the number fits in a regular int. This specialisation
has two benefits:
- more compact (and potentially faster ops, though more would need to be
specialised for this to be really worth it)
- ability to see the value of small constants in ocamldebug
The implementation can be simplified once OCaml 3.11.1 shows up, with
bitwise operations on big_ints, and bug-fixed versions of int64_of_big_int
and big_int_of_int64. *)
open Big_int
type cilint = Small of int | Big of big_int
type truncation = NoTruncation | ValueTruncation | BitTruncation
let zero_cilint = Small 0
let one_cilint = Small 1
let b30 = power_int_positive_int 2 30
let m1 = minus_big_int unit_big_int
let nobits (b:big_int) : bool =
sign_big_int b = 0 || compare_big_int b m1 = 0
let big_int_of_cilint (c:cilint) : big_int =
match c with
| Small i -> big_int_of_int i
| Big b -> b
let cilint_of_big_int (b:big_int) : cilint =
if is_int_big_int b then
Small (int_of_big_int b)
else
Big b
let neg_cilint c =
match c with
| Small i when i <> min_int -> Small (-i)
| _ -> Big (minus_big_int (big_int_of_cilint c))
Apply big_int ' op ' to two cilints , returning a cilint
let b op c1 c2 = cilint_of_big_int (op (big_int_of_cilint c1) (big_int_of_cilint c2))
let add_cilint = b add_big_int
let sub_cilint = b sub_big_int
let mul_cilint = b mult_big_int
let div_cilint = b div_big_int
let mod_cilint = b mod_big_int
let compare_cilint (c1:cilint) (c2:cilint) : int =
match c1, c2 with
| Small i1, Small i2 -> compare i1 i2
| _ -> compare_big_int (big_int_of_cilint c1) (big_int_of_cilint c2)
let is_zero_cilint (c:cilint) : bool =
match c with
| Small i -> i = 0
| Big b -> sign_big_int b = 0
let negative_cilint (c:cilint) : bool =
match c with
| Small i -> i < 0
| Big b -> sign_big_int b < 0
let cilint_of_int (i:int) : cilint = Small i
let int_of_cilint (c:cilint) : int =
match c with
| Small i -> i
| Big b -> int_of_big_int b
let rec cilint_of_int64 (i64:int64) : cilint =
if Int64.compare i64 (Int64.of_int min_int) >= 0 &&
Int64.compare i64 (Int64.of_int max_int) <= 0 then
Small (Int64.to_int i64)
else
We convert 30 bits at a time
let rec loop i mul acc =
if i = 0L then acc
else if i = -1L then sub_big_int acc mul
else
let lo30 = Int64.to_int (Int64.logand i 0x3fffffffL) in
loop (Int64.shift_right i 30) (mult_big_int mul b30)
(add_big_int acc (mult_big_int mul (big_int_of_int lo30)))
in Big (loop i64 unit_big_int zero_big_int)
Note that this never fails , instead it returns the low - order 64 - bits
of the cilint .
of the cilint. *)
let rec int64_of_cilint (c:cilint) : int64 =
match c with
| Small i -> Int64.of_int i
| Big b ->
let rec loop b mul acc =
if sign_big_int b = 0 then
acc
else if compare_big_int b m1 == 0 then
Int64.sub acc mul
else
let hi, lo = quomod_big_int b b30 in
loop hi (Int64.mul mul 0x40000000L)
(Int64.add acc (Int64.mul mul (Int64.of_int (int_of_big_int lo))))
in loop b 1L 0L
let cilint_of_string (s:string) : cilint =
cilint_of_big_int (big_int_of_string s)
let string_of_cilint (c:cilint) : string =
match c with
| Small i -> string_of_int i
| Big b -> string_of_big_int b
Divide rounding towards zero
let div0_cilint (c1:cilint) (c2:cilint) =
match c1, c2 with
| Small i1, Small i2 -> Small (i1 / i2)
| _ ->
let b1 = big_int_of_cilint c1 in
let b2 = big_int_of_cilint c2 in
let q, r = quomod_big_int b1 b2 in
if lt_big_int b1 zero_big_int && (not (eq_big_int r zero_big_int)) then
if gt_big_int b2 zero_big_int then
Big (succ_big_int q)
else
Big (pred_big_int q)
else
Big q
let rem_cilint (c1:cilint) (c2:cilint) =
(sub_cilint c1 (mul_cilint c2 (div0_cilint c1 c2)))
Perform logical op ' op ' over ' int ' on two cilints . Does it work
30 - bits at a time as that is guaranteed to fit in an ' int ' .
30-bits at a time as that is guaranteed to fit in an 'int'. *)
let logop op c1 c2 =
match c1, c2 with
| Small i1, Small i2 -> Small (op i1 i2)
| _ ->
let b1 = big_int_of_cilint c1 in
let b2 = big_int_of_cilint c2 in
let rec loop b1 b2 mul acc =
if nobits b1 && nobits b2 then
Once we only have all-0 / all-1 values left , we can find whether
the infinite high - order bits are all-0 or all-1 by checking the
behaviour of op on b1 and b2 .
the infinite high-order bits are all-0 or all-1 by checking the
behaviour of op on b1 and b2. *)
if op (int_of_big_int b1) (int_of_big_int b2) = 0 then
acc
else
sub_big_int acc mul
else
let hi1, lo1 = quomod_big_int b1 b30 in
let hi2, lo2 = quomod_big_int b2 b30 in
let lo = op (int_of_big_int lo1) (int_of_big_int lo2) in
loop hi1 hi2 (mult_big_int mul b30)
(add_big_int acc (mult_big_int mul (big_int_of_int lo)))
in cilint_of_big_int (loop b1 b2 unit_big_int zero_big_int)
let logand_cilint = logop (land)
let logor_cilint = logop (lor)
let logxor_cilint = logop (lxor)
let shift_right_cilint (c1:cilint) (n:int) : cilint =
match c1 with
| Small i -> Small (i asr n)
| Big b -> cilint_of_big_int (div_big_int b (power_int_positive_int 2 n))
let shift_left_cilint (c1:cilint) (n:int) : cilint =
cilint_of_big_int (mult_big_int (big_int_of_cilint c1) (power_int_positive_int 2 n))
let lognot_cilint (c1:cilint) : cilint =
match c1 with
| Small i -> Small (lnot i)
| Big b -> Big (pred_big_int (minus_big_int b))
let truncate_signed_cilint (c:cilint) (n:int) : cilint * truncation =
match c with
| Small i when n >= Nativeint.size - 1 -> Small i, NoTruncation
| Small i when n < Nativeint.size - 2 ->
let max = 1 lsl (n - 1) in
let truncmax = 1 lsl n in
let bits = i land (truncmax - 1) in
let tval =
check if the n - th bit is 1 ...
if bits < max then
bits
else
and fill with 1 bits on the left if it is
bits - truncmax
in
let trunc =
if i >= max || i < -max then
if i >= truncmax then
BitTruncation
else
ValueTruncation
else
NoTruncation
in Small tval, trunc
| _ ->
let b = big_int_of_cilint c in
let max = power_int_positive_int 2 (n - 1) in
let truncmax = power_int_positive_int 2 n in
let bits = mod_big_int b truncmax in
let tval =
if lt_big_int bits max then
bits
else
sub_big_int bits truncmax
in
let trunc =
if ge_big_int b max || lt_big_int b (minus_big_int max) then
if ge_big_int b truncmax then
BitTruncation
else
ValueTruncation
else
NoTruncation
in cilint_of_big_int tval, trunc
let truncate_unsigned_cilint (c:cilint) (n:int) : cilint * truncation =
match c with
| Small i when i > 0 && n >= Nativeint.size - 2 -> Small i, NoTruncation
| Small i when n < Nativeint.size - 2 ->
let max = 1 lsl (n - 1) in
let truncmax = 1 lsl n in
let bits = i land (truncmax - 1) in
let trunc =
if i >= truncmax || i < 0 then
if i < -max then
BitTruncation
else
ValueTruncation
else
NoTruncation
in Small bits, trunc
| _ ->
let b = big_int_of_cilint c in
let max = power_int_positive_int 2 (n - 1) in
let truncmax = power_int_positive_int 2 n in
let bits = mod_big_int b truncmax in
let trunc =
if ge_big_int b truncmax || lt_big_int b zero_big_int then
if lt_big_int b (minus_big_int max) then
BitTruncation
else
ValueTruncation
else
NoTruncation
in cilint_of_big_int bits, trunc
let is_int_cilint (c:cilint) : bool =
match c with
| Small _ -> true
| Big b -> is_int_big_int b
|
86dccc3f00116f262f3f8db0d04833ce2b7f6019fe694039feeef276786319dc | qfpl/reflex-dom-canvas | CanvasTest3D.hs | {-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE RecordWildCards #
--
module CanvasTest3D where
import Control.Lens ((^.))
import qualified Reflex.Dom.Canvas.WebGL as Gl
import GHCJS.DOM.Types (JSString, MonadJSM)
import qualified GHCJS.DOM.Types as Dom
import qualified Language.Javascript.JSaddle.Object as JSO
import qualified GHCJS.DOM.WebGLRenderingContextBase as Gl
import qualified Reflex.Dom.CanvasBuilder.Types as Canvas
import qualified Reflex.Dom.CanvasDyn as CDyn
import qualified Reflex as R
import Reflex.Dom (MonadWidget)
import qualified Reflex.Dom as RD
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Time (UTCTime, getCurrentTime)
import Control.Monad.Except (ExceptT (..), lift,
runExceptT)
import Data.Either (Either)
import qualified Data.Map as Map
#ifndef ghcjs_HOST_OS
import Language.Javascript.JSaddle.Warp (run)
import Reflex.Dom.Core (mainWidget)
#endif
vertShader
:: Text
vertShader = Text.unlines
[ "attribute vec4 a_position;"
, "void main() {"
, " gl_Position = a_position;"
, "}"
]
fragShader
:: Text
fragShader = Text.unlines
[ "precision mediump float;"
, "void main() {"
, " gl_FragColor = vec4(1, 0, 0.5, 1);"
, "}"
]
positions
:: [Double]
positions =
[ 0.0, 0.0
, 0.0, 0.5
, 0.7, 0.0
]
makeArrayBuffer
:: MonadJSM m
=> [Double]
-> m Dom.ArrayBuffer
makeArrayBuffer ds = Dom.liftJSM $
JSO.new (JSO.jsg ("Float32Array" :: Text)) [ds]
>>= (JSO.! ("buffer" :: Text))
>>= Dom.unsafeCastTo Dom.ArrayBuffer
data RenderMeh = R
{ _rGLProgram :: Dom.WebGLProgram
, _rPosAttrLoc :: Dom.GLint
, _rPosBuffer :: Dom.WebGLBuffer
}
glProgramInit
:: Text
-> Text
-> Gl.WebGLM (Either JSString RenderMeh)
glProgramInit vertSrc fragSrc = runExceptT $ do
-- Begin initialisation
vS <- ExceptT $ Gl.buildShader vertSrc Gl.VERTEX_SHADER
fS <- ExceptT $ Gl.buildShader fragSrc Gl.FRAGMENT_SHADER
glProg <- ExceptT $ Gl.buildProgram vS fS
-- Buffer Setup and Loading
posAttrLoc <- lift $ Gl.getAttribLocationF glProg ( "a_position" :: Text )
lift $ Gl.enableVertexAttribArrayF (fromIntegral posAttrLoc)
posBuffer <- lift Gl.createBufferF
pure $ R glProg posAttrLoc posBuffer
glDraw
:: Dom.ArrayBuffer
-> RenderMeh
-> Gl.WebGLM ()
glDraw arrBuff R {..} = do
-- Populate our buffer with some data
Gl.bindBufferF Gl.ARRAY_BUFFER _rPosBuffer
Gl.bufferDataF Gl.ARRAY_BUFFER arrBuff Gl.STATIC_DRAW
-- Clear canvas
Gl.clearColourF 0 0 0 0
Gl.clearF Gl.COLOR_BUFFER_BIT
Tell WebGL to use our prepared GLProgram
Gl.useProgramF _rGLProgram
let
2 components per iteration
the data is 32bit floats
normalise = False -- don't normalize the data
stride = 0 -- 0 for tightly packed array or move forward size * sizeof(type) each iteration to get the next position
offset = 0 -- start at the beginning of the buffer
Tell the attribute how to get data out of positionBuffer ( )
Gl.vertexAttribPointerF
(fromIntegral _rPosAttrLoc)
size
dataType
normalise
stride
offset
let
primitiveType = Gl.TRIANGLES
count = 3
offset' = 0
Gl.drawArraysF primitiveType offset' count
eDraw :: MonadWidget t m => UTCTime -> m ()
eDraw _aTime = do
let
canvasId = "canvas-three-dee"
canvasAttrs = pure $ Map.fromList
[ ("height", "400")
, ("width", "400")
]
eInit <- RD.button "Init"
eRender <- RD.button "Render"
arrBuffer <- makeArrayBuffer positions
-- Create the canvas element
canvasEl <- fst <$> RD.elDynAttr' "canvas"
(Map.insert "id" canvasId <$> canvasAttrs) RD.blank
dGLCX <- fmap (^. Canvas.canvasInfo_context)
<$> CDyn.dContextWebgl ( Canvas.CanvasConfig canvasEl [] )
let
dInitProg =
pure $ glProgramInit vertShader fragShader
(eInitFailed, eRenderMeh) <-
R.fanEither <$> CDyn.nextFrameWithCxFree dInitProg dGLCX eInit
dInstructions <- R.holdDyn Gl.noopF ( glDraw arrBuffer <$> eRenderMeh )
_ <- CDyn.nextFrameWithCxFree dInstructions dGLCX eRender
dStatus <- R.holdDyn "A little nothing..." eInitFailed
RD.divClass "errorz" $
RD.display dStatus
pure ()
mainish
:: IO ()
mainish = do
n <- getCurrentTime
#ifdef ghcjs_HOST_OS
RD.mainWidget ( eDraw n )
#else
run 8080 $ mainWidget ( eDraw n )
#endif
| null | https://raw.githubusercontent.com/qfpl/reflex-dom-canvas/b9131dffd521e4534eb2240e3fe0b7bfb70136a8/examples/src/CanvasTest3D.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
Begin initialisation
Buffer Setup and Loading
Populate our buffer with some data
Clear canvas
don't normalize the data
0 for tightly packed array or move forward size * sizeof(type) each iteration to get the next position
start at the beginning of the buffer
Create the canvas element | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE RecordWildCards #
module CanvasTest3D where
import Control.Lens ((^.))
import qualified Reflex.Dom.Canvas.WebGL as Gl
import GHCJS.DOM.Types (JSString, MonadJSM)
import qualified GHCJS.DOM.Types as Dom
import qualified Language.Javascript.JSaddle.Object as JSO
import qualified GHCJS.DOM.WebGLRenderingContextBase as Gl
import qualified Reflex.Dom.CanvasBuilder.Types as Canvas
import qualified Reflex.Dom.CanvasDyn as CDyn
import qualified Reflex as R
import Reflex.Dom (MonadWidget)
import qualified Reflex.Dom as RD
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Time (UTCTime, getCurrentTime)
import Control.Monad.Except (ExceptT (..), lift,
runExceptT)
import Data.Either (Either)
import qualified Data.Map as Map
#ifndef ghcjs_HOST_OS
import Language.Javascript.JSaddle.Warp (run)
import Reflex.Dom.Core (mainWidget)
#endif
vertShader
:: Text
vertShader = Text.unlines
[ "attribute vec4 a_position;"
, "void main() {"
, " gl_Position = a_position;"
, "}"
]
fragShader
:: Text
fragShader = Text.unlines
[ "precision mediump float;"
, "void main() {"
, " gl_FragColor = vec4(1, 0, 0.5, 1);"
, "}"
]
positions
:: [Double]
positions =
[ 0.0, 0.0
, 0.0, 0.5
, 0.7, 0.0
]
makeArrayBuffer
:: MonadJSM m
=> [Double]
-> m Dom.ArrayBuffer
makeArrayBuffer ds = Dom.liftJSM $
JSO.new (JSO.jsg ("Float32Array" :: Text)) [ds]
>>= (JSO.! ("buffer" :: Text))
>>= Dom.unsafeCastTo Dom.ArrayBuffer
data RenderMeh = R
{ _rGLProgram :: Dom.WebGLProgram
, _rPosAttrLoc :: Dom.GLint
, _rPosBuffer :: Dom.WebGLBuffer
}
glProgramInit
:: Text
-> Text
-> Gl.WebGLM (Either JSString RenderMeh)
glProgramInit vertSrc fragSrc = runExceptT $ do
vS <- ExceptT $ Gl.buildShader vertSrc Gl.VERTEX_SHADER
fS <- ExceptT $ Gl.buildShader fragSrc Gl.FRAGMENT_SHADER
glProg <- ExceptT $ Gl.buildProgram vS fS
posAttrLoc <- lift $ Gl.getAttribLocationF glProg ( "a_position" :: Text )
lift $ Gl.enableVertexAttribArrayF (fromIntegral posAttrLoc)
posBuffer <- lift Gl.createBufferF
pure $ R glProg posAttrLoc posBuffer
glDraw
:: Dom.ArrayBuffer
-> RenderMeh
-> Gl.WebGLM ()
glDraw arrBuff R {..} = do
Gl.bindBufferF Gl.ARRAY_BUFFER _rPosBuffer
Gl.bufferDataF Gl.ARRAY_BUFFER arrBuff Gl.STATIC_DRAW
Gl.clearColourF 0 0 0 0
Gl.clearF Gl.COLOR_BUFFER_BIT
Tell WebGL to use our prepared GLProgram
Gl.useProgramF _rGLProgram
let
2 components per iteration
the data is 32bit floats
Tell the attribute how to get data out of positionBuffer ( )
Gl.vertexAttribPointerF
(fromIntegral _rPosAttrLoc)
size
dataType
normalise
stride
offset
let
primitiveType = Gl.TRIANGLES
count = 3
offset' = 0
Gl.drawArraysF primitiveType offset' count
eDraw :: MonadWidget t m => UTCTime -> m ()
eDraw _aTime = do
let
canvasId = "canvas-three-dee"
canvasAttrs = pure $ Map.fromList
[ ("height", "400")
, ("width", "400")
]
eInit <- RD.button "Init"
eRender <- RD.button "Render"
arrBuffer <- makeArrayBuffer positions
canvasEl <- fst <$> RD.elDynAttr' "canvas"
(Map.insert "id" canvasId <$> canvasAttrs) RD.blank
dGLCX <- fmap (^. Canvas.canvasInfo_context)
<$> CDyn.dContextWebgl ( Canvas.CanvasConfig canvasEl [] )
let
dInitProg =
pure $ glProgramInit vertShader fragShader
(eInitFailed, eRenderMeh) <-
R.fanEither <$> CDyn.nextFrameWithCxFree dInitProg dGLCX eInit
dInstructions <- R.holdDyn Gl.noopF ( glDraw arrBuffer <$> eRenderMeh )
_ <- CDyn.nextFrameWithCxFree dInstructions dGLCX eRender
dStatus <- R.holdDyn "A little nothing..." eInitFailed
RD.divClass "errorz" $
RD.display dStatus
pure ()
mainish
:: IO ()
mainish = do
n <- getCurrentTime
#ifdef ghcjs_HOST_OS
RD.mainWidget ( eDraw n )
#else
run 8080 $ mainWidget ( eDraw n )
#endif
|
0a690a1ae105a6600110a7984ea16749acaf75d59f6aa4c5e479c22960f174e0 | clojure-interop/aws-api | AmazonRoute53ClientBuilder.clj | (ns com.amazonaws.services.route53.AmazonRoute53ClientBuilder
"Fluent builder for AmazonRoute53. Use of the builder is preferred over using
constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.route53 AmazonRoute53ClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.route53.AmazonRoute53ClientBuilder`"
(^com.amazonaws.services.route53.AmazonRoute53ClientBuilder []
(AmazonRoute53ClientBuilder/standard )))
(defn *default-client
"returns: Default client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.route53.AmazonRoute53`"
(^com.amazonaws.services.route53.AmazonRoute53 []
(AmazonRoute53ClientBuilder/defaultClient )))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.route53/src/com/amazonaws/services/route53/AmazonRoute53ClientBuilder.clj | clojure | (ns com.amazonaws.services.route53.AmazonRoute53ClientBuilder
"Fluent builder for AmazonRoute53. Use of the builder is preferred over using
constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.route53 AmazonRoute53ClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.route53.AmazonRoute53ClientBuilder`"
(^com.amazonaws.services.route53.AmazonRoute53ClientBuilder []
(AmazonRoute53ClientBuilder/standard )))
(defn *default-client
"returns: Default client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.route53.AmazonRoute53`"
(^com.amazonaws.services.route53.AmazonRoute53 []
(AmazonRoute53ClientBuilder/defaultClient )))
| |
b08fbeed58df2028232fb0f58691bd3019d90abe265889c424d175d9e0813b8e | ahrefs/atd | ob_mapping.mli | * decorated ATD AST .
type ob_mapping =
(Ocaml.Repr.t, Biniou.biniou_repr) Mapping.mapping
val defs_of_atd_modules
: ('a * Atd.Ast.module_body) list
-> ('a * (Ocaml.Repr.t, Biniou.biniou_repr) Mapping.def list) list
| null | https://raw.githubusercontent.com/ahrefs/atd/9a3cb984a695563c04b41cdd7a1ce9454eb40e1c/atdgen/src/ob_mapping.mli | ocaml | * decorated ATD AST .
type ob_mapping =
(Ocaml.Repr.t, Biniou.biniou_repr) Mapping.mapping
val defs_of_atd_modules
: ('a * Atd.Ast.module_body) list
-> ('a * (Ocaml.Repr.t, Biniou.biniou_repr) Mapping.def list) list
| |
80cba4533797194c32ed3c71f77a419edf899664458aef5a3b1906a9f9324954 | screenshotbot/screenshotbot-oss | test-user.lisp | ;;;; Copyright 2018-Present Modern Interpreters Inc.
;;;;
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/model/test-user
(:use #:cl
#:fiveam)
(:import-from #:bknr.datastore
#:delete-object)
(:import-from #:screenshotbot/user-api
#:user-email
#:user
#:user-companies)
(:import-from #:screenshotbot/model/company
#:get-singleton-company
#:prepare-singleton-company
#:personalp
#:company
#:company-admins
#:company-owner)
(:import-from #:screenshotbot/installation
#:multi-org-feature
#:installation
#:*installation*)
(:import-from #:bknr.indices
#:object-destroyed-p)
(:import-from #:screenshotbot/model/user
#:user-email-exists
#:*lowercase-email-map*
#:user-with-email
#:make-user
#:default-company)
(:import-from #:util/store
#:with-test-store)
(:import-from #:bknr.datastore
#:with-transaction)
(:local-nicknames (#:a #:alexandria)))
(in-package :screenshotbot/model/test-user)
(util/fiveam:def-suite)
(defclass pro-installation (installation multi-org-feature)
())
(def-fixture state ()
(with-test-store ()
(let ((*installation* (make-instance 'pro-installation)))
(&body))))
(test make-user
(with-fixture state ()
(let ((user (make-user)))
(unwind-protect
(let ((companies (user-companies user)))
(is (equal 1 (length companies)))
(let ((company (car companies)))
(is-true (personalp company))
(is (equal (list user)
(company-admins company))))
(pass))
(let ((companies (user-companies user)))
(delete-object user)
(loop for company in companies
do (delete-object company)))))))
(test remove-reference-from-companies-for-testing
(with-fixture state ()
(let ((user (make-user)))
(let ((company (car (user-companies user))))
(unwind-protect
(is-true (company-owner company))
(delete-object user))
(unwind-protect
(progn
(is-false (company-owner company)))
(delete-object company))))))
(test but-with-regular-installation-singleton-company-is-not-deleted
(with-test-store ()
(let ((*installation* (make-instance 'installation)))
(prepare-singleton-company)
(let* ((user (make-user))
(companies (user-companies user)))
(is (equal (list
(get-singleton-company *installation*))
companies))
(loop for company in (bknr.datastore:store-objects-with-class 'company)
do
(is (not (member user (ignore-errors (company-admins company)))))
(is (not (eql user (ignore-errors (company-owner company))))))
(delete-object user)
(pass)))))
(test default-company
(with-test-store ()
(let ((*installation* (make-instance 'pro-installation)))
(let* ((user (make-user)))
(is (eql
(default-company user)
(car (user-companies user))))))))
(test default-company-for-non-pro
(with-test-store ()
(let* ((company (make-instance 'company))
(*installation* (make-instance 'installation
:singleton-company company)))
(let* ((user (make-user)))
(is (eql
(default-company user)
company))))))
(test default-company-removed-from-user-companies
(with-fixture state ()
(let* ((company (make-instance 'company))
(user-company (make-instance 'company))
(user (make-user
:default-company company
:companies (list user-company))))
(is (eql user-company
(default-company user))))))
(test user-with-email-is-case-insensitive
(with-fixture state ()
(let ((user (make-user :email "")))
(is (eql user (user-with-email "")))
(is (eql user (user-with-email ""))))))
(test user-with-email-is-case-insensitive-the-other-way-around
(with-fixture state ()
(let ((user (make-user :email "")))
(is (eql user (user-with-email "")))
(is (eql user (user-with-email ""))))))
(test user-with-email-is-case-insentivie-even-after-setting-email
(with-fixture state ()
(let ((user (make-user :email "")))
(is (eql user (user-with-email "")))
(with-transaction ()
(setf (user-email user) ""))
(is (equal user (user-with-email "")))
(is (equal nil (user-with-email ""))))))
(test |don't allow me to add a new user with same email|
(with-fixture state ()
(make-user :email "")
(signals user-email-exists
(make-user :email ""))
;; check that our store is still valid though
(make-user :email "")
(signals user-email-exists
(make-user :email ""))))
| null | https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/5c96657315b3ddb009358e821549e653817ff1e9/src/screenshotbot/model/test-user.lisp | lisp | Copyright 2018-Present Modern Interpreters Inc.
check that our store is still valid though | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/model/test-user
(:use #:cl
#:fiveam)
(:import-from #:bknr.datastore
#:delete-object)
(:import-from #:screenshotbot/user-api
#:user-email
#:user
#:user-companies)
(:import-from #:screenshotbot/model/company
#:get-singleton-company
#:prepare-singleton-company
#:personalp
#:company
#:company-admins
#:company-owner)
(:import-from #:screenshotbot/installation
#:multi-org-feature
#:installation
#:*installation*)
(:import-from #:bknr.indices
#:object-destroyed-p)
(:import-from #:screenshotbot/model/user
#:user-email-exists
#:*lowercase-email-map*
#:user-with-email
#:make-user
#:default-company)
(:import-from #:util/store
#:with-test-store)
(:import-from #:bknr.datastore
#:with-transaction)
(:local-nicknames (#:a #:alexandria)))
(in-package :screenshotbot/model/test-user)
(util/fiveam:def-suite)
(defclass pro-installation (installation multi-org-feature)
())
(def-fixture state ()
(with-test-store ()
(let ((*installation* (make-instance 'pro-installation)))
(&body))))
(test make-user
(with-fixture state ()
(let ((user (make-user)))
(unwind-protect
(let ((companies (user-companies user)))
(is (equal 1 (length companies)))
(let ((company (car companies)))
(is-true (personalp company))
(is (equal (list user)
(company-admins company))))
(pass))
(let ((companies (user-companies user)))
(delete-object user)
(loop for company in companies
do (delete-object company)))))))
(test remove-reference-from-companies-for-testing
(with-fixture state ()
(let ((user (make-user)))
(let ((company (car (user-companies user))))
(unwind-protect
(is-true (company-owner company))
(delete-object user))
(unwind-protect
(progn
(is-false (company-owner company)))
(delete-object company))))))
(test but-with-regular-installation-singleton-company-is-not-deleted
(with-test-store ()
(let ((*installation* (make-instance 'installation)))
(prepare-singleton-company)
(let* ((user (make-user))
(companies (user-companies user)))
(is (equal (list
(get-singleton-company *installation*))
companies))
(loop for company in (bknr.datastore:store-objects-with-class 'company)
do
(is (not (member user (ignore-errors (company-admins company)))))
(is (not (eql user (ignore-errors (company-owner company))))))
(delete-object user)
(pass)))))
(test default-company
(with-test-store ()
(let ((*installation* (make-instance 'pro-installation)))
(let* ((user (make-user)))
(is (eql
(default-company user)
(car (user-companies user))))))))
(test default-company-for-non-pro
(with-test-store ()
(let* ((company (make-instance 'company))
(*installation* (make-instance 'installation
:singleton-company company)))
(let* ((user (make-user)))
(is (eql
(default-company user)
company))))))
(test default-company-removed-from-user-companies
(with-fixture state ()
(let* ((company (make-instance 'company))
(user-company (make-instance 'company))
(user (make-user
:default-company company
:companies (list user-company))))
(is (eql user-company
(default-company user))))))
(test user-with-email-is-case-insensitive
(with-fixture state ()
(let ((user (make-user :email "")))
(is (eql user (user-with-email "")))
(is (eql user (user-with-email ""))))))
(test user-with-email-is-case-insensitive-the-other-way-around
(with-fixture state ()
(let ((user (make-user :email "")))
(is (eql user (user-with-email "")))
(is (eql user (user-with-email ""))))))
(test user-with-email-is-case-insentivie-even-after-setting-email
(with-fixture state ()
(let ((user (make-user :email "")))
(is (eql user (user-with-email "")))
(with-transaction ()
(setf (user-email user) ""))
(is (equal user (user-with-email "")))
(is (equal nil (user-with-email ""))))))
(test |don't allow me to add a new user with same email|
(with-fixture state ()
(make-user :email "")
(signals user-email-exists
(make-user :email ""))
(make-user :email "")
(signals user-email-exists
(make-user :email ""))))
|
1218de45b053decf95844dd74068b00fb4d32ed0f416760b1386fcd41c38482f | flyingmachine/datomic-booties | tasks_test.clj | (ns com.flyingmachine.datomic-booties.tasks-test
(:require [com.flyingmachine.datomic-booties.tasks :as tasks]
[com.flyingmachine.datomic-booties.core :as core]
[boot.core :refer [boot deftask]]
[datomic.api :as d]
[clojure.test :refer [deftest is use-fixtures]]))
(def uri "datomic:mem-booties-test")
(defn with-db [f]
(boot (tasks/create-db :uri uri))
(f)
(boot (tasks/delete-db :uri uri)))
(use-fixtures :each with-db)
(deftest migrate
(boot (tasks/migrate-db :uri uri))
(is
(= (d/q '[:find (pull ?e [*])
:where (or [?e :user/username]
[?e :post/content])]
(d/db (d/connect uri)))
[[{:db/id 17592186045422
:user/username "billy"}]
[{:db/id 17592186045423
:post/content "post content"
:content/author {:db/id 17592186045422}}]]))
(boot (tasks/delete-db :uri uri)))
(defn seed-transform
[x]
(pr x (type x))
(assoc x :db/id (d/tempid :db.part/user)))
(deftest custom-schema-and-seed
(boot (tasks/bootstrap-db :uri uri
:schema ["db/custom-schema.edn" "db/custom-schema-2.edn"]
:data ["db/custom-seed.edn"]
:transform 'com.flyingmachine.datomic-booties.tasks-test/seed-transform))
(let [attrs (into #{} (core/attributes (d/connect uri)))]
(is (every? attrs [:custom/attr :custom/mixy-matchy]))
(is (not-any? attrs [:user/username :post/content]))
(is (= (d/q '[:find (pull ?e [*])
:where [?e :custom/attr]]
(d/db (d/connect uri)))
[[{:db/id 17592186045423
:custom/attr "billy"}]]))))
| null | https://raw.githubusercontent.com/flyingmachine/datomic-booties/f2d04f21a3599a3a41db5cc00d30ffd4d9abbee0/test/com/flyingmachine/datomic_booties/tasks_test.clj | clojure | (ns com.flyingmachine.datomic-booties.tasks-test
(:require [com.flyingmachine.datomic-booties.tasks :as tasks]
[com.flyingmachine.datomic-booties.core :as core]
[boot.core :refer [boot deftask]]
[datomic.api :as d]
[clojure.test :refer [deftest is use-fixtures]]))
(def uri "datomic:mem-booties-test")
(defn with-db [f]
(boot (tasks/create-db :uri uri))
(f)
(boot (tasks/delete-db :uri uri)))
(use-fixtures :each with-db)
(deftest migrate
(boot (tasks/migrate-db :uri uri))
(is
(= (d/q '[:find (pull ?e [*])
:where (or [?e :user/username]
[?e :post/content])]
(d/db (d/connect uri)))
[[{:db/id 17592186045422
:user/username "billy"}]
[{:db/id 17592186045423
:post/content "post content"
:content/author {:db/id 17592186045422}}]]))
(boot (tasks/delete-db :uri uri)))
(defn seed-transform
[x]
(pr x (type x))
(assoc x :db/id (d/tempid :db.part/user)))
(deftest custom-schema-and-seed
(boot (tasks/bootstrap-db :uri uri
:schema ["db/custom-schema.edn" "db/custom-schema-2.edn"]
:data ["db/custom-seed.edn"]
:transform 'com.flyingmachine.datomic-booties.tasks-test/seed-transform))
(let [attrs (into #{} (core/attributes (d/connect uri)))]
(is (every? attrs [:custom/attr :custom/mixy-matchy]))
(is (not-any? attrs [:user/username :post/content]))
(is (= (d/q '[:find (pull ?e [*])
:where [?e :custom/attr]]
(d/db (d/connect uri)))
[[{:db/id 17592186045423
:custom/attr "billy"}]]))))
| |
f7d368cd0e45b33f09375427807a464322848f1f086a5320c2a569cf1a963dd3 | kadena-io/pact | Persistence.hs | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - orphans #
module Pact.Server.History.Persistence
( createDB
, insertCompletedCommand
, queryForExisting
, selectCompletedCommands
, selectAllCommands
, closeDB
) where
import Control.Monad
import qualified Data.Text as T
import qualified Data.Aeson as A
import Data.Text.Encoding (encodeUtf8)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as BSL
import Data.List (sortBy)
import Data.HashSet (HashSet)
import qualified Data.HashSet as HashSet
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Data.Maybe
import Database.SQLite3.Direct
import Pact.Types.Command
import Pact.Types.Runtime
import Pact.Types.SQLite
import Pact.Server.History.Types
hashToField :: Hash -> SType
hashToField h = SText $ Utf8 $ BSL.toStrict $ A.encode h
hashFromField :: ByteString -> Hash
hashFromField h = case A.eitherDecodeStrict' h of
Left err -> error $ "hashFromField: unable to decode Hash from database! " ++ show err ++ " => " ++ show h
Right v -> v
crToField :: CommandResult Hash -> SType
crToField r = SText $ Utf8 $ BSL.toStrict $ A.encode $ A.toJSON r
crFromField :: ByteString -> CommandResult Hash
crFromField cr = case A.eitherDecodeStrict' cr of
Left err -> error $ "crFromField: unable to decode CommandResult from database! " ++ show err ++ "\n" ++ show cr
Right v' -> v'
userSigsToField :: [UserSig] -> SType
userSigsToField us = SText $ Utf8 $ BSL.toStrict $ A.encode us
userSigsFromField :: ByteString -> [UserSig]
userSigsFromField us = case A.eitherDecodeStrict' us of
Left err -> error $ "userSigsFromField: unable to decode [UserSigs] from database! " ++ show err ++ "\n" ++ show us
Right v -> v
gasToField :: Gas -> SType
gasToField (Gas g) = SInt g
sqlDbSchema :: Utf8
sqlDbSchema =
"CREATE TABLE IF NOT EXISTS 'main'.'pactCommands' \
\( 'hash' TEXT PRIMARY KEY NOT NULL UNIQUE\
\, 'txid' INTEGER NOT NULL\
\, 'command' TEXT NOT NULL\
\, 'result' TEXT NOT NULL\
\, 'userSigs' TEXT NOT NULL\
\, 'gas' INTEGER NOT NULL\
\)"
eitherToError :: Show e => String -> Either e a -> a
eitherToError _ (Right v) = v
eitherToError s (Left e) = error $ "SQLite Error in History exec: " ++ s ++ "\nWith Error: "++ show e
createDB :: FilePath -> IO DbEnv
createDB f = do
conn' <- eitherToError "OpenDB" <$> open (Utf8 $ encodeUtf8 $ T.pack f)
eitherToError "CreateTable" <$> exec conn' sqlDbSchema
eitherToError " pragmas " < $ > exec = EXCLUSIVE "
DbEnv <$> pure conn'
<*> prepStmt conn' sqlInsertHistoryRow
<*> prepStmt conn' sqlQueryForExisting
<*> prepStmt conn' sqlSelectCompletedCommands
<*> prepStmt conn' sqlSelectAllCommands
closeDB :: DbEnv -> IO ()
closeDB DbEnv{..} = do
liftEither $ closeStmt _insertStatement
liftEither $ closeStmt _qryExistingStmt
liftEither $ closeStmt _qryCompletedStmt
liftEither $ closeStmt _qrySelectAllCmds
liftEither $ close _conn
sqlInsertHistoryRow :: Utf8
sqlInsertHistoryRow =
"INSERT INTO 'main'.'pactCommands' \
\( 'hash'\
\, 'txid' \
\, 'command'\
\, 'result'\
\, 'userSigs'\
\, 'gas'\
\) VALUES (?,?,?,?,?,?)"
insertRow :: Statement -> (Command ByteString, CommandResult Hash) -> IO ()
insertRow s (Command{..},cr@CommandResult {..}) =
execs s [hashToField (toUntypedHash _cmdHash)
,SInt $ fromIntegral (fromMaybe (-1) _crTxId)
,SText $ Utf8 _cmdPayload
,crToField cr
,userSigsToField _cmdSigs
,gasToField _crGas]
insertCompletedCommand :: DbEnv -> [(Command ByteString, CommandResult Hash)] -> IO ()
insertCompletedCommand DbEnv{..} v = do
let sortCmds (_,cr1) (_,cr2) = compare (_crTxId cr1) (_crTxId cr2)
eitherToError "start insert transaction" <$> exec _conn "BEGIN TRANSACTION"
mapM_ (insertRow _insertStatement) $ sortBy sortCmds v
eitherToError "end insert transaction" <$> exec _conn "END TRANSACTION"
sqlQueryForExisting :: Utf8
sqlQueryForExisting = "SELECT EXISTS(SELECT 1 FROM 'main'.'pactCommands' WHERE hash=:hash LIMIT 1)"
queryForExisting :: DbEnv -> HashSet RequestKey -> IO (HashSet RequestKey)
queryForExisting e v = foldM f v v
where
f s rk = do
r <- qrys (_qryExistingStmt e) [hashToField $ unRequestKey rk] [RInt]
case r of
[[SInt 1]] -> return s
_ -> return $ HashSet.delete rk s
sqlSelectCompletedCommands :: Utf8
sqlSelectCompletedCommands =
"SELECT result,txid FROM 'main'.'pactCommands' WHERE hash=:hash LIMIT 1"
selectCompletedCommands :: DbEnv -> HashSet RequestKey -> IO (HashMap RequestKey (CommandResult Hash))
selectCompletedCommands e v = foldM f HashMap.empty v
where
f m rk = do
rs <- qrys (_qryCompletedStmt e) [hashToField $ unRequestKey rk] [RText,RInt,RInt]
if null rs
then return m
else case head rs of
[SText (Utf8 cr),SInt _, SInt _] ->
return $ HashMap.insert rk (crFromField cr) m
r -> dbError $ "Invalid result from query: " ++ show r
sqlSelectAllCommands :: Utf8
sqlSelectAllCommands = "SELECT hash,command,userSigs FROM 'main'.'pactCommands' ORDER BY txid ASC"
selectAllCommands :: DbEnv -> IO [Command ByteString]
selectAllCommands e = do
let rowToCmd [SText (Utf8 hash'),SText (Utf8 cmd'),SText (Utf8 userSigs')] =
Command { _cmdPayload = cmd'
, _cmdSigs = userSigsFromField userSigs'
, _cmdHash = fromUntypedHash $ hashFromField hash'}
rowToCmd err = error $ "selectAllCommands: unexpected result schema: " ++ show err
fmap rowToCmd <$> qrys_ (_qrySelectAllCmds e) [RText,RText,RText]
| null | https://raw.githubusercontent.com/kadena-io/pact/4971ab6078b75eb612d83d56f1e7cd139a5a2ba8/src-ghc/Pact/Server/History/Persistence.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -fno - warn - orphans #
module Pact.Server.History.Persistence
( createDB
, insertCompletedCommand
, queryForExisting
, selectCompletedCommands
, selectAllCommands
, closeDB
) where
import Control.Monad
import qualified Data.Text as T
import qualified Data.Aeson as A
import Data.Text.Encoding (encodeUtf8)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as BSL
import Data.List (sortBy)
import Data.HashSet (HashSet)
import qualified Data.HashSet as HashSet
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Data.Maybe
import Database.SQLite3.Direct
import Pact.Types.Command
import Pact.Types.Runtime
import Pact.Types.SQLite
import Pact.Server.History.Types
hashToField :: Hash -> SType
hashToField h = SText $ Utf8 $ BSL.toStrict $ A.encode h
hashFromField :: ByteString -> Hash
hashFromField h = case A.eitherDecodeStrict' h of
Left err -> error $ "hashFromField: unable to decode Hash from database! " ++ show err ++ " => " ++ show h
Right v -> v
crToField :: CommandResult Hash -> SType
crToField r = SText $ Utf8 $ BSL.toStrict $ A.encode $ A.toJSON r
crFromField :: ByteString -> CommandResult Hash
crFromField cr = case A.eitherDecodeStrict' cr of
Left err -> error $ "crFromField: unable to decode CommandResult from database! " ++ show err ++ "\n" ++ show cr
Right v' -> v'
userSigsToField :: [UserSig] -> SType
userSigsToField us = SText $ Utf8 $ BSL.toStrict $ A.encode us
userSigsFromField :: ByteString -> [UserSig]
userSigsFromField us = case A.eitherDecodeStrict' us of
Left err -> error $ "userSigsFromField: unable to decode [UserSigs] from database! " ++ show err ++ "\n" ++ show us
Right v -> v
gasToField :: Gas -> SType
gasToField (Gas g) = SInt g
sqlDbSchema :: Utf8
sqlDbSchema =
"CREATE TABLE IF NOT EXISTS 'main'.'pactCommands' \
\( 'hash' TEXT PRIMARY KEY NOT NULL UNIQUE\
\, 'txid' INTEGER NOT NULL\
\, 'command' TEXT NOT NULL\
\, 'result' TEXT NOT NULL\
\, 'userSigs' TEXT NOT NULL\
\, 'gas' INTEGER NOT NULL\
\)"
eitherToError :: Show e => String -> Either e a -> a
eitherToError _ (Right v) = v
eitherToError s (Left e) = error $ "SQLite Error in History exec: " ++ s ++ "\nWith Error: "++ show e
createDB :: FilePath -> IO DbEnv
createDB f = do
conn' <- eitherToError "OpenDB" <$> open (Utf8 $ encodeUtf8 $ T.pack f)
eitherToError "CreateTable" <$> exec conn' sqlDbSchema
eitherToError " pragmas " < $ > exec = EXCLUSIVE "
DbEnv <$> pure conn'
<*> prepStmt conn' sqlInsertHistoryRow
<*> prepStmt conn' sqlQueryForExisting
<*> prepStmt conn' sqlSelectCompletedCommands
<*> prepStmt conn' sqlSelectAllCommands
closeDB :: DbEnv -> IO ()
closeDB DbEnv{..} = do
liftEither $ closeStmt _insertStatement
liftEither $ closeStmt _qryExistingStmt
liftEither $ closeStmt _qryCompletedStmt
liftEither $ closeStmt _qrySelectAllCmds
liftEither $ close _conn
sqlInsertHistoryRow :: Utf8
sqlInsertHistoryRow =
"INSERT INTO 'main'.'pactCommands' \
\( 'hash'\
\, 'txid' \
\, 'command'\
\, 'result'\
\, 'userSigs'\
\, 'gas'\
\) VALUES (?,?,?,?,?,?)"
insertRow :: Statement -> (Command ByteString, CommandResult Hash) -> IO ()
insertRow s (Command{..},cr@CommandResult {..}) =
execs s [hashToField (toUntypedHash _cmdHash)
,SInt $ fromIntegral (fromMaybe (-1) _crTxId)
,SText $ Utf8 _cmdPayload
,crToField cr
,userSigsToField _cmdSigs
,gasToField _crGas]
insertCompletedCommand :: DbEnv -> [(Command ByteString, CommandResult Hash)] -> IO ()
insertCompletedCommand DbEnv{..} v = do
let sortCmds (_,cr1) (_,cr2) = compare (_crTxId cr1) (_crTxId cr2)
eitherToError "start insert transaction" <$> exec _conn "BEGIN TRANSACTION"
mapM_ (insertRow _insertStatement) $ sortBy sortCmds v
eitherToError "end insert transaction" <$> exec _conn "END TRANSACTION"
sqlQueryForExisting :: Utf8
sqlQueryForExisting = "SELECT EXISTS(SELECT 1 FROM 'main'.'pactCommands' WHERE hash=:hash LIMIT 1)"
queryForExisting :: DbEnv -> HashSet RequestKey -> IO (HashSet RequestKey)
queryForExisting e v = foldM f v v
where
f s rk = do
r <- qrys (_qryExistingStmt e) [hashToField $ unRequestKey rk] [RInt]
case r of
[[SInt 1]] -> return s
_ -> return $ HashSet.delete rk s
sqlSelectCompletedCommands :: Utf8
sqlSelectCompletedCommands =
"SELECT result,txid FROM 'main'.'pactCommands' WHERE hash=:hash LIMIT 1"
selectCompletedCommands :: DbEnv -> HashSet RequestKey -> IO (HashMap RequestKey (CommandResult Hash))
selectCompletedCommands e v = foldM f HashMap.empty v
where
f m rk = do
rs <- qrys (_qryCompletedStmt e) [hashToField $ unRequestKey rk] [RText,RInt,RInt]
if null rs
then return m
else case head rs of
[SText (Utf8 cr),SInt _, SInt _] ->
return $ HashMap.insert rk (crFromField cr) m
r -> dbError $ "Invalid result from query: " ++ show r
sqlSelectAllCommands :: Utf8
sqlSelectAllCommands = "SELECT hash,command,userSigs FROM 'main'.'pactCommands' ORDER BY txid ASC"
selectAllCommands :: DbEnv -> IO [Command ByteString]
selectAllCommands e = do
let rowToCmd [SText (Utf8 hash'),SText (Utf8 cmd'),SText (Utf8 userSigs')] =
Command { _cmdPayload = cmd'
, _cmdSigs = userSigsFromField userSigs'
, _cmdHash = fromUntypedHash $ hashFromField hash'}
rowToCmd err = error $ "selectAllCommands: unexpected result schema: " ++ show err
fmap rowToCmd <$> qrys_ (_qrySelectAllCmds e) [RText,RText,RText]
|
4a79cb6e3bb9df5145bc759963f8641e83d7d6dfa22659ee4e21fb931db78b79 | Cipherwraith/Rokka | Errors.hs | module Errors where
import qualified Data.ByteString.Lazy as BL hiding (pack, unpack)
import qualified Data.ByteString.Lazy.Char8 as BL
- -- Alternative error messages
inputError = BL.pack " 404 Not Found - Please check input and try again "
authenticationError = BL.pack " 401 Unauthorized - Authentication Error "
urlError = BL.pack " 400 Bad Request - Check URL and try again "
timeLimitError = BL.pack " 401 Unauthorized - Request Limit Reached , please wait and try again "
-
inputError = BL.pack "404 Not Found - Please check input and try again"
authenticationError = BL.pack "401 Unauthorized - Authentication Error"
urlError = BL.pack "400 Bad Request - Check URL and try again"
timeLimitError = BL.pack "401 Unauthorized - Request Limit Reached, please wait and try again"
--}
-- Errors.
inputError :: (BL.ByteString, Int, String)
inputError = (BL.pack "Error 8008135", 404, "")
authenticationError :: (BL.ByteString, Int, String)
authenticationError = (BL.pack "Error 69", 401, "")
urlError :: (BL.ByteString, Int, String)
urlError = (BL.pack "Error 666", 400, "")
timeLimitError :: (BL.ByteString, Int, String)
timeLimitError = (BL.pack "Error 420", 401, "")
pageDoesNotExistError :: (BL.ByteString, Int, String)
pageDoesNotExistError = (BL.pack "Error 13", 404, "")
-- This is a full output of an error code, header, and message
error405NotAllowed :: BL.ByteString
error405NotAllowed = BL.pack $ "HTTP/1.0 405 Method Not Allowed\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 42"
error404NotFound :: BL.ByteString
error404NotFound = BL.pack $ "HTTP/1.0 404 Not Found\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 13"
error400BadRequest :: BL.ByteString
error400BadRequest = BL.pack $ "HTTP/1.0 400 Bad Request\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 42"
error501NotImplemented :: BL.ByteString
error501NotImplemented = BL.pack $ "HTTP/1.0 501 Not Implemented\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 42"
| null | https://raw.githubusercontent.com/Cipherwraith/Rokka/46f55dcc5f0c00f2c806670330234b0e7afea5ee/Errors.hs | haskell | Alternative error messages
}
Errors.
This is a full output of an error code, header, and message | module Errors where
import qualified Data.ByteString.Lazy as BL hiding (pack, unpack)
import qualified Data.ByteString.Lazy.Char8 as BL
inputError = BL.pack " 404 Not Found - Please check input and try again "
authenticationError = BL.pack " 401 Unauthorized - Authentication Error "
urlError = BL.pack " 400 Bad Request - Check URL and try again "
timeLimitError = BL.pack " 401 Unauthorized - Request Limit Reached , please wait and try again "
-
inputError = BL.pack "404 Not Found - Please check input and try again"
authenticationError = BL.pack "401 Unauthorized - Authentication Error"
urlError = BL.pack "400 Bad Request - Check URL and try again"
timeLimitError = BL.pack "401 Unauthorized - Request Limit Reached, please wait and try again"
inputError :: (BL.ByteString, Int, String)
inputError = (BL.pack "Error 8008135", 404, "")
authenticationError :: (BL.ByteString, Int, String)
authenticationError = (BL.pack "Error 69", 401, "")
urlError :: (BL.ByteString, Int, String)
urlError = (BL.pack "Error 666", 400, "")
timeLimitError :: (BL.ByteString, Int, String)
timeLimitError = (BL.pack "Error 420", 401, "")
pageDoesNotExistError :: (BL.ByteString, Int, String)
pageDoesNotExistError = (BL.pack "Error 13", 404, "")
error405NotAllowed :: BL.ByteString
error405NotAllowed = BL.pack $ "HTTP/1.0 405 Method Not Allowed\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 42"
error404NotFound :: BL.ByteString
error404NotFound = BL.pack $ "HTTP/1.0 404 Not Found\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 13"
error400BadRequest :: BL.ByteString
error400BadRequest = BL.pack $ "HTTP/1.0 400 Bad Request\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 42"
error501NotImplemented :: BL.ByteString
error501NotImplemented = BL.pack $ "HTTP/1.0 501 Not Implemented\r\nContent-Type: text/plain; charset=\"Shift_JIS\"\r\nContent-Length: 8\r\n\r\nError 42"
|
48fc367151eee15d300158d5a3903a080f8775844c7897fc825712526c21e994 | nick8325/jukebox | Options.hs | -- Command-line option parsing using applicative functors.
Parsers are represented as values of type OptionParser a ,
-- and run using the function
parseCommandLine : : String - > OptionParser a - > IO a.
OptionParsers are built from ArgParsers , which parse a single
option ( e.g. --verbosity 3 ) .
# LANGUAGE FlexibleContexts , CPP #
module Jukebox.Options where
import Data.Char
import Data.List
import System.Environment
import System.Exit
import System.IO
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
import Data.Monoid
#endif
import Data.Semigroup(Semigroup(..))
import Control.Monad
----------------------------------------------------------------------
-- A parser of some kind annotated with a help text of some kind
data Annotated d p a = Annotated
{ descr :: d,
parser :: p a }
instance Functor p => Functor (Annotated d p) where
fmap f (Annotated d x) = Annotated d (fmap f x)
instance (Monoid d, Applicative p) => Applicative (Annotated d p) where
pure = Annotated mempty . pure
Annotated d f <*> Annotated d' x =
Annotated (d `mappend` d') (f <*> x)
instance (Semigroup d, Monoid d, Semigroup (p a), Monoid (p a)) => Monoid (Annotated d p a) where
mempty = Annotated mempty mempty
mappend = (<>)
instance (Semigroup d, Semigroup (p a)) => Semigroup (Annotated d p a) where
Annotated d p <> Annotated d' p' =
Annotated (d <> d') (p <> p')
----------------------------------------------------------------------
The ArgParser type : parsing of single flags .
type ArgParser = Annotated [String] SeqParser
-- annotated with a description, e.g. "<number>"
Called SeqParser because < * > is sequential composition .
data SeqParser a = SeqParser
{ args :: Int, -- How many arguments will be consumed
consume :: [String] -> Either Error a }
instance Functor SeqParser where
fmap f (SeqParser a c) = SeqParser a (fmap f . c)
instance Applicative SeqParser where
pure = SeqParser 0 . const . pure
SeqParser a c <*> SeqParser a' c' = SeqParser (a + a') f
where f xs = c xs <*> c' (drop a xs)
----------------------------------------------------------------------
Combinators for building ArgParsers .
arg :: String -> String -> (String -> Maybe a) -> ArgParser a
arg desc err f = Annotated [desc] (SeqParser 1 c)
where c [] = Left (Mistake err)
c (x:_) | "-" `isPrefixOf` x = Left (Mistake err)
c (x:_) =
case f x of
Nothing -> Left (Mistake err)
Just ok -> Right ok
argNum :: (Read a, Num a) => ArgParser a
argNum = arg "<num>" "expected a number" f
where f x =
case reads x of
[(y, "")] -> Just y
_ -> Nothing
argFile :: ArgParser FilePath
argFile = arg "<file>" "expected a file" Just
argFiles :: ArgParser [FilePath]
argFiles = arg "<files>" "expected a list of files" $ \x ->
Just $ elts $ x ++ ","
where
elts [] = []
elts s = w:elts r
where
w = takeWhile (/= ',') s
r = tail (dropWhile (/= ',') s)
argName :: ArgParser String
argName = arg "<name>" "expected a name" Just
argNums :: ArgParser [Int]
argNums = arg "<nums>" "expected a number list" $ \x ->
nums . groupBy (\x y -> isDigit x == isDigit y) $ x ++ ","
where
nums [] = Just []
nums (n:",":ns) = (read n :) `fmap` nums ns
nums (n:"..":m:",":ns) = ([read n .. read m] ++) `fmap` nums ns
nums _ = Nothing
argOption :: [(String, a)] -> ArgParser a
argOption as =
argOptionWith "one" "or" "" (map fst as) (`lookup` as)
argList :: [String] -> ArgParser [String]
argList as =
argOptionWith "several" "and" "*" as $ \x -> elts (x ++ ",")
where
elts [] = Just []
elts s | w `elem` as = (w:) `fmap` elts r
where
w = takeWhile (/= ',') s
r = tail (dropWhile (/= ',') s)
elts _ = Nothing
argOptionWith :: String -> String -> String -> [String] -> (String -> Maybe a) -> ArgParser a
argOptionWith one or suff opts p =
arg ("<" ++ intercalate " | " opts ++ ">" ++ suff)
("expected " ++ one ++ " of " ++ list) p
where
list =
case opts of
[] -> "<empty list>" -- ??
_ ->
intercalate ", " (init opts) ++ " " ++ or ++ " " ++ last opts
-- A parser that always fails but produces an error message (useful for --help etc.)
argUsage :: ExitCode -> [String] -> ArgParser a
argUsage code err = Annotated [] (SeqParser 0 (const (Left (Usage code err))))
----------------------------------------------------------------------
The OptionParser type : parsing of whole command lines .
type OptionParser = Annotated [Flag] ParParser
-- The help information for a flag.
data Flag = Flag
{ flagName :: String,
flagGroup :: String,
flagMode :: FlagMode,
flagHelp :: [String],
flagArgs :: String } deriving (Eq, Show)
data FlagMode = NormalMode | ExpertMode | HiddenMode deriving (Eq, Show)
flagExpert :: Flag -> Bool
flagExpert f = flagMode f == ExpertMode
-- Called ParParser because <*> is parallel composition.
-- In other words, in f <*> x, f and x both see the whole command line.
-- We want this when parsing command lines because
-- it doesn't matter what order we write the options in,
-- and because f and x might understand the same flags.
data ParParser a = ParParser
{ val :: Either Error (IO a), -- impure so we can put system information in our options records
peek :: [String] -> ParseResult a }
data ParseResult a
-- Yes n x: consumed n arguments, continue parsing with x
= Yes Int (ParParser a)
-- No x: didn't understand this flag, continue parsing with x
| No (ParParser a)
-- Error
| Error Error
data Error =
Mistake String
| Usage ExitCode [String]
instance Functor ParParser where
fmap f x = pure f <*> x
instance Applicative ParParser where
pure x = ParParser (Right (return x)) (const (pure x))
ParParser v p <*> ParParser v' p' =
ParParser (liftM2 (<*>) v v') (\xs -> p xs <*> p' xs)
instance Functor ParseResult where
fmap f x = pure f <*> x
instance Applicative ParseResult where
pure = No . pure
Yes n r <*> Yes n' r'
| n == n' = Yes n (r <*> r')
| otherwise = error "Options.ParseResult: inconsistent number of arguments"
Error s <*> _ = Error s
_ <*> Error s = Error s
Yes n r <*> No x = Yes n (r <*> x)
No x <*> Yes n r = Yes n (x <*> r)
No f <*> No x = No (f <*> x)
runPar :: ParParser a -> [String] -> Either Error (IO a)
runPar p [] = val p
runPar p xs@(x:_) =
case peek p xs of
Yes n p' -> runPar p' (drop n xs)
No _ -> Left (Mistake ("Didn't recognise option " ++ x))
Error err -> Left err
await :: (String -> Bool) -> Either Error a -> (String -> [String] -> ParseResult a) -> ParParser a
await p def par = ParParser (return <$> def) f
where f (x:xs) | p x =
case par x xs of
Yes n r -> Yes (n+1) r
No _ ->
error "Options.await: got No"
Error err -> Error err
f _ = No (await p def par)
----------------------------------------------------------------------
Low - level primitives for building OptionParsers .
-- Produce an OptionParser with maximum flexibility.
primFlag ::
-- Name and description of options (for documentation)
String -> [String] ->
-- Predicate which checks if this argument is our option
(String -> Bool) ->
-- Handle repeated occurrences of the same option
(a -> a -> Either Error a) ->
-- Default argument value and argument parser
-- The argument parser is given the option name.
a -> ArgParser (String -> a) -> OptionParser a
primFlag name help p combine def (Annotated desc (SeqParser args f)) =
Annotated [desc'] (await p (Right def) (g Right))
where desc' = Flag name "General options" NormalMode help (unwords desc)
g comb x xs =
case f xs >>= comb . ($ x) of
Left (Mistake err) -> Error (Mistake ("Error in option --" ++ name ++ ": " ++ err))
Left (Usage code err) -> Error (Usage code err)
Right y ->
Yes args (await p (Right y) (g (combine y)))
----------------------------------------------------------------------
Combinators for building OptionParsers .
From a flag name and description and argument parser , produce an OptionParser .
flag :: String -> [String] -> a -> ArgParser a -> OptionParser a
flag name help def p =
primFlag name help
(\x -> x == "--" ++ name)
take second occurrence of flag
def (const <$> p)
-- A variant of 'flag' that allows repeated flags.
manyFlags :: String -> [String] -> ArgParser a -> OptionParser [a]
manyFlags name help p =
primFlag name help
(\x -> x == "--" ++ name)
(\x y -> return (x ++ y))
[] (const <$> return <$> p)
-- A boolean flag.
bool :: String -> [String] -> Bool -> OptionParser Bool
bool name help def =
primFlag ("(no-)" ++ name) help
(\x -> x `elem` ["--" ++ name, "--no-" ++ name])
(\_ y -> return y)
def
(pure (\name' -> if "--" ++ name == name' then True else False))
-- A parser that reads all file names from the command line.
filenames :: OptionParser [String]
filenames = Annotated [] (await p (Left err) (f []))
where p x = not ("-" `isPrefixOf` x) || x == "-"
f xs y _ = Yes 0 (let ys = xs ++ [y] in await p (Right ys) (f ys))
err =
Usage (ExitFailure 1)
["No input files specified! Try --help.",
"You can use \"-\" to read from standard input."]
-- Take a value from the environment.
io :: IO a -> OptionParser a
io m = Annotated [] p
where p = ParParser (Right m) (const (No p))
-- Change the group associated with a set of flags.
inGroup :: String -> OptionParser a -> OptionParser a
inGroup x (Annotated fls f) = Annotated [fl{ flagGroup = x } | fl <- fls] f
-- Mark a flag as being for experts only.
expert :: OptionParser a -> OptionParser a
expert (Annotated fls f) = Annotated [fl{ flagMode = ExpertMode } | fl <- fls] f
-- Mark a flag as being hidden.
hidden :: OptionParser a -> OptionParser a
hidden (Annotated fls f) = Annotated [fl{ flagMode = HiddenMode } | fl <- fls] f
-- Add a --version flag.
version :: String -> OptionParser a -> OptionParser a
version x p =
p <*
inGroup "Miscellaneous options"
(flag "version" ["Show the version number."] ()
(argUsage ExitSuccess [x]))
----------------------------------------------------------------------
-- Help screens, error messages and so on.
printHelp :: ExitCode -> [String] -> IO a
printHelp code xs = do
mapM_ (hPutStrLn stderr) xs
exitWith code
printError :: String -> String -> IO a
printError name err =
printHelp (ExitFailure 1) $
[err ++ ".", "Try " ++ name ++ " --help."]
help :: String -> String -> OptionParser a -> OptionParser a
help name description p = p'
where
p' =
p <*
(inGroup "Miscellaneous options" $
flag "help" ["Show help text."] ()
(argUsage ExitSuccess (helpText False name description p')))
<*
(if any flagExpert (descr p) then
(inGroup "Miscellaneous options" $
flag "expert-help" ["Show help text for hidden options."] ()
(argUsage ExitSuccess (helpText True name description p')))
else pure ())
usageText :: String -> String -> [String]
usageText name descr =
[descr ++ ".",
"Usage: " ++ name ++ " <option>* <file>*, where <file> is in TPTP format."]
helpText :: Bool -> String -> String -> OptionParser a -> [String]
helpText expert name description p =
intercalate [""] $
[usageText name description] ++
[[flagGroup f0 ++ ":"] ++
concat [justify ("--" ++ flagName f ++ " " ++ flagArgs f) (flagHelp f) | f <- fs]
| fs@(f0:_) <- groups (filter ok (nub (descr p))) ] ++
[ ["To see hidden options too, try --expert-help."]
| any flagExpert (descr p), not expert ]
where
groups [] = []
groups (f:fs) =
(f:[f' | f' <- fs, flagGroup f == flagGroup f']):
groups [f' | f' <- fs, flagGroup f /= flagGroup f']
ok flag =
case flagMode flag of
NormalMode -> True
ExpertMode -> expert
HiddenMode -> False
justify :: String -> [String] -> [String]
justify name help = [" " ++ name] ++ map (" " ++) help
----------------------------------------------------------------------
-- Running the parser.
parseCommandLine :: String -> OptionParser a -> IO a
parseCommandLine description p =
parseCommandLineWithExtraArgs [] description p
parseCommandLineWithExtraArgs :: [String] -> String -> OptionParser a -> IO a
parseCommandLineWithExtraArgs args0 description p = do
name <- getProgName
args <- getArgs
parseCommandLineWithArgs name (args0 ++ args) description p
parseCommandLineWithArgs :: String -> [String] -> String -> OptionParser a -> IO a
parseCommandLineWithArgs name args description p = do
case runPar (parser (help name description p)) args of
Left (Mistake err) -> printError name err
Left (Usage code err) -> printHelp code err
Right x -> x
| null | https://raw.githubusercontent.com/nick8325/jukebox/52c3206d7def50c8ebcc0128c442fa6c2bd4e074/src/Jukebox/Options.hs | haskell | Command-line option parsing using applicative functors.
and run using the function
verbosity 3 ) .
--------------------------------------------------------------------
A parser of some kind annotated with a help text of some kind
--------------------------------------------------------------------
annotated with a description, e.g. "<number>"
How many arguments will be consumed
--------------------------------------------------------------------
??
A parser that always fails but produces an error message (useful for --help etc.)
--------------------------------------------------------------------
The help information for a flag.
Called ParParser because <*> is parallel composition.
In other words, in f <*> x, f and x both see the whole command line.
We want this when parsing command lines because
it doesn't matter what order we write the options in,
and because f and x might understand the same flags.
impure so we can put system information in our options records
Yes n x: consumed n arguments, continue parsing with x
No x: didn't understand this flag, continue parsing with x
Error
--------------------------------------------------------------------
Produce an OptionParser with maximum flexibility.
Name and description of options (for documentation)
Predicate which checks if this argument is our option
Handle repeated occurrences of the same option
Default argument value and argument parser
The argument parser is given the option name.
--------------------------------------------------------------------
A variant of 'flag' that allows repeated flags.
A boolean flag.
A parser that reads all file names from the command line.
Take a value from the environment.
Change the group associated with a set of flags.
Mark a flag as being for experts only.
Mark a flag as being hidden.
Add a --version flag.
--------------------------------------------------------------------
Help screens, error messages and so on.
--------------------------------------------------------------------
Running the parser. | Parsers are represented as values of type OptionParser a ,
parseCommandLine : : String - > OptionParser a - > IO a.
OptionParsers are built from ArgParsers , which parse a single
# LANGUAGE FlexibleContexts , CPP #
module Jukebox.Options where
import Data.Char
import Data.List
import System.Environment
import System.Exit
import System.IO
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
import Data.Monoid
#endif
import Data.Semigroup(Semigroup(..))
import Control.Monad
data Annotated d p a = Annotated
{ descr :: d,
parser :: p a }
instance Functor p => Functor (Annotated d p) where
fmap f (Annotated d x) = Annotated d (fmap f x)
instance (Monoid d, Applicative p) => Applicative (Annotated d p) where
pure = Annotated mempty . pure
Annotated d f <*> Annotated d' x =
Annotated (d `mappend` d') (f <*> x)
instance (Semigroup d, Monoid d, Semigroup (p a), Monoid (p a)) => Monoid (Annotated d p a) where
mempty = Annotated mempty mempty
mappend = (<>)
instance (Semigroup d, Semigroup (p a)) => Semigroup (Annotated d p a) where
Annotated d p <> Annotated d' p' =
Annotated (d <> d') (p <> p')
The ArgParser type : parsing of single flags .
type ArgParser = Annotated [String] SeqParser
Called SeqParser because < * > is sequential composition .
data SeqParser a = SeqParser
consume :: [String] -> Either Error a }
instance Functor SeqParser where
fmap f (SeqParser a c) = SeqParser a (fmap f . c)
instance Applicative SeqParser where
pure = SeqParser 0 . const . pure
SeqParser a c <*> SeqParser a' c' = SeqParser (a + a') f
where f xs = c xs <*> c' (drop a xs)
Combinators for building ArgParsers .
arg :: String -> String -> (String -> Maybe a) -> ArgParser a
arg desc err f = Annotated [desc] (SeqParser 1 c)
where c [] = Left (Mistake err)
c (x:_) | "-" `isPrefixOf` x = Left (Mistake err)
c (x:_) =
case f x of
Nothing -> Left (Mistake err)
Just ok -> Right ok
argNum :: (Read a, Num a) => ArgParser a
argNum = arg "<num>" "expected a number" f
where f x =
case reads x of
[(y, "")] -> Just y
_ -> Nothing
argFile :: ArgParser FilePath
argFile = arg "<file>" "expected a file" Just
argFiles :: ArgParser [FilePath]
argFiles = arg "<files>" "expected a list of files" $ \x ->
Just $ elts $ x ++ ","
where
elts [] = []
elts s = w:elts r
where
w = takeWhile (/= ',') s
r = tail (dropWhile (/= ',') s)
argName :: ArgParser String
argName = arg "<name>" "expected a name" Just
argNums :: ArgParser [Int]
argNums = arg "<nums>" "expected a number list" $ \x ->
nums . groupBy (\x y -> isDigit x == isDigit y) $ x ++ ","
where
nums [] = Just []
nums (n:",":ns) = (read n :) `fmap` nums ns
nums (n:"..":m:",":ns) = ([read n .. read m] ++) `fmap` nums ns
nums _ = Nothing
argOption :: [(String, a)] -> ArgParser a
argOption as =
argOptionWith "one" "or" "" (map fst as) (`lookup` as)
argList :: [String] -> ArgParser [String]
argList as =
argOptionWith "several" "and" "*" as $ \x -> elts (x ++ ",")
where
elts [] = Just []
elts s | w `elem` as = (w:) `fmap` elts r
where
w = takeWhile (/= ',') s
r = tail (dropWhile (/= ',') s)
elts _ = Nothing
argOptionWith :: String -> String -> String -> [String] -> (String -> Maybe a) -> ArgParser a
argOptionWith one or suff opts p =
arg ("<" ++ intercalate " | " opts ++ ">" ++ suff)
("expected " ++ one ++ " of " ++ list) p
where
list =
case opts of
_ ->
intercalate ", " (init opts) ++ " " ++ or ++ " " ++ last opts
argUsage :: ExitCode -> [String] -> ArgParser a
argUsage code err = Annotated [] (SeqParser 0 (const (Left (Usage code err))))
The OptionParser type : parsing of whole command lines .
type OptionParser = Annotated [Flag] ParParser
data Flag = Flag
{ flagName :: String,
flagGroup :: String,
flagMode :: FlagMode,
flagHelp :: [String],
flagArgs :: String } deriving (Eq, Show)
data FlagMode = NormalMode | ExpertMode | HiddenMode deriving (Eq, Show)
flagExpert :: Flag -> Bool
flagExpert f = flagMode f == ExpertMode
data ParParser a = ParParser
peek :: [String] -> ParseResult a }
data ParseResult a
= Yes Int (ParParser a)
| No (ParParser a)
| Error Error
data Error =
Mistake String
| Usage ExitCode [String]
instance Functor ParParser where
fmap f x = pure f <*> x
instance Applicative ParParser where
pure x = ParParser (Right (return x)) (const (pure x))
ParParser v p <*> ParParser v' p' =
ParParser (liftM2 (<*>) v v') (\xs -> p xs <*> p' xs)
instance Functor ParseResult where
fmap f x = pure f <*> x
instance Applicative ParseResult where
pure = No . pure
Yes n r <*> Yes n' r'
| n == n' = Yes n (r <*> r')
| otherwise = error "Options.ParseResult: inconsistent number of arguments"
Error s <*> _ = Error s
_ <*> Error s = Error s
Yes n r <*> No x = Yes n (r <*> x)
No x <*> Yes n r = Yes n (x <*> r)
No f <*> No x = No (f <*> x)
runPar :: ParParser a -> [String] -> Either Error (IO a)
runPar p [] = val p
runPar p xs@(x:_) =
case peek p xs of
Yes n p' -> runPar p' (drop n xs)
No _ -> Left (Mistake ("Didn't recognise option " ++ x))
Error err -> Left err
await :: (String -> Bool) -> Either Error a -> (String -> [String] -> ParseResult a) -> ParParser a
await p def par = ParParser (return <$> def) f
where f (x:xs) | p x =
case par x xs of
Yes n r -> Yes (n+1) r
No _ ->
error "Options.await: got No"
Error err -> Error err
f _ = No (await p def par)
Low - level primitives for building OptionParsers .
primFlag ::
String -> [String] ->
(String -> Bool) ->
(a -> a -> Either Error a) ->
a -> ArgParser (String -> a) -> OptionParser a
primFlag name help p combine def (Annotated desc (SeqParser args f)) =
Annotated [desc'] (await p (Right def) (g Right))
where desc' = Flag name "General options" NormalMode help (unwords desc)
g comb x xs =
case f xs >>= comb . ($ x) of
Left (Mistake err) -> Error (Mistake ("Error in option --" ++ name ++ ": " ++ err))
Left (Usage code err) -> Error (Usage code err)
Right y ->
Yes args (await p (Right y) (g (combine y)))
Combinators for building OptionParsers .
From a flag name and description and argument parser , produce an OptionParser .
flag :: String -> [String] -> a -> ArgParser a -> OptionParser a
flag name help def p =
primFlag name help
(\x -> x == "--" ++ name)
take second occurrence of flag
def (const <$> p)
manyFlags :: String -> [String] -> ArgParser a -> OptionParser [a]
manyFlags name help p =
primFlag name help
(\x -> x == "--" ++ name)
(\x y -> return (x ++ y))
[] (const <$> return <$> p)
bool :: String -> [String] -> Bool -> OptionParser Bool
bool name help def =
primFlag ("(no-)" ++ name) help
(\x -> x `elem` ["--" ++ name, "--no-" ++ name])
(\_ y -> return y)
def
(pure (\name' -> if "--" ++ name == name' then True else False))
filenames :: OptionParser [String]
filenames = Annotated [] (await p (Left err) (f []))
where p x = not ("-" `isPrefixOf` x) || x == "-"
f xs y _ = Yes 0 (let ys = xs ++ [y] in await p (Right ys) (f ys))
err =
Usage (ExitFailure 1)
["No input files specified! Try --help.",
"You can use \"-\" to read from standard input."]
io :: IO a -> OptionParser a
io m = Annotated [] p
where p = ParParser (Right m) (const (No p))
inGroup :: String -> OptionParser a -> OptionParser a
inGroup x (Annotated fls f) = Annotated [fl{ flagGroup = x } | fl <- fls] f
expert :: OptionParser a -> OptionParser a
expert (Annotated fls f) = Annotated [fl{ flagMode = ExpertMode } | fl <- fls] f
hidden :: OptionParser a -> OptionParser a
hidden (Annotated fls f) = Annotated [fl{ flagMode = HiddenMode } | fl <- fls] f
version :: String -> OptionParser a -> OptionParser a
version x p =
p <*
inGroup "Miscellaneous options"
(flag "version" ["Show the version number."] ()
(argUsage ExitSuccess [x]))
printHelp :: ExitCode -> [String] -> IO a
printHelp code xs = do
mapM_ (hPutStrLn stderr) xs
exitWith code
printError :: String -> String -> IO a
printError name err =
printHelp (ExitFailure 1) $
[err ++ ".", "Try " ++ name ++ " --help."]
help :: String -> String -> OptionParser a -> OptionParser a
help name description p = p'
where
p' =
p <*
(inGroup "Miscellaneous options" $
flag "help" ["Show help text."] ()
(argUsage ExitSuccess (helpText False name description p')))
<*
(if any flagExpert (descr p) then
(inGroup "Miscellaneous options" $
flag "expert-help" ["Show help text for hidden options."] ()
(argUsage ExitSuccess (helpText True name description p')))
else pure ())
usageText :: String -> String -> [String]
usageText name descr =
[descr ++ ".",
"Usage: " ++ name ++ " <option>* <file>*, where <file> is in TPTP format."]
helpText :: Bool -> String -> String -> OptionParser a -> [String]
helpText expert name description p =
intercalate [""] $
[usageText name description] ++
[[flagGroup f0 ++ ":"] ++
concat [justify ("--" ++ flagName f ++ " " ++ flagArgs f) (flagHelp f) | f <- fs]
| fs@(f0:_) <- groups (filter ok (nub (descr p))) ] ++
[ ["To see hidden options too, try --expert-help."]
| any flagExpert (descr p), not expert ]
where
groups [] = []
groups (f:fs) =
(f:[f' | f' <- fs, flagGroup f == flagGroup f']):
groups [f' | f' <- fs, flagGroup f /= flagGroup f']
ok flag =
case flagMode flag of
NormalMode -> True
ExpertMode -> expert
HiddenMode -> False
justify :: String -> [String] -> [String]
justify name help = [" " ++ name] ++ map (" " ++) help
parseCommandLine :: String -> OptionParser a -> IO a
parseCommandLine description p =
parseCommandLineWithExtraArgs [] description p
parseCommandLineWithExtraArgs :: [String] -> String -> OptionParser a -> IO a
parseCommandLineWithExtraArgs args0 description p = do
name <- getProgName
args <- getArgs
parseCommandLineWithArgs name (args0 ++ args) description p
parseCommandLineWithArgs :: String -> [String] -> String -> OptionParser a -> IO a
parseCommandLineWithArgs name args description p = do
case runPar (parser (help name description p)) args of
Left (Mistake err) -> printError name err
Left (Usage code err) -> printHelp code err
Right x -> x
|
85a6465c15ccf38e10c807754fff6f0601b06c0f392d6db2eca269c00cebc549 | onyx-platform/learn-onyx | challenge_6_3_test.clj | (ns workshop.jobs.challenge-6-3-test
(:require [clojure.test :refer [deftest is]]
[onyx.test-helper :refer [with-test-env feedback-exception!]]
[workshop.challenge-6-3 :as c]
[workshop.workshop-utils :as u]
[onyx.api]))
;; In practice, you'll frequently need to respond to events
;; other than N number of segments processed. Let's explore
the watermark trigger . Watermark 's represent the upper and
lower bounds of a window ( say , 1:00 - 1:59:59 ... ) . Use
a fixed window of range 1 hour with a Watermark trigger .
Use the aggregate to maintain all of the segments in
;; memory.
The idea with this challenge is that we pipe through the first
8 segments , all within the 2:00 window , and then drop a segment
with timestamp 3:05 . Onyx sees that this segment is beyond the
2:00 window , so it fires * only * the 2 hour window .
;;
;; Try it with:
;;
` lein test workshop.jobs.challenge-6 - 3 - test `
;;
(def input
[{:event-id 1 :event-time #inst "2015-11-20T02:59:00.000-00:00"}
{:event-id 2 :event-time #inst "2015-11-20T02:46:00.000-00:00"}
{:event-id 3 :event-time #inst "2015-11-20T02:31:00.000-00:00"}
{:event-id 4 :event-time #inst "2015-11-20T02:54:00.000-00:00"}
{:event-id 5 :event-time #inst "2015-11-20T02:00:00.000-00:00"}
{:event-id 6 :event-time #inst "2015-11-20T02:05:00.000-00:00"}
{:event-id 7 :event-time #inst "2015-11-20T02:11:00.000-00:00"}
{:event-id 8 :event-time #inst "2015-11-20T02:18:00.000-00:00"}
{:event-id 9 :event-time #inst "2015-11-20T03:05:00.000-00:00"}])
(def expected-output
{[#inst "2015-11-20T02:00:00.000-00:00"
#inst "2015-11-20T02:59:59.999-00:00"]
[{:event-id 1
:event-time #inst "2015-11-20T02:59:00.000-00:00"}
{:event-id 2
:event-time #inst "2015-11-20T02:46:00.000-00:00"}
{:event-id 3
:event-time #inst "2015-11-20T02:31:00.000-00:00"}
{:event-id 4
:event-time #inst "2015-11-20T02:54:00.000-00:00"}
{:event-id 5
:event-time #inst "2015-11-20T02:00:00.000-00:00"}
{:event-id 6
:event-time #inst "2015-11-20T02:05:00.000-00:00"}
{:event-id 7
:event-time #inst "2015-11-20T02:11:00.000-00:00"}
{:event-id 8
:event-time #inst "2015-11-20T02:18:00.000-00:00"}]})
(deftest test-level-6-challenge-3
(let [cluster-id (java.util.UUID/randomUUID)
env-config (u/load-env-config cluster-id)
peer-config (u/load-peer-config cluster-id)
catalog (c/build-catalog)
lifecycles (c/build-lifecycles)
n-peers (u/n-peers catalog c/workflow)
p (promise)]
(reset! c/fired-window-state {})
(with-test-env
[test-env [n-peers env-config peer-config]]
(add-watch c/fired-window-state :watcher
(fn [k r old new]
;; This trigger fires exactly once (for this data set)
;; when we exceed the watermark with the last
segment ( at 3:05 , outside of the 2:00 - 2:59:59 window ) .
;;
;; Triggers also fire on task completion, so technically
;; the last segment we send through will be synced. We
simply return the first value via the promise to avoid
;; a race condition of seeing the next state sync.
(deliver p new)))
(u/bind-inputs! lifecycles {:read-segments input})
(let [job {:workflow c/workflow
:catalog catalog
:lifecycles lifecycles
:windows c/windows
:triggers c/triggers
:task-scheduler :onyx.task-scheduler/balanced}
job-id (:job-id (onyx.api/submit-job peer-config job))]
(assert job-id "Job was not successfully submitted")
(feedback-exception! peer-config job-id)
(is (= expected-output @p))))))
| null | https://raw.githubusercontent.com/onyx-platform/learn-onyx/6bf1936f35d26e3c8cf171b5971e1bc95e82b3c8/test/workshop/jobs/challenge_6_3_test.clj | clojure | In practice, you'll frequently need to respond to events
other than N number of segments processed. Let's explore
memory.
Try it with:
This trigger fires exactly once (for this data set)
when we exceed the watermark with the last
Triggers also fire on task completion, so technically
the last segment we send through will be synced. We
a race condition of seeing the next state sync. | (ns workshop.jobs.challenge-6-3-test
(:require [clojure.test :refer [deftest is]]
[onyx.test-helper :refer [with-test-env feedback-exception!]]
[workshop.challenge-6-3 :as c]
[workshop.workshop-utils :as u]
[onyx.api]))
the watermark trigger . Watermark 's represent the upper and
lower bounds of a window ( say , 1:00 - 1:59:59 ... ) . Use
a fixed window of range 1 hour with a Watermark trigger .
Use the aggregate to maintain all of the segments in
The idea with this challenge is that we pipe through the first
8 segments , all within the 2:00 window , and then drop a segment
with timestamp 3:05 . Onyx sees that this segment is beyond the
2:00 window , so it fires * only * the 2 hour window .
` lein test workshop.jobs.challenge-6 - 3 - test `
(def input
[{:event-id 1 :event-time #inst "2015-11-20T02:59:00.000-00:00"}
{:event-id 2 :event-time #inst "2015-11-20T02:46:00.000-00:00"}
{:event-id 3 :event-time #inst "2015-11-20T02:31:00.000-00:00"}
{:event-id 4 :event-time #inst "2015-11-20T02:54:00.000-00:00"}
{:event-id 5 :event-time #inst "2015-11-20T02:00:00.000-00:00"}
{:event-id 6 :event-time #inst "2015-11-20T02:05:00.000-00:00"}
{:event-id 7 :event-time #inst "2015-11-20T02:11:00.000-00:00"}
{:event-id 8 :event-time #inst "2015-11-20T02:18:00.000-00:00"}
{:event-id 9 :event-time #inst "2015-11-20T03:05:00.000-00:00"}])
(def expected-output
{[#inst "2015-11-20T02:00:00.000-00:00"
#inst "2015-11-20T02:59:59.999-00:00"]
[{:event-id 1
:event-time #inst "2015-11-20T02:59:00.000-00:00"}
{:event-id 2
:event-time #inst "2015-11-20T02:46:00.000-00:00"}
{:event-id 3
:event-time #inst "2015-11-20T02:31:00.000-00:00"}
{:event-id 4
:event-time #inst "2015-11-20T02:54:00.000-00:00"}
{:event-id 5
:event-time #inst "2015-11-20T02:00:00.000-00:00"}
{:event-id 6
:event-time #inst "2015-11-20T02:05:00.000-00:00"}
{:event-id 7
:event-time #inst "2015-11-20T02:11:00.000-00:00"}
{:event-id 8
:event-time #inst "2015-11-20T02:18:00.000-00:00"}]})
(deftest test-level-6-challenge-3
(let [cluster-id (java.util.UUID/randomUUID)
env-config (u/load-env-config cluster-id)
peer-config (u/load-peer-config cluster-id)
catalog (c/build-catalog)
lifecycles (c/build-lifecycles)
n-peers (u/n-peers catalog c/workflow)
p (promise)]
(reset! c/fired-window-state {})
(with-test-env
[test-env [n-peers env-config peer-config]]
(add-watch c/fired-window-state :watcher
(fn [k r old new]
segment ( at 3:05 , outside of the 2:00 - 2:59:59 window ) .
simply return the first value via the promise to avoid
(deliver p new)))
(u/bind-inputs! lifecycles {:read-segments input})
(let [job {:workflow c/workflow
:catalog catalog
:lifecycles lifecycles
:windows c/windows
:triggers c/triggers
:task-scheduler :onyx.task-scheduler/balanced}
job-id (:job-id (onyx.api/submit-job peer-config job))]
(assert job-id "Job was not successfully submitted")
(feedback-exception! peer-config job-id)
(is (= expected-output @p))))))
|
8c6d60ef59d152ac01c9a0bbe36a6b55f463d0328ee81b8cf0218ad1ea33b5fb | rescript-lang/rescript-compiler | js_of_lam_array.ml | Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
* Copyright ( C ) 2017 - , Authors of
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
(*
construct array,
set array,
ref array,
Also make sure, don't call any primitive array method, i.e [E.array_index_by_int]
We also need check primitive [caml_make_vect], i.e,
[Caml_primitive['caml_make_vect']] see if it's correct
[caml_make_vect]
[caml_array_sub]
[caml_array_append]
[caml_array_concat]
[caml_make_float_vect]
[caml_array_blit]
research: -US/docs/Web/JavaScript/Typed_arrays
*)
module E = Js_exp_make
Parrayref(u|s )
let make_array mt args = E.array mt args
let set_array e e0 e1 = E.assign (E.array_index e e0) e1
let ref_array e e0 = E.array_index e e0
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/e60482c6f6a69994907b9bd56e58ce87052e3659/jscomp/core/js_of_lam_array.ml | ocaml |
construct array,
set array,
ref array,
Also make sure, don't call any primitive array method, i.e [E.array_index_by_int]
We also need check primitive [caml_make_vect], i.e,
[Caml_primitive['caml_make_vect']] see if it's correct
[caml_make_vect]
[caml_array_sub]
[caml_array_append]
[caml_array_concat]
[caml_make_float_vect]
[caml_array_blit]
research: -US/docs/Web/JavaScript/Typed_arrays
| Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
* Copyright ( C ) 2017 - , Authors of
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
module E = Js_exp_make
Parrayref(u|s )
let make_array mt args = E.array mt args
let set_array e e0 e1 = E.assign (E.array_index e e0) e1
let ref_array e e0 = E.array_index e e0
|
86027de10fb71e1b71bdfb72c520c14335fdc9f58d171443fe8f1a895c811f06 | ocaml-flambda/flambda-backend | lambda.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* The "lambda" intermediate code *)
open Asttypes
(* Overriding Asttypes.mutable_flag *)
type mutable_flag = Immutable | Immutable_unique | Mutable
type compile_time_constant =
| Big_endian
| Word_size
| Int_size
| Max_wosize
| Ostype_unix
| Ostype_win32
| Ostype_cygwin
| Backend_type
type immediate_or_pointer =
| Immediate
| Pointer
type alloc_mode = private
| Alloc_heap
| Alloc_local
type modify_mode = private
| Modify_heap
| Modify_maybe_stack
val alloc_heap : alloc_mode
(* Actually [Alloc_heap] if [Config.stack_allocation] is [false] *)
val alloc_local : alloc_mode
val modify_heap : modify_mode
val modify_maybe_stack : modify_mode
type initialization_or_assignment =
(* [Assignment Alloc_local] is a mutation of a block that may be heap or local.
[Assignment Alloc_heap] is a mutation of a block that's definitely heap. *)
| Assignment of modify_mode
(* Initialization of in heap values, like [caml_initialize] C primitive. The
field should not have been read before and initialization should happen
only once. *)
| Heap_initialization
Initialization of roots only . Compiles to a simple store .
No checks are done to preserve GC invariants .
No checks are done to preserve GC invariants. *)
| Root_initialization
type is_safe =
| Safe
| Unsafe
type field_read_semantics =
| Reads_agree
| Reads_vary
(* Tail calls can close their enclosing region early *)
type region_close =
| Rc_normal (* do not close region, may TCO if in tail position *)
| Rc_nontail (* do not close region, must not TCO *)
| Rc_close_at_apply (* close region and tail call *)
type primitive =
| Pbytes_to_string
| Pbytes_of_string
| Pignore
(* Globals *)
| Pgetglobal of Compilation_unit.t
| Psetglobal of Compilation_unit.t
| Pgetpredef of Ident.t
(* Operations on heap blocks *)
| Pmakeblock of int * mutable_flag * block_shape * alloc_mode
| Pmakefloatblock of mutable_flag * alloc_mode
| Pfield of int * field_read_semantics
| Pfield_computed of field_read_semantics
| Psetfield of int * immediate_or_pointer * initialization_or_assignment
| Psetfield_computed of immediate_or_pointer * initialization_or_assignment
| Pfloatfield of int * field_read_semantics * alloc_mode
| Psetfloatfield of int * initialization_or_assignment
| Pduprecord of Types.record_representation * int
(* External call *)
| Pccall of Primitive.description
(* Exceptions *)
| Praise of raise_kind
Boolean operations
| Psequand | Psequor | Pnot
Integer operations
| Pnegint | Paddint | Psubint | Pmulint
| Pdivint of is_safe | Pmodint of is_safe
| Pandint | Porint | Pxorint
| Plslint | Plsrint | Pasrint
| Pintcomp of integer_comparison
(* Comparisons that return int (not bool like above) for ordering *)
| Pcompare_ints | Pcompare_floats | Pcompare_bints of boxed_integer
| Poffsetint of int
| Poffsetref of int
(* Float operations *)
| Pintoffloat | Pfloatofint of alloc_mode
| Pnegfloat of alloc_mode | Pabsfloat of alloc_mode
| Paddfloat of alloc_mode | Psubfloat of alloc_mode
| Pmulfloat of alloc_mode | Pdivfloat of alloc_mode
| Pfloatcomp of float_comparison
(* String operations *)
| Pstringlength | Pstringrefu | Pstringrefs
| Pbyteslength | Pbytesrefu | Pbytessetu | Pbytesrefs | Pbytessets
(* Array operations *)
| Pmakearray of array_kind * mutable_flag * alloc_mode
| Pduparray of array_kind * mutable_flag
* For [ ] , the argument must be an immutable array .
The arguments of [ ] give the kind and mutability of the
array being * produced * by the duplication .
The arguments of [Pduparray] give the kind and mutability of the
array being *produced* by the duplication. *)
| Parraylength of array_kind
| Parrayrefu of array_kind
| Parraysetu of array_kind
| Parrayrefs of array_kind
| Parraysets of array_kind
(* Test if the argument is a block or an immediate integer *)
| Pisint of { variant_only : bool }
(* Test if the (integer) argument is outside an interval *)
| Pisout
Operations on boxed integers ( Nativeint.t , Int32.t , Int64.t )
| Pbintofint of boxed_integer * alloc_mode
| Pintofbint of boxed_integer
| Pcvtbint of boxed_integer (*source*) * boxed_integer (*destination*)
* alloc_mode
| Pnegbint of boxed_integer * alloc_mode
| Paddbint of boxed_integer * alloc_mode
| Psubbint of boxed_integer * alloc_mode
| Pmulbint of boxed_integer * alloc_mode
| Pdivbint of { size : boxed_integer; is_safe : is_safe; mode: alloc_mode }
| Pmodbint of { size : boxed_integer; is_safe : is_safe; mode: alloc_mode }
| Pandbint of boxed_integer * alloc_mode
| Porbint of boxed_integer * alloc_mode
| Pxorbint of boxed_integer * alloc_mode
| Plslbint of boxed_integer * alloc_mode
| Plsrbint of boxed_integer * alloc_mode
| Pasrbint of boxed_integer * alloc_mode
| Pbintcomp of boxed_integer * integer_comparison
(* Operations on Bigarrays: (unsafe, #dimensions, kind, layout) *)
| Pbigarrayref of bool * int * bigarray_kind * bigarray_layout
| Pbigarrayset of bool * int * bigarray_kind * bigarray_layout
(* size of the nth dimension of a Bigarray *)
| Pbigarraydim of int
load / set 16,32,64 bits from a string : ( unsafe )
| Pstring_load_16 of bool
| Pstring_load_32 of bool * alloc_mode
| Pstring_load_64 of bool * alloc_mode
| Pbytes_load_16 of bool
| Pbytes_load_32 of bool * alloc_mode
| Pbytes_load_64 of bool * alloc_mode
| Pbytes_set_16 of bool
| Pbytes_set_32 of bool
| Pbytes_set_64 of bool
load / set 16,32,64 bits from a
( char , int8_unsigned_elt , c_layout ) Bigarray . Array1.t : ( unsafe )
(char, int8_unsigned_elt, c_layout) Bigarray.Array1.t : (unsafe) *)
| Pbigstring_load_16 of bool
| Pbigstring_load_32 of bool * alloc_mode
| Pbigstring_load_64 of bool * alloc_mode
| Pbigstring_set_16 of bool
| Pbigstring_set_32 of bool
| Pbigstring_set_64 of bool
(* Compile time constants *)
| Pctconst of compile_time_constant
(* byte swap *)
| Pbswap16
| Pbbswap of boxed_integer * alloc_mode
Integer to external pointer
| Pint_as_pointer
(* Inhibition of optimisation *)
| Popaque of layout
(* Statically-defined probes *)
| Pprobe_is_enabled of { name: string }
(* Primitives for [Obj] *)
| Pobj_dup
| Pobj_magic of layout
and integer_comparison =
Ceq | Cne | Clt | Cgt | Cle | Cge
and float_comparison =
CFeq | CFneq | CFlt | CFnlt | CFgt | CFngt | CFle | CFnle | CFge | CFnge
and array_kind =
Pgenarray | Paddrarray | Pintarray | Pfloatarray
and value_kind =
Pgenval | Pfloatval | Pboxedintval of boxed_integer | Pintval
| Pvariant of {
consts : int list;
non_consts : (int * value_kind list) list;
(** [non_consts] must be non-empty. For constant variants [Pintval]
must be used. This causes a small loss of precision but it is not
expected to be significant. *)
}
| Parrayval of array_kind
and layout =
| Pvalue of value_kind
and block_shape =
value_kind list option
and boxed_integer = Primitive.boxed_integer =
Pnativeint | Pint32 | Pint64
and bigarray_kind =
Pbigarray_unknown
| Pbigarray_float32 | Pbigarray_float64
| Pbigarray_sint8 | Pbigarray_uint8
| Pbigarray_sint16 | Pbigarray_uint16
| Pbigarray_int32 | Pbigarray_int64
| Pbigarray_caml_int | Pbigarray_native_int
| Pbigarray_complex32 | Pbigarray_complex64
and bigarray_layout =
Pbigarray_unknown_layout
| Pbigarray_c_layout
| Pbigarray_fortran_layout
and raise_kind =
| Raise_regular
| Raise_reraise
| Raise_notrace
val equal_primitive : primitive -> primitive -> bool
val equal_value_kind : value_kind -> value_kind -> bool
val equal_layout : layout -> layout -> bool
val compatible_layout : layout -> layout -> bool
val equal_boxed_integer : boxed_integer -> boxed_integer -> bool
val must_be_value : layout -> value_kind
type structured_constant =
Const_base of constant
| Const_block of int * structured_constant list
| Const_float_array of string list
| Const_immstring of string
| Const_float_block of string list
type tailcall_attribute =
| Tailcall_expectation of bool
(* [@tailcall] and [@tailcall true] have [true],
[@tailcall false] has [false] *)
| Default_tailcall (* no [@tailcall] attribute *)
(* Function declaration inlining annotations *)
type inline_attribute =
[ @inline ] or [ @inline always ]
[ @inline never ]
[ @inline available ]
[ @unroll x ]
no [ @inline ] attribute
(* Call site inlining annotations *)
type inlined_attribute =
| Always_inlined (* [@inlined] or [@inlined always] *)
| Never_inlined (* [@inlined never] *)
| Hint_inlined (* [@inlined hint] *)
[ @unroll x ]
| Default_inlined (* no [@inlined] attribute *)
val equal_inline_attribute : inline_attribute -> inline_attribute -> bool
val equal_inlined_attribute : inlined_attribute -> inlined_attribute -> bool
type probe_desc = { name: string }
type probe = probe_desc option
type specialise_attribute =
| Always_specialise (* [@specialise] or [@specialise always] *)
| Never_specialise (* [@specialise never] *)
| Default_specialise (* no [@specialise] attribute *)
val equal_specialise_attribute
: specialise_attribute
-> specialise_attribute
-> bool
type local_attribute =
| Always_local (* [@local] or [@local always] *)
| Never_local (* [@local never] *)
| Default_local (* [@local maybe] or no [@local] attribute *)
type property =
| Noalloc
type poll_attribute =
[ @poll error ]
no [ @poll ] attribute
type check_attribute =
| Default_check
| Assert of property
| Assume of property
type loop_attribute =
| Always_loop (* [@loop] or [@loop always] *)
| Never_loop (* [@loop never] *)
| Default_loop (* no [@loop] attribute *)
type function_kind = Curried of {nlocal: int} | Tupled
(* [nlocal] determines how many arguments may be partially applied
before the resulting closure must be locally allocated.
See [lfunction] for details *)
type let_kind = Strict | Alias | StrictOpt
Meaning of kinds for let x = e in e ' :
Strict : e may have side - effects ; always evaluate e first
( If e is a simple expression , e.g. a variable or constant ,
we may still substitute e'[x / e ] . )
Alias : e is pure , we can substitute e'[x / e ] if x has 0 or 1 occurrences
in e '
StrictOpt : e does not have side - effects , but depend on the store ;
we can discard e if x does not appear in e '
Strict: e may have side-effects; always evaluate e first
(If e is a simple expression, e.g. a variable or constant,
we may still substitute e'[x/e].)
Alias: e is pure, we can substitute e'[x/e] if x has 0 or 1 occurrences
in e'
StrictOpt: e does not have side-effects, but depend on the store;
we can discard e if x does not appear in e'
*)
type meth_kind = Self | Public | Cached
val equal_meth_kind : meth_kind -> meth_kind -> bool
type shared_code = (int * int) list (* stack size -> code label *)
type function_attribute = {
inline : inline_attribute;
specialise : specialise_attribute;
local: local_attribute;
check : check_attribute;
poll: poll_attribute;
loop: loop_attribute;
is_a_functor: bool;
stub: bool;
tmc_candidate: bool;
}
type scoped_location = Debuginfo.Scoped_location.t
type lambda =
Lvar of Ident.t
| Lmutvar of Ident.t
| Lconst of structured_constant
| Lapply of lambda_apply
| Lfunction of lfunction
| Llet of let_kind * layout * Ident.t * lambda * lambda
| Lmutlet of layout * Ident.t * lambda * lambda
| Lletrec of (Ident.t * lambda) list * lambda
| Lprim of primitive * lambda list * scoped_location
| Lswitch of lambda * lambda_switch * scoped_location * layout
(* switch on strings, clauses are sorted by string order,
strings are pairwise distinct *)
| Lstringswitch of
lambda * (string * lambda) list * lambda option * scoped_location * layout
| Lstaticraise of int * lambda list
| Lstaticcatch of lambda * (int * (Ident.t * layout) list) * lambda * layout
| Ltrywith of lambda * Ident.t * lambda * layout
Lifthenelse ( e , t , f , layout ) evaluates t if e evaluates to 0 , and evaluates f if
e evaluates to any other value ; layout must be the layout of [ t ] and [ f ]
e evaluates to any other value; layout must be the layout of [t] and [f] *)
| Lifthenelse of lambda * lambda * lambda * layout
| Lsequence of lambda * lambda
| Lwhile of lambda_while
| Lfor of lambda_for
| Lassign of Ident.t * lambda
| Lsend of meth_kind * lambda * lambda * lambda list
* region_close * alloc_mode * scoped_location * layout
| Levent of lambda * lambda_event
| Lifused of Ident.t * lambda
| Lregion of lambda * layout
and lfunction = private
{ kind: function_kind;
params: (Ident.t * layout) list;
return: layout;
body: lambda;
specified with [ @inline ] attribute
loc : scoped_location;
mode : alloc_mode; (* alloc mode of the closure itself *)
region : bool; (* false if this function may locally
allocate in the caller's region *)
}
and lambda_while =
{ wh_cond : lambda;
wh_cond_region : bool; (* false if the condition may locally allocate in
the region containing the loop *)
wh_body : lambda;
wh_body_region : bool (* false if the body may locally allocate in
the region containing the loop *)
}
and lambda_for =
{ for_id : Ident.t;
for_from : lambda;
for_to : lambda;
for_dir : direction_flag;
for_body : lambda;
for_region : bool; (* false if the body may locally allocate in the
region containing the loop *)
}
and lambda_apply =
{ ap_func : lambda;
ap_args : lambda list;
ap_result_layout : layout;
ap_region_close : region_close;
ap_mode : alloc_mode;
ap_loc : scoped_location;
ap_tailcall : tailcall_attribute;
ap_inlined : inlined_attribute; (* [@inlined] attribute in code *)
ap_specialised : specialise_attribute;
ap_probe : probe;
}
and lambda_switch =
{ sw_numconsts: int; (* Number of integer cases *)
Integer cases
sw_numblocks: int; (* Number of tag block cases *)
sw_blocks: (int * lambda) list; (* Tag block cases *)
sw_failaction : lambda option} (* Action to take if failure *)
and lambda_event =
{ lev_loc: scoped_location;
lev_kind: lambda_event_kind;
lev_repr: int ref option;
lev_env: Env.t }
and lambda_event_kind =
Lev_before
| Lev_after of Types.type_expr
| Lev_function
| Lev_pseudo
| Lev_module_definition of Ident.t
type program =
{ compilation_unit : Compilation_unit.t;
main_module_block_size : int;
required_globals : Compilation_unit.Set.t;
(* Modules whose initializer side effects
must occur before [code]. *)
code : lambda }
Lambda code for the middle - end .
* In the closure case the code is a sequence of assignments to a
preallocated block of size [ main_module_block_size ] using
( Setfield(Getpredef(compilation_unit ) ) ) . The size is used to preallocate
the block .
* In the flambda case the code is an expression returning a block
value of size [ main_module_block_size ] . The size is used to build
the module root as an initialize_symbol
Initialize_symbol(module_name , 0 ,
[ 0 ; ... ; ( main_module_block_size - 1 ) ] )
* In the closure case the code is a sequence of assignments to a
preallocated block of size [main_module_block_size] using
(Setfield(Getpredef(compilation_unit))). The size is used to preallocate
the block.
* In the flambda case the code is an expression returning a block
value of size [main_module_block_size]. The size is used to build
the module root as an initialize_symbol
Initialize_symbol(module_name, 0,
[getfield 0; ...; getfield (main_module_block_size - 1)])
*)
(* Sharing key *)
val make_key: lambda -> lambda option
val const_unit: structured_constant
val const_int : int -> structured_constant
val lambda_unit: lambda
val layout_unit : layout
val layout_int : layout
val layout_array : array_kind -> layout
val layout_block : layout
val layout_list : layout
val layout_exception : layout
val layout_function : layout
val layout_object : layout
val layout_class : layout
val layout_module : layout
val layout_functor : layout
val layout_module_field : layout
val layout_string : layout
val layout_float : layout
val layout_boxedint : boxed_integer -> layout
A layout that is because it is the field of a block
val layout_field : layout
val layout_lazy : layout
val layout_lazy_contents : layout
A layout that is because we are missing layout polymorphism
val layout_any_value : layout
A layout that is because it is bound by a letrec
val layout_letrec : layout
val layout_top : layout
val layout_bottom : layout
val name_lambda: let_kind -> lambda -> layout -> (Ident.t -> lambda) -> lambda
val name_lambda_list: (lambda * layout) list -> (lambda list -> lambda) -> lambda
val lfunction :
kind:function_kind ->
params:(Ident.t * layout) list ->
return:layout ->
body:lambda ->
specified with [ @inline ] attribute
loc:scoped_location ->
mode:alloc_mode ->
region:bool ->
lambda
val iter_head_constructor: (lambda -> unit) -> lambda -> unit
* [ iter_head_constructor f lam ] apply [ f ] to only the first level of
sub expressions of [ lam ] . It does not recursively traverse the
expression .
sub expressions of [lam]. It does not recursively traverse the
expression.
*)
val shallow_iter:
tail:(lambda -> unit) ->
non_tail:(lambda -> unit) ->
lambda -> unit
(** Same as [iter_head_constructor], but use a different callback for
sub-terms which are in tail position or not. *)
val transl_prim: string -> string -> lambda
(** Translate a value from a persistent module. For instance:
{[
transl_internal_value "CamlinternalLazy" "force"
]}
*)
val free_variables: lambda -> Ident.Set.t
val transl_module_path: scoped_location -> Env.t -> Path.t -> lambda
val transl_value_path: scoped_location -> Env.t -> Path.t -> lambda
val transl_extension_path: scoped_location -> Env.t -> Path.t -> lambda
val transl_class_path: scoped_location -> Env.t -> Path.t -> lambda
val make_sequence: ('a -> lambda) -> 'a list -> lambda
val subst:
(Ident.t -> Types.value_description -> Env.t -> Env.t) ->
?freshen_bound_variables:bool ->
lambda Ident.Map.t -> lambda -> lambda
(** [subst update_env ?freshen_bound_variables s lt]
applies a substitution [s] to the lambda-term [lt].
Assumes that the image of the substitution is out of reach
of the bound variables of the lambda-term (no capture).
[update_env] is used to refresh the environment contained in debug
events.
[freshen_bound_variables], which defaults to [false], freshens
the bound variables within [lt].
*)
val rename : Ident.t Ident.Map.t -> lambda -> lambda
(** A version of [subst] specialized for the case where we're just renaming
idents. *)
val duplicate : lambda -> lambda
(** Duplicate a term, freshening all locally-bound identifiers. *)
val map : (lambda -> lambda) -> lambda -> lambda
(** Bottom-up rewriting, applying the function on
each node from the leaves to the root. *)
val shallow_map :
tail:(lambda -> lambda) ->
non_tail:(lambda -> lambda) ->
lambda -> lambda
(** Rewrite each immediate sub-term with the function. *)
val bind_with_layout:
let_kind -> (Ident.t * layout) -> lambda -> lambda -> lambda
val negate_integer_comparison : integer_comparison -> integer_comparison
val swap_integer_comparison : integer_comparison -> integer_comparison
val negate_float_comparison : float_comparison -> float_comparison
val swap_float_comparison : float_comparison -> float_comparison
val default_function_attribute : function_attribute
val default_stub_attribute : function_attribute
val find_exact_application :
function_kind -> arity:int -> lambda list -> lambda list option
val max_arity : unit -> int
* Maximal number of parameters for a function , or in other words ,
maximal length of the [ params ] list of a [ lfunction ] record .
This is unlimited ( [ max_int ] ) for bytecode , but limited
( currently to 126 ) for native code .
maximal length of the [params] list of a [lfunction] record.
This is unlimited ([max_int]) for bytecode, but limited
(currently to 126) for native code. *)
val join_mode : alloc_mode -> alloc_mode -> alloc_mode
val sub_mode : alloc_mode -> alloc_mode -> bool
val eq_mode : alloc_mode -> alloc_mode -> bool
val is_local_mode : alloc_mode -> bool
val is_heap_mode : alloc_mode -> bool
val primitive_may_allocate : primitive -> alloc_mode option
* Whether and where a primitive may allocate .
[ Some Alloc_local ] permits both options : that is , primitives that
may allocate on both the GC heap and locally report this value .
[Some Alloc_local] permits both options: that is, primitives that
may allocate on both the GC heap and locally report this value. *)
(***********************)
(* For static failures *)
(***********************)
(* Get a new static failure ident *)
val next_raise_count : unit -> int
val staticfail : lambda (* Anticipated static failure *)
(* Check anticipated failure, substitute its final value *)
val is_guarded: lambda -> bool
val patch_guarded : lambda -> lambda -> lambda
val raise_kind: raise_kind -> string
val merge_inline_attributes
: inline_attribute
-> inline_attribute
-> inline_attribute option
val reset: unit -> unit
* Helpers for module block accesses .
Module accesses are always immutable , except in translobj where the
method cache is stored in a mutable module field .
Module accesses are always immutable, except in translobj where the
method cache is stored in a mutable module field.
*)
val mod_field: ?read_semantics: field_read_semantics -> int -> primitive
val mod_setfield: int -> primitive
val structured_constant_layout : structured_constant -> layout
val primitive_result_layout : primitive -> layout
val compute_expr_layout : layout Ident.Map.t -> lambda -> layout
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/9c5d6eecfc6fc1cf067e53e530700c85a0a7b1dd/ocaml/lambda/lambda.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
The "lambda" intermediate code
Overriding Asttypes.mutable_flag
Actually [Alloc_heap] if [Config.stack_allocation] is [false]
[Assignment Alloc_local] is a mutation of a block that may be heap or local.
[Assignment Alloc_heap] is a mutation of a block that's definitely heap.
Initialization of in heap values, like [caml_initialize] C primitive. The
field should not have been read before and initialization should happen
only once.
Tail calls can close their enclosing region early
do not close region, may TCO if in tail position
do not close region, must not TCO
close region and tail call
Globals
Operations on heap blocks
External call
Exceptions
Comparisons that return int (not bool like above) for ordering
Float operations
String operations
Array operations
Test if the argument is a block or an immediate integer
Test if the (integer) argument is outside an interval
source
destination
Operations on Bigarrays: (unsafe, #dimensions, kind, layout)
size of the nth dimension of a Bigarray
Compile time constants
byte swap
Inhibition of optimisation
Statically-defined probes
Primitives for [Obj]
* [non_consts] must be non-empty. For constant variants [Pintval]
must be used. This causes a small loss of precision but it is not
expected to be significant.
[@tailcall] and [@tailcall true] have [true],
[@tailcall false] has [false]
no [@tailcall] attribute
Function declaration inlining annotations
Call site inlining annotations
[@inlined] or [@inlined always]
[@inlined never]
[@inlined hint]
no [@inlined] attribute
[@specialise] or [@specialise always]
[@specialise never]
no [@specialise] attribute
[@local] or [@local always]
[@local never]
[@local maybe] or no [@local] attribute
[@loop] or [@loop always]
[@loop never]
no [@loop] attribute
[nlocal] determines how many arguments may be partially applied
before the resulting closure must be locally allocated.
See [lfunction] for details
stack size -> code label
switch on strings, clauses are sorted by string order,
strings are pairwise distinct
alloc mode of the closure itself
false if this function may locally
allocate in the caller's region
false if the condition may locally allocate in
the region containing the loop
false if the body may locally allocate in
the region containing the loop
false if the body may locally allocate in the
region containing the loop
[@inlined] attribute in code
Number of integer cases
Number of tag block cases
Tag block cases
Action to take if failure
Modules whose initializer side effects
must occur before [code].
Sharing key
* Same as [iter_head_constructor], but use a different callback for
sub-terms which are in tail position or not.
* Translate a value from a persistent module. For instance:
{[
transl_internal_value "CamlinternalLazy" "force"
]}
* [subst update_env ?freshen_bound_variables s lt]
applies a substitution [s] to the lambda-term [lt].
Assumes that the image of the substitution is out of reach
of the bound variables of the lambda-term (no capture).
[update_env] is used to refresh the environment contained in debug
events.
[freshen_bound_variables], which defaults to [false], freshens
the bound variables within [lt].
* A version of [subst] specialized for the case where we're just renaming
idents.
* Duplicate a term, freshening all locally-bound identifiers.
* Bottom-up rewriting, applying the function on
each node from the leaves to the root.
* Rewrite each immediate sub-term with the function.
*********************
For static failures
*********************
Get a new static failure ident
Anticipated static failure
Check anticipated failure, substitute its final value | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Asttypes
type mutable_flag = Immutable | Immutable_unique | Mutable
type compile_time_constant =
| Big_endian
| Word_size
| Int_size
| Max_wosize
| Ostype_unix
| Ostype_win32
| Ostype_cygwin
| Backend_type
type immediate_or_pointer =
| Immediate
| Pointer
type alloc_mode = private
| Alloc_heap
| Alloc_local
type modify_mode = private
| Modify_heap
| Modify_maybe_stack
val alloc_heap : alloc_mode
val alloc_local : alloc_mode
val modify_heap : modify_mode
val modify_maybe_stack : modify_mode
type initialization_or_assignment =
| Assignment of modify_mode
| Heap_initialization
Initialization of roots only . Compiles to a simple store .
No checks are done to preserve GC invariants .
No checks are done to preserve GC invariants. *)
| Root_initialization
type is_safe =
| Safe
| Unsafe
type field_read_semantics =
| Reads_agree
| Reads_vary
type region_close =
type primitive =
| Pbytes_to_string
| Pbytes_of_string
| Pignore
| Pgetglobal of Compilation_unit.t
| Psetglobal of Compilation_unit.t
| Pgetpredef of Ident.t
| Pmakeblock of int * mutable_flag * block_shape * alloc_mode
| Pmakefloatblock of mutable_flag * alloc_mode
| Pfield of int * field_read_semantics
| Pfield_computed of field_read_semantics
| Psetfield of int * immediate_or_pointer * initialization_or_assignment
| Psetfield_computed of immediate_or_pointer * initialization_or_assignment
| Pfloatfield of int * field_read_semantics * alloc_mode
| Psetfloatfield of int * initialization_or_assignment
| Pduprecord of Types.record_representation * int
| Pccall of Primitive.description
| Praise of raise_kind
Boolean operations
| Psequand | Psequor | Pnot
Integer operations
| Pnegint | Paddint | Psubint | Pmulint
| Pdivint of is_safe | Pmodint of is_safe
| Pandint | Porint | Pxorint
| Plslint | Plsrint | Pasrint
| Pintcomp of integer_comparison
| Pcompare_ints | Pcompare_floats | Pcompare_bints of boxed_integer
| Poffsetint of int
| Poffsetref of int
| Pintoffloat | Pfloatofint of alloc_mode
| Pnegfloat of alloc_mode | Pabsfloat of alloc_mode
| Paddfloat of alloc_mode | Psubfloat of alloc_mode
| Pmulfloat of alloc_mode | Pdivfloat of alloc_mode
| Pfloatcomp of float_comparison
| Pstringlength | Pstringrefu | Pstringrefs
| Pbyteslength | Pbytesrefu | Pbytessetu | Pbytesrefs | Pbytessets
| Pmakearray of array_kind * mutable_flag * alloc_mode
| Pduparray of array_kind * mutable_flag
* For [ ] , the argument must be an immutable array .
The arguments of [ ] give the kind and mutability of the
array being * produced * by the duplication .
The arguments of [Pduparray] give the kind and mutability of the
array being *produced* by the duplication. *)
| Parraylength of array_kind
| Parrayrefu of array_kind
| Parraysetu of array_kind
| Parrayrefs of array_kind
| Parraysets of array_kind
| Pisint of { variant_only : bool }
| Pisout
Operations on boxed integers ( Nativeint.t , Int32.t , Int64.t )
| Pbintofint of boxed_integer * alloc_mode
| Pintofbint of boxed_integer
* alloc_mode
| Pnegbint of boxed_integer * alloc_mode
| Paddbint of boxed_integer * alloc_mode
| Psubbint of boxed_integer * alloc_mode
| Pmulbint of boxed_integer * alloc_mode
| Pdivbint of { size : boxed_integer; is_safe : is_safe; mode: alloc_mode }
| Pmodbint of { size : boxed_integer; is_safe : is_safe; mode: alloc_mode }
| Pandbint of boxed_integer * alloc_mode
| Porbint of boxed_integer * alloc_mode
| Pxorbint of boxed_integer * alloc_mode
| Plslbint of boxed_integer * alloc_mode
| Plsrbint of boxed_integer * alloc_mode
| Pasrbint of boxed_integer * alloc_mode
| Pbintcomp of boxed_integer * integer_comparison
| Pbigarrayref of bool * int * bigarray_kind * bigarray_layout
| Pbigarrayset of bool * int * bigarray_kind * bigarray_layout
| Pbigarraydim of int
load / set 16,32,64 bits from a string : ( unsafe )
| Pstring_load_16 of bool
| Pstring_load_32 of bool * alloc_mode
| Pstring_load_64 of bool * alloc_mode
| Pbytes_load_16 of bool
| Pbytes_load_32 of bool * alloc_mode
| Pbytes_load_64 of bool * alloc_mode
| Pbytes_set_16 of bool
| Pbytes_set_32 of bool
| Pbytes_set_64 of bool
load / set 16,32,64 bits from a
( char , int8_unsigned_elt , c_layout ) Bigarray . Array1.t : ( unsafe )
(char, int8_unsigned_elt, c_layout) Bigarray.Array1.t : (unsafe) *)
| Pbigstring_load_16 of bool
| Pbigstring_load_32 of bool * alloc_mode
| Pbigstring_load_64 of bool * alloc_mode
| Pbigstring_set_16 of bool
| Pbigstring_set_32 of bool
| Pbigstring_set_64 of bool
| Pctconst of compile_time_constant
| Pbswap16
| Pbbswap of boxed_integer * alloc_mode
Integer to external pointer
| Pint_as_pointer
| Popaque of layout
| Pprobe_is_enabled of { name: string }
| Pobj_dup
| Pobj_magic of layout
and integer_comparison =
Ceq | Cne | Clt | Cgt | Cle | Cge
and float_comparison =
CFeq | CFneq | CFlt | CFnlt | CFgt | CFngt | CFle | CFnle | CFge | CFnge
and array_kind =
Pgenarray | Paddrarray | Pintarray | Pfloatarray
and value_kind =
Pgenval | Pfloatval | Pboxedintval of boxed_integer | Pintval
| Pvariant of {
consts : int list;
non_consts : (int * value_kind list) list;
}
| Parrayval of array_kind
and layout =
| Pvalue of value_kind
and block_shape =
value_kind list option
and boxed_integer = Primitive.boxed_integer =
Pnativeint | Pint32 | Pint64
and bigarray_kind =
Pbigarray_unknown
| Pbigarray_float32 | Pbigarray_float64
| Pbigarray_sint8 | Pbigarray_uint8
| Pbigarray_sint16 | Pbigarray_uint16
| Pbigarray_int32 | Pbigarray_int64
| Pbigarray_caml_int | Pbigarray_native_int
| Pbigarray_complex32 | Pbigarray_complex64
and bigarray_layout =
Pbigarray_unknown_layout
| Pbigarray_c_layout
| Pbigarray_fortran_layout
and raise_kind =
| Raise_regular
| Raise_reraise
| Raise_notrace
val equal_primitive : primitive -> primitive -> bool
val equal_value_kind : value_kind -> value_kind -> bool
val equal_layout : layout -> layout -> bool
val compatible_layout : layout -> layout -> bool
val equal_boxed_integer : boxed_integer -> boxed_integer -> bool
val must_be_value : layout -> value_kind
type structured_constant =
Const_base of constant
| Const_block of int * structured_constant list
| Const_float_array of string list
| Const_immstring of string
| Const_float_block of string list
type tailcall_attribute =
| Tailcall_expectation of bool
type inline_attribute =
[ @inline ] or [ @inline always ]
[ @inline never ]
[ @inline available ]
[ @unroll x ]
no [ @inline ] attribute
type inlined_attribute =
[ @unroll x ]
val equal_inline_attribute : inline_attribute -> inline_attribute -> bool
val equal_inlined_attribute : inlined_attribute -> inlined_attribute -> bool
type probe_desc = { name: string }
type probe = probe_desc option
type specialise_attribute =
val equal_specialise_attribute
: specialise_attribute
-> specialise_attribute
-> bool
type local_attribute =
type property =
| Noalloc
type poll_attribute =
[ @poll error ]
no [ @poll ] attribute
type check_attribute =
| Default_check
| Assert of property
| Assume of property
type loop_attribute =
type function_kind = Curried of {nlocal: int} | Tupled
type let_kind = Strict | Alias | StrictOpt
Meaning of kinds for let x = e in e ' :
Strict : e may have side - effects ; always evaluate e first
( If e is a simple expression , e.g. a variable or constant ,
we may still substitute e'[x / e ] . )
Alias : e is pure , we can substitute e'[x / e ] if x has 0 or 1 occurrences
in e '
StrictOpt : e does not have side - effects , but depend on the store ;
we can discard e if x does not appear in e '
Strict: e may have side-effects; always evaluate e first
(If e is a simple expression, e.g. a variable or constant,
we may still substitute e'[x/e].)
Alias: e is pure, we can substitute e'[x/e] if x has 0 or 1 occurrences
in e'
StrictOpt: e does not have side-effects, but depend on the store;
we can discard e if x does not appear in e'
*)
type meth_kind = Self | Public | Cached
val equal_meth_kind : meth_kind -> meth_kind -> bool
type function_attribute = {
inline : inline_attribute;
specialise : specialise_attribute;
local: local_attribute;
check : check_attribute;
poll: poll_attribute;
loop: loop_attribute;
is_a_functor: bool;
stub: bool;
tmc_candidate: bool;
}
type scoped_location = Debuginfo.Scoped_location.t
type lambda =
Lvar of Ident.t
| Lmutvar of Ident.t
| Lconst of structured_constant
| Lapply of lambda_apply
| Lfunction of lfunction
| Llet of let_kind * layout * Ident.t * lambda * lambda
| Lmutlet of layout * Ident.t * lambda * lambda
| Lletrec of (Ident.t * lambda) list * lambda
| Lprim of primitive * lambda list * scoped_location
| Lswitch of lambda * lambda_switch * scoped_location * layout
| Lstringswitch of
lambda * (string * lambda) list * lambda option * scoped_location * layout
| Lstaticraise of int * lambda list
| Lstaticcatch of lambda * (int * (Ident.t * layout) list) * lambda * layout
| Ltrywith of lambda * Ident.t * lambda * layout
Lifthenelse ( e , t , f , layout ) evaluates t if e evaluates to 0 , and evaluates f if
e evaluates to any other value ; layout must be the layout of [ t ] and [ f ]
e evaluates to any other value; layout must be the layout of [t] and [f] *)
| Lifthenelse of lambda * lambda * lambda * layout
| Lsequence of lambda * lambda
| Lwhile of lambda_while
| Lfor of lambda_for
| Lassign of Ident.t * lambda
| Lsend of meth_kind * lambda * lambda * lambda list
* region_close * alloc_mode * scoped_location * layout
| Levent of lambda * lambda_event
| Lifused of Ident.t * lambda
| Lregion of lambda * layout
and lfunction = private
{ kind: function_kind;
params: (Ident.t * layout) list;
return: layout;
body: lambda;
specified with [ @inline ] attribute
loc : scoped_location;
}
and lambda_while =
{ wh_cond : lambda;
wh_body : lambda;
}
and lambda_for =
{ for_id : Ident.t;
for_from : lambda;
for_to : lambda;
for_dir : direction_flag;
for_body : lambda;
}
and lambda_apply =
{ ap_func : lambda;
ap_args : lambda list;
ap_result_layout : layout;
ap_region_close : region_close;
ap_mode : alloc_mode;
ap_loc : scoped_location;
ap_tailcall : tailcall_attribute;
ap_specialised : specialise_attribute;
ap_probe : probe;
}
and lambda_switch =
Integer cases
and lambda_event =
{ lev_loc: scoped_location;
lev_kind: lambda_event_kind;
lev_repr: int ref option;
lev_env: Env.t }
and lambda_event_kind =
Lev_before
| Lev_after of Types.type_expr
| Lev_function
| Lev_pseudo
| Lev_module_definition of Ident.t
type program =
{ compilation_unit : Compilation_unit.t;
main_module_block_size : int;
required_globals : Compilation_unit.Set.t;
code : lambda }
Lambda code for the middle - end .
* In the closure case the code is a sequence of assignments to a
preallocated block of size [ main_module_block_size ] using
( Setfield(Getpredef(compilation_unit ) ) ) . The size is used to preallocate
the block .
* In the flambda case the code is an expression returning a block
value of size [ main_module_block_size ] . The size is used to build
the module root as an initialize_symbol
Initialize_symbol(module_name , 0 ,
[ 0 ; ... ; ( main_module_block_size - 1 ) ] )
* In the closure case the code is a sequence of assignments to a
preallocated block of size [main_module_block_size] using
(Setfield(Getpredef(compilation_unit))). The size is used to preallocate
the block.
* In the flambda case the code is an expression returning a block
value of size [main_module_block_size]. The size is used to build
the module root as an initialize_symbol
Initialize_symbol(module_name, 0,
[getfield 0; ...; getfield (main_module_block_size - 1)])
*)
val make_key: lambda -> lambda option
val const_unit: structured_constant
val const_int : int -> structured_constant
val lambda_unit: lambda
val layout_unit : layout
val layout_int : layout
val layout_array : array_kind -> layout
val layout_block : layout
val layout_list : layout
val layout_exception : layout
val layout_function : layout
val layout_object : layout
val layout_class : layout
val layout_module : layout
val layout_functor : layout
val layout_module_field : layout
val layout_string : layout
val layout_float : layout
val layout_boxedint : boxed_integer -> layout
A layout that is because it is the field of a block
val layout_field : layout
val layout_lazy : layout
val layout_lazy_contents : layout
A layout that is because we are missing layout polymorphism
val layout_any_value : layout
A layout that is because it is bound by a letrec
val layout_letrec : layout
val layout_top : layout
val layout_bottom : layout
val name_lambda: let_kind -> lambda -> layout -> (Ident.t -> lambda) -> lambda
val name_lambda_list: (lambda * layout) list -> (lambda list -> lambda) -> lambda
val lfunction :
kind:function_kind ->
params:(Ident.t * layout) list ->
return:layout ->
body:lambda ->
specified with [ @inline ] attribute
loc:scoped_location ->
mode:alloc_mode ->
region:bool ->
lambda
val iter_head_constructor: (lambda -> unit) -> lambda -> unit
* [ iter_head_constructor f lam ] apply [ f ] to only the first level of
sub expressions of [ lam ] . It does not recursively traverse the
expression .
sub expressions of [lam]. It does not recursively traverse the
expression.
*)
val shallow_iter:
tail:(lambda -> unit) ->
non_tail:(lambda -> unit) ->
lambda -> unit
val transl_prim: string -> string -> lambda
val free_variables: lambda -> Ident.Set.t
val transl_module_path: scoped_location -> Env.t -> Path.t -> lambda
val transl_value_path: scoped_location -> Env.t -> Path.t -> lambda
val transl_extension_path: scoped_location -> Env.t -> Path.t -> lambda
val transl_class_path: scoped_location -> Env.t -> Path.t -> lambda
val make_sequence: ('a -> lambda) -> 'a list -> lambda
val subst:
(Ident.t -> Types.value_description -> Env.t -> Env.t) ->
?freshen_bound_variables:bool ->
lambda Ident.Map.t -> lambda -> lambda
val rename : Ident.t Ident.Map.t -> lambda -> lambda
val duplicate : lambda -> lambda
val map : (lambda -> lambda) -> lambda -> lambda
val shallow_map :
tail:(lambda -> lambda) ->
non_tail:(lambda -> lambda) ->
lambda -> lambda
val bind_with_layout:
let_kind -> (Ident.t * layout) -> lambda -> lambda -> lambda
val negate_integer_comparison : integer_comparison -> integer_comparison
val swap_integer_comparison : integer_comparison -> integer_comparison
val negate_float_comparison : float_comparison -> float_comparison
val swap_float_comparison : float_comparison -> float_comparison
val default_function_attribute : function_attribute
val default_stub_attribute : function_attribute
val find_exact_application :
function_kind -> arity:int -> lambda list -> lambda list option
val max_arity : unit -> int
* Maximal number of parameters for a function , or in other words ,
maximal length of the [ params ] list of a [ lfunction ] record .
This is unlimited ( [ max_int ] ) for bytecode , but limited
( currently to 126 ) for native code .
maximal length of the [params] list of a [lfunction] record.
This is unlimited ([max_int]) for bytecode, but limited
(currently to 126) for native code. *)
val join_mode : alloc_mode -> alloc_mode -> alloc_mode
val sub_mode : alloc_mode -> alloc_mode -> bool
val eq_mode : alloc_mode -> alloc_mode -> bool
val is_local_mode : alloc_mode -> bool
val is_heap_mode : alloc_mode -> bool
val primitive_may_allocate : primitive -> alloc_mode option
* Whether and where a primitive may allocate .
[ Some Alloc_local ] permits both options : that is , primitives that
may allocate on both the GC heap and locally report this value .
[Some Alloc_local] permits both options: that is, primitives that
may allocate on both the GC heap and locally report this value. *)
val next_raise_count : unit -> int
val is_guarded: lambda -> bool
val patch_guarded : lambda -> lambda -> lambda
val raise_kind: raise_kind -> string
val merge_inline_attributes
: inline_attribute
-> inline_attribute
-> inline_attribute option
val reset: unit -> unit
* Helpers for module block accesses .
Module accesses are always immutable , except in translobj where the
method cache is stored in a mutable module field .
Module accesses are always immutable, except in translobj where the
method cache is stored in a mutable module field.
*)
val mod_field: ?read_semantics: field_read_semantics -> int -> primitive
val mod_setfield: int -> primitive
val structured_constant_layout : structured_constant -> layout
val primitive_result_layout : primitive -> layout
val compute_expr_layout : layout Ident.Map.t -> lambda -> layout
|
8d4e599a86d2273c5cadf9b64da2958a2421b176d9ecf427149314be3d3c41a3 | gergoerdi/tandoori | let-poly-mutual.hs | test x = let tick y = (x,y):tock y
tock y = (x,y):tick y
in (tick,tock)
test' x = let tick y = (x,y):tock y
tock y = (y,x):tick y
in (tick,tock)
| null | https://raw.githubusercontent.com/gergoerdi/tandoori/515142ce76b96efa75d7044c9077d85394585556/input/let-poly-mutual.hs | haskell | test x = let tick y = (x,y):tock y
tock y = (x,y):tick y
in (tick,tock)
test' x = let tick y = (x,y):tock y
tock y = (y,x):tick y
in (tick,tock)
| |
2d2b9a5369f3e5393db6b3f73e4e734f241416d7cd4ddf8a11e10f84ece25fba | let-def/lrgrep | dfa.ml | open Utils
open Misc
open Fix.Indexing
module Make(Regexp : Mid.Sigs.REGEXP)()
: Sigs.DFA with module Regexp = Regexp
=
struct
module Regexp = Regexp
open Regexp
open Info
module Redgraph = Mid.Redgraph.Make(Info)()
module Reduction = Mid.Reduction.Make(Redgraph)
module CachedKRESet = Reduction.Cache(struct
type t = KRESet.t * RE.var indexset
let compare = compare_pair KRESet.compare IndexSet.compare
let derive (k, _) = KRESet.derive_in_reduction k
let merge ts =
let ks, vs = List.split ts in
(List.fold_left KRESet.union KRESet.empty ks,
List.fold_left IndexSet.union IndexSet.empty vs)
let cmon (k, _) = KRESet.cmon k
end)
module Red = Reduction.Make(CachedKRESet)
include (struct
type thread = RE.var
let thread = RE.var
end : sig
type thread
val thread : int -> thread index
end)
module Kern = struct
type t = {
direct: KRE.t array;
reduce: Red.t array;
}
type direct_map = (thread, RE.var indexset) indexmap array
type reduce_map = thread indexset array
type transition = (direct_map * reduce_map * t)
let make direct =
{direct = Array.of_list (KRESet.elements direct); reduce = [||]}
let empty = {direct = [||]; reduce = [||]}
let cmon t =
let rs = Printf.sprintf "{%d reductions}" (Array.length t.reduce) in
Cmon.record [
"direct", Cmon.array_map KRE.cmon t.direct;
"reduce", Cmon.constant rs;
]
let compare t1 t2 =
let c = array_compare KRE.compare t1.direct t2.direct in
if c <> 0 then c else
array_compare Red.compare t1.reduce t2.reduce
let compare_transition (d1, r1, t1 : transition) (d2, r2, t2 : transition) =
let c = compare t1 t2 in
if c <> 0 then c else
let c = array_compare (IndexMap.compare IndexSet.compare) d1 d2 in
if c <> 0 then c else
array_compare IndexSet.compare r1 r2
let make_from_elts elts =
let direct, reduce = List.partition_map (fun x -> x) elts in
let direct =
let combine_vars vars (_, vars') =
let vars_union _ vs1 vs2 = Some (IndexSet.union vs1 vs2) in
IndexMap.union vars_union vars vars'
in
sort_and_merge
(compare_fst KRE.compare)
(fun (k, vars) rest -> (k, List.fold_left combine_vars vars rest))
direct
in
let reduce =
let combine_ix ix (_, ix') = IndexSet.union ix ix' in
sort_and_merge
(compare_fst Red.compare)
(fun (r, ix) rest -> (r, List.fold_left combine_ix ix rest))
reduce
in
let direct, direct_tr = array_split (Array.of_list direct) in
let reduce, reduce_tr = array_split (Array.of_list reduce) in
(direct_tr, reduce_tr, {direct; reduce})
let derive ~reduction_cache t =
let output = ref [] in
let push_direct sg ix k = push output (sg, Either.left (k, ix)) in
let make_varmap (ix : thread indexset) vars =
IndexSet.fold (fun i map -> IndexMap.add i vars map) ix IndexMap.empty
in
let output_reductions ix (ds, rs) =
List.iter (fun (sg, k) ->
let k, v = CachedKRESet.unlift k in
KRESet.iter (push_direct sg (make_varmap ix v)) k
) ds;
List.iter (fun (sg, r) ->
push output (sg, Either.right (r, ix))
) rs;
in
let accept, direct, reduce =
let accept = ref [] and direct = ref [] and reduce = ref [] in
let loop i k =
KRESet.derive_kre k (thread i)
~visited:(ref KRESet.empty) ~accept ~reduce ~direct
in
Array.iteri loop t.direct;
(!accept, !direct, !reduce)
in
List.iter (fun (kre, ix) ->
CachedKRESet.lift (KRESet.singleton kre, IndexSet.empty)
|> Red.compile reduction_cache
|> Red.initial
|> output_reductions ix
) (sort_and_merge
(compare_fst KRE.compare)
(fun (kre, i) is -> (kre, IndexSet.of_list (i :: List.map snd is)) )
reduce
);
Array.iteri (fun i r ->
output_reductions
(IndexSet.singleton (thread (Array.length t.direct + i)))
(Red.derive r)
) t.reduce;
List.iter (fun (sg, vars, k, i) ->
push_direct sg (IndexMap.singleton i vars) k
) direct;
let tr =
IndexRefine.annotated_partition !output
|> List.map (fun (sg, elts) -> (sg, make_from_elts elts))
|> Misc.sort_and_merge
(compare_snd compare_transition)
(fun (sg, e) rest ->
let union_sg sg (sg', _) = IndexSet.union sg sg' in
(List.fold_left union_sg sg rest, e))
in
(accept, tr)
let _interpret st ~stack =
let reduction_cache = Red.make_compilation_cache () in
let rec loop st stack =
Printf.eprintf "------------------------\n";
Printf.eprintf "Matcher state:\n%a\n" print_cmon (cmon st);
let accepted, transitions = derive ~reduction_cache st in
Printf.eprintf "Matching actions: [%s]\n"
(string_concat_map ";" (fun (x, _) -> string_of_index x) accepted);
match stack with
| [] -> Printf.eprintf "End of stack\n"
| lr1 :: stack' ->
Printf.eprintf "Parser in state %s\n" (Lr1.to_string lr1);
let match_transition (sg, st') =
if IndexSet.mem lr1 sg
then Some st'
else None
in
let targets = List.filter_map match_transition transitions in
let count = List.length targets in
Printf.eprintf "Transition: %d target%s\n" count
(match count with
| 0 -> " (ending analysis)"
| 1 -> " (deterministic)"
| _ -> "s (non-deterministic), error!");
match targets with
| [] -> loop empty stack'
| [_, _, st'] -> loop st' stack'
| _ -> assert false
in
loop st stack
end
module KernMap = Map.Make(Kern)
module Pre = struct
type t = {
expr: Kern.t;
id: int;
accepted: (KRE.clause index * thread index) list;
mutable transitions: transition list;
mutable visited: Lr1.set;
mutable scheduled: Lr1.set;
}
and transition = Lr1.set * Kern.direct_map * Kern.reduce_map * t lazy_t
let derive_dfa expr =
let next_id =
let k = ref 0 in
fun () ->
let id = !k in
incr k;
id
in
let reduction_cache = Red.make_compilation_cache () in
let dfa : t KernMap.t ref = ref KernMap.empty in
let rec find_state st =
match KernMap.find_opt st !dfa with
| Some state -> state
| None ->
let accepted, transitions = Kern.derive ~reduction_cache st in
let state = {
expr; id = next_id ();
visited = IndexSet.empty;
scheduled = IndexSet.empty;
accepted;
transitions = List.map make_transition transitions;
} in
dfa := KernMap.add st state !dfa;
state
and make_transition (sg, (direct_tr, reduce_tr, k)) =
(sg, direct_tr, reduce_tr, lazy (find_state k))
in
let todo = ref [] in
let schedule st sg =
if not (IndexSet.is_empty sg) then (
let lazy st = st in
let unvisited = IndexSet.diff sg st.visited in
if not (IndexSet.is_empty unvisited) then (
if IndexSet.is_empty st.scheduled then push todo st;
st.scheduled <- IndexSet.union st.scheduled unvisited;
)
)
in
let process st =
let sg = st.scheduled in
st.visited <- IndexSet.union sg st.visited;
st.scheduled <- IndexSet.empty;
List.iter (fun (sg', _, _, st') ->
schedule st' (Lr1.set_predecessors (IndexSet.inter sg' sg))
) st.transitions
in
let rec loop () =
match List.rev !todo with
| [] -> ()
| todo' ->
todo := [];
List.iter process todo';
loop ()
in
let initial = find_state expr in
schedule (lazy initial) Lr1.all;
loop ();
!dfa
end
type state_index = int
type transition = {
label: Lr1.set;
source: state_index;
target: state_index;
direct_map: Kern.direct_map;
reduce_map: Kern.reduce_map;
}
type state = {
index: state_index;
kern: Kern.t;
visited: Lr1.set;
accepted: (KRE.clause index * thread index) list;
forward: transition list;
mutable backward: transition list;
}
type dfa = state array
let threads st =
Array.length st.kern.direct + Array.length st.kern.reduce
let label tr = tr.label
let source tr = tr.source
let target tr = tr.target
let all_vars tr =
Array.fold_left (fun acc map -> IndexMap.fold (fun _ -> IndexSet.union) map acc)
IndexSet.empty tr.direct_map
let index st = st.index
let forward st = st.forward
let backward st = st.backward
let accepted st = st.accepted
let reverse_mapping tr ~target_thread =
let target_thread = Index.to_int target_thread in
let direct_count = Array.length tr.direct_map in
if target_thread < direct_count then
tr.direct_map.(target_thread)
else
IndexSet.fold
(fun thread acc -> IndexMap.add thread IndexSet.empty acc)
tr.reduce_map.(target_thread - direct_count) IndexMap.empty
let derive_dfa expr =
let dfa = Pre.derive_dfa (Kern.make expr) in
let states =
Array.make (KernMap.cardinal dfa)
{index=0; visited=IndexSet.empty; kern=Kern.empty;
accepted=[]; forward=[]; backward=[]}
in
KernMap.iter (fun kern {Pre. id=index; accepted; transitions; visited; _} ->
let forward = List.filter_map
(fun (label, direct_map, reduce_map, tgt) ->
if Lazy.is_val tgt then (
let target = (Lazy.force_val tgt).Pre.id in
Some {label; source=index; target; direct_map; reduce_map}
) else None
) transitions
in
states.(index) <- {index; kern; forward; visited; accepted; backward=[]}
) dfa;
Array.iter (fun src ->
List.iter (fun tr ->
let tgt = states.(tr.target) in
tgt.backward <- tr :: tgt.backward
) src.forward
) states;
states
let rec eval (dfa : dfa) (st : state_index) ~stack =
Printf.eprintf "------------------------\n";
Printf.eprintf "Matcher in state %d:\n%a\n"
st print_cmon (Kern.cmon dfa.(st).kern);
Printf.eprintf "Matching actions: [%s]\n"
(string_concat_map ";" (fun (x, _) -> string_of_index x) dfa.(st).accepted);
match stack with
| [] -> Printf.eprintf "End of stack\n"
| lr1 :: xs ->
Printf.eprintf "Parser in state %s\n" (Lr1.to_string lr1);
let filter_tr tr = IndexSet.mem lr1 tr.label in
begin match List.find_opt filter_tr dfa.(st).forward with
| Some tr -> eval dfa tr.target ~stack:xs
| None ->
Printf.eprintf "No transitions, ending analysis\n"
end
let variable_index liveness (thr, var : thread index * RE.var index) =
let before, elt, _ = IndexMap.split thr liveness in
let offset =
IndexMap.fold (fun _ set acc -> acc + IndexSet.cardinal set) before 0
in
match elt with
| None -> assert false
| Some vars ->
IndexSet.fold (fun var' acc -> if var < var' then acc + 1 else acc) vars offset
let compute_action dfa liveness tr =
let src_live = liveness.(source tr) in
let tgt_live = liveness.(target tr) in
let move = ref [] in
let store = ref [] in
for i = 0 to threads dfa.(target tr) - 1 do
let tgt_thr = thread i in
match IndexMap.find_opt tgt_thr tgt_live with
| None -> ()
| Some tgt_vars ->
IndexMap.iter (fun src_thr store_vars ->
IndexSet.iter (fun var ->
if IndexSet.mem var store_vars then
push store (variable_index tgt_live (tgt_thr, var))
else
push move (variable_index tgt_live (tgt_thr, var),
variable_index src_live (src_thr, var))
) tgt_vars;
) (reverse_mapping tr ~target_thread:tgt_thr)
done;
{ Lrgrep_support.
store = !store;
move = !move;
target = target tr;
}
type liveness = (thread, RE.var indexset) indexmap array
let gen_table dfa liveness =
let states = Array.map (fun (r : state) ->
let accept =
List.map (fun (clause, thread) ->
let clause = Index.to_int clause in
let live = liveness.(index r) in
match IndexMap.find_opt thread live with
| None -> assert false
| Some variables ->
match IndexSet.minimum variables with
| None -> (clause, 0, 0)
| Some var ->
(clause,
variable_index live (thread, var),
IndexSet.cardinal variables)
) r.accepted
in
let transitions = ref [] in
let halting = ref r.visited in
List.iter (fun tr ->
halting := IndexSet.diff !halting tr.label;
let action = compute_action dfa liveness tr in
push transitions ((tr.label :> IntSet.t), action);
) r.forward;
let halting = (!halting :> IntSet.t) in
let transitions = !transitions in
{Lrgrep_support. accept; halting; transitions}
) dfa
in
Lrgrep_support.compact states
end
| null | https://raw.githubusercontent.com/let-def/lrgrep/0b692a8c6aba8c1fe5d467e89d1bb14602fee9f2/src/back/dfa.ml | ocaml | open Utils
open Misc
open Fix.Indexing
module Make(Regexp : Mid.Sigs.REGEXP)()
: Sigs.DFA with module Regexp = Regexp
=
struct
module Regexp = Regexp
open Regexp
open Info
module Redgraph = Mid.Redgraph.Make(Info)()
module Reduction = Mid.Reduction.Make(Redgraph)
module CachedKRESet = Reduction.Cache(struct
type t = KRESet.t * RE.var indexset
let compare = compare_pair KRESet.compare IndexSet.compare
let derive (k, _) = KRESet.derive_in_reduction k
let merge ts =
let ks, vs = List.split ts in
(List.fold_left KRESet.union KRESet.empty ks,
List.fold_left IndexSet.union IndexSet.empty vs)
let cmon (k, _) = KRESet.cmon k
end)
module Red = Reduction.Make(CachedKRESet)
include (struct
type thread = RE.var
let thread = RE.var
end : sig
type thread
val thread : int -> thread index
end)
module Kern = struct
type t = {
direct: KRE.t array;
reduce: Red.t array;
}
type direct_map = (thread, RE.var indexset) indexmap array
type reduce_map = thread indexset array
type transition = (direct_map * reduce_map * t)
let make direct =
{direct = Array.of_list (KRESet.elements direct); reduce = [||]}
let empty = {direct = [||]; reduce = [||]}
let cmon t =
let rs = Printf.sprintf "{%d reductions}" (Array.length t.reduce) in
Cmon.record [
"direct", Cmon.array_map KRE.cmon t.direct;
"reduce", Cmon.constant rs;
]
let compare t1 t2 =
let c = array_compare KRE.compare t1.direct t2.direct in
if c <> 0 then c else
array_compare Red.compare t1.reduce t2.reduce
let compare_transition (d1, r1, t1 : transition) (d2, r2, t2 : transition) =
let c = compare t1 t2 in
if c <> 0 then c else
let c = array_compare (IndexMap.compare IndexSet.compare) d1 d2 in
if c <> 0 then c else
array_compare IndexSet.compare r1 r2
let make_from_elts elts =
let direct, reduce = List.partition_map (fun x -> x) elts in
let direct =
let combine_vars vars (_, vars') =
let vars_union _ vs1 vs2 = Some (IndexSet.union vs1 vs2) in
IndexMap.union vars_union vars vars'
in
sort_and_merge
(compare_fst KRE.compare)
(fun (k, vars) rest -> (k, List.fold_left combine_vars vars rest))
direct
in
let reduce =
let combine_ix ix (_, ix') = IndexSet.union ix ix' in
sort_and_merge
(compare_fst Red.compare)
(fun (r, ix) rest -> (r, List.fold_left combine_ix ix rest))
reduce
in
let direct, direct_tr = array_split (Array.of_list direct) in
let reduce, reduce_tr = array_split (Array.of_list reduce) in
(direct_tr, reduce_tr, {direct; reduce})
let derive ~reduction_cache t =
let output = ref [] in
let push_direct sg ix k = push output (sg, Either.left (k, ix)) in
let make_varmap (ix : thread indexset) vars =
IndexSet.fold (fun i map -> IndexMap.add i vars map) ix IndexMap.empty
in
let output_reductions ix (ds, rs) =
List.iter (fun (sg, k) ->
let k, v = CachedKRESet.unlift k in
KRESet.iter (push_direct sg (make_varmap ix v)) k
) ds;
List.iter (fun (sg, r) ->
push output (sg, Either.right (r, ix))
) rs;
in
let accept, direct, reduce =
let accept = ref [] and direct = ref [] and reduce = ref [] in
let loop i k =
KRESet.derive_kre k (thread i)
~visited:(ref KRESet.empty) ~accept ~reduce ~direct
in
Array.iteri loop t.direct;
(!accept, !direct, !reduce)
in
List.iter (fun (kre, ix) ->
CachedKRESet.lift (KRESet.singleton kre, IndexSet.empty)
|> Red.compile reduction_cache
|> Red.initial
|> output_reductions ix
) (sort_and_merge
(compare_fst KRE.compare)
(fun (kre, i) is -> (kre, IndexSet.of_list (i :: List.map snd is)) )
reduce
);
Array.iteri (fun i r ->
output_reductions
(IndexSet.singleton (thread (Array.length t.direct + i)))
(Red.derive r)
) t.reduce;
List.iter (fun (sg, vars, k, i) ->
push_direct sg (IndexMap.singleton i vars) k
) direct;
let tr =
IndexRefine.annotated_partition !output
|> List.map (fun (sg, elts) -> (sg, make_from_elts elts))
|> Misc.sort_and_merge
(compare_snd compare_transition)
(fun (sg, e) rest ->
let union_sg sg (sg', _) = IndexSet.union sg sg' in
(List.fold_left union_sg sg rest, e))
in
(accept, tr)
let _interpret st ~stack =
let reduction_cache = Red.make_compilation_cache () in
let rec loop st stack =
Printf.eprintf "------------------------\n";
Printf.eprintf "Matcher state:\n%a\n" print_cmon (cmon st);
let accepted, transitions = derive ~reduction_cache st in
Printf.eprintf "Matching actions: [%s]\n"
(string_concat_map ";" (fun (x, _) -> string_of_index x) accepted);
match stack with
| [] -> Printf.eprintf "End of stack\n"
| lr1 :: stack' ->
Printf.eprintf "Parser in state %s\n" (Lr1.to_string lr1);
let match_transition (sg, st') =
if IndexSet.mem lr1 sg
then Some st'
else None
in
let targets = List.filter_map match_transition transitions in
let count = List.length targets in
Printf.eprintf "Transition: %d target%s\n" count
(match count with
| 0 -> " (ending analysis)"
| 1 -> " (deterministic)"
| _ -> "s (non-deterministic), error!");
match targets with
| [] -> loop empty stack'
| [_, _, st'] -> loop st' stack'
| _ -> assert false
in
loop st stack
end
module KernMap = Map.Make(Kern)
module Pre = struct
type t = {
expr: Kern.t;
id: int;
accepted: (KRE.clause index * thread index) list;
mutable transitions: transition list;
mutable visited: Lr1.set;
mutable scheduled: Lr1.set;
}
and transition = Lr1.set * Kern.direct_map * Kern.reduce_map * t lazy_t
let derive_dfa expr =
let next_id =
let k = ref 0 in
fun () ->
let id = !k in
incr k;
id
in
let reduction_cache = Red.make_compilation_cache () in
let dfa : t KernMap.t ref = ref KernMap.empty in
let rec find_state st =
match KernMap.find_opt st !dfa with
| Some state -> state
| None ->
let accepted, transitions = Kern.derive ~reduction_cache st in
let state = {
expr; id = next_id ();
visited = IndexSet.empty;
scheduled = IndexSet.empty;
accepted;
transitions = List.map make_transition transitions;
} in
dfa := KernMap.add st state !dfa;
state
and make_transition (sg, (direct_tr, reduce_tr, k)) =
(sg, direct_tr, reduce_tr, lazy (find_state k))
in
let todo = ref [] in
let schedule st sg =
if not (IndexSet.is_empty sg) then (
let lazy st = st in
let unvisited = IndexSet.diff sg st.visited in
if not (IndexSet.is_empty unvisited) then (
if IndexSet.is_empty st.scheduled then push todo st;
st.scheduled <- IndexSet.union st.scheduled unvisited;
)
)
in
let process st =
let sg = st.scheduled in
st.visited <- IndexSet.union sg st.visited;
st.scheduled <- IndexSet.empty;
List.iter (fun (sg', _, _, st') ->
schedule st' (Lr1.set_predecessors (IndexSet.inter sg' sg))
) st.transitions
in
let rec loop () =
match List.rev !todo with
| [] -> ()
| todo' ->
todo := [];
List.iter process todo';
loop ()
in
let initial = find_state expr in
schedule (lazy initial) Lr1.all;
loop ();
!dfa
end
type state_index = int
type transition = {
label: Lr1.set;
source: state_index;
target: state_index;
direct_map: Kern.direct_map;
reduce_map: Kern.reduce_map;
}
type state = {
index: state_index;
kern: Kern.t;
visited: Lr1.set;
accepted: (KRE.clause index * thread index) list;
forward: transition list;
mutable backward: transition list;
}
type dfa = state array
let threads st =
Array.length st.kern.direct + Array.length st.kern.reduce
let label tr = tr.label
let source tr = tr.source
let target tr = tr.target
let all_vars tr =
Array.fold_left (fun acc map -> IndexMap.fold (fun _ -> IndexSet.union) map acc)
IndexSet.empty tr.direct_map
let index st = st.index
let forward st = st.forward
let backward st = st.backward
let accepted st = st.accepted
let reverse_mapping tr ~target_thread =
let target_thread = Index.to_int target_thread in
let direct_count = Array.length tr.direct_map in
if target_thread < direct_count then
tr.direct_map.(target_thread)
else
IndexSet.fold
(fun thread acc -> IndexMap.add thread IndexSet.empty acc)
tr.reduce_map.(target_thread - direct_count) IndexMap.empty
let derive_dfa expr =
let dfa = Pre.derive_dfa (Kern.make expr) in
let states =
Array.make (KernMap.cardinal dfa)
{index=0; visited=IndexSet.empty; kern=Kern.empty;
accepted=[]; forward=[]; backward=[]}
in
KernMap.iter (fun kern {Pre. id=index; accepted; transitions; visited; _} ->
let forward = List.filter_map
(fun (label, direct_map, reduce_map, tgt) ->
if Lazy.is_val tgt then (
let target = (Lazy.force_val tgt).Pre.id in
Some {label; source=index; target; direct_map; reduce_map}
) else None
) transitions
in
states.(index) <- {index; kern; forward; visited; accepted; backward=[]}
) dfa;
Array.iter (fun src ->
List.iter (fun tr ->
let tgt = states.(tr.target) in
tgt.backward <- tr :: tgt.backward
) src.forward
) states;
states
let rec eval (dfa : dfa) (st : state_index) ~stack =
Printf.eprintf "------------------------\n";
Printf.eprintf "Matcher in state %d:\n%a\n"
st print_cmon (Kern.cmon dfa.(st).kern);
Printf.eprintf "Matching actions: [%s]\n"
(string_concat_map ";" (fun (x, _) -> string_of_index x) dfa.(st).accepted);
match stack with
| [] -> Printf.eprintf "End of stack\n"
| lr1 :: xs ->
Printf.eprintf "Parser in state %s\n" (Lr1.to_string lr1);
let filter_tr tr = IndexSet.mem lr1 tr.label in
begin match List.find_opt filter_tr dfa.(st).forward with
| Some tr -> eval dfa tr.target ~stack:xs
| None ->
Printf.eprintf "No transitions, ending analysis\n"
end
let variable_index liveness (thr, var : thread index * RE.var index) =
let before, elt, _ = IndexMap.split thr liveness in
let offset =
IndexMap.fold (fun _ set acc -> acc + IndexSet.cardinal set) before 0
in
match elt with
| None -> assert false
| Some vars ->
IndexSet.fold (fun var' acc -> if var < var' then acc + 1 else acc) vars offset
let compute_action dfa liveness tr =
let src_live = liveness.(source tr) in
let tgt_live = liveness.(target tr) in
let move = ref [] in
let store = ref [] in
for i = 0 to threads dfa.(target tr) - 1 do
let tgt_thr = thread i in
match IndexMap.find_opt tgt_thr tgt_live with
| None -> ()
| Some tgt_vars ->
IndexMap.iter (fun src_thr store_vars ->
IndexSet.iter (fun var ->
if IndexSet.mem var store_vars then
push store (variable_index tgt_live (tgt_thr, var))
else
push move (variable_index tgt_live (tgt_thr, var),
variable_index src_live (src_thr, var))
) tgt_vars;
) (reverse_mapping tr ~target_thread:tgt_thr)
done;
{ Lrgrep_support.
store = !store;
move = !move;
target = target tr;
}
type liveness = (thread, RE.var indexset) indexmap array
let gen_table dfa liveness =
let states = Array.map (fun (r : state) ->
let accept =
List.map (fun (clause, thread) ->
let clause = Index.to_int clause in
let live = liveness.(index r) in
match IndexMap.find_opt thread live with
| None -> assert false
| Some variables ->
match IndexSet.minimum variables with
| None -> (clause, 0, 0)
| Some var ->
(clause,
variable_index live (thread, var),
IndexSet.cardinal variables)
) r.accepted
in
let transitions = ref [] in
let halting = ref r.visited in
List.iter (fun tr ->
halting := IndexSet.diff !halting tr.label;
let action = compute_action dfa liveness tr in
push transitions ((tr.label :> IntSet.t), action);
) r.forward;
let halting = (!halting :> IntSet.t) in
let transitions = !transitions in
{Lrgrep_support. accept; halting; transitions}
) dfa
in
Lrgrep_support.compact states
end
| |
f365954341617e260649b2ecd95cfbbe316f1f3420fc502d7e105fc31f1297f9 | alphaHeavy/llvm-general-typed | ValueJoin.hs | {-# LANGUAGE GADTs #-}
module LLVM.General.Typed.ValueJoin where
import LLVM.General.Typed.BasicBlock
import LLVM.General.Typed.Value
-- Place a 'Value' into a 'BasicBlock'
vjoin :: Value const a -> BasicBlock (Value const a)
vjoin (ValueOperand a) = ValuePure <$> a
vjoin a = return a
| null | https://raw.githubusercontent.com/alphaHeavy/llvm-general-typed/75c39111f7fc685aacb3eaf1b2948451e7222e0b/llvm-general-typed-pure/src/LLVM/General/Typed/ValueJoin.hs | haskell | # LANGUAGE GADTs #
Place a 'Value' into a 'BasicBlock' |
module LLVM.General.Typed.ValueJoin where
import LLVM.General.Typed.BasicBlock
import LLVM.General.Typed.Value
vjoin :: Value const a -> BasicBlock (Value const a)
vjoin (ValueOperand a) = ValuePure <$> a
vjoin a = return a
|
0b467825abf4566bc44c770543dd45da39a177931d3dc051796dc71a8583175a | qnighy/ipc_solver | main.ml | open Term
open Format
let verbose = ref false
let latex_output = ref None
let haskell_in_latex = ref false
let usage_msg =
"ipc_solver is a solver for intuitionistic propositional formulas.\n" ^
"\n" ^
"usage:"
let speclist = [
("--latex", Arg.String (fun path -> latex_output := Some path),
"Sets a path for latex output");
("--no-latex", Arg.Unit (fun _ -> latex_output := None),
"Cancels latex output");
("--haskell-in-latex", Arg.Set haskell_in_latex,
"Outputs Haskell source code in the latex output");
("--verbose", Arg.Set verbose, "Enables verbose output");
("-v", Arg.Set verbose, "Enables verbose output")
]
let () =
Arg.parse speclist (fun _ -> ()) usage_msg;
let lexbuf = Lexing.from_channel stdin in
begin try
let tn = Parser.main Lexer.token lexbuf in
if !verbose then eprintf "Term is %a@." pp_print_pnterm tn;
let (t,env) = convert_name tn in
if !verbose then eprintf "Term is %a@." (pp_print_pterm env) t;
let solver_result = Solver.solve env.maxvar t in
let classical_result = begin match solver_result with
| Some _ -> Kripke.Irrefutable
| None -> Kripke.solve_n env 1 t end in
let kripke_result = begin match solver_result, classical_result with
| Some _, _ -> Kripke.Irrefutable
| _, Kripke.Irrefutable -> Kripke.solve env t
| _, r -> r end in
let message =
begin match solver_result, classical_result with
| Some _, _ -> "Provable."
| _, Kripke.Refutable _ -> "Not provable in intuitionistic logic; not provable in classical logic neither."
| _, Kripke.Irrefutable -> "Not provable in intuitionistic logic; provable in classical logic however."
| _, _ -> "Not provable in intuitionistic logic."
end in
Format.printf "%s@." message;
begin match !latex_output with
| Some latex_path ->
let f = open_out latex_path in
let ff = formatter_of_out_channel f in
fprintf ff "%s@." "\\documentclass[preview,varwidth=10000px,12pt]{standalone}";
fprintf ff "%s@." "\\usepackage{bussproofs}";
fprintf ff "%s@." "\\usepackage{color}";
fprintf ff "%s@." "\\usepackage{latexsym}";
fprintf ff "%s@." "\\usepackage{listings}";
fprintf ff "%s@." "\\begin{document}";
fprintf ff "%a:@.@." (pp_print_pterm_latex env 5) t;
fprintf ff "%s@.@." message;
begin match Solver.solve env.maxvar t with
| Some pr ->
if !verbose then eprintf "proof(LF, plain): %a@."
Lf_proof.pp_print_proofitem pr;
if !verbose then eprintf "proof(LF):@,%a@."
(Lf_proof.pp_print_proof env env.maxvar t) pr;
let npr = Nj_proof.convert_lf t pr in
if !verbose then eprintf "proof(NJ):@,%a@."
(Nj_proof.pp_print_lambda env) npr;
ignore (Nj_proof.nj_check_type [] npr);
let npr = Nj_proof.postproc_proof npr in
ignore (Nj_proof.nj_check_type [] npr);
if !verbose then eprintf "proof(NJ):@,%a@."
(Nj_proof.pp_print_lambda env) npr;
if !haskell_in_latex then begin
fprintf ff "Haskell code:@.@.";
fprintf ff "\\begin{lstlisting}[language=Haskell]@.";
fprintf ff "%s@." (PpHaskell.haskell_of_nj env npr);
fprintf ff "\\end{lstlisting}@."
end else begin
fprintf ff "%s@.@." "Proof tree (intuitionistic):";
fprintf ff "%a@." (Nj_proof.print_nj_latex env) npr
end
| None -> ()
end;
begin match kripke_result with
| Kripke.Refutable (n, accessibility, term_asgn) ->
if n == 1 then begin
fprintf ff "%s" "Counterexample: ";
for i = 0 to (env.maxvar-1) do
if i <> 0 then fprintf ff ", ";
fprintf ff "$%a = %d$"
(pp_print_pterm_latex_internal env 5) (PVar i)
(if (Hashtbl.find term_asgn (PVar i)).(0) then 1 else 0)
done;
fprintf ff "@.@."
end else begin
fprintf ff "%s" "Kripke counterexample: ";
fprintf ff "$\\mathcal{W} = \\{";
for j = 0 to (n-1) do
if j <> 0 then fprintf ff ", ";
fprintf ff "W_{%d}" j
done;
fprintf ff "\\}$, ";
for i = 0 to (n-1) do
for j = i+1 to (n-1) do
if accessibility.(i).(j) then begin
let ok = ref true in
for k = i+1 to (j-1) do
if accessibility.(i).(j) && accessibility.(j).(k) then
ok := false
done;
if !ok then fprintf ff "$(W_{%d} \\leadsto W_{%d})$, " i j
end
done
done;
for i = 0 to (env.maxvar-1) do
if i <> 0 then fprintf ff ", ";
fprintf ff "$%a = \\{"
(pp_print_pterm_latex_internal env 5) (PVar i);
let comma = ref false in
for j = 0 to (n-1) do
if (Hashtbl.find term_asgn (PVar i)).(j) then begin
if !comma then fprintf ff ", ";
fprintf ff "W_{%d}" j;
comma := true
end
done;
fprintf ff "\\}$"
done;
fprintf ff "@.@."
end;
fprintf ff "\\begin{tabular}{|r|l|}@.";
let visited = Hashtbl.create 771 in
let rec visit t =
if not (Hashtbl.mem visited t) then begin
Hashtbl.add visited t ();
begin match t with
| PVar _ -> ()
| PArrow (t0, t1) -> visit t0; visit t1
| POr (t0, t1) -> visit t0; visit t1
| PAnd (t0, t1) -> visit t0; visit t1
| PTop -> ()
| PBot -> ()
end;
fprintf ff "%a & " (pp_print_pterm_latex env 5) t;
if n == 1 then begin
fprintf ff "$%d$"
(if (Hashtbl.find term_asgn t).(0) then 1 else 0)
end else begin
fprintf ff "$\\{";
let comma = ref false in
for j = 0 to (n-1) do
if (Hashtbl.find term_asgn t).(j) then begin
if !comma then fprintf ff ", ";
fprintf ff "W_{%d}" j;
comma := true
end
done;
fprintf ff "\\}$"
end;
fprintf ff " \\\\"
end
in
for i = 0 to (env.maxvar-1) do
visit (PVar i)
done;
visit t;
fprintf ff "\\end{tabular}@.";
fprintf ff "@.@."
| _ -> ()
end;
fprintf ff "%s@." "\\end{document}";
close_out f
| None -> ()
end
with
| Parsing.Parse_error ->
Format.printf "Parse Error@.";
begin match !latex_output with
| Some latex_path ->
let f = open_out latex_path in
let ff = formatter_of_out_channel f in
fprintf ff "%s@." "%parse_error";
fprintf ff "%s@." "\\documentclass[preview,varwidth=4000px]{standalone}";
fprintf ff "%s@." "\\begin{document}";
fprintf ff "%s@." "Parse Error";
fprintf ff "%s@." "\\end{document}";
close_out f
| None -> ()
end
end
| null | https://raw.githubusercontent.com/qnighy/ipc_solver/41ef5367150f515580ba98aaf7ea3d8152d84b04/main.ml | ocaml | open Term
open Format
let verbose = ref false
let latex_output = ref None
let haskell_in_latex = ref false
let usage_msg =
"ipc_solver is a solver for intuitionistic propositional formulas.\n" ^
"\n" ^
"usage:"
let speclist = [
("--latex", Arg.String (fun path -> latex_output := Some path),
"Sets a path for latex output");
("--no-latex", Arg.Unit (fun _ -> latex_output := None),
"Cancels latex output");
("--haskell-in-latex", Arg.Set haskell_in_latex,
"Outputs Haskell source code in the latex output");
("--verbose", Arg.Set verbose, "Enables verbose output");
("-v", Arg.Set verbose, "Enables verbose output")
]
let () =
Arg.parse speclist (fun _ -> ()) usage_msg;
let lexbuf = Lexing.from_channel stdin in
begin try
let tn = Parser.main Lexer.token lexbuf in
if !verbose then eprintf "Term is %a@." pp_print_pnterm tn;
let (t,env) = convert_name tn in
if !verbose then eprintf "Term is %a@." (pp_print_pterm env) t;
let solver_result = Solver.solve env.maxvar t in
let classical_result = begin match solver_result with
| Some _ -> Kripke.Irrefutable
| None -> Kripke.solve_n env 1 t end in
let kripke_result = begin match solver_result, classical_result with
| Some _, _ -> Kripke.Irrefutable
| _, Kripke.Irrefutable -> Kripke.solve env t
| _, r -> r end in
let message =
begin match solver_result, classical_result with
| Some _, _ -> "Provable."
| _, Kripke.Refutable _ -> "Not provable in intuitionistic logic; not provable in classical logic neither."
| _, Kripke.Irrefutable -> "Not provable in intuitionistic logic; provable in classical logic however."
| _, _ -> "Not provable in intuitionistic logic."
end in
Format.printf "%s@." message;
begin match !latex_output with
| Some latex_path ->
let f = open_out latex_path in
let ff = formatter_of_out_channel f in
fprintf ff "%s@." "\\documentclass[preview,varwidth=10000px,12pt]{standalone}";
fprintf ff "%s@." "\\usepackage{bussproofs}";
fprintf ff "%s@." "\\usepackage{color}";
fprintf ff "%s@." "\\usepackage{latexsym}";
fprintf ff "%s@." "\\usepackage{listings}";
fprintf ff "%s@." "\\begin{document}";
fprintf ff "%a:@.@." (pp_print_pterm_latex env 5) t;
fprintf ff "%s@.@." message;
begin match Solver.solve env.maxvar t with
| Some pr ->
if !verbose then eprintf "proof(LF, plain): %a@."
Lf_proof.pp_print_proofitem pr;
if !verbose then eprintf "proof(LF):@,%a@."
(Lf_proof.pp_print_proof env env.maxvar t) pr;
let npr = Nj_proof.convert_lf t pr in
if !verbose then eprintf "proof(NJ):@,%a@."
(Nj_proof.pp_print_lambda env) npr;
ignore (Nj_proof.nj_check_type [] npr);
let npr = Nj_proof.postproc_proof npr in
ignore (Nj_proof.nj_check_type [] npr);
if !verbose then eprintf "proof(NJ):@,%a@."
(Nj_proof.pp_print_lambda env) npr;
if !haskell_in_latex then begin
fprintf ff "Haskell code:@.@.";
fprintf ff "\\begin{lstlisting}[language=Haskell]@.";
fprintf ff "%s@." (PpHaskell.haskell_of_nj env npr);
fprintf ff "\\end{lstlisting}@."
end else begin
fprintf ff "%s@.@." "Proof tree (intuitionistic):";
fprintf ff "%a@." (Nj_proof.print_nj_latex env) npr
end
| None -> ()
end;
begin match kripke_result with
| Kripke.Refutable (n, accessibility, term_asgn) ->
if n == 1 then begin
fprintf ff "%s" "Counterexample: ";
for i = 0 to (env.maxvar-1) do
if i <> 0 then fprintf ff ", ";
fprintf ff "$%a = %d$"
(pp_print_pterm_latex_internal env 5) (PVar i)
(if (Hashtbl.find term_asgn (PVar i)).(0) then 1 else 0)
done;
fprintf ff "@.@."
end else begin
fprintf ff "%s" "Kripke counterexample: ";
fprintf ff "$\\mathcal{W} = \\{";
for j = 0 to (n-1) do
if j <> 0 then fprintf ff ", ";
fprintf ff "W_{%d}" j
done;
fprintf ff "\\}$, ";
for i = 0 to (n-1) do
for j = i+1 to (n-1) do
if accessibility.(i).(j) then begin
let ok = ref true in
for k = i+1 to (j-1) do
if accessibility.(i).(j) && accessibility.(j).(k) then
ok := false
done;
if !ok then fprintf ff "$(W_{%d} \\leadsto W_{%d})$, " i j
end
done
done;
for i = 0 to (env.maxvar-1) do
if i <> 0 then fprintf ff ", ";
fprintf ff "$%a = \\{"
(pp_print_pterm_latex_internal env 5) (PVar i);
let comma = ref false in
for j = 0 to (n-1) do
if (Hashtbl.find term_asgn (PVar i)).(j) then begin
if !comma then fprintf ff ", ";
fprintf ff "W_{%d}" j;
comma := true
end
done;
fprintf ff "\\}$"
done;
fprintf ff "@.@."
end;
fprintf ff "\\begin{tabular}{|r|l|}@.";
let visited = Hashtbl.create 771 in
let rec visit t =
if not (Hashtbl.mem visited t) then begin
Hashtbl.add visited t ();
begin match t with
| PVar _ -> ()
| PArrow (t0, t1) -> visit t0; visit t1
| POr (t0, t1) -> visit t0; visit t1
| PAnd (t0, t1) -> visit t0; visit t1
| PTop -> ()
| PBot -> ()
end;
fprintf ff "%a & " (pp_print_pterm_latex env 5) t;
if n == 1 then begin
fprintf ff "$%d$"
(if (Hashtbl.find term_asgn t).(0) then 1 else 0)
end else begin
fprintf ff "$\\{";
let comma = ref false in
for j = 0 to (n-1) do
if (Hashtbl.find term_asgn t).(j) then begin
if !comma then fprintf ff ", ";
fprintf ff "W_{%d}" j;
comma := true
end
done;
fprintf ff "\\}$"
end;
fprintf ff " \\\\"
end
in
for i = 0 to (env.maxvar-1) do
visit (PVar i)
done;
visit t;
fprintf ff "\\end{tabular}@.";
fprintf ff "@.@."
| _ -> ()
end;
fprintf ff "%s@." "\\end{document}";
close_out f
| None -> ()
end
with
| Parsing.Parse_error ->
Format.printf "Parse Error@.";
begin match !latex_output with
| Some latex_path ->
let f = open_out latex_path in
let ff = formatter_of_out_channel f in
fprintf ff "%s@." "%parse_error";
fprintf ff "%s@." "\\documentclass[preview,varwidth=4000px]{standalone}";
fprintf ff "%s@." "\\begin{document}";
fprintf ff "%s@." "Parse Error";
fprintf ff "%s@." "\\end{document}";
close_out f
| None -> ()
end
end
| |
1ceeabb328efc3f4fb1a4ebbf222a21415ff2583f11fffab689e89903a4a7658 | poscat0x04/telegram-types | StopMessageLiveLocation.hs | module Web.Telegram.Types.Internal.API.StopMessageLiveLocation where
import Common
import Web.Telegram.Types.Internal.API.ChatId
import Web.Telegram.Types.Internal.ReplyMarkup
data StopMessageLiveLocation = StopMessageLiveLocation
{ chatId :: Maybe ChatId,
messageId :: Maybe Int,
inlineMessageId :: Maybe Text,
replyMarkup :: Maybe ReplyMarkup
}
deriving stock (Show, Eq)
mkLabel ''StopMessageLiveLocation
deriveToJSON snake ''StopMessageLiveLocation
makeMethod ''StopMessageLiveLocation
| null | https://raw.githubusercontent.com/poscat0x04/telegram-types/3de0710640f5303638a83e409001b0342299aeb8/src/Web/Telegram/Types/Internal/API/StopMessageLiveLocation.hs | haskell | module Web.Telegram.Types.Internal.API.StopMessageLiveLocation where
import Common
import Web.Telegram.Types.Internal.API.ChatId
import Web.Telegram.Types.Internal.ReplyMarkup
data StopMessageLiveLocation = StopMessageLiveLocation
{ chatId :: Maybe ChatId,
messageId :: Maybe Int,
inlineMessageId :: Maybe Text,
replyMarkup :: Maybe ReplyMarkup
}
deriving stock (Show, Eq)
mkLabel ''StopMessageLiveLocation
deriveToJSON snake ''StopMessageLiveLocation
makeMethod ''StopMessageLiveLocation
| |
cfe023d712b6a339d41a1461c8b4c3eb786973f4bb56d7cd10192490be18cb55 | oakes/play-cljc | play_cljc.clj | (ns leiningen.new.play-cljc
(:require [leiningen.new.templates :as t]
[clojure.string :as str]
[clojure.java.io :as io]))
(defn sanitize-name [s]
(as-> s $
(str/trim $)
(str/lower-case $)
(str/replace $ "'" "")
(str/replace $ #"[^a-z0-9]" " ")
(str/split $ #" ")
(remove empty? $)
(str/join "-" $)))
(defn play-cljc-data [name]
(let [project-name (sanitize-name name)
core-name "core"]
(when-not (seq project-name)
(throw (Exception. (str "Invalid name: " name))))
{:name project-name
:core-name core-name
:project_name (str/replace project-name "-" "_")
:core_name (str/replace core-name "-" "_")}))
(defn play-cljc*
[{:keys [project_name core_name] :as data}]
(let [render (t/renderer "play-cljc")]
{"README.md" (render "README.md" data)
".gitignore" (render "gitignore" data)
"deps.edn" (render "deps.edn" data)
"figwheel-main.edn" (render "figwheel-main.edn" data)
"dev.cljs.edn" (render "dev.cljs.edn" data)
"dev.clj" (render "dev.clj" data)
"prod.clj" (render "prod.clj" data)
"project.clj" (render "project.clj" data)
(str "src/" project_name "/" core_name ".cljc") (render "core.cljc" data)
(str "src/" project_name "/utils.cljc") (render "utils.cljc" data)
(str "src/" project_name "/move.cljc") (render "move.cljc" data)
(str "src/" project_name "/start.clj") (render "start.clj" data)
(str "src/" project_name "/start.cljs") (render "start.cljs" data)
(str "src/" project_name "/start_dev.clj") (render "start_dev.clj" data)
(str "src/" project_name "/start_dev.cljs") (render "start_dev.cljs" data)
"resources/public/index.html" (render "index.html" data)
"resources/public/player_walk1.png" (-> "player_walk1.png" io/resource io/input-stream)
"resources/public/player_walk2.png" (-> "player_walk2.png" io/resource io/input-stream)
"resources/public/player_walk3.png" (-> "player_walk3.png" io/resource io/input-stream)}))
(defn play-cljc
[name & _]
(let [data (play-cljc-data name)
path->content (play-cljc* data)]
(apply t/->files data (vec path->content))))
| null | https://raw.githubusercontent.com/oakes/play-cljc/ccf9571c1dbc96c044e1d03057cb3b1533100ace/template/src/leiningen/new/play_cljc.clj | clojure | (ns leiningen.new.play-cljc
(:require [leiningen.new.templates :as t]
[clojure.string :as str]
[clojure.java.io :as io]))
(defn sanitize-name [s]
(as-> s $
(str/trim $)
(str/lower-case $)
(str/replace $ "'" "")
(str/replace $ #"[^a-z0-9]" " ")
(str/split $ #" ")
(remove empty? $)
(str/join "-" $)))
(defn play-cljc-data [name]
(let [project-name (sanitize-name name)
core-name "core"]
(when-not (seq project-name)
(throw (Exception. (str "Invalid name: " name))))
{:name project-name
:core-name core-name
:project_name (str/replace project-name "-" "_")
:core_name (str/replace core-name "-" "_")}))
(defn play-cljc*
[{:keys [project_name core_name] :as data}]
(let [render (t/renderer "play-cljc")]
{"README.md" (render "README.md" data)
".gitignore" (render "gitignore" data)
"deps.edn" (render "deps.edn" data)
"figwheel-main.edn" (render "figwheel-main.edn" data)
"dev.cljs.edn" (render "dev.cljs.edn" data)
"dev.clj" (render "dev.clj" data)
"prod.clj" (render "prod.clj" data)
"project.clj" (render "project.clj" data)
(str "src/" project_name "/" core_name ".cljc") (render "core.cljc" data)
(str "src/" project_name "/utils.cljc") (render "utils.cljc" data)
(str "src/" project_name "/move.cljc") (render "move.cljc" data)
(str "src/" project_name "/start.clj") (render "start.clj" data)
(str "src/" project_name "/start.cljs") (render "start.cljs" data)
(str "src/" project_name "/start_dev.clj") (render "start_dev.clj" data)
(str "src/" project_name "/start_dev.cljs") (render "start_dev.cljs" data)
"resources/public/index.html" (render "index.html" data)
"resources/public/player_walk1.png" (-> "player_walk1.png" io/resource io/input-stream)
"resources/public/player_walk2.png" (-> "player_walk2.png" io/resource io/input-stream)
"resources/public/player_walk3.png" (-> "player_walk3.png" io/resource io/input-stream)}))
(defn play-cljc
[name & _]
(let [data (play-cljc-data name)
path->content (play-cljc* data)]
(apply t/->files data (vec path->content))))
| |
a08047b7d1fe4cb4ecc4463c59b7777c4e0d9186deb347738870b3326092f9bd | NorfairKing/sydtest | RedisSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module Test.Syd.RedisSpec (spec) where
import Database.Redis as Redis
import Test.Syd
import Test.Syd.Redis
spec :: Spec
spec = do
describe "redisServerSpec" $
redisServerSpec $
it "sets up and tears down the redis server nicely" $ do
pure () :: IO ()
describe "redisSpec" $
redisSpec $ do
it "sets up and tears down a redis connection nicely" $ \conn -> do
runRedis conn $ do
errOrStatus <- Redis.set "hello" "world"
liftIO $ case errOrStatus of
Left err -> expectationFailure $ show err
Right status -> status `shouldBe` Ok
errOrReply <- Redis.get "hello"
liftIO $ case errOrReply of
Left err -> expectationFailure $ show err
Right val -> val `shouldBe` Just "world"
doNotRandomiseExecutionOrder $
describe "shared state" $ do
it "can write a message" $ \conn -> runRedis conn $ do
errOrStatus <- Redis.set "hello" "world"
liftIO $ case errOrStatus of
Left err -> expectationFailure $ show err
Right status -> status `shouldBe` Ok
it "cannot read a message that has not been written yet" $ \conn -> runRedis conn $ do
errOrReply <- Redis.get "hello"
liftIO $ case errOrReply of
Left err -> expectationFailure $ show err
Right Nothing -> pure ()
Right (Just val) -> expectationFailure $ "Should not have been able to read anything, but got: " <> show val
| null | https://raw.githubusercontent.com/NorfairKing/sydtest/0fad471cee677a4018acbe1983385dfc9a1b49d2/sydtest-hedis/test/Test/Syd/RedisSpec.hs | haskell | # LANGUAGE OverloadedStrings # |
module Test.Syd.RedisSpec (spec) where
import Database.Redis as Redis
import Test.Syd
import Test.Syd.Redis
spec :: Spec
spec = do
describe "redisServerSpec" $
redisServerSpec $
it "sets up and tears down the redis server nicely" $ do
pure () :: IO ()
describe "redisSpec" $
redisSpec $ do
it "sets up and tears down a redis connection nicely" $ \conn -> do
runRedis conn $ do
errOrStatus <- Redis.set "hello" "world"
liftIO $ case errOrStatus of
Left err -> expectationFailure $ show err
Right status -> status `shouldBe` Ok
errOrReply <- Redis.get "hello"
liftIO $ case errOrReply of
Left err -> expectationFailure $ show err
Right val -> val `shouldBe` Just "world"
doNotRandomiseExecutionOrder $
describe "shared state" $ do
it "can write a message" $ \conn -> runRedis conn $ do
errOrStatus <- Redis.set "hello" "world"
liftIO $ case errOrStatus of
Left err -> expectationFailure $ show err
Right status -> status `shouldBe` Ok
it "cannot read a message that has not been written yet" $ \conn -> runRedis conn $ do
errOrReply <- Redis.get "hello"
liftIO $ case errOrReply of
Left err -> expectationFailure $ show err
Right Nothing -> pure ()
Right (Just val) -> expectationFailure $ "Should not have been able to read anything, but got: " <> show val
|
003007ff0319ed6d0a2d66701dc79930b628716faa2d598c768c0269a515dad2 | IBM/wcs-ocaml | cnl_samples.ml |
* This file is part of the Watson Conversation Service OCaml API project .
*
* Copyright 2016 - 2017 IBM Corporation
*
* Licensed under the Apache License , Version 2.0 ( the " License " ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an " AS IS " BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
* This file is part of the Watson Conversation Service OCaml API project.
*
* Copyright 2016-2017 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*)
open Cnl_t
open Cnl_util
open Cnl_builder
(** Empty rule *)
let empty_init () =
let evnt = mk_evnt_undefined () in
let cond = mk_cond_undefined () in
let actns = mk_actns_undefined () in
let r0 = mk_rule_f evnt cond actns in
index_rule r0
let rule_init =
ref None
let set_rule_init r =
rule_init := Some r
let rule_init () =
begin match !rule_init with
| None -> empty_init ()
| Some r -> r
end
(** Partial ASTs *)
let cond_init () : cnl_cond_desc =
C_condition (mk_expr_undefined ())
let actns_init () : cnl_actns_desc =
{ list_elems = [ mk_actn_undefined () ];
list_closed = N_undefined None; }
let print_init () : cnl_actn_desc =
A_print (mk_expr_undefined ())
let emit_init () : cnl_actn_desc =
A_emit (mk_expr_undefined ())
let define_init vname : cnl_actn_desc =
A_define (vname,mk_expr_undefined ())
let set_init vname fname : cnl_actn_desc =
A_set (vname,fname,mk_expr_undefined ())
(**
when a transaction occurs, called 'the transaction'
if
the balance of 'the account' is less than 'Average Risky Account Balance'
then
print "aggregate " + 'Average Risky Account Balance' ;
print "balance" + the balance of 'the account';
define 'the exception' as a new exception where
the reason is "The account for " + the email of the customer of 'the account' + " is risky and below the average balance for risky accounts." ,
the code is "R04" ;
emit a new authorization response where
the account is 'the account' ,
the message is "R04: the account for " + the email of the customer of 'the account' + " is risky and below the average balance for risky accounts.",
the exception is 'the exception' ,
the transaction is 'the transaction' ;
*)
let define1 =
mk_define_f "the exception"
(mk_new_event_f "exception"
[("reason",mk_concat_list_f
[mk_string_f "The account for ";
mk_get_f (mk_get_var_f "customer" "the account") "email";
mk_string_f " is risky and below the average balance for risky accounts."]);
("code",mk_string_f "R04")])
let emit1 =
mk_emit_f
(mk_new_event_f "authorization response"
[("account",mk_var_f "the account");
("message", mk_concat_list_f
[mk_string_f "R04: the account for ";
mk_get_f (mk_get_var_f "the account" "customer") "email";
mk_string_f " is risky and below the average balance for risky accounts."]);
("exception",mk_var_f "the exception");
("transaction",mk_var_f "the transaction")])
let when1 = ("transaction",Some "the transaction")
let cond1 = C_condition (mk_lt_f (mk_get_var_f "balance" "the account") (mk_expr_f (E_var "Average Risky Account Balance")))
let then1 =
{ list_elems = [mk_print_f (mk_concat_f (mk_string_f "aggregate ") (mk_var_f "Average Risky Account Balance"));
mk_print_f (mk_concat_f (mk_string_f "balance") (mk_get_var_f "balance" "the account"));
define1;
emit1;];
list_closed = N_filled (None, ()); }
let rule1 = mk_rule_init when1 cond1 then1
*
when an airplane event occurs
then
define rpmAverage as (
the average engine rpm of ' the airplane ' +
the rpm of the engine of this airplane event ) / 2 ;
define pressureAverage as (
the average engine pressure ratio of ' the airplane ' +
the pressure ratio of the engine of this airplane event ) / 2 ;
set the average engine rpm of ' the airplane ' to rpmAverage ;
set the average engine pressure ratio of ' the airplane ' to
pressureAverage ;
when an airplane event occurs
then
define rpmAverage as (
the average engine rpm of 'the airplane' +
the rpm of the engine of this airplane event) / 2;
define pressureAverage as (
the average engine pressure ratio of 'the airplane' +
the pressure ratio of the engine of this airplane event) / 2;
set the average engine rpm of 'the airplane' to rpmAverage;
set the average engine pressure ratio of 'the airplane' to
pressureAverage;
*)
let define21 =
mk_define_f
"rpmAverage"
(mk_div_f
(mk_plus_f
(mk_get_var_f "average engine rpm" "the airplane")
(mk_get_f (mk_get_f (mk_this_f "airplane") "engine") "rpm"))
(mk_int_f 2))
let define22 =
mk_define_f
"pressureAverage"
(mk_div_f
(mk_plus_f
(mk_get_var_f "average engine pressure ratio" "the airplane")
(mk_get_f (mk_get_f (mk_this_f "airplane") "engine") "pressure ratio"))
(mk_int_f 2))
let setdesc21 = mk_set_desc_f "average engine rpm" "the airplane" (mk_var_f "rpmAverage")
let setdesc22 = mk_set_desc_f "average engine pressure ratio" "the airplane" (mk_var_f "pressureAverage")
let set21 = mk_set_f "average engine rpm" "the airplane" (mk_var_f "rpmAverage")
let set22 = mk_set_f "average engine pressure ratio" "the airplane" (mk_var_f "pressureAverage")
let rule2 =
mk_rule_init
("airplane", None)
C_no_condition
{ list_elems = [define21;
define22;
set21;
set22;];
list_closed = N_filled (None, ()); }
(** Table of samples *)
let cnl_samples =
[ ("rule_init", rule_init ());
("rule1un1", index_rule rule1);
("rule1", rule_f_to_a rule1);
("rule2", rule2); ]
(* Sample expressions *)
let expr1 = mk_expr_undefined () (* XXX TODO XXX *)
(* "expr": { *)
(* "expr_desc": [ *)
(* "P_confirmed", *)
(* [ *)
(* "E_binop", *)
(* [ *)
(* "Op_lt" *)
(* ], *)
(* { *)
(* "expr_desc": [ *)
(* "P_confirmed", *)
(* [ *)
(* "E_prop", *)
(* { *)
(* "expr_desc": [ *)
(* "P_confirmed", *)
(* [ *)
(* "E_variable", *)
(* "balance" *)
(* ] *)
(* ] *)
(* }, *)
(* "the account" *)
(* ] *)
(* ] *)
(* }, *)
(* { *)
(* "expr_desc": [ *)
(* "P_confirmed", *)
(* [ *)
" E_variable " ,
" Average Risky Account Balance "
(* ] *)
(* ] *)
(* } *)
(* ] *)
(* ] *)
(* } *)
let expr2 = mk_expr_undefined () (* XXX TODO XXX *)
(* "expr": { *)
(* "expr_desc": [ *)
(* "P_filled", *)
(* 13, *)
(* [ *)
(* "E_binop", *)
(* [ *)
(* "Op_concat" *)
(* ], *)
(* { *)
(* "expr_desc": [ *)
(* "P_filled", *)
(* 14, *)
(* [ *)
(* "E_lit", *)
(* [ *)
" L_string " ,
(* "balance" *)
(* ] *)
(* ] *)
(* ] *)
(* }, *)
(* { *)
(* "expr_desc": [ *)
(* "P_filled", *)
(* 15, *)
(* [ *)
(* "E_prop", *)
(* { *)
(* "expr_desc": [ *)
(* "P_filled", *)
(* 16, *)
(* [ *)
(* "E_variable", *)
(* "balance" *)
(* ] *)
(* ] *)
(* }, *)
(* "the account" *)
(* ] *)
(* ] *)
(* } *)
(* ] *)
(* ] *)
(* } *)
| null | https://raw.githubusercontent.com/IBM/wcs-ocaml/b237b7057f44caa09d36e466be015e2bc3173dd5/examples/rulebot/src/cnl_samples.ml | ocaml | * Empty rule
* Partial ASTs
*
when a transaction occurs, called 'the transaction'
if
the balance of 'the account' is less than 'Average Risky Account Balance'
then
print "aggregate " + 'Average Risky Account Balance' ;
print "balance" + the balance of 'the account';
define 'the exception' as a new exception where
the reason is "The account for " + the email of the customer of 'the account' + " is risky and below the average balance for risky accounts." ,
the code is "R04" ;
emit a new authorization response where
the account is 'the account' ,
the message is "R04: the account for " + the email of the customer of 'the account' + " is risky and below the average balance for risky accounts.",
the exception is 'the exception' ,
the transaction is 'the transaction' ;
* Table of samples
Sample expressions
XXX TODO XXX
"expr": {
"expr_desc": [
"P_confirmed",
[
"E_binop",
[
"Op_lt"
],
{
"expr_desc": [
"P_confirmed",
[
"E_prop",
{
"expr_desc": [
"P_confirmed",
[
"E_variable",
"balance"
]
]
},
"the account"
]
]
},
{
"expr_desc": [
"P_confirmed",
[
]
]
}
]
]
}
XXX TODO XXX
"expr": {
"expr_desc": [
"P_filled",
13,
[
"E_binop",
[
"Op_concat"
],
{
"expr_desc": [
"P_filled",
14,
[
"E_lit",
[
"balance"
]
]
]
},
{
"expr_desc": [
"P_filled",
15,
[
"E_prop",
{
"expr_desc": [
"P_filled",
16,
[
"E_variable",
"balance"
]
]
},
"the account"
]
]
}
]
]
} |
* This file is part of the Watson Conversation Service OCaml API project .
*
* Copyright 2016 - 2017 IBM Corporation
*
* Licensed under the Apache License , Version 2.0 ( the " License " ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an " AS IS " BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
* This file is part of the Watson Conversation Service OCaml API project.
*
* Copyright 2016-2017 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*)
open Cnl_t
open Cnl_util
open Cnl_builder
let empty_init () =
let evnt = mk_evnt_undefined () in
let cond = mk_cond_undefined () in
let actns = mk_actns_undefined () in
let r0 = mk_rule_f evnt cond actns in
index_rule r0
let rule_init =
ref None
let set_rule_init r =
rule_init := Some r
let rule_init () =
begin match !rule_init with
| None -> empty_init ()
| Some r -> r
end
let cond_init () : cnl_cond_desc =
C_condition (mk_expr_undefined ())
let actns_init () : cnl_actns_desc =
{ list_elems = [ mk_actn_undefined () ];
list_closed = N_undefined None; }
let print_init () : cnl_actn_desc =
A_print (mk_expr_undefined ())
let emit_init () : cnl_actn_desc =
A_emit (mk_expr_undefined ())
let define_init vname : cnl_actn_desc =
A_define (vname,mk_expr_undefined ())
let set_init vname fname : cnl_actn_desc =
A_set (vname,fname,mk_expr_undefined ())
let define1 =
mk_define_f "the exception"
(mk_new_event_f "exception"
[("reason",mk_concat_list_f
[mk_string_f "The account for ";
mk_get_f (mk_get_var_f "customer" "the account") "email";
mk_string_f " is risky and below the average balance for risky accounts."]);
("code",mk_string_f "R04")])
let emit1 =
mk_emit_f
(mk_new_event_f "authorization response"
[("account",mk_var_f "the account");
("message", mk_concat_list_f
[mk_string_f "R04: the account for ";
mk_get_f (mk_get_var_f "the account" "customer") "email";
mk_string_f " is risky and below the average balance for risky accounts."]);
("exception",mk_var_f "the exception");
("transaction",mk_var_f "the transaction")])
let when1 = ("transaction",Some "the transaction")
let cond1 = C_condition (mk_lt_f (mk_get_var_f "balance" "the account") (mk_expr_f (E_var "Average Risky Account Balance")))
let then1 =
{ list_elems = [mk_print_f (mk_concat_f (mk_string_f "aggregate ") (mk_var_f "Average Risky Account Balance"));
mk_print_f (mk_concat_f (mk_string_f "balance") (mk_get_var_f "balance" "the account"));
define1;
emit1;];
list_closed = N_filled (None, ()); }
let rule1 = mk_rule_init when1 cond1 then1
*
when an airplane event occurs
then
define rpmAverage as (
the average engine rpm of ' the airplane ' +
the rpm of the engine of this airplane event ) / 2 ;
define pressureAverage as (
the average engine pressure ratio of ' the airplane ' +
the pressure ratio of the engine of this airplane event ) / 2 ;
set the average engine rpm of ' the airplane ' to rpmAverage ;
set the average engine pressure ratio of ' the airplane ' to
pressureAverage ;
when an airplane event occurs
then
define rpmAverage as (
the average engine rpm of 'the airplane' +
the rpm of the engine of this airplane event) / 2;
define pressureAverage as (
the average engine pressure ratio of 'the airplane' +
the pressure ratio of the engine of this airplane event) / 2;
set the average engine rpm of 'the airplane' to rpmAverage;
set the average engine pressure ratio of 'the airplane' to
pressureAverage;
*)
let define21 =
mk_define_f
"rpmAverage"
(mk_div_f
(mk_plus_f
(mk_get_var_f "average engine rpm" "the airplane")
(mk_get_f (mk_get_f (mk_this_f "airplane") "engine") "rpm"))
(mk_int_f 2))
let define22 =
mk_define_f
"pressureAverage"
(mk_div_f
(mk_plus_f
(mk_get_var_f "average engine pressure ratio" "the airplane")
(mk_get_f (mk_get_f (mk_this_f "airplane") "engine") "pressure ratio"))
(mk_int_f 2))
let setdesc21 = mk_set_desc_f "average engine rpm" "the airplane" (mk_var_f "rpmAverage")
let setdesc22 = mk_set_desc_f "average engine pressure ratio" "the airplane" (mk_var_f "pressureAverage")
let set21 = mk_set_f "average engine rpm" "the airplane" (mk_var_f "rpmAverage")
let set22 = mk_set_f "average engine pressure ratio" "the airplane" (mk_var_f "pressureAverage")
let rule2 =
mk_rule_init
("airplane", None)
C_no_condition
{ list_elems = [define21;
define22;
set21;
set22;];
list_closed = N_filled (None, ()); }
let cnl_samples =
[ ("rule_init", rule_init ());
("rule1un1", index_rule rule1);
("rule1", rule_f_to_a rule1);
("rule2", rule2); ]
" E_variable " ,
" Average Risky Account Balance "
" L_string " ,
|
1489732457fcb2052433f64ffc438646950e8e1e5deaad3b775225b273821ee4 | namin/blond | mit.scm | (define add1 (lambda (n) (+ n 1)))
(define sub1 (lambda (n) (- n 1)))
(define atom? (lambda (x) (not (pair? x))))
(define (flush-output-port) (flush-output))
| null | https://raw.githubusercontent.com/namin/blond/7a92a1b005958b17b3436530ac90acb4726bdd79/mit.scm | scheme | (define add1 (lambda (n) (+ n 1)))
(define sub1 (lambda (n) (- n 1)))
(define atom? (lambda (x) (not (pair? x))))
(define (flush-output-port) (flush-output))
| |
37664296671caee0f45c9d7751802622069005f98d2eafefc8bcead389be75a1 | Palmik/wai-sockjs | Session.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
module Network.Sock.Types.Session
( Session(..)
, SessionStatus(..)
, SessionID
) where
------------------------------------------------------------------------------
import Control.Concurrent.Lifted (ThreadId)
import Control.Concurrent.MVar.Lifted (MVar)
import Control.Concurrent.STM.TMChan (TMChan)
import Control.Concurrent.Timer.Lifted (Timer)
import Control.Monad.Trans.Control (MonadBaseControl)
------------------------------------------------------------------------------
import qualified Data.ByteString.Lazy as BL (ByteString)
import qualified Data.Conduit as C (ResourceT)
import qualified Data.Text as TS (Text)
------------------------------------------------------------------------------
import Network.Sock.Types.Protocol
------------------------------------------------------------------------------
-- | Session
data Session where
Session ::
{ sessionID :: SessionID
^ The status can be " fresh " , " opened " , " closed " or in case the MVar is empty , it should be interpreted as " currently being used"/"waiting " .
, sessionIncomingBuffer :: TMChan BL.ByteString -- ^ This buffer is filled with incoming messages (parsed from request body or from WS' receive).
, sessionOutgoingBuffer :: TMChan Protocol -- ^ This buffer is filled with outgoing messages which are then sent (as a response or with WS' sendSink).
^ If the MVar is empty , some thread is already trying to fork application .
-- If it contains Nothing, noone is forking nor has anyone forked yet.
-- If it contains Just a value, application was already forked.
-- * Timers. They are restarted with every request made by the client to this session.
, sessionTimeoutTimer : : Timer ( C.ResourceT IO ) -- ^ Timer that every 5 seconds closes the session . If the session is already closed , it removes it from the session map .
, sessionHeartbeatTimer : : Timer ( C.ResourceT IO ) -- ^ Timer that every 25 seconds sends a heartbeat frame .
} -> Session
| SessionID
type SessionID = TS.Text
| SessionStatus
data SessionStatus
= SessionFresh -- ^ Right after creation, Session is "Fresh"
^ Right after we send opening frame , Session is " Opened " . We also start the timeout & heartbeat timer at this point .
| SessionClosed Int BL.ByteString -- ^ Right after we send closing frame, Session is "Closed".
| null | https://raw.githubusercontent.com/Palmik/wai-sockjs/d1037cb00450a362b7e593a76d6257d06ecb2405/src/Network/Sock/Types/Session.hs | haskell | # LANGUAGE GADTs #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Session
^ This buffer is filled with incoming messages (parsed from request body or from WS' receive).
^ This buffer is filled with outgoing messages which are then sent (as a response or with WS' sendSink).
If it contains Nothing, noone is forking nor has anyone forked yet.
If it contains Just a value, application was already forked.
* Timers. They are restarted with every request made by the client to this session.
^ Timer that every 5 seconds closes the session . If the session is already closed , it removes it from the session map .
^ Timer that every 25 seconds sends a heartbeat frame .
^ Right after creation, Session is "Fresh"
^ Right after we send closing frame, Session is "Closed". | # LANGUAGE FlexibleContexts #
module Network.Sock.Types.Session
( Session(..)
, SessionStatus(..)
, SessionID
) where
import Control.Concurrent.Lifted (ThreadId)
import Control.Concurrent.MVar.Lifted (MVar)
import Control.Concurrent.STM.TMChan (TMChan)
import Control.Concurrent.Timer.Lifted (Timer)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.ByteString.Lazy as BL (ByteString)
import qualified Data.Conduit as C (ResourceT)
import qualified Data.Text as TS (Text)
import Network.Sock.Types.Protocol
data Session where
Session ::
{ sessionID :: SessionID
^ The status can be " fresh " , " opened " , " closed " or in case the MVar is empty , it should be interpreted as " currently being used"/"waiting " .
^ If the MVar is empty , some thread is already trying to fork application .
} -> Session
| SessionID
type SessionID = TS.Text
| SessionStatus
data SessionStatus
^ Right after we send opening frame , Session is " Opened " . We also start the timeout & heartbeat timer at this point .
|
bb9188df504da36b21799f7fb3135ad9817078b1928452d115ffcb3af533ddd7 | sweirich/trellys | SCC.hs |
module SCC where
import DepthFirstSearch
import Data.List(findIndex,union,(\\),nub,partition)
topSort :: (Show a,Eq a) => (d -> [a]) -> [(a,d)] -> [[(a,d)]]
topSort depends pairs = topSortP (==) depends pairs
topSortP :: Show a => (a -> p -> Bool) -> (d -> [a]) -> [(p,d)] -> [[(p,d)]]
topSortP exports depends pairs = map (map f) groups
where zs = zipWith g pairs [0..] -- [(("even",0,d1),(("odd"),1,d2),...]
g (p,d) n = (p,n,depends d)
getindex " odd " --- > 1
getindex [] s = []
getindex ((p,n,_):xs) s = if exports s p then n:getindex xs s else getindex xs s
edges1 = concat[ map (\ e -> (n,e)) es | (a,n,ws) <- zs, es <- map (getindex zs) ws ]
groups = scc2 (buildG (0,length pairs -1) edges1)
f n = pairs !! n -- f 1 ---> (("odd"),d2)
topSortQ :: (Eq a, Show a) => (b -> [a]) -> (b -> [a]) -> [b] -> [[b]]
topSortQ definef dependf items = map (map f) (topSortS definef dependf items)
where f (x,_,_) = x
topSortS :: Eq a => (t -> [a]) -> (t -> [a]) -> [t] -> [[(t, [a], [a])]]
topSortS exports depends pairs = map (map f) groups
where zs = zipWith g pairs [0..] -- [(("even",0,d1),(("odd"),1,d2),...]
g d n = (exports d,n,depends d)
getindex " odd " --- > [ 1 ]
getindex [] s = []
getindex ((exs,n,_):xs) s = if elem s exs then n:getindex xs s else getindex xs s
edges1 = concat[ map (\ e -> (n,e)) es | (a,n,ws) <- zs, es <- map (getindex zs) ws ]
groups = scc2 (buildG (0,length pairs -1) edges1)
-- f n = pairs !! n -- f 1 ---> (("odd"),d2)
f n = (pairs !! n,defines,depends) where (defines,_,depends) = zs !! n
topSortR :: (Eq a, Show a) => (b -> ([a],[a])) -> [b] -> ([[b]],[([a],[a])])
topSortR deps bs = (map (map f) groups,map project zs)
where zs = zipWith g bs [0..]
g d n = let (exports,depends) = deps d in (exports,n,depends)
getindex " odd " --- > 1
getindex [] s = []
getindex ( ( exs , n,_):xs ) s = if elem s exs then [ n ] else getindex xs s
getindex ((exs,n,_):xs) s = if elem s exs then n:getindex xs s else getindex xs s
edges1 = concat[ map (\ e -> (n,e)) es | (a,n,ws) <- zs, es <- map (getindex zs) ws ]
groups = scc2 (buildG (0,length bs -1) edges1)
f n = bs !! n -- f 1 ---> (("odd"),d2)
project (exs,n,deps) = (exs,deps)
pairs :: [(String,Int)]
pairs = [("odd",1),("even",2),("id",3),("map",4),("fold",5)]
depends :: Int -> [String]
depends 1 = ["even","lib"]
depends 2 = ["odd","id"]
depends 3 = []
depends 4 = ["lib"]
depends 5 = ["map"]
ans = topSort depends pairs
exports (s,n) = [s]
ans2 = topSortQ exports (depends . snd) pairs
ans3 = topSortS exports (depends . snd) pairs
| null | https://raw.githubusercontent.com/sweirich/trellys/63ea89d8fa09929c23504665c55a3d909fe047c5/nax/src/SCC.hs | haskell | [(("even",0,d1),(("odd"),1,d2),...]
- > 1
f 1 ---> (("odd"),d2)
[(("even",0,d1),(("odd"),1,d2),...]
- > [ 1 ]
f n = pairs !! n -- f 1 ---> (("odd"),d2)
- > 1
f 1 ---> (("odd"),d2)
|
module SCC where
import DepthFirstSearch
import Data.List(findIndex,union,(\\),nub,partition)
topSort :: (Show a,Eq a) => (d -> [a]) -> [(a,d)] -> [[(a,d)]]
topSort depends pairs = topSortP (==) depends pairs
topSortP :: Show a => (a -> p -> Bool) -> (d -> [a]) -> [(p,d)] -> [[(p,d)]]
topSortP exports depends pairs = map (map f) groups
g (p,d) n = (p,n,depends d)
getindex [] s = []
getindex ((p,n,_):xs) s = if exports s p then n:getindex xs s else getindex xs s
edges1 = concat[ map (\ e -> (n,e)) es | (a,n,ws) <- zs, es <- map (getindex zs) ws ]
groups = scc2 (buildG (0,length pairs -1) edges1)
topSortQ :: (Eq a, Show a) => (b -> [a]) -> (b -> [a]) -> [b] -> [[b]]
topSortQ definef dependf items = map (map f) (topSortS definef dependf items)
where f (x,_,_) = x
topSortS :: Eq a => (t -> [a]) -> (t -> [a]) -> [t] -> [[(t, [a], [a])]]
topSortS exports depends pairs = map (map f) groups
g d n = (exports d,n,depends d)
getindex [] s = []
getindex ((exs,n,_):xs) s = if elem s exs then n:getindex xs s else getindex xs s
edges1 = concat[ map (\ e -> (n,e)) es | (a,n,ws) <- zs, es <- map (getindex zs) ws ]
groups = scc2 (buildG (0,length pairs -1) edges1)
f n = (pairs !! n,defines,depends) where (defines,_,depends) = zs !! n
topSortR :: (Eq a, Show a) => (b -> ([a],[a])) -> [b] -> ([[b]],[([a],[a])])
topSortR deps bs = (map (map f) groups,map project zs)
where zs = zipWith g bs [0..]
g d n = let (exports,depends) = deps d in (exports,n,depends)
getindex [] s = []
getindex ( ( exs , n,_):xs ) s = if elem s exs then [ n ] else getindex xs s
getindex ((exs,n,_):xs) s = if elem s exs then n:getindex xs s else getindex xs s
edges1 = concat[ map (\ e -> (n,e)) es | (a,n,ws) <- zs, es <- map (getindex zs) ws ]
groups = scc2 (buildG (0,length bs -1) edges1)
project (exs,n,deps) = (exs,deps)
pairs :: [(String,Int)]
pairs = [("odd",1),("even",2),("id",3),("map",4),("fold",5)]
depends :: Int -> [String]
depends 1 = ["even","lib"]
depends 2 = ["odd","id"]
depends 3 = []
depends 4 = ["lib"]
depends 5 = ["map"]
ans = topSort depends pairs
exports (s,n) = [s]
ans2 = topSortQ exports (depends . snd) pairs
ans3 = topSortS exports (depends . snd) pairs
|
e3dea34ee9ceba31674c89c278891368b644469096da17cd9694bc9701f7aaab | ocaml-multicore/eio | linux_backend.enabled.ml | let run ~fallback fn = Eio_linux.run ~fallback (fun env -> fn (env :> Eio.Stdenv.t))
| null | https://raw.githubusercontent.com/ocaml-multicore/eio/554179071b7bd239f8ff8ba38bcfb8286c792bc8/lib_main/linux_backend.enabled.ml | ocaml | let run ~fallback fn = Eio_linux.run ~fallback (fun env -> fn (env :> Eio.Stdenv.t))
| |
e1aa79c03dfb830e7a0ebef3cc1bdaa6d24cf2257ef5170c14624ee236fecc1b | mbenke/zpf2013 | Mon.hs | module Mon where
infixl 5 <>
class Mon m where
m1 :: m
(<>) :: m -> m -> m
-- ** Properties:
-- * leftUnit x = m1 <> x == x
-- * rightUnit x = x <> m1 == x
-- * assoc x y z = (x<>y)<>z == x<>(y<>z)
| null | https://raw.githubusercontent.com/mbenke/zpf2013/85f32747e17f07a74e1c3cb064b1d6acaca3f2f0/Labs/01/Reg/Mon.hs | haskell | ** Properties:
* leftUnit x = m1 <> x == x
* rightUnit x = x <> m1 == x
* assoc x y z = (x<>y)<>z == x<>(y<>z) | module Mon where
infixl 5 <>
class Mon m where
m1 :: m
(<>) :: m -> m -> m
|
b5cb8b3322bb9e0ee3868f261c6ad4e6ec78215f8a59b2e68ee54d2ac8952c7d | bhauman/advent-of-clojure | day20.clj | (ns advent-2015.day20)
;; happy to say that I did this without looking at the forums
;; spent a decent amount of time understanding this page:
;;
(defn value* [p n]
(/ (dec (java.lang.Math/pow p (inc n)))
(dec p)))
(defn sum-of-divisors [prime-factors]
(apply * (map #(apply value* %) (frequencies prime-factors))))
;; I searched prime combinations by hand maximizing the (/ sum-of-divisors house-number)
;; and getting a feel for the search space
(let [d [2 2 2 2 3 3 3 5 5 7 11]
house-number (apply * d)
sum-of-presents (sum-of-divisors d)]
[house-number
sum-of-presents
(/ sum-of-presents house-number)
(java.lang.Math/abs (- sum-of-presents 3600000))
(< house-number 900900)]) ;; 900900 is the last minimum I found
part 2
;; brute force: I know the bounds now and understand the
;; character of the space a bit better
(defn divisors [n]
(cons n (filter #(and
(>= (* 50 %) n)
(zero? (rem n %)))
(range 16632 (inc (/ n 2))))))
#_(first (filter #(>= (* 11 (apply + (divisors %)))
36000000)
(range 831600 900900 5)))
| null | https://raw.githubusercontent.com/bhauman/advent-of-clojure/856763baf45bf7bf452ffd304dc1b89f9bc879a6/src/advent-2015/day20.clj | clojure | happy to say that I did this without looking at the forums
spent a decent amount of time understanding this page:
I searched prime combinations by hand maximizing the (/ sum-of-divisors house-number)
and getting a feel for the search space
900900 is the last minimum I found
brute force: I know the bounds now and understand the
character of the space a bit better | (ns advent-2015.day20)
(defn value* [p n]
(/ (dec (java.lang.Math/pow p (inc n)))
(dec p)))
(defn sum-of-divisors [prime-factors]
(apply * (map #(apply value* %) (frequencies prime-factors))))
(let [d [2 2 2 2 3 3 3 5 5 7 11]
house-number (apply * d)
sum-of-presents (sum-of-divisors d)]
[house-number
sum-of-presents
(/ sum-of-presents house-number)
(java.lang.Math/abs (- sum-of-presents 3600000))
part 2
(defn divisors [n]
(cons n (filter #(and
(>= (* 50 %) n)
(zero? (rem n %)))
(range 16632 (inc (/ n 2))))))
#_(first (filter #(>= (* 11 (apply + (divisors %)))
36000000)
(range 831600 900900 5)))
|
c75a3763743ded0094613841ae0798b51a709a1f5d0eddecec8932929f17abbb | amnh/poy5 | utl.mli | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2014 , , , Ward Wheeler ,
and the American Museum of Natural History .
(* *)
(* This program is free software; you can redistribute it and/or modify *)
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
(* (at your option) any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
val large_int : int
val break_code : int -> int array
val max_seq_len : int
val gen_chrom_ref_code : int ref
val gen_seq_ref_code : int ref
val gen_genome_ref_code : int ref
val get_new_chrom_ref_code : unit -> int
val get_new_genome_ref_code : unit -> int
val get_new_seq_ref_code : unit -> int
val deref : 'a option -> 'a
val compare_non_dec_list : int list -> int list -> bool
val get_sum_arr : int array -> int -> int -> int
val invert_subarr : 'a array -> int -> int -> unit
val binary_index_search : int array -> int -> int
val find_index : 'a array -> 'b -> ('b -> 'a -> int) -> int
val get_common :
'a array -> 'a array -> ('a -> 'a -> int) -> 'a array * 'a array
val get_common3 :
'a array -> 'a array -> 'a array -> ('a -> 'a -> int) -> 'a array * 'a array * 'a array
val insert : 'a array -> int -> 'a -> 'a array
val swap_item : int -> int -> 'a array -> 'a array
val bigger_int : int -> int -> int
val trim : string -> string
val float_to_int_mat : float array array -> int array array
val int_to_int32_arr : int array -> int32 array
val int_to_int32_mat : int array array -> int32 array array
val printIntArr : int array -> unit
val printIntArrWithIdx : int array -> unit
val printIntMat : int array array -> unit
val printIntMatWithIdx : int array array -> unit
(** printIntList print a list of int, with a newline.*)
val printIntList : int list -> unit
(** printIntList print a list of int, without a newline.*)
val printIntList2 : int list -> unit
val printIntListList : int list list -> unit
val printIntListList2 : int list list -> unit
val printIntListListList : int list list list -> unit
val printIntListToFile : out_channel -> int list -> unit
val create_ls : int -> 'a -> 'a list
val get_neg_rev_intlst : int list -> int list
val get_abs_intlst : int list -> int list
val remove_nth : ?acc:'a list -> 'a list -> int -> 'a * 'a list
val insert_arr : int array -> int array -> int -> int array
val pairwisep : ('a -> 'a -> bool) -> 'a list -> bool
val get_k_random_elem : 'a list -> int -> 'a list
val isEqualArr : 'a array -> 'b array -> ('a -> 'b -> int) -> bool
val break_array : 'a array -> (int * int) list -> 'a array list
val max_arr : 'a array -> 'a
val min_arr : 'a array -> 'a
val get_dir : [> `Negative | `Positive ] -> string
val factorial :int -> int
val p_m_n : int -> int -> int
val get_avg_of_intlst : int list -> float
val get_min_of_lst : 'a list -> 'a
val get_avg_of_floatlst : float list -> float
| null | https://raw.githubusercontent.com/amnh/poy5/da563a2339d3fa9c0110ae86cc35fad576f728ab/src/utl.mli | ocaml |
This program is free software; you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
* printIntList print a list of int, with a newline.
* printIntList print a list of int, without a newline. | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2014 , , , Ward Wheeler ,
and the American Museum of Natural History .
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
val large_int : int
val break_code : int -> int array
val max_seq_len : int
val gen_chrom_ref_code : int ref
val gen_seq_ref_code : int ref
val gen_genome_ref_code : int ref
val get_new_chrom_ref_code : unit -> int
val get_new_genome_ref_code : unit -> int
val get_new_seq_ref_code : unit -> int
val deref : 'a option -> 'a
val compare_non_dec_list : int list -> int list -> bool
val get_sum_arr : int array -> int -> int -> int
val invert_subarr : 'a array -> int -> int -> unit
val binary_index_search : int array -> int -> int
val find_index : 'a array -> 'b -> ('b -> 'a -> int) -> int
val get_common :
'a array -> 'a array -> ('a -> 'a -> int) -> 'a array * 'a array
val get_common3 :
'a array -> 'a array -> 'a array -> ('a -> 'a -> int) -> 'a array * 'a array * 'a array
val insert : 'a array -> int -> 'a -> 'a array
val swap_item : int -> int -> 'a array -> 'a array
val bigger_int : int -> int -> int
val trim : string -> string
val float_to_int_mat : float array array -> int array array
val int_to_int32_arr : int array -> int32 array
val int_to_int32_mat : int array array -> int32 array array
val printIntArr : int array -> unit
val printIntArrWithIdx : int array -> unit
val printIntMat : int array array -> unit
val printIntMatWithIdx : int array array -> unit
val printIntList : int list -> unit
val printIntList2 : int list -> unit
val printIntListList : int list list -> unit
val printIntListList2 : int list list -> unit
val printIntListListList : int list list list -> unit
val printIntListToFile : out_channel -> int list -> unit
val create_ls : int -> 'a -> 'a list
val get_neg_rev_intlst : int list -> int list
val get_abs_intlst : int list -> int list
val remove_nth : ?acc:'a list -> 'a list -> int -> 'a * 'a list
val insert_arr : int array -> int array -> int -> int array
val pairwisep : ('a -> 'a -> bool) -> 'a list -> bool
val get_k_random_elem : 'a list -> int -> 'a list
val isEqualArr : 'a array -> 'b array -> ('a -> 'b -> int) -> bool
val break_array : 'a array -> (int * int) list -> 'a array list
val max_arr : 'a array -> 'a
val min_arr : 'a array -> 'a
val get_dir : [> `Negative | `Positive ] -> string
val factorial :int -> int
val p_m_n : int -> int -> int
val get_avg_of_intlst : int list -> float
val get_min_of_lst : 'a list -> 'a
val get_avg_of_floatlst : float list -> float
|
a905258aaf31798028eb41d3ac4b5e180ee1bc1520e448a34e4245db324b6366 | incoherentsoftware/defect-process | Behavior.hs | module Enemy.All.Wall.Behavior
( WallEnemyBehaviorInstr(..)
, HurtType(..)
, WallEnemyBehavior(..)
, isIdleBehavior
, isWalkBehavior
, isBackWalkBehavior
, isHurtBehavior
, isFallenBehavior
, isFallenHurtBehavior
, isWallSplatBehavior
, isWallHurtBehavior
, isLaunchedBehavior
) where
import Util
data WallEnemyBehaviorInstr
= StartIdleInstr
| UpdateIdleInstr Secs
| StartWalkInstr
| UpdateWalkInstr Secs
| StartBackWalkInstr
| UpdateBackWalkInstr Secs
| FacePlayerInstr
| StartAttackInstr
| CreateAttackProjInstr
| UpdateHurtInstr Secs HurtType
| StartLaunchedInstr Secs
| LaunchedHangtimeInstr Secs
| StartFallenInstr Secs
| UpdateFallenInstr Secs
| StartGetUpInstr
| StartWallSplatInstr
| UpdateWallSplatInstr Secs
| UpdateSpawnInstr
| StartDeathInstr
| SetDeadInstr
data HurtType
= StandHurt
| AirHurt
| LaunchUpHurt
| FallenHurt
| KnockDownHurt
| WallHurt
deriving Eq
data WallEnemyBehavior
= SpawnBehavior
| IdleBehavior Secs
| WalkBehavior Secs
| BackWalkBehavior Secs
| HurtBehavior Secs HurtType
| LaunchedBehavior Secs
| AttackBehavior
| FallenBehavior Secs
| GetUpBehavior
| WallSplatBehavior Secs
| DeathBehavior
deriving Eq
isIdleBehavior :: WallEnemyBehavior -> Bool
isIdleBehavior = \case
IdleBehavior _ -> True
_ -> False
isWalkBehavior :: WallEnemyBehavior -> Bool
isWalkBehavior = \case
WalkBehavior _ -> True
_ -> False
isBackWalkBehavior :: WallEnemyBehavior -> Bool
isBackWalkBehavior = \case
BackWalkBehavior _ -> True
_ -> False
isHurtBehavior :: WallEnemyBehavior -> Bool
isHurtBehavior = \case
HurtBehavior _ _ -> True
_ -> False
isFallenBehavior :: WallEnemyBehavior -> Bool
isFallenBehavior = \case
FallenBehavior _ -> True
_ -> False
isFallenHurtBehavior :: WallEnemyBehavior -> Bool
isFallenHurtBehavior = \case
HurtBehavior _ FallenHurt -> True
_ -> False
isWallSplatBehavior :: WallEnemyBehavior -> Bool
isWallSplatBehavior = \case
WallSplatBehavior _ -> True
_ -> False
isWallHurtBehavior :: WallEnemyBehavior -> Bool
isWallHurtBehavior = \case
HurtBehavior _ WallHurt -> True
_ -> False
isLaunchedBehavior :: WallEnemyBehavior -> Bool
isLaunchedBehavior = \case
LaunchedBehavior _ -> True
_ -> False
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/Wall/Behavior.hs | haskell | module Enemy.All.Wall.Behavior
( WallEnemyBehaviorInstr(..)
, HurtType(..)
, WallEnemyBehavior(..)
, isIdleBehavior
, isWalkBehavior
, isBackWalkBehavior
, isHurtBehavior
, isFallenBehavior
, isFallenHurtBehavior
, isWallSplatBehavior
, isWallHurtBehavior
, isLaunchedBehavior
) where
import Util
data WallEnemyBehaviorInstr
= StartIdleInstr
| UpdateIdleInstr Secs
| StartWalkInstr
| UpdateWalkInstr Secs
| StartBackWalkInstr
| UpdateBackWalkInstr Secs
| FacePlayerInstr
| StartAttackInstr
| CreateAttackProjInstr
| UpdateHurtInstr Secs HurtType
| StartLaunchedInstr Secs
| LaunchedHangtimeInstr Secs
| StartFallenInstr Secs
| UpdateFallenInstr Secs
| StartGetUpInstr
| StartWallSplatInstr
| UpdateWallSplatInstr Secs
| UpdateSpawnInstr
| StartDeathInstr
| SetDeadInstr
data HurtType
= StandHurt
| AirHurt
| LaunchUpHurt
| FallenHurt
| KnockDownHurt
| WallHurt
deriving Eq
data WallEnemyBehavior
= SpawnBehavior
| IdleBehavior Secs
| WalkBehavior Secs
| BackWalkBehavior Secs
| HurtBehavior Secs HurtType
| LaunchedBehavior Secs
| AttackBehavior
| FallenBehavior Secs
| GetUpBehavior
| WallSplatBehavior Secs
| DeathBehavior
deriving Eq
isIdleBehavior :: WallEnemyBehavior -> Bool
isIdleBehavior = \case
IdleBehavior _ -> True
_ -> False
isWalkBehavior :: WallEnemyBehavior -> Bool
isWalkBehavior = \case
WalkBehavior _ -> True
_ -> False
isBackWalkBehavior :: WallEnemyBehavior -> Bool
isBackWalkBehavior = \case
BackWalkBehavior _ -> True
_ -> False
isHurtBehavior :: WallEnemyBehavior -> Bool
isHurtBehavior = \case
HurtBehavior _ _ -> True
_ -> False
isFallenBehavior :: WallEnemyBehavior -> Bool
isFallenBehavior = \case
FallenBehavior _ -> True
_ -> False
isFallenHurtBehavior :: WallEnemyBehavior -> Bool
isFallenHurtBehavior = \case
HurtBehavior _ FallenHurt -> True
_ -> False
isWallSplatBehavior :: WallEnemyBehavior -> Bool
isWallSplatBehavior = \case
WallSplatBehavior _ -> True
_ -> False
isWallHurtBehavior :: WallEnemyBehavior -> Bool
isWallHurtBehavior = \case
HurtBehavior _ WallHurt -> True
_ -> False
isLaunchedBehavior :: WallEnemyBehavior -> Bool
isLaunchedBehavior = \case
LaunchedBehavior _ -> True
_ -> False
| |
4cd3719d0ed3bea450578739ab200b57d9fb16caa2b5a3a1884e16dec7434168 | basho/riak_core | riak_core_cinfo_core.erl | %% -------------------------------------------------------------------
%%
%% Riak: A lightweight, decentralized key-value store.
%%
Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(riak_core_cinfo_core).
-export([cluster_info_init/0, cluster_info_generator_funs/0]).
( ) - > term ( )
%% @doc Required callback function for cluster_info: initialization.
%%
%% This function doesn't have to do anything.
cluster_info_init() ->
ok.
( ) - > list({string ( ) , fun ( ) } )
%% @doc Required callback function for cluster_info: return list of
%% {NameForReport, FunOfArity_1} tuples to generate ASCII/UTF-8
%% formatted reports.
cluster_info_generator_funs() ->
[
{"Riak Core config files", fun config_files/1},
{"Riak Core vnode modules", fun vnode_modules/1},
{"Riak Core ring", fun get_my_ring/1},
{"Riak Core latest ring file", fun latest_ringfile/1},
{"Riak Core active partitions", fun active_partitions/1}
].
vnode_modules(CPid) -> % CPid is the data collector's pid.
cluster_info:format(CPid, "~p\n", [riak_core:vnode_modules()]).
get_my_ring(CPid) ->
{ok, Ring} = riak_core_ring_manager:get_my_ring(),
cluster_info:format(CPid, "~p\n", [Ring]).
latest_ringfile(CPid) ->
{ok, Path} = riak_core_ring_manager:find_latest_ringfile(),
{ok, Contents} = file:read_file(Path),
cluster_info:format(CPid, "Latest ringfile: ~s\n", [Path]),
cluster_info:format(CPid, "File contents:\n~p\n", [binary_to_term(Contents)]).
active_partitions(CPid) ->
Vnodes = [{Mod, Idx} || {Mod, Idx, _Pid} <- riak_core_vnode_manager:all_vnodes()],
Partitions = lists:foldl(fun({_,P}, Ps) ->
ordsets:add_element(P, Ps)
end, ordsets:new(), Vnodes),
cluster_info:format(CPid, "~p\n", [Partitions]).
config_files(C) ->
{ok, [[AppPath]]} = init:get_argument(config),
EtcDir = filename:dirname(AppPath),
VmPath = filename:join(EtcDir, "vm.args"),
[begin
cluster_info:format(C, "File: ~s\n", [os:cmd("ls -l " ++ File)]),
{ok, FileBin} = file:read_file(File),
cluster_info:format(C, "File contents:\n~s\n", [FileBin])
end || File <- [AppPath, VmPath]].
| null | https://raw.githubusercontent.com/basho/riak_core/762ec81ae9af9a278e853f1feca418b9dcf748a3/src/riak_core_cinfo_core.erl | erlang | -------------------------------------------------------------------
Riak: A lightweight, decentralized key-value store.
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc Required callback function for cluster_info: initialization.
This function doesn't have to do anything.
@doc Required callback function for cluster_info: return list of
{NameForReport, FunOfArity_1} tuples to generate ASCII/UTF-8
formatted reports.
CPid is the data collector's pid. | Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_core_cinfo_core).
-export([cluster_info_init/0, cluster_info_generator_funs/0]).
( ) - > term ( )
cluster_info_init() ->
ok.
( ) - > list({string ( ) , fun ( ) } )
cluster_info_generator_funs() ->
[
{"Riak Core config files", fun config_files/1},
{"Riak Core vnode modules", fun vnode_modules/1},
{"Riak Core ring", fun get_my_ring/1},
{"Riak Core latest ring file", fun latest_ringfile/1},
{"Riak Core active partitions", fun active_partitions/1}
].
cluster_info:format(CPid, "~p\n", [riak_core:vnode_modules()]).
get_my_ring(CPid) ->
{ok, Ring} = riak_core_ring_manager:get_my_ring(),
cluster_info:format(CPid, "~p\n", [Ring]).
latest_ringfile(CPid) ->
{ok, Path} = riak_core_ring_manager:find_latest_ringfile(),
{ok, Contents} = file:read_file(Path),
cluster_info:format(CPid, "Latest ringfile: ~s\n", [Path]),
cluster_info:format(CPid, "File contents:\n~p\n", [binary_to_term(Contents)]).
active_partitions(CPid) ->
Vnodes = [{Mod, Idx} || {Mod, Idx, _Pid} <- riak_core_vnode_manager:all_vnodes()],
Partitions = lists:foldl(fun({_,P}, Ps) ->
ordsets:add_element(P, Ps)
end, ordsets:new(), Vnodes),
cluster_info:format(CPid, "~p\n", [Partitions]).
config_files(C) ->
{ok, [[AppPath]]} = init:get_argument(config),
EtcDir = filename:dirname(AppPath),
VmPath = filename:join(EtcDir, "vm.args"),
[begin
cluster_info:format(C, "File: ~s\n", [os:cmd("ls -l " ++ File)]),
{ok, FileBin} = file:read_file(File),
cluster_info:format(C, "File contents:\n~s\n", [FileBin])
end || File <- [AppPath, VmPath]].
|
2fb00929f799d9b3119529187c7de43dae9e0d07055ea696fe7b3e3e76b407b7 | lingnand/VIMonad | LayoutScreens.hs | # LANGUAGE FlexibleContexts , FlexibleInstances , MultiParamTypeClasses #
-----------------------------------------------------------------------------
-- |
Module : XMonad . Layout . LayoutScreens
Copyright : ( c ) < >
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : none
-- Stability : unstable
-- Portability : unportable
--
-- Divide a single screen into multiple screens.
-----------------------------------------------------------------------------
module XMonad.Layout.LayoutScreens (
-- * Usage
-- $usage
layoutScreens, layoutSplitScreen, fixedLayout,
FixedLayout,
) where
import XMonad
import qualified XMonad.StackSet as W
-- $usage
This module allows you to pretend that you have more than one screen by
dividing a single screen into multiple screens that xmonad will treat as
-- separate screens. This should definitely be useful for testing the
behavior of xmonad under Xinerama , and it 's possible that it 'd also be
-- handy for use as an actual user interface, if you've got a very large
-- screen and long for greater flexibility (e.g. being able to see your
-- email window at all times, a crude mimic of sticky windows).
--
-- You can use this module with the following in your
-- @~\/.xmonad\/xmonad.hs@ file:
--
> import XMonad . Layout . LayoutScreens
> import XMonad . Layout . TwoPane
--
-- Then add some keybindings; for example:
--
> , ( ( modm .| . shiftMask , xK_space ) , layoutScreens 2 ( TwoPane 0.5 0.5 ) )
> , ( ( modm .| . controlMask .| . shiftMask , xK_space ) , rescreen )
--
-- Another example use would be to handle a scenario where xrandr didn't
-- work properly (e.g. a VNC X server in my case) and you want to be able
-- to resize your screen (e.g. to match the size of a remote VNC client):
--
> import XMonad . Layout . LayoutScreens
--
> , ( ( modm .| . shiftMask , xK_space ) ,
> layoutScreens 1 ( fixedLayout [ Rectangle 0 0 1024 768 ] ) )
> , ( ( modm .| . controlMask .| . shiftMask , xK_space ) , rescreen )
--
-- For detailed instructions on editing your key bindings, see
" XMonad . Doc . Extending#Editing_key_bindings " .
-- | Modify all screens.
layoutScreens :: LayoutClass l Int => Int -> l Int -> X ()
layoutScreens nscr _ | nscr < 1 = trace $ "Can't layoutScreens with only " ++ show nscr ++ " screens."
layoutScreens nscr l =
do rtrect <- asks theRoot >>= getWindowRectangle
(wss, _) <- runLayout (W.Workspace "" l (Just $ W.Stack { W.focus=1, W.up=[],W.down=[1..nscr-1] })) rtrect
windows $ \ws@(W.StackSet { W.current = v, W.visible = vs, W.hidden = hs }) ->
let (x:xs, ys) = splitAt nscr $ map W.workspace (v:vs) ++ hs
s:ss = map snd wss
in ws { W.current = W.Screen x 0 (SD s)
, W.visible = zipWith3 W.Screen xs [1 ..] $ map SD ss
, W.hidden = ys }
-- | Modify current screen.
layoutSplitScreen :: LayoutClass l Int => Int -> l Int -> X ()
layoutSplitScreen nscr _ | nscr < 1 = trace $ "Can't layoutSplitScreen with only " ++ show nscr ++ " screens."
layoutSplitScreen nscr l =
do rect <- gets $ screenRect . W.screenDetail . W.current . windowset
(wss, _) <- runLayout (W.Workspace "" l (Just $ W.Stack { W.focus=1, W.up=[],W.down=[1..nscr-1] })) rect
windows $ \ws@(W.StackSet { W.current = c, W.visible = vs, W.hidden = hs }) ->
let (x:xs, ys) = splitAt nscr $ W.workspace c : hs
s:ss = map snd wss
in ws { W.current = W.Screen x (W.screen c) (SD s)
, W.visible = (zipWith3 W.Screen xs [(W.screen c+1) ..] $ map SD ss) ++
map (\v -> if W.screen v>W.screen c then v{W.screen = W.screen v + fromIntegral (nscr-1)} else v) vs
, W.hidden = ys }
getWindowRectangle :: Window -> X Rectangle
getWindowRectangle w = withDisplay $ \d ->
do a <- io $ getWindowAttributes d w
return $ Rectangle (fromIntegral $ wa_x a) (fromIntegral $ wa_y a)
(fromIntegral $ wa_width a) (fromIntegral $ wa_height a)
data FixedLayout a = FixedLayout [Rectangle] deriving (Read,Show)
instance LayoutClass FixedLayout a where
doLayout (FixedLayout rs) _ s = return (zip (W.integrate s) rs, Nothing)
fixedLayout :: [Rectangle] -> FixedLayout a
fixedLayout = FixedLayout
| null | https://raw.githubusercontent.com/lingnand/VIMonad/048e419fc4ef57a5235dbaeef8890faf6956b574/XMonadContrib/XMonad/Layout/LayoutScreens.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD3-style (see LICENSE)
Maintainer : none
Stability : unstable
Portability : unportable
Divide a single screen into multiple screens.
---------------------------------------------------------------------------
* Usage
$usage
$usage
separate screens. This should definitely be useful for testing the
handy for use as an actual user interface, if you've got a very large
screen and long for greater flexibility (e.g. being able to see your
email window at all times, a crude mimic of sticky windows).
You can use this module with the following in your
@~\/.xmonad\/xmonad.hs@ file:
Then add some keybindings; for example:
Another example use would be to handle a scenario where xrandr didn't
work properly (e.g. a VNC X server in my case) and you want to be able
to resize your screen (e.g. to match the size of a remote VNC client):
For detailed instructions on editing your key bindings, see
| Modify all screens.
| Modify current screen. | # LANGUAGE FlexibleContexts , FlexibleInstances , MultiParamTypeClasses #
Module : XMonad . Layout . LayoutScreens
Copyright : ( c ) < >
module XMonad.Layout.LayoutScreens (
layoutScreens, layoutSplitScreen, fixedLayout,
FixedLayout,
) where
import XMonad
import qualified XMonad.StackSet as W
This module allows you to pretend that you have more than one screen by
dividing a single screen into multiple screens that xmonad will treat as
behavior of xmonad under Xinerama , and it 's possible that it 'd also be
> import XMonad . Layout . LayoutScreens
> import XMonad . Layout . TwoPane
> , ( ( modm .| . shiftMask , xK_space ) , layoutScreens 2 ( TwoPane 0.5 0.5 ) )
> , ( ( modm .| . controlMask .| . shiftMask , xK_space ) , rescreen )
> import XMonad . Layout . LayoutScreens
> , ( ( modm .| . shiftMask , xK_space ) ,
> layoutScreens 1 ( fixedLayout [ Rectangle 0 0 1024 768 ] ) )
> , ( ( modm .| . controlMask .| . shiftMask , xK_space ) , rescreen )
" XMonad . Doc . Extending#Editing_key_bindings " .
layoutScreens :: LayoutClass l Int => Int -> l Int -> X ()
layoutScreens nscr _ | nscr < 1 = trace $ "Can't layoutScreens with only " ++ show nscr ++ " screens."
layoutScreens nscr l =
do rtrect <- asks theRoot >>= getWindowRectangle
(wss, _) <- runLayout (W.Workspace "" l (Just $ W.Stack { W.focus=1, W.up=[],W.down=[1..nscr-1] })) rtrect
windows $ \ws@(W.StackSet { W.current = v, W.visible = vs, W.hidden = hs }) ->
let (x:xs, ys) = splitAt nscr $ map W.workspace (v:vs) ++ hs
s:ss = map snd wss
in ws { W.current = W.Screen x 0 (SD s)
, W.visible = zipWith3 W.Screen xs [1 ..] $ map SD ss
, W.hidden = ys }
layoutSplitScreen :: LayoutClass l Int => Int -> l Int -> X ()
layoutSplitScreen nscr _ | nscr < 1 = trace $ "Can't layoutSplitScreen with only " ++ show nscr ++ " screens."
layoutSplitScreen nscr l =
do rect <- gets $ screenRect . W.screenDetail . W.current . windowset
(wss, _) <- runLayout (W.Workspace "" l (Just $ W.Stack { W.focus=1, W.up=[],W.down=[1..nscr-1] })) rect
windows $ \ws@(W.StackSet { W.current = c, W.visible = vs, W.hidden = hs }) ->
let (x:xs, ys) = splitAt nscr $ W.workspace c : hs
s:ss = map snd wss
in ws { W.current = W.Screen x (W.screen c) (SD s)
, W.visible = (zipWith3 W.Screen xs [(W.screen c+1) ..] $ map SD ss) ++
map (\v -> if W.screen v>W.screen c then v{W.screen = W.screen v + fromIntegral (nscr-1)} else v) vs
, W.hidden = ys }
getWindowRectangle :: Window -> X Rectangle
getWindowRectangle w = withDisplay $ \d ->
do a <- io $ getWindowAttributes d w
return $ Rectangle (fromIntegral $ wa_x a) (fromIntegral $ wa_y a)
(fromIntegral $ wa_width a) (fromIntegral $ wa_height a)
data FixedLayout a = FixedLayout [Rectangle] deriving (Read,Show)
instance LayoutClass FixedLayout a where
doLayout (FixedLayout rs) _ s = return (zip (W.integrate s) rs, Nothing)
fixedLayout :: [Rectangle] -> FixedLayout a
fixedLayout = FixedLayout
|
fc6de8ff5623bdbc5ae47cd84bd25de231497fcc7b36543d35ccddb49a54a0dd | davexunit/guile-2d | scene.scm | ;;; guile-2d
Copyright ( C ) 2013 >
;;;
;;; Guile-2d is free software: you can redistribute it and/or modify it
;;; under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation , either version 3 of the
;;; License, or (at your option) any later version.
;;;
;;; Guile-2d is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; Lesser General Public License for more details.
;;;
You should have received a copy of the GNU Lesser General Public
;;; License along with this program. If not, see
;;; </>.
;;; Commentary:
;;
;; Scenes describe the behavioral aspects of a game.
;;
;;; Code:
(define-module (2d scene)
#:use-module (srfi srfi-9)
#:use-module (2d observer)
#:export (<scene>
make-scene
scene?
scene-name
scene-init
scene-enter
scene-exit
scene-draw
scene-update
scene-observer
init-scene
enter-scene
exit-scene
draw-scene
update-scene
scene-trigger
default-events))
(define-record-type <scene>
(%make-scene name init enter exit draw update observer)
scene?
(name scene-name)
(init scene-init)
(enter scene-enter)
(exit scene-exit)
(draw scene-draw)
(update scene-update)
(observer scene-observer))
(define no-op (lambda args #f))
(define default-events (make-parameter '()))
(define* (make-scene name
#:optional #:key
(init no-op)
(enter no-op)
(exit no-op)
(draw no-op)
(update no-op)
(events (default-events)))
"Create a new scene object. All callbacks default to a no-op."
(%make-scene name init enter exit draw update
(alist->observer events)))
(define (init-scene scene)
"Return the value returned by the state constructor thunk for
SCENE."
((scene-init scene)))
(define (enter-scene scene state)
"Call enter callback for SCENE with STATE."
((scene-enter scene) state))
(define (exit-scene scene state)
"Call the exit callback for SCENE with STATE."
((scene-exit scene) state))
(define (draw-scene scene state)
"Call the draw callback for SCENE with STATE."
((scene-draw scene) state))
(define (update-scene scene state)
"Call the update callback for SCENE with STATE."
((scene-update scene) state))
(define (scene-trigger scene state event . args)
(apply observer-trigger (scene-observer scene) event state args))
| null | https://raw.githubusercontent.com/davexunit/guile-2d/83d9dfab5b04a337565cb2798847b15e4fbd7786/2d/scene.scm | scheme | guile-2d
Guile-2d is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as
License, or (at your option) any later version.
Guile-2d is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this program. If not, see
</>.
Commentary:
Scenes describe the behavioral aspects of a game.
Code: | Copyright ( C ) 2013 >
published by the Free Software Foundation , either version 3 of the
You should have received a copy of the GNU Lesser General Public
(define-module (2d scene)
#:use-module (srfi srfi-9)
#:use-module (2d observer)
#:export (<scene>
make-scene
scene?
scene-name
scene-init
scene-enter
scene-exit
scene-draw
scene-update
scene-observer
init-scene
enter-scene
exit-scene
draw-scene
update-scene
scene-trigger
default-events))
(define-record-type <scene>
(%make-scene name init enter exit draw update observer)
scene?
(name scene-name)
(init scene-init)
(enter scene-enter)
(exit scene-exit)
(draw scene-draw)
(update scene-update)
(observer scene-observer))
(define no-op (lambda args #f))
(define default-events (make-parameter '()))
(define* (make-scene name
#:optional #:key
(init no-op)
(enter no-op)
(exit no-op)
(draw no-op)
(update no-op)
(events (default-events)))
"Create a new scene object. All callbacks default to a no-op."
(%make-scene name init enter exit draw update
(alist->observer events)))
(define (init-scene scene)
"Return the value returned by the state constructor thunk for
SCENE."
((scene-init scene)))
(define (enter-scene scene state)
"Call enter callback for SCENE with STATE."
((scene-enter scene) state))
(define (exit-scene scene state)
"Call the exit callback for SCENE with STATE."
((scene-exit scene) state))
(define (draw-scene scene state)
"Call the draw callback for SCENE with STATE."
((scene-draw scene) state))
(define (update-scene scene state)
"Call the update callback for SCENE with STATE."
((scene-update scene) state))
(define (scene-trigger scene state event . args)
(apply observer-trigger (scene-observer scene) event state args))
|
6b5124c462135462b299f9a9abe776b3510999ca00561b6c351cddd60e732961 | AnthonySuper/jordan | Example.hs | # LANGUAGE DataKinds #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedLabels #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
module Jordan.Servant.Example where
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Bifunctor
import Data.Maybe (mapMaybe)
import Data.Text (Text, isInfixOf)
import qualified Data.Text as T
import GHC.Generics
import Jordan
import Jordan.Servant
import Jordan.Servant.Example.ServerM
import Jordan.Servant.Server
import Jordan.Types.Internal.AccumE
import Jordan.Types.JSONError
import Optics
import Servant.API
import Servant.Server
import Servant.Server.UVerb
data CreatePerson = CreatePerson
{ firstName :: Text,
lastName :: Text
}
deriving (Show, Read, Eq, Ord, Generic)
deriving anyclass (ToJSON, FromJSON)
data CreatePersonErrors = CreatePersonErrors
{ firstNameErrors :: [Text],
lastNameErrors :: [Text]
}
deriving (Show, Read, Eq, Ord, Generic)
deriving anyclass (ToJSON, FromJSON)
instance Semigroup CreatePersonErrors where
lhs <> rhs =
CreatePersonErrors
{ firstNameErrors = firstNameErrors lhs <> firstNameErrors rhs,
lastNameErrors = lastNameErrors lhs <> lastNameErrors rhs
}
instance Monoid CreatePersonErrors where
mempty = CreatePersonErrors mempty mempty
data QueryFilter = MkQueryFilter
{ firstNameIncludes :: Maybe Text,
lastNameIncludes :: Maybe Text
}
deriving (Show, Read, Eq, Ord, Generic)
deriving anyclass (ToJSON, FromJSON)
parseOptional :: (JSONObjectParser f, FromJSON a) => Text -> f (Maybe a)
parseOptional f = parseFieldWithDefault f (Just <$> fromJSON) Nothing
type Create =
Summary "Create a person"
:> ReportingRequestBody CreatePerson
:> UVerb 'POST '[JSON] [WithStatus 200 (ViaJordan Person), WithStatus 400 (ViaJordan CreatePersonErrors)]
toFilter :: Maybe QueryFilter -> [Person] -> [Person]
toFilter Nothing = id
toFilter (Just MkQueryFilter {..}) =
mapMaybe $
maybe Just (toFilter personFirstName) firstNameIncludes
>=> maybe Just (toFilter personLastName) lastNameIncludes
where
toFilter get q p = if q `isInfixOf` get p then pure p else Nothing
type List = Summary "List people" :> OptionalJordanQuery "filter" QueryFilter :> Get '[JSON] (ViaJordan [Person])
type API =
"people" :> (Create :<|> List)
ensureNonEmpty t l
| T.length t == 0 = AccumEL (mempty @CreatePersonErrors & l %~ (<> ["cannot be empty"]))
| otherwise = pure t
handleCreate :: ServerT Create ServerM
handleCreate CreatePerson {..} =
let mp = MkPerson <$> ensureNonEmpty firstName #firstNameErrors <*> ensureNonEmpty lastName #lastNameErrors
in runToUnion $ do
person <- lowerEither $ first (WithStatus @400 . ViaJordan) $ getAccumE mp
lift $ addPerson person
pure $ WithStatus @200 $ ViaJordan person
handleList :: ServerT List ServerM
handleList qf = ViaJordan . toFilter qf <$> readPeople
handler :: ServerT API ServerM
handler = handleCreate :<|> handleList
| null | https://raw.githubusercontent.com/AnthonySuper/jordan/ec4e0f771fd54b38f53891e6ae996eb6bb0f1f51/servant-example/lib/Jordan/Servant/Example.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings # | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedLabels #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
module Jordan.Servant.Example where
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Bifunctor
import Data.Maybe (mapMaybe)
import Data.Text (Text, isInfixOf)
import qualified Data.Text as T
import GHC.Generics
import Jordan
import Jordan.Servant
import Jordan.Servant.Example.ServerM
import Jordan.Servant.Server
import Jordan.Types.Internal.AccumE
import Jordan.Types.JSONError
import Optics
import Servant.API
import Servant.Server
import Servant.Server.UVerb
data CreatePerson = CreatePerson
{ firstName :: Text,
lastName :: Text
}
deriving (Show, Read, Eq, Ord, Generic)
deriving anyclass (ToJSON, FromJSON)
data CreatePersonErrors = CreatePersonErrors
{ firstNameErrors :: [Text],
lastNameErrors :: [Text]
}
deriving (Show, Read, Eq, Ord, Generic)
deriving anyclass (ToJSON, FromJSON)
instance Semigroup CreatePersonErrors where
lhs <> rhs =
CreatePersonErrors
{ firstNameErrors = firstNameErrors lhs <> firstNameErrors rhs,
lastNameErrors = lastNameErrors lhs <> lastNameErrors rhs
}
instance Monoid CreatePersonErrors where
mempty = CreatePersonErrors mempty mempty
data QueryFilter = MkQueryFilter
{ firstNameIncludes :: Maybe Text,
lastNameIncludes :: Maybe Text
}
deriving (Show, Read, Eq, Ord, Generic)
deriving anyclass (ToJSON, FromJSON)
parseOptional :: (JSONObjectParser f, FromJSON a) => Text -> f (Maybe a)
parseOptional f = parseFieldWithDefault f (Just <$> fromJSON) Nothing
type Create =
Summary "Create a person"
:> ReportingRequestBody CreatePerson
:> UVerb 'POST '[JSON] [WithStatus 200 (ViaJordan Person), WithStatus 400 (ViaJordan CreatePersonErrors)]
toFilter :: Maybe QueryFilter -> [Person] -> [Person]
toFilter Nothing = id
toFilter (Just MkQueryFilter {..}) =
mapMaybe $
maybe Just (toFilter personFirstName) firstNameIncludes
>=> maybe Just (toFilter personLastName) lastNameIncludes
where
toFilter get q p = if q `isInfixOf` get p then pure p else Nothing
type List = Summary "List people" :> OptionalJordanQuery "filter" QueryFilter :> Get '[JSON] (ViaJordan [Person])
type API =
"people" :> (Create :<|> List)
ensureNonEmpty t l
| T.length t == 0 = AccumEL (mempty @CreatePersonErrors & l %~ (<> ["cannot be empty"]))
| otherwise = pure t
handleCreate :: ServerT Create ServerM
handleCreate CreatePerson {..} =
let mp = MkPerson <$> ensureNonEmpty firstName #firstNameErrors <*> ensureNonEmpty lastName #lastNameErrors
in runToUnion $ do
person <- lowerEither $ first (WithStatus @400 . ViaJordan) $ getAccumE mp
lift $ addPerson person
pure $ WithStatus @200 $ ViaJordan person
handleList :: ServerT List ServerM
handleList qf = ViaJordan . toFilter qf <$> readPeople
handler :: ServerT API ServerM
handler = handleCreate :<|> handleList
|
cf06c9d2148c7c5d1b26baf866cc2bbd7bfb289a11cf9beda7f4065c3da85da0 | nilenso/kulu-backend | bulk_upload_invoices.clj | (ns kulu-backend.scripts.bulk-upload-invoices
(:require [aws.sdk.s3 :as s3 :only [copy-object]]
[clojure.tools.logging :as log]
[clojure.java.io :as io]
[kulu-backend.invoices.model :as inv]
[kulu-backend.organizations.model :as org]
[kulu-backend.config :as cfg]
[clj-time.coerce :as time-c]))
(defn all-files [path]
(file-seq (io/file path)))
(defn only-files [files]
(filter #(and (not (.isDirectory %)) (.isFile %)) files))
(defn write-to-s3 [file name]
(s3/put-object (cfg/aws-creds) (cfg/invoice-bucket) name file))
(defn write-to-db [info storage-key]
(prn info storage-key)
(inv/store (assoc info :storage_key storage-key)))
(defn db-and-image [org-name user-email files]
(reduce (fn [res file]
(if-let [img (io/input-stream file)]
(if-not (= (.getName file) ".DS_Store")
(let [name (.getName file)
s3-token (write-to-s3 img name)
string-date (second (clojure.string/split name #"_"))
20150325
org (org/lookup-by-name org-name)
db (write-to-db {:email user-email
:expense-type "Company"
:remarks ""
:status "Submitted"
:amount 0.0
:currency "INR"
:date date
:organization_id (:id org)} name)]
(conj res name)))
res))
[]
files))
(defn db-only [org-name user-email file-names]
(let [org (org/lookup-by-name org-name)]
(reduce (fn [res file-name]
(let [string-date (second (clojure.string/split file-name #"_"))
date (java.sql.Date. (.getTime (.parse (java.text.SimpleDateFormat. "yyyyMMdd") string-date)))]
(write-to-db {:email user-email
:expense-type "Company"
:remarks ""
:status "Submitted"
:amount 0.0
:currency "INR"
:date date
:organization_id (:id org)} file-name))) [] (vec file-names))))
(defn -main [& args]
(System/setProperty "nomad.env" (first args))
(db-only (second args) (first (rest (rest args))) (rest (rest (rest args))))
(System/exit 0))
| null | https://raw.githubusercontent.com/nilenso/kulu-backend/0b404f76643e77219432dcbffa681172a61e591b/src/kulu_backend/scripts/bulk_upload_invoices.clj | clojure | (ns kulu-backend.scripts.bulk-upload-invoices
(:require [aws.sdk.s3 :as s3 :only [copy-object]]
[clojure.tools.logging :as log]
[clojure.java.io :as io]
[kulu-backend.invoices.model :as inv]
[kulu-backend.organizations.model :as org]
[kulu-backend.config :as cfg]
[clj-time.coerce :as time-c]))
(defn all-files [path]
(file-seq (io/file path)))
(defn only-files [files]
(filter #(and (not (.isDirectory %)) (.isFile %)) files))
(defn write-to-s3 [file name]
(s3/put-object (cfg/aws-creds) (cfg/invoice-bucket) name file))
(defn write-to-db [info storage-key]
(prn info storage-key)
(inv/store (assoc info :storage_key storage-key)))
(defn db-and-image [org-name user-email files]
(reduce (fn [res file]
(if-let [img (io/input-stream file)]
(if-not (= (.getName file) ".DS_Store")
(let [name (.getName file)
s3-token (write-to-s3 img name)
string-date (second (clojure.string/split name #"_"))
20150325
org (org/lookup-by-name org-name)
db (write-to-db {:email user-email
:expense-type "Company"
:remarks ""
:status "Submitted"
:amount 0.0
:currency "INR"
:date date
:organization_id (:id org)} name)]
(conj res name)))
res))
[]
files))
(defn db-only [org-name user-email file-names]
(let [org (org/lookup-by-name org-name)]
(reduce (fn [res file-name]
(let [string-date (second (clojure.string/split file-name #"_"))
date (java.sql.Date. (.getTime (.parse (java.text.SimpleDateFormat. "yyyyMMdd") string-date)))]
(write-to-db {:email user-email
:expense-type "Company"
:remarks ""
:status "Submitted"
:amount 0.0
:currency "INR"
:date date
:organization_id (:id org)} file-name))) [] (vec file-names))))
(defn -main [& args]
(System/setProperty "nomad.env" (first args))
(db-only (second args) (first (rest (rest args))) (rest (rest (rest args))))
(System/exit 0))
| |
73b678b3b2bd959df9c9b2d3b36ea119c4c76e8d3117c612fa6b6f19a0a5e751 | emaphis/HtDP2e-solutions | ex052.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex052) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Ex . 52 .
Which integers are contained in the four intervals above ?
[ 3,5 ] = > 3,4,5
( 3,5 ] = > 4,5
[ 3,5 ) = > 3,4
( 3,5 ) = > 4
| null | https://raw.githubusercontent.com/emaphis/HtDP2e-solutions/ecb60b9a7bbf9b8999c0122b6ea152a3301f0a68/1-Fixed-Size-Data/04-Intervals/ex052.rkt | racket | about the language level of this file in a form that our tools can easily process. | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex052) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Ex . 52 .
Which integers are contained in the four intervals above ?
[ 3,5 ] = > 3,4,5
( 3,5 ] = > 4,5
[ 3,5 ) = > 3,4
( 3,5 ) = > 4
|
34aad072d4285b9d7a89ced1fa37f244086475ae563721afad1f371e805ff9d7 | ygrek/mldonkey | indexer2.ml | Copyright 2001 , 2002 b8_bavard , b8_fee_carabine ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Printf2
module Make(Doc : sig
type t
val num : t -> int
val filtered : t -> bool
val filter : t -> bool -> unit
end) = struct
type doc = Doc.t
type filtered_node =
None
| Some of node
| Filtered of node
and node = {
mutable next_doc : int;
mutable docs : Doc.t array;
mutable fields : int array;
mutable ndocs : int;
mutable nodes : filtered_node array;
}
type index = {
mutable node : node;
}
let stats index =
let mem = ref 2 in
let rec iter node =
mem := !mem + 9 +
Array.length node.docs + Array.length node.fields +
Array.length node.nodes;
Array.iter (fun n ->
match n with None -> ()
| Some node ->
mem := !mem + 2;
iter node
| Filtered node ->
mem := !mem + 2;
iter node
) node.nodes
in
iter index.node;
!mem
let new_node () = {
next_doc = 0;
docs = [||];
fields = [||];
nodes = [||];
ndocs = 0;
}
let empty = new_node ()
let create () = { node = new_node () }
let convert_char c =
match c with
'a' .. 'z' -> int_of_char c - 97
| 'A' .. 'Z' -> int_of_char c - 65
| '0' .. '9' -> 26 + (int_of_char c - int_of_char '0')
| _ -> assert false
let exit_exn = Exit
let add_doc node doc fields =
lprintf " add_doc " ; ( ) ;
let len = Array.length node.docs in
let pos = node.next_doc in
try
for i = 0 to node.next_doc - 1 do
if node.docs.(i) == doc then begin
node.fields.(i) <- node.fields.(i) lor fields;
raise exit_exn;
end;
done;
if pos = len then begin
let new_docs = Array.make (len + len/2 + 2) doc in
let new_fields = Array.make (len + len/2 + 2) 0 in
Array.blit node.docs 0 new_docs 0 len;
Array.blit node.fields 0 new_fields 0 len;
node.docs <- new_docs;
node.fields <- new_fields
end;
node.docs.(pos) <- doc;
(* lprintf "Adding doc with field %d" fields; lprint_newline (); *)
node.fields.(pos) <- fields;
node.next_doc <- pos +1;
node.ndocs <- node.ndocs + 1;
true
with e ->
" exn % s " ( Printexc2.to_string e ) ; lprint_newline ( ) ;
false
; " done " ; ( )
let add_char node c =
" add_char " ; ( ) ;
let n = new_node () in
let len = Array.length node.nodes in
if len <= c then begin
let new_nodes = Array.make (c+1) None in
Array.blit node.nodes 0 new_nodes 0 len;
node.nodes <- new_nodes;
end;
lprintf " set % d % d % d " c len ( Array.length node.nodes ) ; lprint_newline ( ) ;
node.nodes.(c) <- Some n;
(* lprintf "done"; lprint_newline (); *)
n
let add index string doc fields =
(* lprintf "add (%s)" string; lprint_newline (); *)
try
(* lprintf "add"; lprint_newline (); *)
let len = String.length string in
let rec iter pos node =
" pos % d " pos ; ( ) ;
if pos = len then
if add_doc node doc fields then begin
node.ndocs <- node.ndocs + 1;
true
end else false
else
let c = string.[pos] in
let c = convert_char c in
let node =
if Array.length node.nodes > c then
match node.nodes.(c) with
None -> add_char node c
| Some node -> node
| Filtered _ ->
Doc.filter doc true;
lprintf_nl "doc filtered";
raise Not_found
else
add_char node c
in
iter (pos+1) node
in
ignore (iter 0 index.node)
with e ->
" Exc % s " ( Printexc2.to_string e);lprint_newline ( ) ;
()
; " done " ; ( )
let clear index = index.node <- new_node ()
let filter_node node bool =
(* lprintf "filter node"; lprint_newline (); *)
for i = 0 to node.next_doc - 1 do
(* lprintf "filter doc %s\n" (string_of_bool bool); *)
Doc.filter node.docs.(i) bool;
(* if Doc.filtered node.docs.(i) then
(lprintf "doc is filtered\n"); *)
done
let rec filter_nodes node bool =
lprintf " filter_nodes " ; ( ) ;
filter_node node bool;
let len = Array.length node.nodes in
for i = 0 to len - 1 do
match node.nodes.(i) with
None -> ()
| Some n -> filter_nodes n bool
| Filtered n -> filter_nodes n bool
done
let add_filter index s =
try
let len = String.length s in
let rec iter pos node =
let c = s.[pos] in
let c = convert_char c in
let n =
if Array.length node.nodes > c then
match node.nodes.(c) with
None -> add_char node c
| Some node -> node
| Filtered _ -> raise Not_found
else
add_char node c
in
if pos+1 = len then begin
filter_nodes n true;
node.nodes.(c) <- Filtered n
end else
iter (pos+1) n
in
iter 0 index.node
with _ -> ()
let filter_words index list =
(* lprintf "FILTER ALL"; lprint_newline (); *)
List.iter (fun s ->
(* lprintf "filter (%s)" s; lprint_newline (); *)
add_filter index s) list
let clear_filter index =
lprintf " CLEAR FILTER " ; lprint_newline ( ) ;
let rec iter node =
let len = Array.length node.nodes in
for i = 0 to len - 1 do
match node.nodes.(i) with
Filtered n ->
node.nodes.(i) <- Some n;
filter_node n false;
iter_in n
| Some n -> iter n
| _ -> ()
done
and iter_in node =
let len = Array.length node.nodes in
for i = 0 to len - 1 do
match node.nodes.(i) with
Filtered n ->
node.nodes.(i) <- Some n;
filter_node n false;
iter_in n
| Some n ->
filter_node n false;
iter_in n
| _ -> ()
done
in
iter index.node
let filtered doc = Doc.filtered doc
let find node s =
let len = String.length s in
let rec iter node pos =
if pos = len then node else
let c = s.[pos] in
let c = convert_char c in
if Array.length node.nodes > c then
match node.nodes.(c) with
None -> raise Not_found
| Some node -> iter node (pos+1)
| Filtered _ -> raise Not_found
else raise Not_found
in
try
iter node.node 0
with _ -> empty
let or_get_fields map node fields =
let rec iter node =
for i = 0 to node.next_doc - 1 do
if node.fields.(i) land fields <> 0 then
let doc = node.docs.(i) in
if not (Doc.filtered doc) &&
not (Intmap.mem (Doc.num doc) !map) then
map := Intmap.add (Doc.num doc) doc !map
done;
for i = 0 to Array.length node.nodes - 1 do
match node.nodes.(i) with
None -> ()
| Some node -> iter node
| Filtered _ -> ()
done;
in iter node;
!map
let and_get_fields node fields and_map =
let map = ref Intmap.empty in
let rec iter node =
for i = 0 to node.next_doc - 1 do
if node.fields.(i) land fields <> 0 then
let doc = node.docs.(i) in
if (Intmap.mem (Doc.num doc) and_map) &&
not (Intmap.mem (Doc.num doc) !map) then
map := Intmap.add (Doc.num doc) doc !map
done;
for i = 0 to Array.length node.nodes - 1 do
match node.nodes.(i) with
None -> ()
| Some node -> iter node
| Filtered _ -> ()
done;
in iter node;
!map
let size node = node.ndocs
end
module FullMake (Doc : Indexer.Doc) = Indexer.FullMake (Doc ) (Make)
| null | https://raw.githubusercontent.com/ygrek/mldonkey/333868a12bb6cd25fed49391dd2c3a767741cb51/src/utils/lib/indexer2.ml | ocaml | lprintf "Adding doc with field %d" fields; lprint_newline ();
lprintf "done"; lprint_newline ();
lprintf "add (%s)" string; lprint_newline ();
lprintf "add"; lprint_newline ();
lprintf "filter node"; lprint_newline ();
lprintf "filter doc %s\n" (string_of_bool bool);
if Doc.filtered node.docs.(i) then
(lprintf "doc is filtered\n");
lprintf "FILTER ALL"; lprint_newline ();
lprintf "filter (%s)" s; lprint_newline (); | Copyright 2001 , 2002 b8_bavard , b8_fee_carabine ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Printf2
module Make(Doc : sig
type t
val num : t -> int
val filtered : t -> bool
val filter : t -> bool -> unit
end) = struct
type doc = Doc.t
type filtered_node =
None
| Some of node
| Filtered of node
and node = {
mutable next_doc : int;
mutable docs : Doc.t array;
mutable fields : int array;
mutable ndocs : int;
mutable nodes : filtered_node array;
}
type index = {
mutable node : node;
}
let stats index =
let mem = ref 2 in
let rec iter node =
mem := !mem + 9 +
Array.length node.docs + Array.length node.fields +
Array.length node.nodes;
Array.iter (fun n ->
match n with None -> ()
| Some node ->
mem := !mem + 2;
iter node
| Filtered node ->
mem := !mem + 2;
iter node
) node.nodes
in
iter index.node;
!mem
let new_node () = {
next_doc = 0;
docs = [||];
fields = [||];
nodes = [||];
ndocs = 0;
}
let empty = new_node ()
let create () = { node = new_node () }
let convert_char c =
match c with
'a' .. 'z' -> int_of_char c - 97
| 'A' .. 'Z' -> int_of_char c - 65
| '0' .. '9' -> 26 + (int_of_char c - int_of_char '0')
| _ -> assert false
let exit_exn = Exit
let add_doc node doc fields =
lprintf " add_doc " ; ( ) ;
let len = Array.length node.docs in
let pos = node.next_doc in
try
for i = 0 to node.next_doc - 1 do
if node.docs.(i) == doc then begin
node.fields.(i) <- node.fields.(i) lor fields;
raise exit_exn;
end;
done;
if pos = len then begin
let new_docs = Array.make (len + len/2 + 2) doc in
let new_fields = Array.make (len + len/2 + 2) 0 in
Array.blit node.docs 0 new_docs 0 len;
Array.blit node.fields 0 new_fields 0 len;
node.docs <- new_docs;
node.fields <- new_fields
end;
node.docs.(pos) <- doc;
node.fields.(pos) <- fields;
node.next_doc <- pos +1;
node.ndocs <- node.ndocs + 1;
true
with e ->
" exn % s " ( Printexc2.to_string e ) ; lprint_newline ( ) ;
false
; " done " ; ( )
let add_char node c =
" add_char " ; ( ) ;
let n = new_node () in
let len = Array.length node.nodes in
if len <= c then begin
let new_nodes = Array.make (c+1) None in
Array.blit node.nodes 0 new_nodes 0 len;
node.nodes <- new_nodes;
end;
lprintf " set % d % d % d " c len ( Array.length node.nodes ) ; lprint_newline ( ) ;
node.nodes.(c) <- Some n;
n
let add index string doc fields =
try
let len = String.length string in
let rec iter pos node =
" pos % d " pos ; ( ) ;
if pos = len then
if add_doc node doc fields then begin
node.ndocs <- node.ndocs + 1;
true
end else false
else
let c = string.[pos] in
let c = convert_char c in
let node =
if Array.length node.nodes > c then
match node.nodes.(c) with
None -> add_char node c
| Some node -> node
| Filtered _ ->
Doc.filter doc true;
lprintf_nl "doc filtered";
raise Not_found
else
add_char node c
in
iter (pos+1) node
in
ignore (iter 0 index.node)
with e ->
" Exc % s " ( Printexc2.to_string e);lprint_newline ( ) ;
()
; " done " ; ( )
let clear index = index.node <- new_node ()
let filter_node node bool =
for i = 0 to node.next_doc - 1 do
Doc.filter node.docs.(i) bool;
done
let rec filter_nodes node bool =
lprintf " filter_nodes " ; ( ) ;
filter_node node bool;
let len = Array.length node.nodes in
for i = 0 to len - 1 do
match node.nodes.(i) with
None -> ()
| Some n -> filter_nodes n bool
| Filtered n -> filter_nodes n bool
done
let add_filter index s =
try
let len = String.length s in
let rec iter pos node =
let c = s.[pos] in
let c = convert_char c in
let n =
if Array.length node.nodes > c then
match node.nodes.(c) with
None -> add_char node c
| Some node -> node
| Filtered _ -> raise Not_found
else
add_char node c
in
if pos+1 = len then begin
filter_nodes n true;
node.nodes.(c) <- Filtered n
end else
iter (pos+1) n
in
iter 0 index.node
with _ -> ()
let filter_words index list =
List.iter (fun s ->
add_filter index s) list
let clear_filter index =
lprintf " CLEAR FILTER " ; lprint_newline ( ) ;
let rec iter node =
let len = Array.length node.nodes in
for i = 0 to len - 1 do
match node.nodes.(i) with
Filtered n ->
node.nodes.(i) <- Some n;
filter_node n false;
iter_in n
| Some n -> iter n
| _ -> ()
done
and iter_in node =
let len = Array.length node.nodes in
for i = 0 to len - 1 do
match node.nodes.(i) with
Filtered n ->
node.nodes.(i) <- Some n;
filter_node n false;
iter_in n
| Some n ->
filter_node n false;
iter_in n
| _ -> ()
done
in
iter index.node
let filtered doc = Doc.filtered doc
let find node s =
let len = String.length s in
let rec iter node pos =
if pos = len then node else
let c = s.[pos] in
let c = convert_char c in
if Array.length node.nodes > c then
match node.nodes.(c) with
None -> raise Not_found
| Some node -> iter node (pos+1)
| Filtered _ -> raise Not_found
else raise Not_found
in
try
iter node.node 0
with _ -> empty
let or_get_fields map node fields =
let rec iter node =
for i = 0 to node.next_doc - 1 do
if node.fields.(i) land fields <> 0 then
let doc = node.docs.(i) in
if not (Doc.filtered doc) &&
not (Intmap.mem (Doc.num doc) !map) then
map := Intmap.add (Doc.num doc) doc !map
done;
for i = 0 to Array.length node.nodes - 1 do
match node.nodes.(i) with
None -> ()
| Some node -> iter node
| Filtered _ -> ()
done;
in iter node;
!map
let and_get_fields node fields and_map =
let map = ref Intmap.empty in
let rec iter node =
for i = 0 to node.next_doc - 1 do
if node.fields.(i) land fields <> 0 then
let doc = node.docs.(i) in
if (Intmap.mem (Doc.num doc) and_map) &&
not (Intmap.mem (Doc.num doc) !map) then
map := Intmap.add (Doc.num doc) doc !map
done;
for i = 0 to Array.length node.nodes - 1 do
match node.nodes.(i) with
None -> ()
| Some node -> iter node
| Filtered _ -> ()
done;
in iter node;
!map
let size node = node.ndocs
end
module FullMake (Doc : Indexer.Doc) = Indexer.FullMake (Doc ) (Make)
|
464e0685b1932e477db993639f95a796c703270f9c2cba1ad4bac4c38c3e1a4b | shop-planner/shop3 | axiom-else-bug.lisp | (in-package :shop-user)
;;;---------------------------------------------------------------------------
;;; File Description:
;;;
;;; Example domain and problems created to illustrate a problem
with backtracking over if - then - else axioms in shop2 .
;;;
;;; History/Bugs/Notes:
;;;
[ 2004/01/23 : rpg ] Created .
;;;
;;;---------------------------------------------------------------------------
(defdomain test-axiom-else-branch
(
(:method (simple-test)
((ok-precondition 12))
(!operator))
(:method (test)
((labeled-precondition 12))
(!operator))
(:method (second-test)
((check-constraint (<= :unbound :unbound))
(:unbound 12))
(!operator))
(:method (test-patch)
()
(:ordered
(!establish-bad-axiom)
(!check-bad-axiom)))
(:operator (!operator)
()
()
())
(:- (ok-precondition ?x)
( (call = ?x 12) )
( (eval error "should not have reached this branch!") ))
(:- (bad-axiom ?x)
( (call = ?x 12) ))
(:operator (!establish-bad-axiom)
()
;; delete
()
;; add
(bad-axiom 22))
(:operator (!check-bad-axiom)
((bad-axiom 22))
;; delete
()
;; add
())
(:- (labeled-precondition ?x)
branch1 ( (call = ?x 12) )
branch2 ( (eval error "should not have reached this branch!") ))
(:- (check-constraint (<= ?x ?y))
arg1-unbound ( (:unbound ?x) )
arg2-unbound ( (:unbound ?y) )
tested ((call <= ?x ?y)))
;; I'm not entirely sure how to specify a simple ground clause
(:- (:unbound :unbound) nil)
))
;;; this will work fine --- just intended to show that my domain was
;;; well-formed.
(defproblem is-domain-ok
NIL
(simple-test))
;;; actually labeling the axiom branches was a red herring. This will
;;; plan correctly, as it should.
(defproblem testing-labeled-axioms
NIL
(test))
If the bug is n't fixed , this problem will cause to crash ,
;;; because it will backtrack from checking the arg1-unbound branch of
;;; the check-constraint axiom, and will then try to call
;;; (<= :unbound :unbound). This problem should NOT be plannable.
[ 2004/02/02 : rpg ]
(defproblem testing-check-constraint
NIL
(second-test))
(defproblem this-should-work-if-patch-is-ok
NIL
(test-patch))
(defun test-axiom-else-bug ()
"Tests the CUT behavior of axioms with if-then-else branches.
Should return T if behavior is correct."
(and
(let ((retval (find-plans 'is-domain-ok)))
(equalp retval '(((!OPERATOR) 1.0))))
(let ((retval (find-plans 'testing-labeled-axioms)))
(equalp retval '(((!OPERATOR) 1.0))))
(let ((retval (find-plans 'testing-check-constraint)))
(null retval))))
| null | https://raw.githubusercontent.com/shop-planner/shop3/ba429cf91a575e88f28b7f0e89065de7b4d666a6/shop3/examples/axiom-else-bug.lisp | lisp | ---------------------------------------------------------------------------
File Description:
Example domain and problems created to illustrate a problem
History/Bugs/Notes:
---------------------------------------------------------------------------
delete
add
delete
add
I'm not entirely sure how to specify a simple ground clause
this will work fine --- just intended to show that my domain was
well-formed.
actually labeling the axiom branches was a red herring. This will
plan correctly, as it should.
because it will backtrack from checking the arg1-unbound branch of
the check-constraint axiom, and will then try to call
(<= :unbound :unbound). This problem should NOT be plannable. | (in-package :shop-user)
with backtracking over if - then - else axioms in shop2 .
[ 2004/01/23 : rpg ] Created .
(defdomain test-axiom-else-branch
(
(:method (simple-test)
((ok-precondition 12))
(!operator))
(:method (test)
((labeled-precondition 12))
(!operator))
(:method (second-test)
((check-constraint (<= :unbound :unbound))
(:unbound 12))
(!operator))
(:method (test-patch)
()
(:ordered
(!establish-bad-axiom)
(!check-bad-axiom)))
(:operator (!operator)
()
()
())
(:- (ok-precondition ?x)
( (call = ?x 12) )
( (eval error "should not have reached this branch!") ))
(:- (bad-axiom ?x)
( (call = ?x 12) ))
(:operator (!establish-bad-axiom)
()
()
(bad-axiom 22))
(:operator (!check-bad-axiom)
((bad-axiom 22))
()
())
(:- (labeled-precondition ?x)
branch1 ( (call = ?x 12) )
branch2 ( (eval error "should not have reached this branch!") ))
(:- (check-constraint (<= ?x ?y))
arg1-unbound ( (:unbound ?x) )
arg2-unbound ( (:unbound ?y) )
tested ((call <= ?x ?y)))
(:- (:unbound :unbound) nil)
))
(defproblem is-domain-ok
NIL
(simple-test))
(defproblem testing-labeled-axioms
NIL
(test))
If the bug is n't fixed , this problem will cause to crash ,
[ 2004/02/02 : rpg ]
(defproblem testing-check-constraint
NIL
(second-test))
(defproblem this-should-work-if-patch-is-ok
NIL
(test-patch))
(defun test-axiom-else-bug ()
"Tests the CUT behavior of axioms with if-then-else branches.
Should return T if behavior is correct."
(and
(let ((retval (find-plans 'is-domain-ok)))
(equalp retval '(((!OPERATOR) 1.0))))
(let ((retval (find-plans 'testing-labeled-axioms)))
(equalp retval '(((!OPERATOR) 1.0))))
(let ((retval (find-plans 'testing-check-constraint)))
(null retval))))
|
d53296b9552987abe4f53483bd5415647cb7740f0ecf90d8663ce9692562a909 | gbwey/predicate-typed | Maybe.hs | # LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE TypeFamilies #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE NoStarIsType #
{-# LANGUAGE EmptyDataDeriving #-}
-- | promoted 'Maybe' functions
module Predicate.Data.Maybe (
-- ** predicates
IsNothing
, IsJust
-- ** constructors
, MkNothing
, MkNothing'
, MkJust
-- ** get rid of Maybe
, Just'
, JustDef
, JustFail
, MapMaybe
, CatMaybes
, MaybeBool
, MaybeIn
, MaybeId
-- ** type families
, MaybeInT
) where
import Predicate.Core
import Predicate.Misc
import Predicate.Util
import Predicate.Data.Foldable (ConcatMap)
import Predicate.Data.Monoid (MEmptyP)
import Predicate.Data.Lifted (EmptyBool)
import Data.Proxy (Proxy(..))
import Data.Kind (Type)
import Data.Maybe (isJust, isNothing)
import GHC.TypeLits (ErrorMessage((:$$:),(:<>:)))
import qualified GHC.TypeLits as GL
-- $setup
-- >>> :set -XDataKinds
-- >>> :set -XTypeApplications
-- >>> :set -XTypeOperators
-- >>> :set -XOverloadedStrings
-- >>> import qualified Data.Map.Strict as M
-- >>> import Predicate
> > > import qualified Data . Semigroup as SG
| similar to ' Data . Maybe.fromJust '
--
> > > pz @(Just ' > > Succ ) ( Just 20 )
-- Val 21
--
> > > pz @(Just ' > > Succ ) Nothing
-- Fail "Just' found Nothing"
--
data Just' deriving Show
instance Show a => P Just' (Maybe a) where
type PP Just' (Maybe a) = a
eval _ opts lr =
let msg0 = "Just'"
in pure $ case lr of
Nothing -> mkNode opts (Fail (msg0 <> " found Nothing")) "" []
Just a -> mkNode opts (Val a) (msg0 <> " " <> showL opts a) []
-- | constructs a Nothing for a given type
data MkNothing' t deriving Show
works always ! is a good alternative and then do nt need the extra ' t '
-- for this to be useful has to have 't' else we end up with tons of problems
instance P (MkNothing' t) a where
type PP (MkNothing' t) a = Maybe (PP t a)
eval _ opts _ =
let msg0 = "MkNothing"
in pure $ mkNode opts (Val Nothing) msg0 []
-- | constructs a Nothing for a given type
data MkNothing (t :: Type) deriving Show
type MkNothingT (t :: Type) = MkNothing' (Hole t)
instance P (MkNothing t) x where
type PP (MkNothing t) x = PP (MkNothingT t) x
eval _ = eval (Proxy @(MkNothingT t))
-- | 'GHC.Maybe.Just' constructor
--
> > > pz @(MkJust I d ) 44
-- Val (Just 44)
--
data MkJust p deriving Show
instance ( PP p x ~ a
, P p x
, Show a
) => P (MkJust p) x where
type PP (MkJust p) x = Maybe (PP p x)
eval _ opts x = do
let msg0 = "MkJust"
pp <- eval (Proxy @p) opts x
pure $ case getValueLR NoInline opts msg0 pp [] of
Left e -> e
Right p ->
let d = Just p
in mkNode opts (Val d) (msg0 <> " Just " <> showL opts p) [hh pp]
-- | similar to 'Data.Maybe.isJust'
--
-- >>> pz @IsJust Nothing
-- Val False
--
-- >>> pz @IsJust (Just 'a')
-- Val True
--
data IsJust deriving Show
instance x ~ Maybe a
=> P IsJust x where
type PP IsJust x = Bool
eval _ opts x = pure $ mkNodeB opts (isJust x) "IsJust" []
-- | similar to 'Data.Maybe.isNothing'
--
-- >>> pz @IsNothing (Just 123)
-- Val False
--
-- >>> pz @IsNothing Nothing
-- Val True
--
-- >>> pl @(Not IsNothing &&& ('Just Id >> Id + 12)) (Just 1)
Present ( True,13 ) ( ' ( True,13 ) )
-- Val (True,13)
--
-- >>> pl @(Not IsNothing &&& ('Just Id >> Id + 12)) Nothing
Error ' ) ( ' ( , ) )
Fail " ' ) "
--
data IsNothing deriving Show
instance x ~ Maybe a
=> P IsNothing x where
type PP IsNothing x = Bool
eval _ opts x = pure $ mkNodeB opts (isNothing x) "IsNothing" []
-- | like 'Data.Maybe.mapMaybe'
--
> > > pl @(MapMaybe ( ( Le 3 ) I d ) I d ) [ 1 .. 5 ]
-- Present [1,2,3] ((>>) [1,2,3] | {Concat [1,2,3] | [[1],[2],[3],[],[]]})
-- Val [1,2,3]
--
> > > pl @(MapMaybe ( ( Gt 3 ) I d ) I d ) [ 1 .. 5 ]
Present [ 4,5 ] ( ( > > ) [ 4,5 ] | { Concat [ 4,5 ] | [ [ ] , [ ] , [ ] , [ 4],[5 ] ] } )
Val [ 4,5 ]
--
data MapMaybe p q deriving Show
type MapMaybeT p q = ConcatMap (p >> MaybeId MEmptyP '[Id]) q
instance P (MapMaybeT p q) x => P (MapMaybe p q) x where
type PP (MapMaybe p q) x = PP (MapMaybeT p q) x
eval _ = eval (Proxy @(MapMaybeT p q))
| similar to ' Data . Maybe.catMaybes '
--
-- >>> pl @CatMaybes [Just 'a',Nothing,Just 'c',Just 'd',Nothing]
-- Present "acd" ((>>) "acd" | {Concat "acd" | ["a","","c","d",""]})
-- Val "acd"
--
data CatMaybes deriving Show
type CatMaybesT = MapMaybe Id Id
instance P CatMaybesT x => P CatMaybes x where
type PP CatMaybes x = PP CatMaybesT x
eval _ = eval (Proxy @CatMaybesT)
| Convenient method to convert a value @p@ to a ' Maybe ' based on a predicate
if then Just @p@ else Nothing
--
> > > pz @(MaybeBool ( I d > 4 ) I d ) 24
-- Val (Just 24)
--
-- >>> pz @(MaybeBool (Id > 4) Id) (-5)
-- Val Nothing
--
-- >>> pz @(MaybeBool 'True 10) ()
-- Val (Just 10)
--
data MaybeBool b p deriving Show
type MaybeBoolT b p = EmptyBool Maybe b p
instance P (MaybeBoolT b p) x => P (MaybeBool b p) x where
type PP (MaybeBool b p) x = PP (MaybeBoolT b p) x
eval _ = eval (Proxy @(MaybeBoolT b p))
-- | extract the value from a 'Maybe' otherwise use the default value: similar to 'Data.Maybe.fromMaybe'
--
> > > pl @(JustDef ' True I d ) Nothing -- preserves TrueP / FalseP in the default case
True ( Nothing )
-- Val True
--
> > > pl @(JustDef ( > 12 ) Snd ) ( 3,Just False ) -- for normal case
Present False ( Just )
-- Val False
--
> > > pl @(JustDef ) ( True , Nothing )
Present True ( Nothing )
-- Val True
--
> > > pz @(JustDef ( 1 % 4 ) I d ) ( Just 20.4 )
Val ( 102 % 5 )
--
> > > pz @(JustDef ( 1 % 4 ) I d ) Nothing
Val ( 1 % 4 )
--
-- >>> pz @(JustDef (MEmptyT _) Id) (Just "xy")
-- Val "xy"
--
-- >>> pz @(JustDef (MEmptyT _) Id) Nothing
-- Val ()
--
-- >>> pz @(JustDef (MEmptyT (SG.Sum _)) Id) Nothing
-- Val (Sum {getSum = 0})
--
> > > pl @(JustDef 0 I d ) ( Just 123 )
Present 123 ( Just )
Val 123
--
-- >>> pl @(JustDef 0 Id) Nothing
Present 0 ( Nothing )
-- Val 0
--
> > > pl @(JustDef 99 I d ) ( Just 12 )
Present 12 ( Just )
-- Val 12
--
> > > pl @(JustDef 99 I d ) Nothing
Present 99 ( Nothing )
-- Val 99
--
> > > pl @(JustDef ( 99 -% 1 ) I d ) Nothing
Present ( -99 ) % 1 ( Nothing )
-- Val ((-99) % 1)
--
> > > pl @(JustDef ( MEmptyT _ ) I d ) ( Just ( SG.Sum 123 ) )
Present Sum { getSum = 123 } ( Just )
Val ( Sum { getSum = 123 } )
--
-- >>> pl @(JustDef (MEmptyT _) Id) (Nothing @(SG.Sum _))
Present Sum { getSum = 0 } ( Nothing )
-- Val (Sum {getSum = 0})
--
data JustDef p q deriving Show
instance ( PP p x ~ a
, PP q x ~ Maybe a
, P p x
, P q x
)
=> P (JustDef p q) x where
type PP (JustDef p q) x = MaybeT (PP q x)
eval _ opts x = do
let msg0 = "JustDef"
qq <- eval (Proxy @q) opts x
case getValueLR NoInline opts msg0 qq [] of
Left e -> pure e
Right q ->
case q of
Just b -> pure $ mkNode opts (Val b) (msg0 <> " Just") [hh qq]
Nothing -> do
pp <- eval (Proxy @p) opts x
pure $ case getValueLR NoInline opts msg0 pp [hh qq] of
Left e -> e
Right _ -> mkNodeCopy opts pp (msg0 <> " Nothing") [hh qq]
-- | extract the value from a 'Maybe' or fail with the given message
--
> > > pz @(JustFail " nope " I d ) ( Just 99 )
-- Val 99
--
-- >>> pz @(JustFail "nope" Id) Nothing
-- Fail "nope"
--
> > > pz @(JustFail ( PrintF " oops=%d " Snd ) Fst ) ( Nothing , 123 )
-- Fail "oops=123"
--
> > > pz @(JustFail ( PrintF " oops=%d " Snd ) Fst ) ( Just ' x ' , 123 )
-- Val 'x'
--
data JustFail p q deriving Show
instance ( PP p x ~ String
, PP q x ~ Maybe a
, P p x
, P q x
)
=> P (JustFail p q) x where
type PP (JustFail p q) x = MaybeT (PP q x)
eval _ opts x = do
let msg0 = "JustFail"
qq <- eval (Proxy @q) opts x
case getValueLR NoInline opts msg0 qq [] of
Left e -> pure e
Right q ->
case q of
Just b -> pure $ mkNode opts (Val b) (msg0 <> " Just") [hh qq]
Nothing -> do
pp <- eval (Proxy @p) opts x
pure $ case getValueLR NoInline opts msg0 pp [hh qq] of
Left e -> e
Right p -> mkNode opts (Fail p) (msg0 <> " Nothing") [hh qq, hh pp]
-- | destructs an Maybe value
@n@ @Nothing@ receives @(PP s x , Proxy result)@ ( you can use the proxy with MEmptyP )
@p@ @Just a@ receives , a)@
-- @s@ points to the environment you want to pass in
-- @t@ points to the Maybe value
--
-- >>> pz @(MaybeIn Fst Snd Fst Snd) ('a', Just 'x')
-- Val 'x'
--
-- >>> pz @(MaybeIn Fst Snd Fst Snd) ('a', Nothing)
-- Val 'a'
--
> > > pl @(MaybeIn " none " " just " ( ) I d ) ( Just ( SG.Sum 12 ) )
Present " just " ( MaybeIn(Just ) " just " | Sum { getSum = 12 } )
-- Val "just"
--
> > > pl @(MaybeIn ( Snd > > FailP " oops " ) Snd Fst Snd ) ( " abc " , Nothing )
-- Error oops (Proxy | MaybeIn(Nothing) n failed)
-- Fail "oops"
--
> > > pl @(MaybeIn ( Snd > > MEmptyP ) Snd Fst Snd ) ( " abc " , Nothing )
-- Present () (MaybeIn(Nothing) () | ())
-- Val ()
--
data MaybeIn n p s t deriving Show
instance ( Show a
, Show (PP p (y,a))
, P n (y,Proxy z)
, P p (y,a)
, PP n (y,Proxy z) ~ PP p (y,a)
, z ~ PP p (y,a)
, P s x
, P t x
, PP t x ~ Maybe a
, PP s x ~ y
) => P (MaybeIn n p s t) x where
type PP (MaybeIn n p s t) x = MaybeInT p (PP s x) (PP t x)
eval _ opts x = do
let msg0 = "MaybeIn"
lr <- runPQ NoInline msg0 (Proxy @s) (Proxy @t) opts x []
case lr of
Left e -> pure e
Right (s,t,ss,tt) -> do
let hhs = [hh ss, hh tt]
case t of
Nothing -> do
let msg1 = msg0 <> "(Nothing)"
nn <- eval (Proxy @n) opts (s,Proxy @z)
pure $ case getValueLR NoInline opts (msg1 <> " n failed") nn hhs of
Left e -> e
Right c -> mkNodeCopy opts nn (show3 opts msg1 c ()) hhs
Just a -> do
let msg1 = msg0 <> "(Just)"
pp <- eval (Proxy @p) opts (s,a)
pure $ case getValueLR NoInline opts (msg1 <> " p failed") pp hhs of
Left e -> e
Right c -> mkNodeCopy opts pp (show3 opts msg1 c a) hhs
| calculate the return type for ' MaybeIn '
type family MaybeInT (p :: k) (y :: Type) (ma :: Type) where
MaybeInT p y (Maybe a) = PP p (y,a)
MaybeInT _ _ o = GL.TypeError (
'GL.Text "MaybeInT: expected 'Maybe a' "
':$$: 'GL.Text "o = "
':<>: 'GL.ShowType o)
-- | simple version of 'MaybeIn' with Id as the Maybe value and the environment set to ()
--
-- >>> pz @(MaybeId '("x","oops") '(Id,"fromjust")) (Just "ok")
-- Val ("ok","fromjust")
--
-- >>> pz @(MaybeId '("x","oops") '(Id,"fromjust")) Nothing
-- Val ("x","oops")
--
> > > pz @(MaybeId " found nothing " ( ShowP Pred ) ) ( Just 20 )
Val " 19 "
--
> > > pz @(MaybeId " found nothing " ( ShowP Pred ) ) Nothing
-- Val "found nothing"
--
-- >>> pl @(MaybeId 'True Id) Nothing
-- True (MaybeIn(Nothing) True | ())
-- Val True
--
-- >>> pl @(MaybeId 'True IdBool) (Just False)
-- False (MaybeIn(Just) False | False)
-- Val False
--
-- >>> pl @(MaybeId (FailT _ "failed4") Id) (Just 10)
-- Present 10 (MaybeIn(Just) 10 | 10)
-- Val 10
--
-- >>> pl @(MaybeId 'False Id) Nothing
-- False (MaybeIn(Nothing) False | ())
-- Val False
--
-- >>> pl @(MaybeId (FailT _ "err") Id) Nothing
-- Error err (Proxy | MaybeIn(Nothing) n failed)
-- Fail "err"
--
> > > pz @(MaybeId 99 I d ) ( Just 12 )
-- Val 12
--
-- >>> pz @(MaybeId 99 Id) Nothing
-- Val 99
--
-- >>> pl @(MaybeId MEmptyP Ones) (Just "ab")
-- Present ["a","b"] (MaybeIn(Just) ["a","b"] | "ab")
-- Val ["a","b"]
--
-- >>> pl @(MaybeId MEmptyP Ones) Nothing
-- Present [] (MaybeIn(Nothing) [] | ())
-- Val []
--
-- >>> pl @(MaybeId MEmptyP (Fst ==! Snd)) (Just ('x','z'))
-- Present LT (MaybeIn(Just) LT | ('x','z'))
--
> > > pl @(MaybeId MEmptyP ( Fst = = ! Snd ) ) ( Nothing @(Char , ) )
-- Present EQ (MaybeIn(Nothing) EQ | ())
-- Val EQ
--
data MaybeId n p deriving Show
type MaybeIdT n p = MaybeIn (Snd >> n) (Snd >> p) () Id
instance P (MaybeIdT n p) x => P (MaybeId n p) x where
type PP (MaybeId n p) x = PP (MaybeIdT n p) x
eval _ = eval (Proxy @(MaybeIdT n p))
| null | https://raw.githubusercontent.com/gbwey/predicate-typed/51f8d51f662722e1109d2ff35644aea1e0371b42/src/Predicate/Data/Maybe.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
# LANGUAGE OverloadedStrings #
# LANGUAGE ConstraintKinds #
# LANGUAGE EmptyDataDeriving #
| promoted 'Maybe' functions
** predicates
** constructors
** get rid of Maybe
** type families
$setup
>>> :set -XDataKinds
>>> :set -XTypeApplications
>>> :set -XTypeOperators
>>> :set -XOverloadedStrings
>>> import qualified Data.Map.Strict as M
>>> import Predicate
Val 21
Fail "Just' found Nothing"
| constructs a Nothing for a given type
for this to be useful has to have 't' else we end up with tons of problems
| constructs a Nothing for a given type
| 'GHC.Maybe.Just' constructor
Val (Just 44)
| similar to 'Data.Maybe.isJust'
>>> pz @IsJust Nothing
Val False
>>> pz @IsJust (Just 'a')
Val True
| similar to 'Data.Maybe.isNothing'
>>> pz @IsNothing (Just 123)
Val False
>>> pz @IsNothing Nothing
Val True
>>> pl @(Not IsNothing &&& ('Just Id >> Id + 12)) (Just 1)
Val (True,13)
>>> pl @(Not IsNothing &&& ('Just Id >> Id + 12)) Nothing
| like 'Data.Maybe.mapMaybe'
Present [1,2,3] ((>>) [1,2,3] | {Concat [1,2,3] | [[1],[2],[3],[],[]]})
Val [1,2,3]
>>> pl @CatMaybes [Just 'a',Nothing,Just 'c',Just 'd',Nothing]
Present "acd" ((>>) "acd" | {Concat "acd" | ["a","","c","d",""]})
Val "acd"
Val (Just 24)
>>> pz @(MaybeBool (Id > 4) Id) (-5)
Val Nothing
>>> pz @(MaybeBool 'True 10) ()
Val (Just 10)
| extract the value from a 'Maybe' otherwise use the default value: similar to 'Data.Maybe.fromMaybe'
preserves TrueP / FalseP in the default case
Val True
for normal case
Val False
Val True
>>> pz @(JustDef (MEmptyT _) Id) (Just "xy")
Val "xy"
>>> pz @(JustDef (MEmptyT _) Id) Nothing
Val ()
>>> pz @(JustDef (MEmptyT (SG.Sum _)) Id) Nothing
Val (Sum {getSum = 0})
>>> pl @(JustDef 0 Id) Nothing
Val 0
Val 12
Val 99
Val ((-99) % 1)
>>> pl @(JustDef (MEmptyT _) Id) (Nothing @(SG.Sum _))
Val (Sum {getSum = 0})
| extract the value from a 'Maybe' or fail with the given message
Val 99
>>> pz @(JustFail "nope" Id) Nothing
Fail "nope"
Fail "oops=123"
Val 'x'
| destructs an Maybe value
@s@ points to the environment you want to pass in
@t@ points to the Maybe value
>>> pz @(MaybeIn Fst Snd Fst Snd) ('a', Just 'x')
Val 'x'
>>> pz @(MaybeIn Fst Snd Fst Snd) ('a', Nothing)
Val 'a'
Val "just"
Error oops (Proxy | MaybeIn(Nothing) n failed)
Fail "oops"
Present () (MaybeIn(Nothing) () | ())
Val ()
| simple version of 'MaybeIn' with Id as the Maybe value and the environment set to ()
>>> pz @(MaybeId '("x","oops") '(Id,"fromjust")) (Just "ok")
Val ("ok","fromjust")
>>> pz @(MaybeId '("x","oops") '(Id,"fromjust")) Nothing
Val ("x","oops")
Val "found nothing"
>>> pl @(MaybeId 'True Id) Nothing
True (MaybeIn(Nothing) True | ())
Val True
>>> pl @(MaybeId 'True IdBool) (Just False)
False (MaybeIn(Just) False | False)
Val False
>>> pl @(MaybeId (FailT _ "failed4") Id) (Just 10)
Present 10 (MaybeIn(Just) 10 | 10)
Val 10
>>> pl @(MaybeId 'False Id) Nothing
False (MaybeIn(Nothing) False | ())
Val False
>>> pl @(MaybeId (FailT _ "err") Id) Nothing
Error err (Proxy | MaybeIn(Nothing) n failed)
Fail "err"
Val 12
>>> pz @(MaybeId 99 Id) Nothing
Val 99
>>> pl @(MaybeId MEmptyP Ones) (Just "ab")
Present ["a","b"] (MaybeIn(Just) ["a","b"] | "ab")
Val ["a","b"]
>>> pl @(MaybeId MEmptyP Ones) Nothing
Present [] (MaybeIn(Nothing) [] | ())
Val []
>>> pl @(MaybeId MEmptyP (Fst ==! Snd)) (Just ('x','z'))
Present LT (MaybeIn(Just) LT | ('x','z'))
Present EQ (MaybeIn(Nothing) EQ | ())
Val EQ
| # LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE NoStarIsType #
module Predicate.Data.Maybe (
IsNothing
, IsJust
, MkNothing
, MkNothing'
, MkJust
, Just'
, JustDef
, JustFail
, MapMaybe
, CatMaybes
, MaybeBool
, MaybeIn
, MaybeId
, MaybeInT
) where
import Predicate.Core
import Predicate.Misc
import Predicate.Util
import Predicate.Data.Foldable (ConcatMap)
import Predicate.Data.Monoid (MEmptyP)
import Predicate.Data.Lifted (EmptyBool)
import Data.Proxy (Proxy(..))
import Data.Kind (Type)
import Data.Maybe (isJust, isNothing)
import GHC.TypeLits (ErrorMessage((:$$:),(:<>:)))
import qualified GHC.TypeLits as GL
> > > import qualified Data . Semigroup as SG
| similar to ' Data . Maybe.fromJust '
> > > pz @(Just ' > > Succ ) ( Just 20 )
> > > pz @(Just ' > > Succ ) Nothing
data Just' deriving Show
instance Show a => P Just' (Maybe a) where
type PP Just' (Maybe a) = a
eval _ opts lr =
let msg0 = "Just'"
in pure $ case lr of
Nothing -> mkNode opts (Fail (msg0 <> " found Nothing")) "" []
Just a -> mkNode opts (Val a) (msg0 <> " " <> showL opts a) []
data MkNothing' t deriving Show
works always ! is a good alternative and then do nt need the extra ' t '
instance P (MkNothing' t) a where
type PP (MkNothing' t) a = Maybe (PP t a)
eval _ opts _ =
let msg0 = "MkNothing"
in pure $ mkNode opts (Val Nothing) msg0 []
data MkNothing (t :: Type) deriving Show
type MkNothingT (t :: Type) = MkNothing' (Hole t)
instance P (MkNothing t) x where
type PP (MkNothing t) x = PP (MkNothingT t) x
eval _ = eval (Proxy @(MkNothingT t))
> > > pz @(MkJust I d ) 44
data MkJust p deriving Show
instance ( PP p x ~ a
, P p x
, Show a
) => P (MkJust p) x where
type PP (MkJust p) x = Maybe (PP p x)
eval _ opts x = do
let msg0 = "MkJust"
pp <- eval (Proxy @p) opts x
pure $ case getValueLR NoInline opts msg0 pp [] of
Left e -> e
Right p ->
let d = Just p
in mkNode opts (Val d) (msg0 <> " Just " <> showL opts p) [hh pp]
data IsJust deriving Show
instance x ~ Maybe a
=> P IsJust x where
type PP IsJust x = Bool
eval _ opts x = pure $ mkNodeB opts (isJust x) "IsJust" []
Present ( True,13 ) ( ' ( True,13 ) )
Error ' ) ( ' ( , ) )
Fail " ' ) "
data IsNothing deriving Show
instance x ~ Maybe a
=> P IsNothing x where
type PP IsNothing x = Bool
eval _ opts x = pure $ mkNodeB opts (isNothing x) "IsNothing" []
> > > pl @(MapMaybe ( ( Le 3 ) I d ) I d ) [ 1 .. 5 ]
> > > pl @(MapMaybe ( ( Gt 3 ) I d ) I d ) [ 1 .. 5 ]
Present [ 4,5 ] ( ( > > ) [ 4,5 ] | { Concat [ 4,5 ] | [ [ ] , [ ] , [ ] , [ 4],[5 ] ] } )
Val [ 4,5 ]
data MapMaybe p q deriving Show
type MapMaybeT p q = ConcatMap (p >> MaybeId MEmptyP '[Id]) q
instance P (MapMaybeT p q) x => P (MapMaybe p q) x where
type PP (MapMaybe p q) x = PP (MapMaybeT p q) x
eval _ = eval (Proxy @(MapMaybeT p q))
| similar to ' Data . Maybe.catMaybes '
data CatMaybes deriving Show
type CatMaybesT = MapMaybe Id Id
instance P CatMaybesT x => P CatMaybes x where
type PP CatMaybes x = PP CatMaybesT x
eval _ = eval (Proxy @CatMaybesT)
| Convenient method to convert a value @p@ to a ' Maybe ' based on a predicate
if then Just @p@ else Nothing
> > > pz @(MaybeBool ( I d > 4 ) I d ) 24
data MaybeBool b p deriving Show
type MaybeBoolT b p = EmptyBool Maybe b p
instance P (MaybeBoolT b p) x => P (MaybeBool b p) x where
type PP (MaybeBool b p) x = PP (MaybeBoolT b p) x
eval _ = eval (Proxy @(MaybeBoolT b p))
True ( Nothing )
Present False ( Just )
> > > pl @(JustDef ) ( True , Nothing )
Present True ( Nothing )
> > > pz @(JustDef ( 1 % 4 ) I d ) ( Just 20.4 )
Val ( 102 % 5 )
> > > pz @(JustDef ( 1 % 4 ) I d ) Nothing
Val ( 1 % 4 )
> > > pl @(JustDef 0 I d ) ( Just 123 )
Present 123 ( Just )
Val 123
Present 0 ( Nothing )
> > > pl @(JustDef 99 I d ) ( Just 12 )
Present 12 ( Just )
> > > pl @(JustDef 99 I d ) Nothing
Present 99 ( Nothing )
> > > pl @(JustDef ( 99 -% 1 ) I d ) Nothing
Present ( -99 ) % 1 ( Nothing )
> > > pl @(JustDef ( MEmptyT _ ) I d ) ( Just ( SG.Sum 123 ) )
Present Sum { getSum = 123 } ( Just )
Val ( Sum { getSum = 123 } )
Present Sum { getSum = 0 } ( Nothing )
data JustDef p q deriving Show
instance ( PP p x ~ a
, PP q x ~ Maybe a
, P p x
, P q x
)
=> P (JustDef p q) x where
type PP (JustDef p q) x = MaybeT (PP q x)
eval _ opts x = do
let msg0 = "JustDef"
qq <- eval (Proxy @q) opts x
case getValueLR NoInline opts msg0 qq [] of
Left e -> pure e
Right q ->
case q of
Just b -> pure $ mkNode opts (Val b) (msg0 <> " Just") [hh qq]
Nothing -> do
pp <- eval (Proxy @p) opts x
pure $ case getValueLR NoInline opts msg0 pp [hh qq] of
Left e -> e
Right _ -> mkNodeCopy opts pp (msg0 <> " Nothing") [hh qq]
> > > pz @(JustFail " nope " I d ) ( Just 99 )
> > > pz @(JustFail ( PrintF " oops=%d " Snd ) Fst ) ( Nothing , 123 )
> > > pz @(JustFail ( PrintF " oops=%d " Snd ) Fst ) ( Just ' x ' , 123 )
data JustFail p q deriving Show
instance ( PP p x ~ String
, PP q x ~ Maybe a
, P p x
, P q x
)
=> P (JustFail p q) x where
type PP (JustFail p q) x = MaybeT (PP q x)
eval _ opts x = do
let msg0 = "JustFail"
qq <- eval (Proxy @q) opts x
case getValueLR NoInline opts msg0 qq [] of
Left e -> pure e
Right q ->
case q of
Just b -> pure $ mkNode opts (Val b) (msg0 <> " Just") [hh qq]
Nothing -> do
pp <- eval (Proxy @p) opts x
pure $ case getValueLR NoInline opts msg0 pp [hh qq] of
Left e -> e
Right p -> mkNode opts (Fail p) (msg0 <> " Nothing") [hh qq, hh pp]
@n@ @Nothing@ receives @(PP s x , Proxy result)@ ( you can use the proxy with MEmptyP )
@p@ @Just a@ receives , a)@
> > > pl @(MaybeIn " none " " just " ( ) I d ) ( Just ( SG.Sum 12 ) )
Present " just " ( MaybeIn(Just ) " just " | Sum { getSum = 12 } )
> > > pl @(MaybeIn ( Snd > > FailP " oops " ) Snd Fst Snd ) ( " abc " , Nothing )
> > > pl @(MaybeIn ( Snd > > MEmptyP ) Snd Fst Snd ) ( " abc " , Nothing )
data MaybeIn n p s t deriving Show
instance ( Show a
, Show (PP p (y,a))
, P n (y,Proxy z)
, P p (y,a)
, PP n (y,Proxy z) ~ PP p (y,a)
, z ~ PP p (y,a)
, P s x
, P t x
, PP t x ~ Maybe a
, PP s x ~ y
) => P (MaybeIn n p s t) x where
type PP (MaybeIn n p s t) x = MaybeInT p (PP s x) (PP t x)
eval _ opts x = do
let msg0 = "MaybeIn"
lr <- runPQ NoInline msg0 (Proxy @s) (Proxy @t) opts x []
case lr of
Left e -> pure e
Right (s,t,ss,tt) -> do
let hhs = [hh ss, hh tt]
case t of
Nothing -> do
let msg1 = msg0 <> "(Nothing)"
nn <- eval (Proxy @n) opts (s,Proxy @z)
pure $ case getValueLR NoInline opts (msg1 <> " n failed") nn hhs of
Left e -> e
Right c -> mkNodeCopy opts nn (show3 opts msg1 c ()) hhs
Just a -> do
let msg1 = msg0 <> "(Just)"
pp <- eval (Proxy @p) opts (s,a)
pure $ case getValueLR NoInline opts (msg1 <> " p failed") pp hhs of
Left e -> e
Right c -> mkNodeCopy opts pp (show3 opts msg1 c a) hhs
| calculate the return type for ' MaybeIn '
type family MaybeInT (p :: k) (y :: Type) (ma :: Type) where
MaybeInT p y (Maybe a) = PP p (y,a)
MaybeInT _ _ o = GL.TypeError (
'GL.Text "MaybeInT: expected 'Maybe a' "
':$$: 'GL.Text "o = "
':<>: 'GL.ShowType o)
> > > pz @(MaybeId " found nothing " ( ShowP Pred ) ) ( Just 20 )
Val " 19 "
> > > pz @(MaybeId " found nothing " ( ShowP Pred ) ) Nothing
> > > pz @(MaybeId 99 I d ) ( Just 12 )
> > > pl @(MaybeId MEmptyP ( Fst = = ! Snd ) ) ( Nothing @(Char , ) )
data MaybeId n p deriving Show
type MaybeIdT n p = MaybeIn (Snd >> n) (Snd >> p) () Id
instance P (MaybeIdT n p) x => P (MaybeId n p) x where
type PP (MaybeId n p) x = PP (MaybeIdT n p) x
eval _ = eval (Proxy @(MaybeIdT n p))
|
a69177ae81411eb22c0601eaaec4cb5a430dd6a9aa6988009939e92ff87952d0 | didierverna/declt | assess.lisp | ;; assess.lisp --- Definitions extraction
Copyright ( C ) 2020 - 2022
Author : < >
This file is part of Declt .
;; Permission to use, copy, modify, and distribute this software for any
;; purpose with or without fee is hereby granted, provided that the above
;; copyright notice and this permission notice appear in all copies.
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
;; WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
;; MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
;; ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
;; OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
;;; Commentary:
# # # # NOTE : there are more clever ways to create definitions , notably by
;; avoiding traversing the same structures several times. For example, modules
;; belong to a single system, and files belong to a single module, etc. On the
;; other hand, there are corner cases which would make it tricky to be clever
;; (e.g. complex systems which belong to the same file as the corresponding
;; simple system). So the way it's done below is much simpler and less
;; error-prone: create definitions type after type, independently from each
;; other, and resolve the cross-references later (that's what the finalization
;; process does), even those which were known right from the start.
;;; Code:
(in-package :net.didierverna.declt.assess)
(in-readtable :net.didierverna.declt)
;; ==========================================================================
;; Definitions Creation
;; ==========================================================================
;; ---------------
;; Local utilities
;; ---------------
(defun components (module type)
"Return the list of all (sub)TYPE components found in MODULE's tree."
# # # # NOTE : we accept subtypes of TYPE because ASDF components might be
subclassed . An example of this is SBCL 's grovel facility which subclasses
;; asdf:cl-source-file.
(loop :for component :in (component-children module)
:if (typep component type)
:collect component
:if (typep component 'asdf:module)
:nconc (components component type)))
;; ------------------
;; System definitions
;; ------------------
(defun system-dependencies (system)
"Return all system names from SYSTEM dependencies.
This includes both :defsystem-depends-on and :depends-on."
(mapcar (lambda (dependency-def)
(reordered-dependency-def-system
(reorder-dependency-def dependency-def)))
(append (system-defsystem-depends-on system)
(component-sideway-dependencies system))))
(defun sub-component-p
(component directory
# # # # FIXME : not sure this is still valid , as we now have a specific
way of loading UIOP and ASDF .
# # # # NOTE : COMPONENT - PATHNAME can return nil when it 's impossible to
locate the component 's source . This happens for example with UIOP when
ASDF is embedded in a Lisp implementation like SBCL .
fell on this issue when trying to document CL - PROJECT , which
explicitly depends on UIOP .
&aux (component-pathname (component-pathname component)))
"Return T if COMPONENT can be found under DIRECTORY."
(when component-pathname
(pathname-match-p component-pathname
(make-pathname :name :wild
:directory
(append (pathname-directory directory)
'(:wild-inferiors))))))
(defun subsystem
(name system directory
# # # # TODO : - NAME can fail on components that are not
;; loaded (e.g. with a missing :feature). Currently, we simply ignore the
;; error, but this raises the general question of representing unloaded
;; components. There is a similar case in finalize.lisp.
&aux (subsystem (ignore-errors (resolve-dependency-name system name))))
"Return NAME'd SYSTEM dependency if found under DIRECTORY, or nil."
(when (and subsystem (sub-component-p subsystem directory))
subsystem))
(defun subsystems (system directory)
"Return the list of SYSTEM and all its dependencies found under DIRECTORY.
All dependencies are descended recursively. Both :defsystem-depends-on and
:depends-on are included. Potential duplicates are removed."
(cons
system
(remove-duplicates
(mapcan (lambda (subsystem) (subsystems subsystem directory))
(remove-if #'null
(mapcar
(lambda (name) (subsystem name system directory))
(system-dependencies system))))
:from-end t)))
(defun make-all-system-definitions (system)
"Return a list of all system definitions for SYSTEM.
The only guarantee is that the definition for SYSTEM comes first.
The other considered systems are those found recursively in SYSTEM's
dependencies, and located under SYSTEM's directory.
See `subsystems' for more information."
(mapcar #'make-system-definition
(subsystems system (component-pathname system))))
;; ------------------
;; Module definitions
;; ------------------
# # # # WARNING : do not confuse this function with ASDF 's MODULE - COMPONENTS
;; (which, BTW, is deprecated in favor of COMPONENT-CHILDREN).
(defun module-components (module)
"Return the list of all module components found in MODULE's tree."
(components module 'asdf:module))
(defun make-all-module-definitions (definitions)
"Return a list of all module definitions for system DEFINITIONS."
(mapcar #'make-module-definition
(mapcan #'module-components
(mapcar #'system definitions))))
;; ----------------
;; File definitions
;; ----------------
# # # # WARNING : in the unlikely but possible case that physical files would
;; be shared by different systems being documented at the same time, we would
;; end up with duplicate file documentation. The problem is that these files
;; would still be logically different, because they would belong to different
;; modules. We cannot really merge their definitions because they would have
;; different logical names (hence anchors etc.). So in the end, it's better to
;; leave it like that.
(defun file-components (module)
"Return the list of all file components found in MODULE's tree."
(components module 'asdf:file-component))
(defun make-all-file-definitions
(definitions &aux (systems (mapcar #'system definitions)))
"Return a list of all file definitions for system DEFINITIONS."
(append (make-system-file-definitions systems)
(mapcar #'make-file-definition
# # # # WARNING : some systems ( e.g. ) list their system
;; files explicitly, for example as static files. We want to
;; filter those out here, since they are already created as pseudo
;; Lisp files by MAKE-SYSTEM-FILE-DEFINITIONS.
(remove-if
(lambda (file)
(string= (pathname-type (component-pathname file)) "asd"))
(mapcan #'file-components systems)))))
;; -------------------
;; Package definitions
;; -------------------
(defun make-all-package-definitions
(file-definitions system-definitions
&aux (packages (list-all-packages))
# # # # NOTE : I do n't bother filtering out non - Lisp files here . No
;; package could be defined in those anyway.
(pathnames (mapcar (lambda (definition)
(component-pathname (file definition)))
file-definitions))
(prefixes (mapcar (lambda (definition)
(concatenate 'string
(component-name (system definition))
"/"))
system-definitions))
definitions)
"Return a list of all package definitions for FILE- and SYSTEM-DEFINITIONS.
This list contains definitions for packages defined in the corresponding
files, or for which the source is not found, but the name is of the form
SYSTEM/... (case insensitive) for one of the corresponding systems."
(dolist (package packages)
(let ((pathname (source-by-object package))
(name (package-name package)))
(when (or (member pathname pathnames :test #'equal)
# # # # FIXME : remind me why we need that stuff ?
# # # # WARNING : shaky heuristic , bound to fail one day or
;; another.
(and (null pathname)
(find-if (lambda (prefix)
(let ((pos (search prefix name
:test #'char-equal)))
(and pos (zerop pos))))
prefixes)))
(push (make-package-definition package) definitions))))
definitions)
;; ------------------
;; Symbol definitions
;; ------------------
# # # # PORTME : FUNCTION - LAMBDA - EXPRESSION 's return values are not
;; standardized.
(defun funcoid-name (funcoid)
"Return FUNCOID's name, or NIL.
FUNCOID may be a function, a macro function, or a compiler macro function.
Lambda expression are not considered as proper names, so NIL is returned."
(multiple-value-bind (lambda-expression closure-p name)
(function-lambda-expression funcoid)
(declare (ignore lambda-expression closure-p))
(etypecase name
(symbol name)
(list (case (first name)
((macro-function compiler-macro) (second name))
(setf name)
(otherwise nil))))))
# # # # PORTME .
(defun make-symbol-definitions
(symbol packages pathnames &aux (setf-symbol `(setf ,symbol)) definitions)
"Make and return a list of all existing domestic definitions for SYMBOL.
Domesticity is defined in relation to domestic PACKAGES and PATHNAMES; see
`domesticp'."
;; Constants.
(when (and (eql (sb-int:info :variable :kind symbol) :constant)
(domesticp symbol (source-by-name symbol :constant)
packages pathnames))
(endpush (make-constant-definition symbol) definitions))
;; Special variables.
(when (and (eql (sb-int:info :variable :kind symbol) :special)
(domesticp symbol (source-by-name symbol :variable)
packages pathnames))
(endpush (make-special-definition symbol) definitions))
;; Symbol macros.
(when (and (eql (sb-int:info :variable :kind symbol) :macro)
(domesticp symbol (source-by-name symbol :symbol-macro)
packages pathnames))
(endpush (make-symbol-macro-definition symbol) definitions))
Macros .
(when-let (macro (macro-function symbol))
(let ((original-name (funcoid-name macro)))
(if (or (not original-name) (eq symbol original-name))
(when (domesticp symbol (source-by-object macro)
packages pathnames)
(endpush (make-macro-definition symbol macro) definitions))
(when (domesticp symbol nil packages pathnames)
(endpush (make-macro-alias-definition symbol) definitions)))))
Compiler macros .
(when-let (compiler-macro (compiler-macro-function symbol))
(let ((original-name (funcoid-name compiler-macro)))
(if (or (not original-name) (eq symbol original-name))
(when (domesticp symbol (source-by-object compiler-macro)
packages pathnames)
(endpush (make-compiler-macro-definition symbol compiler-macro)
definitions))
(when (domesticp symbol nil packages pathnames)
(endpush (make-compiler-macro-alias-definition symbol)
definitions)))))
Setf compiler macros .
(when-let (compiler-macro (compiler-macro-function setf-symbol))
(let ((original-name (funcoid-name compiler-macro)))
(if (or (not original-name) (equal setf-symbol original-name))
(when (domesticp symbol (source-by-object compiler-macro)
packages pathnames)
(endpush
(make-compiler-macro-definition symbol compiler-macro :setf t)
definitions))
(when (domesticp symbol nil packages pathnames)
(endpush (make-compiler-macro-alias-definition symbol t)
definitions)))))
Setf expanders .
(when-let (expander (sb-int:info :setf :expander symbol))
(when (domesticp symbol (source-by-name symbol :setf-expander)
packages pathnames)
(endpush (make-expander-definition symbol expander) definitions)))
( Generic ) functions .
(when-let (function (and (fboundp symbol)
(not (macro-function symbol))
(fdefinition symbol)))
(let ((original-name (funcoid-name function)))
(if (or (not original-name) (eq symbol original-name))
(cond ((typep function 'generic-function)
# # # # NOTE : although we might be creating both generic
;; function and associated method definitions here, we defer
;; the computation of those cross-references until the
;; finalization process. I haven't thought this through, but
;; since the finalization process may add new method
;; definitions, there may be cases where a computation done
;; here would end up being invalidated.
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-generic-function-definition symbol function)
definitions))
(dolist (method (generic-function-methods function))
(when (domesticp symbol (source-by-object method)
packages pathnames)
(endpush (make-method-definition method) definitions))))
(t
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-ordinary-function-definition symbol function)
definitions))))
(when (domesticp symbol nil packages pathnames)
(endpush (make-function-alias-definition symbol) definitions)))))
( Generic ) setf functions .
(when-let (function (and (fboundp setf-symbol) (fdefinition setf-symbol)))
(let ((original-name (funcoid-name function)))
(if (or (not original-name) (equal setf-symbol original-name))
(cond ((typep function 'generic-function)
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-generic-function-definition symbol function
:setf t)
definitions))
(dolist (method (generic-function-methods function))
(when (domesticp symbol (source-by-object method)
packages pathnames)
(endpush (make-method-definition method) definitions))))
(t
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-ordinary-function-definition symbol function
:setf t)
definitions))))
(when (domesticp symbol nil packages pathnames)
(endpush (make-function-alias-definition symbol t) definitions)))))
;; Method combinations.
# # # # WARNING : method combinations are ill - defined in the Common Lisp
;; standard. In particular, they are not necessarily global objects and
do n't have an actual namespace . This has been explained , first in a blog
;; (), and then in a ELS paper
( ) . As a consequence , in order
to be 100 % correct ( and also 200 % pedantic ) , we should normally document
;; every single generic function's method combination as a local object. We
;; will assume, however, that the programmer has some sanity, and only
defines one method combination for every name . The corresponding object
;; will be documented like the other ones, and generic functions using it
;; will provide a cross-reference to it, also advertising the options in
;; use.
;;
After my ELS paper , made some changes to SBCL that
;; considerably sanitized the situation. Method combinations are now reified
;; in the SB-PCL::**METHOD-COMBINATIONS** hash table. Each entry is in fact
an SB - PCL::METHOD - COMBINATION - INFO structure , which contains , among other
;; things, a cache associating method combination options with actual method
;; combination objects. Thus, a method combination, as a general entity, and
;; as opposed to every single instantiation of it, is adequately and
uniquely represented by the entry in SB - PCL::**METHOD - COMBINATIONS * * . See
;; also the comment about generic function stabilization in finalize.lisp.
;; Finally, note that the source information must be retrieved with the
;; SOURCE-BY-NAME protocol, NOT by the SOURCE-BY-OBJECT one. Indeed,
;; the later would return the method-combination-info structure's source
information , which is an internal SBCL file .
(when-let (combination (gethash symbol sb-pcl::**method-combinations**))
(when (domesticp symbol (source-by-name symbol :method-combination)
packages pathnames)
(endpush (make-combination-definition symbol combination) definitions)))
# # # # WARNING : classoids and their slots are treated differently from
;; generic functions and their methods. We will never create standalone
slots or partial classoid definitions ( missing slots ) . There are several
;; reasons for this.
1 . While a generic function definition may be scattered all over the
;; place, a classoid definition is always monolithic.
2 . While it 's easy to look at a method and figure out the generic
;; function (they have the same name), it's not the case for slots, so
;; rendering a standalone slot would make little sense.
3 . On top of that , SBCL 's typed structure slot description objects do n't
;; have a back pointer to the structure description, so it would be
;; impossible for the finalization process to compute the
;; cross-references.
;; Because of all this, this is what we do.
1 . We create domestic classoids completely , and compute all classoid /
;; slot cross-references here. Some slots may still get a foreign flag,
;; but that probably doesn't really matter.
2 . Conversely , there may be foreign structures containing domestic slots ,
;; but those won't be created now (maybe later, during the finalization
;; process).
;; Structures, classes, and conditions.
(when-let (classoid (find-class symbol nil))
(let ((source (source-by-object classoid)))
(when (domesticp symbol source packages pathnames)
(let ((classoid-definition
(make-classoid-definition symbol classoid packages pathnames)))
(endpush classoid-definition definitions)
(dolist (slot-definition (direct-slots classoid-definition))
(endpush slot-definition definitions))))))
;; Typed structures.
(when-let (structure (sb-int:info :typed-structure :info symbol))
(let ((source (source-by-object structure)))
(when (domesticp symbol source packages pathnames)
(let ((structure-definition
(make-classoid-definition symbol structure packages pathnames)))
(endpush structure-definition definitions)
(dolist (slot-definition (direct-slots structure-definition))
(endpush slot-definition definitions))))))
;; Types.
(when-let (expander (sb-int:info :type :expander symbol))
(when (domesticp symbol (source-by-name symbol :type)
packages pathnames)
(endpush (make-type-definition symbol expander) definitions)))
definitions)
(defun package-symbols (package &aux symbols)
"Return the list of symbols from home PACKAGE."
(do-symbols (symbol package symbols)
(when (eq (symbol-package symbol) package)
# # # # WARNING : we may encounter the same symbol several times . Hence
the need to PUSHNEW here .
(pushnew symbol symbols))))
(defun make-all-symbol-definitions
(packages pathnames all-symbols-p &aux definitions processed)
"Return a list of all domestic symbol definitions.
If ALL-SYMBOLS-P, introspect all accessible symbols in the current Lisp
environment. Otherwise (the default), limit introspection to the symbols from
domestic PACKAGES.
Domesticity is defined in relation to domestic PACKAGES and PATHNAMES; see
`domesticp'."
(if all-symbols-p
(do-all-symbols (symbol definitions)
(unless (member symbol processed)
(push symbol processed)
(when-let (symbol-definitions
(make-symbol-definitions symbol packages pathnames))
(setq definitions (nconc definitions symbol-definitions)))))
(dolist (symbol (mapcan #'package-symbols packages) definitions)
(when-let (symbol-definitions
(make-symbol-definitions symbol packages pathnames))
(setq definitions (nconc definitions symbol-definitions))))))
;; ==========================================================================
;; Report Class
;; ==========================================================================
(defclass report ()
((system-name :documentation "The main system's name, coerced to a string."
:initarg :system-name :reader system-name)
(library-name :documentation "The library's name."
:accessor library-name)
(tagline :documentation "The reference manual's tagline."
:accessor tagline)
(library-version :documentation "The library's version."
:accessor library-version)
(contacts :documentation "The list of contacts for the library.
Each element is of the form (NAME . EMAIL) where both NAME and EMAIL are
strings or NIL, and cannot be null at the same time."
:accessor contacts)
(copyright-years :documentation "A copyright years string."
:accessor copyright-years)
(license :documentation "The library's license."
:accessor license)
(introduction :documentation "Contents for an introduction chapter."
:accessor introduction)
(conclusion :documentation "Contents for a conclusion chapter."
:accessor conclusion)
(definitions :documentation "The list of definitions."
:accessor definitions))
(:documentation "The Report class.
This is the class holding all extracted documentation information."))
(defmethod print-object ((report report) stream)
"Show REPORT's library name."
(print-unreadable-object (report stream :type t)
(princ (library-name report) stream)))
(defun make-report (system-name)
"Make a new report for SYSTEM-NAME (an ASDF system designator)."
(make-instance 'report :system-name (asdf:coerce-name system-name)))
;; ==========================================================================
;; Entry Point
;; ==========================================================================
(defun load-system (system-name &aux (system (find-system system-name)))
"Load ASDF SYSTEM-NAME in a manner suitable to extract documentation.
Return the corresponding ASDF system.
SYSTEM-NAME is an ASDF system designator."
Because of some bootstrapping issues , ASDF and UIOP need some
;; special-casing.
(cond ((string= (asdf:coerce-name system-name) "uiop")
(load (merge-pathnames "uiop/uiop.asd"
(system-source-directory
(asdf:find-system :asdf))))
(mapc #'load
(asdf:input-files :monolithic-concatenate-source-op
"asdf/driver")))
((string= (asdf:coerce-name system-name) "asdf")
(setq system (find-system "asdf/defsystem"))
(mapc #'load
(asdf:input-files :monolithic-concatenate-source-op
"asdf/defsystem")))
(t
(asdf:load-system system-name)))
system)
(defun assess
(system-name
&key (introspection-level 1)
(library-name (asdf:coerce-name system-name))
(tagline nil taglinep)
(library-version nil library-version-p)
(contact nil contactp)
copyright-years
license
introduction
conclusion
&aux (system (load-system system-name))
(report (make-report system-name)))
"Extract and return documentation information for ASDF SYSTEM-NAME.
The documentation information is returned in a REPORT structure, which see.
SYSTEM-NAME is an ASDF system designator. The following keyword parameters
allow to specify or override some bits of information.
- INTROSPECTION-LEVEL: how hard to introspect the Lisp environment. At level 1
(the default), scan only the symbols from domestic packages. At level 2,
scan all accessible symbols in the Lisp environment. Some additional
information may be discovered in the process, at the expense of a much
higher computation time.
- LIBRARY-NAME: name of the library being documented. Defaults to the system
name.
- TAGLINE: small text to be used as the manual's subtitle, or NIL.
Defaults to the system long name or description.
- LIBRARY-VERSION: version information, or NIL.
Defaults to the system version.
- CONTACT: contact information, or NIL. The default value is computed from the
system maintainer(s), author(s), and mailto information. Accepts a contact
string, or a list of such. See `parse-contact-string' for more information.
- COPYRIGHT-YEARS: copyright years information or NIL. Defaults to the current
year.
- LICENSE: license information. Defaults to NIL. Also accepts :mit, :boost,
:bsd, :gpl, :lgpl, and :ms-pl.
- INTRODUCTION: introduction chapter contents in Texinfo format.
Defaults to NIL.
- CONCLUSION: conclusion chapter contents in Texinfo format.
Defaults to NIL."
(check-type introspection-level (member 1 2))
(check-type library-name non-empty-string)
(setf (library-name report) library-name)
(unless taglinep
(setq tagline (or (system-long-name system)
(component-description system))))
(unless (one-liner-p tagline)
(setq tagline nil))
(when (and tagline (char= (aref tagline (1- (length tagline))) #\.))
(setq tagline (subseq tagline 0 (1- (length tagline)))))
(setf (tagline report) tagline)
(unless library-version-p
(setq library-version (component-version system)))
(unless (one-liner-p library-version)
(setq library-version nil))
(setf (library-version report) library-version)
(cond (contactp
(setf (contacts report) (|parse-contact(s)| contact)))
(t
(setq contact (system-author system))
(when (stringp contact) (setq contact (list contact)))
(typecase (system-maintainer system)
(string (push (system-maintainer system) contact))
(cons (setq contact (append (system-maintainer system) contact))))
(let ((contacts (|parse-contact(s)| contact)))
(when-let (mailto (when (one-liner-p (system-mailto system))
(validate-email (system-mailto system))))
(if (and (= (length contacts) 1) (null (cdr (car contacts))))
(setf (cdr (car contacts)) mailto)
(unless (find mailto contacts :key #'cdr :test #'string=)
(endpush (cons nil mailto) contacts))))
(setf (contacts report) contacts))))
(setq copyright-years
(or copyright-years
(multiple-value-bind (second minute hour date month year)
(get-decoded-time)
(declare (ignore second minute hour date month))
(format nil "~A" year))))
(unless (one-liner-p copyright-years)
(setq copyright-years nil))
(setf (copyright-years report) copyright-years)
(when license
(setq license (assoc license *licenses*))
(unless license
(error "License not found.")))
(setf (license report) license)
(setf (introduction report) introduction)
(setf (conclusion report) conclusion)
(let* ((system-definitions (make-all-system-definitions system))
(module-definitions (make-all-module-definitions system-definitions))
(file-definitions (make-all-file-definitions system-definitions))
(pathnames
(mapcar #'component-pathname
(mapcar #'file
(remove-if-not #'lisp-file-definition-p file-definitions))))
(package-definitions
(make-all-package-definitions file-definitions system-definitions))
(packages (mapcar #'definition-package package-definitions))
(symbol-definitions
(make-all-symbol-definitions
packages pathnames (> introspection-level 1))))
(setf (definitions report)
(append system-definitions module-definitions file-definitions
package-definitions symbol-definitions))
(finalize (definitions report) packages pathnames))
report)
;;; assess.lisp ends here
| null | https://raw.githubusercontent.com/didierverna/declt/2b81350102d78b8122be908b8619f43fbf87da57/assess/src/assess.lisp | lisp | assess.lisp --- Definitions extraction
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Commentary:
avoiding traversing the same structures several times. For example, modules
belong to a single system, and files belong to a single module, etc. On the
other hand, there are corner cases which would make it tricky to be clever
(e.g. complex systems which belong to the same file as the corresponding
simple system). So the way it's done below is much simpler and less
error-prone: create definitions type after type, independently from each
other, and resolve the cross-references later (that's what the finalization
process does), even those which were known right from the start.
Code:
==========================================================================
Definitions Creation
==========================================================================
---------------
Local utilities
---------------
asdf:cl-source-file.
------------------
System definitions
------------------
loaded (e.g. with a missing :feature). Currently, we simply ignore the
error, but this raises the general question of representing unloaded
components. There is a similar case in finalize.lisp.
------------------
Module definitions
------------------
(which, BTW, is deprecated in favor of COMPONENT-CHILDREN).
----------------
File definitions
----------------
be shared by different systems being documented at the same time, we would
end up with duplicate file documentation. The problem is that these files
would still be logically different, because they would belong to different
modules. We cannot really merge their definitions because they would have
different logical names (hence anchors etc.). So in the end, it's better to
leave it like that.
files explicitly, for example as static files. We want to
filter those out here, since they are already created as pseudo
Lisp files by MAKE-SYSTEM-FILE-DEFINITIONS.
-------------------
Package definitions
-------------------
package could be defined in those anyway.
another.
------------------
Symbol definitions
------------------
standardized.
see
Constants.
Special variables.
Symbol macros.
function and associated method definitions here, we defer
the computation of those cross-references until the
finalization process. I haven't thought this through, but
since the finalization process may add new method
definitions, there may be cases where a computation done
here would end up being invalidated.
Method combinations.
standard. In particular, they are not necessarily global objects and
(), and then in a ELS paper
every single generic function's method combination as a local object. We
will assume, however, that the programmer has some sanity, and only
will be documented like the other ones, and generic functions using it
will provide a cross-reference to it, also advertising the options in
use.
considerably sanitized the situation. Method combinations are now reified
in the SB-PCL::**METHOD-COMBINATIONS** hash table. Each entry is in fact
things, a cache associating method combination options with actual method
combination objects. Thus, a method combination, as a general entity, and
as opposed to every single instantiation of it, is adequately and
also the comment about generic function stabilization in finalize.lisp.
Finally, note that the source information must be retrieved with the
SOURCE-BY-NAME protocol, NOT by the SOURCE-BY-OBJECT one. Indeed,
the later would return the method-combination-info structure's source
generic functions and their methods. We will never create standalone
reasons for this.
place, a classoid definition is always monolithic.
function (they have the same name), it's not the case for slots, so
rendering a standalone slot would make little sense.
have a back pointer to the structure description, so it would be
impossible for the finalization process to compute the
cross-references.
Because of all this, this is what we do.
slot cross-references here. Some slots may still get a foreign flag,
but that probably doesn't really matter.
but those won't be created now (maybe later, during the finalization
process).
Structures, classes, and conditions.
Typed structures.
Types.
see
==========================================================================
Report Class
==========================================================================
==========================================================================
Entry Point
==========================================================================
special-casing.
assess.lisp ends here |
Copyright ( C ) 2020 - 2022
Author : < >
This file is part of Declt .
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
# # # # NOTE : there are more clever ways to create definitions , notably by
(in-package :net.didierverna.declt.assess)
(in-readtable :net.didierverna.declt)
(defun components (module type)
"Return the list of all (sub)TYPE components found in MODULE's tree."
# # # # NOTE : we accept subtypes of TYPE because ASDF components might be
subclassed . An example of this is SBCL 's grovel facility which subclasses
(loop :for component :in (component-children module)
:if (typep component type)
:collect component
:if (typep component 'asdf:module)
:nconc (components component type)))
(defun system-dependencies (system)
"Return all system names from SYSTEM dependencies.
This includes both :defsystem-depends-on and :depends-on."
(mapcar (lambda (dependency-def)
(reordered-dependency-def-system
(reorder-dependency-def dependency-def)))
(append (system-defsystem-depends-on system)
(component-sideway-dependencies system))))
(defun sub-component-p
(component directory
# # # # FIXME : not sure this is still valid , as we now have a specific
way of loading UIOP and ASDF .
# # # # NOTE : COMPONENT - PATHNAME can return nil when it 's impossible to
locate the component 's source . This happens for example with UIOP when
ASDF is embedded in a Lisp implementation like SBCL .
fell on this issue when trying to document CL - PROJECT , which
explicitly depends on UIOP .
&aux (component-pathname (component-pathname component)))
"Return T if COMPONENT can be found under DIRECTORY."
(when component-pathname
(pathname-match-p component-pathname
(make-pathname :name :wild
:directory
(append (pathname-directory directory)
'(:wild-inferiors))))))
(defun subsystem
(name system directory
# # # # TODO : - NAME can fail on components that are not
&aux (subsystem (ignore-errors (resolve-dependency-name system name))))
"Return NAME'd SYSTEM dependency if found under DIRECTORY, or nil."
(when (and subsystem (sub-component-p subsystem directory))
subsystem))
(defun subsystems (system directory)
"Return the list of SYSTEM and all its dependencies found under DIRECTORY.
All dependencies are descended recursively. Both :defsystem-depends-on and
:depends-on are included. Potential duplicates are removed."
(cons
system
(remove-duplicates
(mapcan (lambda (subsystem) (subsystems subsystem directory))
(remove-if #'null
(mapcar
(lambda (name) (subsystem name system directory))
(system-dependencies system))))
:from-end t)))
(defun make-all-system-definitions (system)
"Return a list of all system definitions for SYSTEM.
The only guarantee is that the definition for SYSTEM comes first.
The other considered systems are those found recursively in SYSTEM's
dependencies, and located under SYSTEM's directory.
See `subsystems' for more information."
(mapcar #'make-system-definition
(subsystems system (component-pathname system))))
# # # # WARNING : do not confuse this function with ASDF 's MODULE - COMPONENTS
(defun module-components (module)
"Return the list of all module components found in MODULE's tree."
(components module 'asdf:module))
(defun make-all-module-definitions (definitions)
"Return a list of all module definitions for system DEFINITIONS."
(mapcar #'make-module-definition
(mapcan #'module-components
(mapcar #'system definitions))))
# # # # WARNING : in the unlikely but possible case that physical files would
(defun file-components (module)
"Return the list of all file components found in MODULE's tree."
(components module 'asdf:file-component))
(defun make-all-file-definitions
(definitions &aux (systems (mapcar #'system definitions)))
"Return a list of all file definitions for system DEFINITIONS."
(append (make-system-file-definitions systems)
(mapcar #'make-file-definition
# # # # WARNING : some systems ( e.g. ) list their system
(remove-if
(lambda (file)
(string= (pathname-type (component-pathname file)) "asd"))
(mapcan #'file-components systems)))))
(defun make-all-package-definitions
(file-definitions system-definitions
&aux (packages (list-all-packages))
# # # # NOTE : I do n't bother filtering out non - Lisp files here . No
(pathnames (mapcar (lambda (definition)
(component-pathname (file definition)))
file-definitions))
(prefixes (mapcar (lambda (definition)
(concatenate 'string
(component-name (system definition))
"/"))
system-definitions))
definitions)
"Return a list of all package definitions for FILE- and SYSTEM-DEFINITIONS.
This list contains definitions for packages defined in the corresponding
files, or for which the source is not found, but the name is of the form
SYSTEM/... (case insensitive) for one of the corresponding systems."
(dolist (package packages)
(let ((pathname (source-by-object package))
(name (package-name package)))
(when (or (member pathname pathnames :test #'equal)
# # # # FIXME : remind me why we need that stuff ?
# # # # WARNING : shaky heuristic , bound to fail one day or
(and (null pathname)
(find-if (lambda (prefix)
(let ((pos (search prefix name
:test #'char-equal)))
(and pos (zerop pos))))
prefixes)))
(push (make-package-definition package) definitions))))
definitions)
# # # # PORTME : FUNCTION - LAMBDA - EXPRESSION 's return values are not
(defun funcoid-name (funcoid)
"Return FUNCOID's name, or NIL.
FUNCOID may be a function, a macro function, or a compiler macro function.
Lambda expression are not considered as proper names, so NIL is returned."
(multiple-value-bind (lambda-expression closure-p name)
(function-lambda-expression funcoid)
(declare (ignore lambda-expression closure-p))
(etypecase name
(symbol name)
(list (case (first name)
((macro-function compiler-macro) (second name))
(setf name)
(otherwise nil))))))
# # # # PORTME .
(defun make-symbol-definitions
(symbol packages pathnames &aux (setf-symbol `(setf ,symbol)) definitions)
"Make and return a list of all existing domestic definitions for SYMBOL.
`domesticp'."
(when (and (eql (sb-int:info :variable :kind symbol) :constant)
(domesticp symbol (source-by-name symbol :constant)
packages pathnames))
(endpush (make-constant-definition symbol) definitions))
(when (and (eql (sb-int:info :variable :kind symbol) :special)
(domesticp symbol (source-by-name symbol :variable)
packages pathnames))
(endpush (make-special-definition symbol) definitions))
(when (and (eql (sb-int:info :variable :kind symbol) :macro)
(domesticp symbol (source-by-name symbol :symbol-macro)
packages pathnames))
(endpush (make-symbol-macro-definition symbol) definitions))
Macros .
(when-let (macro (macro-function symbol))
(let ((original-name (funcoid-name macro)))
(if (or (not original-name) (eq symbol original-name))
(when (domesticp symbol (source-by-object macro)
packages pathnames)
(endpush (make-macro-definition symbol macro) definitions))
(when (domesticp symbol nil packages pathnames)
(endpush (make-macro-alias-definition symbol) definitions)))))
Compiler macros .
(when-let (compiler-macro (compiler-macro-function symbol))
(let ((original-name (funcoid-name compiler-macro)))
(if (or (not original-name) (eq symbol original-name))
(when (domesticp symbol (source-by-object compiler-macro)
packages pathnames)
(endpush (make-compiler-macro-definition symbol compiler-macro)
definitions))
(when (domesticp symbol nil packages pathnames)
(endpush (make-compiler-macro-alias-definition symbol)
definitions)))))
Setf compiler macros .
(when-let (compiler-macro (compiler-macro-function setf-symbol))
(let ((original-name (funcoid-name compiler-macro)))
(if (or (not original-name) (equal setf-symbol original-name))
(when (domesticp symbol (source-by-object compiler-macro)
packages pathnames)
(endpush
(make-compiler-macro-definition symbol compiler-macro :setf t)
definitions))
(when (domesticp symbol nil packages pathnames)
(endpush (make-compiler-macro-alias-definition symbol t)
definitions)))))
Setf expanders .
(when-let (expander (sb-int:info :setf :expander symbol))
(when (domesticp symbol (source-by-name symbol :setf-expander)
packages pathnames)
(endpush (make-expander-definition symbol expander) definitions)))
( Generic ) functions .
(when-let (function (and (fboundp symbol)
(not (macro-function symbol))
(fdefinition symbol)))
(let ((original-name (funcoid-name function)))
(if (or (not original-name) (eq symbol original-name))
(cond ((typep function 'generic-function)
# # # # NOTE : although we might be creating both generic
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-generic-function-definition symbol function)
definitions))
(dolist (method (generic-function-methods function))
(when (domesticp symbol (source-by-object method)
packages pathnames)
(endpush (make-method-definition method) definitions))))
(t
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-ordinary-function-definition symbol function)
definitions))))
(when (domesticp symbol nil packages pathnames)
(endpush (make-function-alias-definition symbol) definitions)))))
( Generic ) setf functions .
(when-let (function (and (fboundp setf-symbol) (fdefinition setf-symbol)))
(let ((original-name (funcoid-name function)))
(if (or (not original-name) (equal setf-symbol original-name))
(cond ((typep function 'generic-function)
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-generic-function-definition symbol function
:setf t)
definitions))
(dolist (method (generic-function-methods function))
(when (domesticp symbol (source-by-object method)
packages pathnames)
(endpush (make-method-definition method) definitions))))
(t
(when (domesticp symbol (source-by-object function)
packages pathnames)
(endpush (make-ordinary-function-definition symbol function
:setf t)
definitions))))
(when (domesticp symbol nil packages pathnames)
(endpush (make-function-alias-definition symbol t) definitions)))))
# # # # WARNING : method combinations are ill - defined in the Common Lisp
do n't have an actual namespace . This has been explained , first in a blog
( ) . As a consequence , in order
to be 100 % correct ( and also 200 % pedantic ) , we should normally document
defines one method combination for every name . The corresponding object
After my ELS paper , made some changes to SBCL that
an SB - PCL::METHOD - COMBINATION - INFO structure , which contains , among other
uniquely represented by the entry in SB - PCL::**METHOD - COMBINATIONS * * . See
information , which is an internal SBCL file .
(when-let (combination (gethash symbol sb-pcl::**method-combinations**))
(when (domesticp symbol (source-by-name symbol :method-combination)
packages pathnames)
(endpush (make-combination-definition symbol combination) definitions)))
# # # # WARNING : classoids and their slots are treated differently from
slots or partial classoid definitions ( missing slots ) . There are several
1 . While a generic function definition may be scattered all over the
2 . While it 's easy to look at a method and figure out the generic
3 . On top of that , SBCL 's typed structure slot description objects do n't
1 . We create domestic classoids completely , and compute all classoid /
2 . Conversely , there may be foreign structures containing domestic slots ,
(when-let (classoid (find-class symbol nil))
(let ((source (source-by-object classoid)))
(when (domesticp symbol source packages pathnames)
(let ((classoid-definition
(make-classoid-definition symbol classoid packages pathnames)))
(endpush classoid-definition definitions)
(dolist (slot-definition (direct-slots classoid-definition))
(endpush slot-definition definitions))))))
(when-let (structure (sb-int:info :typed-structure :info symbol))
(let ((source (source-by-object structure)))
(when (domesticp symbol source packages pathnames)
(let ((structure-definition
(make-classoid-definition symbol structure packages pathnames)))
(endpush structure-definition definitions)
(dolist (slot-definition (direct-slots structure-definition))
(endpush slot-definition definitions))))))
(when-let (expander (sb-int:info :type :expander symbol))
(when (domesticp symbol (source-by-name symbol :type)
packages pathnames)
(endpush (make-type-definition symbol expander) definitions)))
definitions)
(defun package-symbols (package &aux symbols)
"Return the list of symbols from home PACKAGE."
(do-symbols (symbol package symbols)
(when (eq (symbol-package symbol) package)
# # # # WARNING : we may encounter the same symbol several times . Hence
the need to PUSHNEW here .
(pushnew symbol symbols))))
(defun make-all-symbol-definitions
(packages pathnames all-symbols-p &aux definitions processed)
"Return a list of all domestic symbol definitions.
If ALL-SYMBOLS-P, introspect all accessible symbols in the current Lisp
environment. Otherwise (the default), limit introspection to the symbols from
domestic PACKAGES.
`domesticp'."
(if all-symbols-p
(do-all-symbols (symbol definitions)
(unless (member symbol processed)
(push symbol processed)
(when-let (symbol-definitions
(make-symbol-definitions symbol packages pathnames))
(setq definitions (nconc definitions symbol-definitions)))))
(dolist (symbol (mapcan #'package-symbols packages) definitions)
(when-let (symbol-definitions
(make-symbol-definitions symbol packages pathnames))
(setq definitions (nconc definitions symbol-definitions))))))
(defclass report ()
((system-name :documentation "The main system's name, coerced to a string."
:initarg :system-name :reader system-name)
(library-name :documentation "The library's name."
:accessor library-name)
(tagline :documentation "The reference manual's tagline."
:accessor tagline)
(library-version :documentation "The library's version."
:accessor library-version)
(contacts :documentation "The list of contacts for the library.
Each element is of the form (NAME . EMAIL) where both NAME and EMAIL are
strings or NIL, and cannot be null at the same time."
:accessor contacts)
(copyright-years :documentation "A copyright years string."
:accessor copyright-years)
(license :documentation "The library's license."
:accessor license)
(introduction :documentation "Contents for an introduction chapter."
:accessor introduction)
(conclusion :documentation "Contents for a conclusion chapter."
:accessor conclusion)
(definitions :documentation "The list of definitions."
:accessor definitions))
(:documentation "The Report class.
This is the class holding all extracted documentation information."))
(defmethod print-object ((report report) stream)
"Show REPORT's library name."
(print-unreadable-object (report stream :type t)
(princ (library-name report) stream)))
(defun make-report (system-name)
"Make a new report for SYSTEM-NAME (an ASDF system designator)."
(make-instance 'report :system-name (asdf:coerce-name system-name)))
(defun load-system (system-name &aux (system (find-system system-name)))
"Load ASDF SYSTEM-NAME in a manner suitable to extract documentation.
Return the corresponding ASDF system.
SYSTEM-NAME is an ASDF system designator."
Because of some bootstrapping issues , ASDF and UIOP need some
(cond ((string= (asdf:coerce-name system-name) "uiop")
(load (merge-pathnames "uiop/uiop.asd"
(system-source-directory
(asdf:find-system :asdf))))
(mapc #'load
(asdf:input-files :monolithic-concatenate-source-op
"asdf/driver")))
((string= (asdf:coerce-name system-name) "asdf")
(setq system (find-system "asdf/defsystem"))
(mapc #'load
(asdf:input-files :monolithic-concatenate-source-op
"asdf/defsystem")))
(t
(asdf:load-system system-name)))
system)
(defun assess
(system-name
&key (introspection-level 1)
(library-name (asdf:coerce-name system-name))
(tagline nil taglinep)
(library-version nil library-version-p)
(contact nil contactp)
copyright-years
license
introduction
conclusion
&aux (system (load-system system-name))
(report (make-report system-name)))
"Extract and return documentation information for ASDF SYSTEM-NAME.
The documentation information is returned in a REPORT structure, which see.
SYSTEM-NAME is an ASDF system designator. The following keyword parameters
allow to specify or override some bits of information.
- INTROSPECTION-LEVEL: how hard to introspect the Lisp environment. At level 1
(the default), scan only the symbols from domestic packages. At level 2,
scan all accessible symbols in the Lisp environment. Some additional
information may be discovered in the process, at the expense of a much
higher computation time.
- LIBRARY-NAME: name of the library being documented. Defaults to the system
name.
- TAGLINE: small text to be used as the manual's subtitle, or NIL.
Defaults to the system long name or description.
- LIBRARY-VERSION: version information, or NIL.
Defaults to the system version.
- CONTACT: contact information, or NIL. The default value is computed from the
system maintainer(s), author(s), and mailto information. Accepts a contact
string, or a list of such. See `parse-contact-string' for more information.
- COPYRIGHT-YEARS: copyright years information or NIL. Defaults to the current
year.
- LICENSE: license information. Defaults to NIL. Also accepts :mit, :boost,
:bsd, :gpl, :lgpl, and :ms-pl.
- INTRODUCTION: introduction chapter contents in Texinfo format.
Defaults to NIL.
- CONCLUSION: conclusion chapter contents in Texinfo format.
Defaults to NIL."
(check-type introspection-level (member 1 2))
(check-type library-name non-empty-string)
(setf (library-name report) library-name)
(unless taglinep
(setq tagline (or (system-long-name system)
(component-description system))))
(unless (one-liner-p tagline)
(setq tagline nil))
(when (and tagline (char= (aref tagline (1- (length tagline))) #\.))
(setq tagline (subseq tagline 0 (1- (length tagline)))))
(setf (tagline report) tagline)
(unless library-version-p
(setq library-version (component-version system)))
(unless (one-liner-p library-version)
(setq library-version nil))
(setf (library-version report) library-version)
(cond (contactp
(setf (contacts report) (|parse-contact(s)| contact)))
(t
(setq contact (system-author system))
(when (stringp contact) (setq contact (list contact)))
(typecase (system-maintainer system)
(string (push (system-maintainer system) contact))
(cons (setq contact (append (system-maintainer system) contact))))
(let ((contacts (|parse-contact(s)| contact)))
(when-let (mailto (when (one-liner-p (system-mailto system))
(validate-email (system-mailto system))))
(if (and (= (length contacts) 1) (null (cdr (car contacts))))
(setf (cdr (car contacts)) mailto)
(unless (find mailto contacts :key #'cdr :test #'string=)
(endpush (cons nil mailto) contacts))))
(setf (contacts report) contacts))))
(setq copyright-years
(or copyright-years
(multiple-value-bind (second minute hour date month year)
(get-decoded-time)
(declare (ignore second minute hour date month))
(format nil "~A" year))))
(unless (one-liner-p copyright-years)
(setq copyright-years nil))
(setf (copyright-years report) copyright-years)
(when license
(setq license (assoc license *licenses*))
(unless license
(error "License not found.")))
(setf (license report) license)
(setf (introduction report) introduction)
(setf (conclusion report) conclusion)
(let* ((system-definitions (make-all-system-definitions system))
(module-definitions (make-all-module-definitions system-definitions))
(file-definitions (make-all-file-definitions system-definitions))
(pathnames
(mapcar #'component-pathname
(mapcar #'file
(remove-if-not #'lisp-file-definition-p file-definitions))))
(package-definitions
(make-all-package-definitions file-definitions system-definitions))
(packages (mapcar #'definition-package package-definitions))
(symbol-definitions
(make-all-symbol-definitions
packages pathnames (> introspection-level 1))))
(setf (definitions report)
(append system-definitions module-definitions file-definitions
package-definitions symbol-definitions))
(finalize (definitions report) packages pathnames))
report)
|
c7daa28f974b4c82b840f4a73f8be16beef8912c793e2d5e401d8ffb0ace2198 | DSiSc/why3 | glob.ml | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
open Ident
let flag = Debug.register_flag "track_symbol_use"
~desc:"Track@ symbol@ occurrences@ in@ source@ files.@ Used@ by@ why3doc."
let () = Debug.unset_flag flag (* make sure it is unset by default *)
let dummy_id = id_register (id_fresh "dummy")
type def_use = Def | Use
let glob = Hashtbl.create 5003
(* could be improved with nested hash tables *)
let key loc = let f, l, c, _ = Loc.get loc in f, l, c
let add loc idk =
let k = key loc in
if not (Hashtbl.mem glob k) then Hashtbl.add glob k idk
let def ~kind id =
Opt.iter (fun loc -> add loc (id, Def, kind)) id.id_loc
let use ~kind loc id =
add loc (id, Use, kind)
let find loc =
Hashtbl.find glob (key loc)
FIXME allow several entries for the same loc , find returns all of them ,
and why3doc inserts several anchors
and why3doc inserts several anchors *)
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/parser/glob.ml | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
make sure it is unset by default
could be improved with nested hash tables | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
open Ident
let flag = Debug.register_flag "track_symbol_use"
~desc:"Track@ symbol@ occurrences@ in@ source@ files.@ Used@ by@ why3doc."
let dummy_id = id_register (id_fresh "dummy")
type def_use = Def | Use
let glob = Hashtbl.create 5003
let key loc = let f, l, c, _ = Loc.get loc in f, l, c
let add loc idk =
let k = key loc in
if not (Hashtbl.mem glob k) then Hashtbl.add glob k idk
let def ~kind id =
Opt.iter (fun loc -> add loc (id, Def, kind)) id.id_loc
let use ~kind loc id =
add loc (id, Use, kind)
let find loc =
Hashtbl.find glob (key loc)
FIXME allow several entries for the same loc , find returns all of them ,
and why3doc inserts several anchors
and why3doc inserts several anchors *)
|
a5ea905b742a45bd30b0e82c6e0e32ae5c8438653e0700e0aae513c06d77ab8c | juhp/fbrnch | Koji.hs | module Koji (
kojiNVRTags,
kojiBuildStatus,
kojiBuildTags,
kojiGetBuildID,
kojiGetBuildTaskID,
kojiLatestNVR,
kojiOpenTasks,
kojiScratchBuild,
kojiUserSideTags,
buildIDInfo,
BuildState(..),
kojiBuildBranch,
kojiBuildBranchNoWait,
kojiSource,
kojiBuildTarget,
kojiTagArchs,
kojiWaitRepo,
kojiWatchTask,
kojiWaitTask,
TaskID,
displayID,
fedoraHub,
maybeTimeout,
targetMaybeSidetag
) where
import Data.Char (isDigit)
import Control.Concurrent (threadDelay)
import qualified Data.ByteString.Lazy.Char8 as B
import Data.Fixed (Micro)
import Distribution.Koji
import qualified Distribution.Koji.API as Koji
import System.Exit
import System.Process.Typed
import System.Timeout (timeout)
import System.Time.Extra (sleep)
import Branches
import Common
import Common.System
import Git
import Krb
import Package (fedpkg, Package, unPackage)
import Pagure
import Prompt
import Types
fedoraHub :: String
fedoraHub = fedoraKojiHub
kojiNVRTags :: String -> IO [String]
kojiNVRTags nvr = do
mbldid <- kojiGetBuildID fedoraHub nvr
case mbldid of
Nothing -> error' $ nvr ++ " koji build not found"
Just bldid -> kojiBuildTags fedoraHub (buildIDInfo bldid)
kojiBuildStatus :: String -> IO (Maybe BuildState)
kojiBuildStatus nvr =
kojiGetBuildState fedoraHub (BuildInfoNVR nvr)
kojiLatestNVR :: String -> String -> IO (Maybe String)
kojiLatestNVR tag pkg = do
mbld <- kojiLatestBuild fedoraHub tag pkg
return $ case mbld of
Nothing -> Nothing
Just bld -> lookupStruct "nvr" bld
kojiLatestNVRRepo :: String -> Int -> String -> IO (Maybe String)
kojiLatestNVRRepo tag event pkg = do
mbld <- kojiLatestBuildRepo fedoraHub tag event pkg
return $ case mbld of
Nothing -> Nothing
Just bld -> lookupStruct "nvr" bld
kojiOpenTasks :: Package -> Maybe String -> String -> IO [TaskID]
kojiOpenTasks pkg mref target = do
user <- fasIdFromKrb
muserid <- kojiGetUserID fedoraHub user
let userid = fromMaybe (error' $ "Koji failed to return userid for '" ++ user ++ "'") muserid
commit <- maybe (git "rev-parse" ["HEAD"]) return mref
let source = kojiSource pkg commit
kojiUserBuildTasks fedoraHub userid (Just source) (Just target)
* building
kojiScratchBuild :: String -> [String] -> FilePath -> IO String
kojiScratchBuild target args srpm = do
Right url <- kojiBuild' True target $ args ++ ["--scratch", "--no-rebuild-srpm", srpm]
return url
type KojiBuildTask = Either TaskID String
-- FIXME setTermTitle nvr
kojiBuild' :: Bool -> String -> [String] -> IO KojiBuildTask
kojiBuild' wait target args = do
krbTicket
let srpm = if null args
then error' "no args passed to koji build"
else ".src.rpm" `isSuffixOf` last args
FIXME use tee functionality
when srpm $ putStrLn "koji srpm build: uploading..."
-- can fail like:
[ ERROR ] : Request error : POST:: / kojihub / ssllogin::<PreparedRequest [ POST ] >
[ ERROR ] : AuthError : unable to obtain a session
readCreateProcess : " build " " --nowait " " f33 - build - side-25385 " " --fail - fast " " --background " ... ( exit 1 ): failed
(ret,out) <- readProcessStdout $ proc "koji" $ ["build", "--nowait", target] ++ args
-- for srpm: drop uploading line until doing tee
-- for git: drop "Created task: "
-- init to drop final newline
unless (B.null out) $
logMsg $ (B.unpack . B.init . B.unlines . tail . B.lines) out
if ret == ExitSuccess
then do
let kojiurl = B.unpack $ last $ B.words out
task = (TaskId . read) $ takeWhileEnd isDigit kojiurl
when wait $ do
timeIO $ kojiWatchTask task
cmd_ "date" ["+%T"]
return $ if wait then Right kojiurl else Left task
else do
prompt_ "Press Enter to resubmit Koji build"
kojiBuild' wait target args
-- kojiBuild :: String -> [String] -> IO String
-- kojiBuild target args = do
-- Right url <- kojiBuild' True target args
-- return url
FIXME filter / simplify output
FIXME implement native watchTask
kojiWatchTask :: TaskID -> IO ()
kojiWatchTask task = do
FIXME can error :
eg1 [ ERROR ] : HTTPError : 503 Server Error : Service Unavailable for url :
eg2 [ ERROR ] : ServerOffline : database outage : - user error ( Error 1014 : database outage )
eg3 [ ERROR ] : ReadTimeout : HTTPSConnectionPool(host='koji.fedoraproject.org ' , port=443 ): Read timed out . ( read timeout=43200 )
-- This might error with exit 0 occasionally so we check the taskstate always
void $ cmdBool "koji" ["watch-task", displayID task]
mst <- kojiGetTaskState fedoraHub task
case mst of
Just TaskClosed -> return ()
Just TaskFailed -> do
let kojitool = "/usr/bin/koji-tool"
whenM (doesFileExist kojitool) $
cmd_ kojitool ["tasks", displayID task, "--tail", "-s", "fail"]
error' "Task failed!"
Just TaskCanceled -> return ()
_ -> kojiWatchTask task
FIXME at 4 am
Connection timed out : retrying
Connection timed out : retrying
: < socket : 11 > : does not exist ( No route to host )
kojiWaitTask :: TaskID -> IO Bool
kojiWaitTask task = do
FIXME can error :
eg1 [ ERROR ] : HTTPError : 503 Server Error : Service Unavailable for url :
eg2 [ ERROR ] : ServerOffline : database outage : - user error ( Error 1014 : database outage )
mst <- maybeTimeout 45 $ kojiGetTaskState fedoraHub task
case mst of
Just ts ->
if ts `elem` openTaskStates
then do
-- this makes parallel too noisy
--putChar '.'
sleep 20
kojiWaitTask task
else return $ ts == TaskClosed
Nothing -> do
error $ "failed to get info for koji task " ++ displayID task
kojiSource :: Package -> String -> String
kojiSource pkg ref =
"git+https://" ++ srcfpo ++ "/rpms" +/+ unPackage pkg ++ ".git#" ++ ref
kojiBuildBranch' :: Bool -> String -> Package -> Maybe String -> [String]
-> IO KojiBuildTask
kojiBuildBranch' wait target pkg mref args = do
commit <- maybe (git "rev-parse" ["HEAD"]) return mref
kojiBuild' wait target $ args ++ [kojiSource pkg commit]
kojiBuildBranch :: String -> Package -> Maybe String -> [String] -> IO ()
kojiBuildBranch target pkg mref args =
checkResult <$> kojiBuildBranch' True target pkg mref args
where
checkResult = either (\ task -> error' (displayID task ++ " not completed")) (const ())
kojiBuildBranchNoWait ::String -> Package -> Maybe String -> [String] -> IO TaskID
kojiBuildBranchNoWait target pkg mref args = do
Left task <- kojiBuildBranch' False target pkg mref args
return task
kojiWaitRepo :: Bool -> Bool -> String -> String -> IO ()
kojiWaitRepo dryrun quiet target nvr = do
Just (buildtag,_desttag) <- kojiBuildTarget fedoraHub target
unless dryrun $
waitRepo buildtag Nothing
where
waitRepo :: String -> Maybe Struct -> IO ()
waitRepo buildtag moldrepo = do
when (isJust moldrepo) $ do
50s
mrepo <- kojiGetRepo fedoraHub buildtag Nothing Nothing
case mrepo of
Nothing -> error' $ "failed to find koji repo for " ++ buildtag
Just repo ->
if moldrepo == mrepo
then waitRepo buildtag mrepo
else do
let mevent = lookupStruct "create_event" repo
case mevent of
Nothing -> error "create_event not found"
Just event -> do
latest <- kojiLatestNVRRepo buildtag event (nameOfNVR nvr)
if latest == Just nvr
then logMsg $ nvr ++
if isNothing moldrepo
then " is in " ++ buildtag
else " appeared"
else do
when (isNothing moldrepo && not quiet) $
logMsg $ "Waiting for " ++ buildtag ++ " to have " ++ nvr
waitRepo buildtag mrepo
FIXME : obsolete by using NVR
-- n-v-r -> n
nameOfNVR :: String -> String
nameOfNVR = removeSeg . removeSeg
where
removeSeg = init . dropWhileEnd (/= '-')
kojiTagArchs :: String -> IO [String]
kojiTagArchs tag = do
st <- Koji.getTag fedoraHub (Koji.InfoString tag) Nothing
return $ maybe [] words $ lookupStruct "arches" st
kojiUserSideTags :: Maybe Branch -> IO [String]
kojiUserSideTags mbr = do
user <- fasIdFromKrb
case mbr of
Nothing -> do
maybeTimeout 55 $ kojiListSideTags fedoraKojiHub Nothing (Just user)
Just br -> do
mtags <- kojiBuildTarget fedoraHub (branchTarget br)
case mtags of
Nothing -> return []
Just (buildtag,_desttag) -> do
kojiListSideTags fedoraKojiHub (Just buildtag) (Just user)
maybeTimeout :: Micro -> IO a -> IO a
maybeTimeout secs act = do
mres <- timeout (fromEnum secs) act
case mres of
Nothing -> do
warning "Connection timed out: retrying"
maybeTimeout (secs + 5) act
Just res -> return res
targetMaybeSidetag :: Bool -> Branch -> Maybe SideTagTarget -> IO String
targetMaybeSidetag dryrun br msidetagTarget =
case msidetagTarget of
Nothing -> return $ branchTarget br
Just (Target t) -> return t
Just SideTag -> do
tags <- map (head . words) <$> kojiUserSideTags (Just br)
case tags of
[] -> do
Just (buildtag,_desttag) <- kojiBuildTarget fedoraHub (show br)
out <-
if dryrun
then return $ "Side tag '" ++ buildtag ++ "-dryrun'"
else head . lines <$> fedpkg "request-side-tag" ["--base-tag", buildtag]
if "Side tag '" `isPrefixOf` out
then do
putStrLn out
let sidetag =
init . dropWhileEnd (/= '\'') $ dropPrefix "Side tag '" out
logMsg $ "Waiting for " ++ sidetag ++ " repo"
unless dryrun $
cmd_ "koji" ["wait-repo", sidetag]
return sidetag
else error' "'fedpkg request-side-tag' failed"
[tag] -> return tag
_ -> error' $ "More than one user side-tag found for " ++ show br
| null | https://raw.githubusercontent.com/juhp/fbrnch/521d268e90801366f1fefba995257f18e125a10c/src/Koji.hs | haskell | FIXME setTermTitle nvr
can fail like:
for srpm: drop uploading line until doing tee
for git: drop "Created task: "
init to drop final newline
kojiBuild :: String -> [String] -> IO String
kojiBuild target args = do
Right url <- kojiBuild' True target args
return url
This might error with exit 0 occasionally so we check the taskstate always
this makes parallel too noisy
putChar '.'
n-v-r -> n | module Koji (
kojiNVRTags,
kojiBuildStatus,
kojiBuildTags,
kojiGetBuildID,
kojiGetBuildTaskID,
kojiLatestNVR,
kojiOpenTasks,
kojiScratchBuild,
kojiUserSideTags,
buildIDInfo,
BuildState(..),
kojiBuildBranch,
kojiBuildBranchNoWait,
kojiSource,
kojiBuildTarget,
kojiTagArchs,
kojiWaitRepo,
kojiWatchTask,
kojiWaitTask,
TaskID,
displayID,
fedoraHub,
maybeTimeout,
targetMaybeSidetag
) where
import Data.Char (isDigit)
import Control.Concurrent (threadDelay)
import qualified Data.ByteString.Lazy.Char8 as B
import Data.Fixed (Micro)
import Distribution.Koji
import qualified Distribution.Koji.API as Koji
import System.Exit
import System.Process.Typed
import System.Timeout (timeout)
import System.Time.Extra (sleep)
import Branches
import Common
import Common.System
import Git
import Krb
import Package (fedpkg, Package, unPackage)
import Pagure
import Prompt
import Types
fedoraHub :: String
fedoraHub = fedoraKojiHub
kojiNVRTags :: String -> IO [String]
kojiNVRTags nvr = do
mbldid <- kojiGetBuildID fedoraHub nvr
case mbldid of
Nothing -> error' $ nvr ++ " koji build not found"
Just bldid -> kojiBuildTags fedoraHub (buildIDInfo bldid)
kojiBuildStatus :: String -> IO (Maybe BuildState)
kojiBuildStatus nvr =
kojiGetBuildState fedoraHub (BuildInfoNVR nvr)
kojiLatestNVR :: String -> String -> IO (Maybe String)
kojiLatestNVR tag pkg = do
mbld <- kojiLatestBuild fedoraHub tag pkg
return $ case mbld of
Nothing -> Nothing
Just bld -> lookupStruct "nvr" bld
kojiLatestNVRRepo :: String -> Int -> String -> IO (Maybe String)
kojiLatestNVRRepo tag event pkg = do
mbld <- kojiLatestBuildRepo fedoraHub tag event pkg
return $ case mbld of
Nothing -> Nothing
Just bld -> lookupStruct "nvr" bld
kojiOpenTasks :: Package -> Maybe String -> String -> IO [TaskID]
kojiOpenTasks pkg mref target = do
user <- fasIdFromKrb
muserid <- kojiGetUserID fedoraHub user
let userid = fromMaybe (error' $ "Koji failed to return userid for '" ++ user ++ "'") muserid
commit <- maybe (git "rev-parse" ["HEAD"]) return mref
let source = kojiSource pkg commit
kojiUserBuildTasks fedoraHub userid (Just source) (Just target)
* building
kojiScratchBuild :: String -> [String] -> FilePath -> IO String
kojiScratchBuild target args srpm = do
Right url <- kojiBuild' True target $ args ++ ["--scratch", "--no-rebuild-srpm", srpm]
return url
type KojiBuildTask = Either TaskID String
kojiBuild' :: Bool -> String -> [String] -> IO KojiBuildTask
kojiBuild' wait target args = do
krbTicket
let srpm = if null args
then error' "no args passed to koji build"
else ".src.rpm" `isSuffixOf` last args
FIXME use tee functionality
when srpm $ putStrLn "koji srpm build: uploading..."
[ ERROR ] : Request error : POST:: / kojihub / ssllogin::<PreparedRequest [ POST ] >
[ ERROR ] : AuthError : unable to obtain a session
readCreateProcess : " build " " --nowait " " f33 - build - side-25385 " " --fail - fast " " --background " ... ( exit 1 ): failed
(ret,out) <- readProcessStdout $ proc "koji" $ ["build", "--nowait", target] ++ args
unless (B.null out) $
logMsg $ (B.unpack . B.init . B.unlines . tail . B.lines) out
if ret == ExitSuccess
then do
let kojiurl = B.unpack $ last $ B.words out
task = (TaskId . read) $ takeWhileEnd isDigit kojiurl
when wait $ do
timeIO $ kojiWatchTask task
cmd_ "date" ["+%T"]
return $ if wait then Right kojiurl else Left task
else do
prompt_ "Press Enter to resubmit Koji build"
kojiBuild' wait target args
FIXME filter / simplify output
FIXME implement native watchTask
kojiWatchTask :: TaskID -> IO ()
kojiWatchTask task = do
FIXME can error :
eg1 [ ERROR ] : HTTPError : 503 Server Error : Service Unavailable for url :
eg2 [ ERROR ] : ServerOffline : database outage : - user error ( Error 1014 : database outage )
eg3 [ ERROR ] : ReadTimeout : HTTPSConnectionPool(host='koji.fedoraproject.org ' , port=443 ): Read timed out . ( read timeout=43200 )
void $ cmdBool "koji" ["watch-task", displayID task]
mst <- kojiGetTaskState fedoraHub task
case mst of
Just TaskClosed -> return ()
Just TaskFailed -> do
let kojitool = "/usr/bin/koji-tool"
whenM (doesFileExist kojitool) $
cmd_ kojitool ["tasks", displayID task, "--tail", "-s", "fail"]
error' "Task failed!"
Just TaskCanceled -> return ()
_ -> kojiWatchTask task
FIXME at 4 am
Connection timed out : retrying
Connection timed out : retrying
: < socket : 11 > : does not exist ( No route to host )
kojiWaitTask :: TaskID -> IO Bool
kojiWaitTask task = do
FIXME can error :
eg1 [ ERROR ] : HTTPError : 503 Server Error : Service Unavailable for url :
eg2 [ ERROR ] : ServerOffline : database outage : - user error ( Error 1014 : database outage )
mst <- maybeTimeout 45 $ kojiGetTaskState fedoraHub task
case mst of
Just ts ->
if ts `elem` openTaskStates
then do
sleep 20
kojiWaitTask task
else return $ ts == TaskClosed
Nothing -> do
error $ "failed to get info for koji task " ++ displayID task
kojiSource :: Package -> String -> String
kojiSource pkg ref =
"git+https://" ++ srcfpo ++ "/rpms" +/+ unPackage pkg ++ ".git#" ++ ref
kojiBuildBranch' :: Bool -> String -> Package -> Maybe String -> [String]
-> IO KojiBuildTask
kojiBuildBranch' wait target pkg mref args = do
commit <- maybe (git "rev-parse" ["HEAD"]) return mref
kojiBuild' wait target $ args ++ [kojiSource pkg commit]
kojiBuildBranch :: String -> Package -> Maybe String -> [String] -> IO ()
kojiBuildBranch target pkg mref args =
checkResult <$> kojiBuildBranch' True target pkg mref args
where
checkResult = either (\ task -> error' (displayID task ++ " not completed")) (const ())
kojiBuildBranchNoWait ::String -> Package -> Maybe String -> [String] -> IO TaskID
kojiBuildBranchNoWait target pkg mref args = do
Left task <- kojiBuildBranch' False target pkg mref args
return task
kojiWaitRepo :: Bool -> Bool -> String -> String -> IO ()
kojiWaitRepo dryrun quiet target nvr = do
Just (buildtag,_desttag) <- kojiBuildTarget fedoraHub target
unless dryrun $
waitRepo buildtag Nothing
where
waitRepo :: String -> Maybe Struct -> IO ()
waitRepo buildtag moldrepo = do
when (isJust moldrepo) $ do
50s
mrepo <- kojiGetRepo fedoraHub buildtag Nothing Nothing
case mrepo of
Nothing -> error' $ "failed to find koji repo for " ++ buildtag
Just repo ->
if moldrepo == mrepo
then waitRepo buildtag mrepo
else do
let mevent = lookupStruct "create_event" repo
case mevent of
Nothing -> error "create_event not found"
Just event -> do
latest <- kojiLatestNVRRepo buildtag event (nameOfNVR nvr)
if latest == Just nvr
then logMsg $ nvr ++
if isNothing moldrepo
then " is in " ++ buildtag
else " appeared"
else do
when (isNothing moldrepo && not quiet) $
logMsg $ "Waiting for " ++ buildtag ++ " to have " ++ nvr
waitRepo buildtag mrepo
FIXME : obsolete by using NVR
nameOfNVR :: String -> String
nameOfNVR = removeSeg . removeSeg
where
removeSeg = init . dropWhileEnd (/= '-')
kojiTagArchs :: String -> IO [String]
kojiTagArchs tag = do
st <- Koji.getTag fedoraHub (Koji.InfoString tag) Nothing
return $ maybe [] words $ lookupStruct "arches" st
kojiUserSideTags :: Maybe Branch -> IO [String]
kojiUserSideTags mbr = do
user <- fasIdFromKrb
case mbr of
Nothing -> do
maybeTimeout 55 $ kojiListSideTags fedoraKojiHub Nothing (Just user)
Just br -> do
mtags <- kojiBuildTarget fedoraHub (branchTarget br)
case mtags of
Nothing -> return []
Just (buildtag,_desttag) -> do
kojiListSideTags fedoraKojiHub (Just buildtag) (Just user)
maybeTimeout :: Micro -> IO a -> IO a
maybeTimeout secs act = do
mres <- timeout (fromEnum secs) act
case mres of
Nothing -> do
warning "Connection timed out: retrying"
maybeTimeout (secs + 5) act
Just res -> return res
targetMaybeSidetag :: Bool -> Branch -> Maybe SideTagTarget -> IO String
targetMaybeSidetag dryrun br msidetagTarget =
case msidetagTarget of
Nothing -> return $ branchTarget br
Just (Target t) -> return t
Just SideTag -> do
tags <- map (head . words) <$> kojiUserSideTags (Just br)
case tags of
[] -> do
Just (buildtag,_desttag) <- kojiBuildTarget fedoraHub (show br)
out <-
if dryrun
then return $ "Side tag '" ++ buildtag ++ "-dryrun'"
else head . lines <$> fedpkg "request-side-tag" ["--base-tag", buildtag]
if "Side tag '" `isPrefixOf` out
then do
putStrLn out
let sidetag =
init . dropWhileEnd (/= '\'') $ dropPrefix "Side tag '" out
logMsg $ "Waiting for " ++ sidetag ++ " repo"
unless dryrun $
cmd_ "koji" ["wait-repo", sidetag]
return sidetag
else error' "'fedpkg request-side-tag' failed"
[tag] -> return tag
_ -> error' $ "More than one user side-tag found for " ++ show br
|
c97b58ba108b2c5c99f562f4eb1e422c473d936712534014fa4728c0571de7b9 | VisionsGlobalEmpowerment/webchange | syllables.clj | (ns webchange.dev-templates.syllables
(:require [webchange.dev-templates :as t]
[webchange.templates.core :as templates]
[webchange.course.core :as core]))
(comment
(def test-course-slug (-> (t/create-test-course) :slug))
(def scene-slug "syllables")
(def test-course-slug "english")
(def scene-slug "syllables-1")
(def test-course-slug "test-course-english-jsbqsmnp")
(def scene-slug "syllables")
(core/update-course-activity-template! test-course-slug scene-slug t/user-id)
(core/get-scene-latest-version test-course-slug scene-slug)
(-> (core/save-scene! test-course-slug scene-slug data t/user-id)
first)
(t/update-activity course-slug scene-slug)
(let [data {:activity-name "Syllables"
:template-id 33
:characters [{:name "teacher"
:skeleton "senoravaca"}]
:lang "English"
:skills []}
activity (templates/activity-from-template data)
metadata (templates/metadata-from-template data)
[_ {scene-slug :scene-slug}] (core/create-scene! activity metadata test-course-slug scene-slug [] t/user-id)]
(str "/courses/" test-course-slug "/editor-v2/" scene-slug)))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/118ba5ee407ba1261bac40a6ba5729ccda6e8150/env/dev/clj/webchange/dev_templates/syllables.clj | clojure | (ns webchange.dev-templates.syllables
(:require [webchange.dev-templates :as t]
[webchange.templates.core :as templates]
[webchange.course.core :as core]))
(comment
(def test-course-slug (-> (t/create-test-course) :slug))
(def scene-slug "syllables")
(def test-course-slug "english")
(def scene-slug "syllables-1")
(def test-course-slug "test-course-english-jsbqsmnp")
(def scene-slug "syllables")
(core/update-course-activity-template! test-course-slug scene-slug t/user-id)
(core/get-scene-latest-version test-course-slug scene-slug)
(-> (core/save-scene! test-course-slug scene-slug data t/user-id)
first)
(t/update-activity course-slug scene-slug)
(let [data {:activity-name "Syllables"
:template-id 33
:characters [{:name "teacher"
:skeleton "senoravaca"}]
:lang "English"
:skills []}
activity (templates/activity-from-template data)
metadata (templates/metadata-from-template data)
[_ {scene-slug :scene-slug}] (core/create-scene! activity metadata test-course-slug scene-slug [] t/user-id)]
(str "/courses/" test-course-slug "/editor-v2/" scene-slug)))
| |
da664db90ee74a2b62db9db650bbae042e708b270b257aef4152fea6a13ddfdc | mmirman/caledon | Choice.hs | # LANGUAGE
DeriveFunctor ,
FlexibleContexts ,
TypeSynonymInstances ,
FlexibleInstances ,
MultiParamTypeClasses
#
DeriveFunctor,
FlexibleContexts,
TypeSynonymInstances,
FlexibleInstances,
MultiParamTypeClasses
#-}
module Choice where
import Control.Monad
import Data.Functor
import Control.Applicative
import Control.Monad.Error.Class (catchError, throwError, MonadError)
import Control.Monad.Cont
data Choice a = Choice a :<|>: Choice a
| Fail String
| Success a
deriving (Functor)
instance Monad Choice where
fail = Fail
return = Success
Fail a >>= _ = Fail a
(m :<|>: m') >>= f = (m >>= f) :<|>: (m' >>= f)
Success a >>= f = f a
instance Applicative Choice where
pure = Success
mf <*> ma = mf >>= (<$> ma)
determine a b = appendErr "" $ (:<|>:) (appendErr "" a) (appendErr "" b)
--determine = (:<|>:)
instance Alternative Choice where
empty = Fail ""
(<|>) = determine
instance MonadPlus Choice where
mzero = Fail ""
mplus = determine
class RunChoice m where
runError :: m a -> Either String a
instance RunChoice Choice where
runError chs = case dropWhile notSuccess lst of
[] -> case dropWhile notFail lst of
Fail a:_ -> Left a
_ -> error "this result makes no sense"
Success a : _ -> Right a
_ -> error "this result makes no sense"
where lst = chs:queue lst 1
queue _ 0 = []
queue [] _ = error "queue size should be empty when queue is empty"
queue ((a :<|>: b):l) q = a:b:queue l (q + 1)
queue (_:l) q = queue l (q - 1)
notFail (Fail _) = False
notFail _ = True
notSuccess (Success _) = False
notSuccess _ = True
appendErr :: (MonadError String m) => String -> m a -> m a
appendErr s m = catchError m $ \s' -> throwError $ s' ++ "\n" ++ s
instance MonadError String Choice where
throwError = Fail
catchError try1 foo_try2 = case runError try1 of
Left s -> foo_try2 s
Right a -> Success a
type CONT_T a m c = ((c -> m a) -> m a)
-- why doesn't this exist in the standard library?
instance (Monad m, Alternative m) => Alternative (ContT a m) where
empty = lift $ empty
c1 <|> c2 = ContT $ \cont -> m1f cont <|> m2f cont
where m1f = runContT c1
m2f = runContT c2
| null | https://raw.githubusercontent.com/mmirman/caledon/7e97110270362d46d8784a5197a84b6a4b7e2982/sources/Choice.hs | haskell | determine = (:<|>:)
why doesn't this exist in the standard library? | # LANGUAGE
DeriveFunctor ,
FlexibleContexts ,
TypeSynonymInstances ,
FlexibleInstances ,
MultiParamTypeClasses
#
DeriveFunctor,
FlexibleContexts,
TypeSynonymInstances,
FlexibleInstances,
MultiParamTypeClasses
#-}
module Choice where
import Control.Monad
import Data.Functor
import Control.Applicative
import Control.Monad.Error.Class (catchError, throwError, MonadError)
import Control.Monad.Cont
data Choice a = Choice a :<|>: Choice a
| Fail String
| Success a
deriving (Functor)
instance Monad Choice where
fail = Fail
return = Success
Fail a >>= _ = Fail a
(m :<|>: m') >>= f = (m >>= f) :<|>: (m' >>= f)
Success a >>= f = f a
instance Applicative Choice where
pure = Success
mf <*> ma = mf >>= (<$> ma)
determine a b = appendErr "" $ (:<|>:) (appendErr "" a) (appendErr "" b)
instance Alternative Choice where
empty = Fail ""
(<|>) = determine
instance MonadPlus Choice where
mzero = Fail ""
mplus = determine
class RunChoice m where
runError :: m a -> Either String a
instance RunChoice Choice where
runError chs = case dropWhile notSuccess lst of
[] -> case dropWhile notFail lst of
Fail a:_ -> Left a
_ -> error "this result makes no sense"
Success a : _ -> Right a
_ -> error "this result makes no sense"
where lst = chs:queue lst 1
queue _ 0 = []
queue [] _ = error "queue size should be empty when queue is empty"
queue ((a :<|>: b):l) q = a:b:queue l (q + 1)
queue (_:l) q = queue l (q - 1)
notFail (Fail _) = False
notFail _ = True
notSuccess (Success _) = False
notSuccess _ = True
appendErr :: (MonadError String m) => String -> m a -> m a
appendErr s m = catchError m $ \s' -> throwError $ s' ++ "\n" ++ s
instance MonadError String Choice where
throwError = Fail
catchError try1 foo_try2 = case runError try1 of
Left s -> foo_try2 s
Right a -> Success a
type CONT_T a m c = ((c -> m a) -> m a)
instance (Monad m, Alternative m) => Alternative (ContT a m) where
empty = lift $ empty
c1 <|> c2 = ContT $ \cont -> m1f cont <|> m2f cont
where m1f = runContT c1
m2f = runContT c2
|
54768c74cf53562eea9693a4af9907e512f91ec00f9bfb6cf0fc9e7ad072d187 | Opetushallitus/aipal | kysely.clj | Copyright ( c ) 2014 The Finnish National Board of Education - Opetushallitus
;;
This program is free software : Licensed under the EUPL , Version 1.1 or - as
soon as they will be approved by the European Commission - subsequent versions
of the EUPL ( the " Licence " ) ;
;;
;; You may not use this work except in compliance with the Licence.
;; You may obtain a copy of the Licence at: /
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
European Union Public Licence for more details .
(ns aipal.rest-api.kysely
(:require [compojure.api.core :refer [defroutes DELETE GET POST PUT]]
[schema.core :as s]
[aipal.compojure-util :as cu]
[aipal.arkisto.kysely :as arkisto]
[aipal.arkisto.kyselykerta :as kyselykerta-arkisto]
[aipal.arkisto.kysymysryhma :as kysymysryhma-arkisto]
[aipal.infra.kayttaja :refer [*kayttaja* yllapitaja?]]
[aipal.rest-api.kysymysryhma :refer [lisaa-jarjestys]]
[aipal.toimiala.kayttajaoikeudet :refer [kysymysryhma-luku? kysymysryhma-on-julkaistu?]]
[oph.common.util.http-util :refer [response-or-404 parse-iso-date]]
[oph.common.util.util :refer [map-by paivita-arvot]]))
(defn lisaa-kysymysryhma!
[kyselyid kysymysryhma]
(let [kayttajan-kysymykset (map-by :kysymysid (:kysymykset kysymysryhma))]
(doseq [kysymys (arkisto/hae-kysymysten-poistettavuus (:kysymysryhmaid kysymysryhma))
:let [kysymysid (:kysymysid kysymys)
kayttajan-kysymys (get kayttajan-kysymykset kysymysid)]
:when (not (and (:poistettu kayttajan-kysymys)
(:poistettava kysymys)))]
vain on muokannut poistetuksi kysymyksen
(assert (not (:poistettu kayttajan-kysymys)))
(arkisto/lisaa-kysymys! kyselyid kysymysid)))
(arkisto/lisaa-kysymysryhma! kyselyid kysymysryhma))
(defn lisakysymysten-lukumaara
[kysymysryhmat]
(->> kysymysryhmat
(remove :valtakunnallinen)
(mapcat :kysymykset)
(remove :poistettu)
count))
(defn valtakunnallisia-ryhmia?
[kysymysryhmat]
(some :valtakunnallinen kysymysryhmat))
(defn paivita-kysely!
[kysely]
(let [valtakunnallisia (valtakunnallisia-ryhmia? (:kysymysryhmat kysely))
max-kysymyksia (if valtakunnallisia 10 30)]
(assert (not (> (lisakysymysten-lukumaara (:kysymysryhmat kysely)) max-kysymyksia))))
(arkisto/poista-kysymysryhmat! (:kyselyid kysely))
(arkisto/poista-kysymykset! (:kyselyid kysely))
(doseq [kysymysryhma (lisaa-jarjestys (:kysymysryhmat kysely))]
(assert (kysymysryhma-luku? (:kysymysryhmaid kysymysryhma)))
(assert (kysymysryhma-on-julkaistu? (:kysymysryhmaid kysymysryhma)))
(lisaa-kysymysryhma! (:kyselyid kysely) kysymysryhma))
(arkisto/muokkaa-kyselya! kysely)
kysely)
(defroutes reitit
(GET "/" []
:kayttooikeus :kysely
(response-or-404 (arkisto/hae-kaikki (:aktiivinen-koulutustoimija *kayttaja*))))
(POST "/" []
:body [kysely s/Any]
:kayttooikeus :kysely-luonti
(let [kysely (assoc (paivita-arvot kysely
[:voimassa_alkupvm :voimassa_loppupvm]
parse-iso-date)
:koulutustoimija (:aktiivinen-koulutustoimija *kayttaja*))]
(if (arkisto/samanniminen-kysely? kysely)
{:status 400
:body "kysely.samanniminen_kysely"}
(response-or-404
(let [{:keys [kyselyid]}
(arkisto/lisaa! (select-keys kysely [:nimi_fi :nimi_sv :selite_fi :selite_sv :voimassa_alkupvm :voimassa_loppupvm :tila :koulutustoimija]))]
(paivita-kysely! (assoc kysely :kyselyid kyselyid)))))))
(POST "/:kyselyid" []
:path-params [kyselyid :- s/Int]
:body [kysely s/Any]
:kayttooikeus [:kysely-muokkaus kyselyid]
(let [kysely (paivita-arvot (assoc kysely :kyselyid kyselyid)
[:voimassa_alkupvm :voimassa_loppupvm]
parse-iso-date)]
(if (arkisto/samanniminen-kysely? (assoc kysely :koulutustoimija (:aktiivinen-koulutustoimija *kayttaja*)))
{:status 400
:body "kysely.samanniminen_kysely"}
(response-or-404 (paivita-kysely! kysely)))))
(DELETE "/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-poisto kyselyid]
(if (arkisto/kysely-poistettavissa? kyselyid)
(do
(arkisto/poista-kysely! kyselyid)
{:status 204})
{:status 403}))
(GET "/:kyselyid/vastaustunnustiedot" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-luku kyselyid]
(response-or-404 (kyselykerta-arkisto/hae-vastaustunnustiedot-kyselylta kyselyid)))
(GET "/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-luku kyselyid]
(response-or-404 (when-let [kysely (arkisto/hae kyselyid)]
(assoc kysely :kysymysryhmat (kysymysryhma-arkisto/hae-kyselysta kyselyid)))))
(PUT "/julkaise/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(if (> (arkisto/laske-kysymysryhmat kyselyid) 0)
(response-or-404 (arkisto/julkaise-kysely! kyselyid))
{:status 403}))
(PUT "/sulje/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(response-or-404 (arkisto/sulje-kysely! kyselyid)))
(PUT "/palauta/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(response-or-404 (arkisto/julkaise-kysely! kyselyid)))
(PUT "/palauta-luonnokseksi/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(if (= (arkisto/laske-kyselykerrat kyselyid) 0)
(response-or-404 (arkisto/palauta-luonnokseksi! kyselyid))
{:status 403}))) | null | https://raw.githubusercontent.com/Opetushallitus/aipal/767bd14ec7153dc97fdf688443b9687cdb70082f/aipal/src/clj/aipal/rest_api/kysely.clj | clojure |
You may not use this work except in compliance with the Licence.
You may obtain a copy of the Licence at: /
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | Copyright ( c ) 2014 The Finnish National Board of Education - Opetushallitus
This program is free software : Licensed under the EUPL , Version 1.1 or - as
soon as they will be approved by the European Commission - subsequent versions
European Union Public Licence for more details .
(ns aipal.rest-api.kysely
(:require [compojure.api.core :refer [defroutes DELETE GET POST PUT]]
[schema.core :as s]
[aipal.compojure-util :as cu]
[aipal.arkisto.kysely :as arkisto]
[aipal.arkisto.kyselykerta :as kyselykerta-arkisto]
[aipal.arkisto.kysymysryhma :as kysymysryhma-arkisto]
[aipal.infra.kayttaja :refer [*kayttaja* yllapitaja?]]
[aipal.rest-api.kysymysryhma :refer [lisaa-jarjestys]]
[aipal.toimiala.kayttajaoikeudet :refer [kysymysryhma-luku? kysymysryhma-on-julkaistu?]]
[oph.common.util.http-util :refer [response-or-404 parse-iso-date]]
[oph.common.util.util :refer [map-by paivita-arvot]]))
(defn lisaa-kysymysryhma!
[kyselyid kysymysryhma]
(let [kayttajan-kysymykset (map-by :kysymysid (:kysymykset kysymysryhma))]
(doseq [kysymys (arkisto/hae-kysymysten-poistettavuus (:kysymysryhmaid kysymysryhma))
:let [kysymysid (:kysymysid kysymys)
kayttajan-kysymys (get kayttajan-kysymykset kysymysid)]
:when (not (and (:poistettu kayttajan-kysymys)
(:poistettava kysymys)))]
vain on muokannut poistetuksi kysymyksen
(assert (not (:poistettu kayttajan-kysymys)))
(arkisto/lisaa-kysymys! kyselyid kysymysid)))
(arkisto/lisaa-kysymysryhma! kyselyid kysymysryhma))
(defn lisakysymysten-lukumaara
[kysymysryhmat]
(->> kysymysryhmat
(remove :valtakunnallinen)
(mapcat :kysymykset)
(remove :poistettu)
count))
(defn valtakunnallisia-ryhmia?
[kysymysryhmat]
(some :valtakunnallinen kysymysryhmat))
(defn paivita-kysely!
[kysely]
(let [valtakunnallisia (valtakunnallisia-ryhmia? (:kysymysryhmat kysely))
max-kysymyksia (if valtakunnallisia 10 30)]
(assert (not (> (lisakysymysten-lukumaara (:kysymysryhmat kysely)) max-kysymyksia))))
(arkisto/poista-kysymysryhmat! (:kyselyid kysely))
(arkisto/poista-kysymykset! (:kyselyid kysely))
(doseq [kysymysryhma (lisaa-jarjestys (:kysymysryhmat kysely))]
(assert (kysymysryhma-luku? (:kysymysryhmaid kysymysryhma)))
(assert (kysymysryhma-on-julkaistu? (:kysymysryhmaid kysymysryhma)))
(lisaa-kysymysryhma! (:kyselyid kysely) kysymysryhma))
(arkisto/muokkaa-kyselya! kysely)
kysely)
(defroutes reitit
(GET "/" []
:kayttooikeus :kysely
(response-or-404 (arkisto/hae-kaikki (:aktiivinen-koulutustoimija *kayttaja*))))
(POST "/" []
:body [kysely s/Any]
:kayttooikeus :kysely-luonti
(let [kysely (assoc (paivita-arvot kysely
[:voimassa_alkupvm :voimassa_loppupvm]
parse-iso-date)
:koulutustoimija (:aktiivinen-koulutustoimija *kayttaja*))]
(if (arkisto/samanniminen-kysely? kysely)
{:status 400
:body "kysely.samanniminen_kysely"}
(response-or-404
(let [{:keys [kyselyid]}
(arkisto/lisaa! (select-keys kysely [:nimi_fi :nimi_sv :selite_fi :selite_sv :voimassa_alkupvm :voimassa_loppupvm :tila :koulutustoimija]))]
(paivita-kysely! (assoc kysely :kyselyid kyselyid)))))))
(POST "/:kyselyid" []
:path-params [kyselyid :- s/Int]
:body [kysely s/Any]
:kayttooikeus [:kysely-muokkaus kyselyid]
(let [kysely (paivita-arvot (assoc kysely :kyselyid kyselyid)
[:voimassa_alkupvm :voimassa_loppupvm]
parse-iso-date)]
(if (arkisto/samanniminen-kysely? (assoc kysely :koulutustoimija (:aktiivinen-koulutustoimija *kayttaja*)))
{:status 400
:body "kysely.samanniminen_kysely"}
(response-or-404 (paivita-kysely! kysely)))))
(DELETE "/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-poisto kyselyid]
(if (arkisto/kysely-poistettavissa? kyselyid)
(do
(arkisto/poista-kysely! kyselyid)
{:status 204})
{:status 403}))
(GET "/:kyselyid/vastaustunnustiedot" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-luku kyselyid]
(response-or-404 (kyselykerta-arkisto/hae-vastaustunnustiedot-kyselylta kyselyid)))
(GET "/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-luku kyselyid]
(response-or-404 (when-let [kysely (arkisto/hae kyselyid)]
(assoc kysely :kysymysryhmat (kysymysryhma-arkisto/hae-kyselysta kyselyid)))))
(PUT "/julkaise/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(if (> (arkisto/laske-kysymysryhmat kyselyid) 0)
(response-or-404 (arkisto/julkaise-kysely! kyselyid))
{:status 403}))
(PUT "/sulje/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(response-or-404 (arkisto/sulje-kysely! kyselyid)))
(PUT "/palauta/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(response-or-404 (arkisto/julkaise-kysely! kyselyid)))
(PUT "/palauta-luonnokseksi/:kyselyid" []
:path-params [kyselyid :- s/Int]
:kayttooikeus [:kysely-tilamuutos kyselyid]
(if (= (arkisto/laske-kyselykerrat kyselyid) 0)
(response-or-404 (arkisto/palauta-luonnokseksi! kyselyid))
{:status 403}))) |
038d425509ce088ba6d24b63e56c12f85da575b8431eb0e705c624ef53ad3b6b | nineties/Choco | Scheduling.hs | -------------------------------------------------
Choco --
Chikadzume Oriented Compiler --
Copyright 2007 - 2008 by Basement fairy --
-------------------------------------------------
module Scheduling (
opLatency,
opIssueCycles,
opInBasicBlock
) where
import Mach hiding (Inst(..))
import qualified Data.IntMap as I
import qualified Data.Map as M
import qualified Control.Monad.State as ST
import Control.Monad
{- instruction informations -}
opLatency op = case op of
Ireload -> 2
Iload _ -> 2
Istore _ -> 2
Iconst_float _ -> 2
Iput -> 2
Iget -> 2
_ | op `elem` [Isqrt, Ifinv, Ifadd, Ifsub, Ifmul]
-> 3
_ -> 1
opIssueCycles op = case op of
Iconst_float _ -> 2
Iconst_symbol _ -> 2
_ -> 1
opInBasicBlock op = case op of
Icall_ind -> False
Icall_imm _ -> False
Itailcall_ind -> False
Itailcall_imm _ -> False
_ -> True
| null | https://raw.githubusercontent.com/nineties/Choco/0081351d0b556ff74f096accb65c9ab45d29ddfe/tsubaki/Scheduling.hs | haskell | -----------------------------------------------
-----------------------------------------------
instruction informations | module Scheduling (
opLatency,
opIssueCycles,
opInBasicBlock
) where
import Mach hiding (Inst(..))
import qualified Data.IntMap as I
import qualified Data.Map as M
import qualified Control.Monad.State as ST
import Control.Monad
opLatency op = case op of
Ireload -> 2
Iload _ -> 2
Istore _ -> 2
Iconst_float _ -> 2
Iput -> 2
Iget -> 2
_ | op `elem` [Isqrt, Ifinv, Ifadd, Ifsub, Ifmul]
-> 3
_ -> 1
opIssueCycles op = case op of
Iconst_float _ -> 2
Iconst_symbol _ -> 2
_ -> 1
opInBasicBlock op = case op of
Icall_ind -> False
Icall_imm _ -> False
Itailcall_ind -> False
Itailcall_imm _ -> False
_ -> True
|
ea0c76cc3c490d9a17e06ae6e49fcd1bfeedace1c62f6501b0970c80ca01a78e | ovotech/ring-jwt | jwk.clj | (ns ring.middleware.jwk
(:import (java.net URL)
(com.auth0.jwk GuavaCachedJwkProvider UrlJwkProvider)
(com.auth0.jwt.interfaces RSAKeyProvider ECDSAKeyProvider)))
(defn- new-jwk-provider
[url]
(-> (URL. url)
(UrlJwkProvider.)
(GuavaCachedJwkProvider.)))
(def rsa-key-provider
(memoize
(fn [url]
(let [jwk-provider (new-jwk-provider url)]
(reify RSAKeyProvider
(getPublicKeyById [_ key-id]
(-> (.get jwk-provider key-id)
(.getPublicKey)))
(getPrivateKey [_] nil)
(getPrivateKeyId [_] nil))))))
| null | https://raw.githubusercontent.com/ovotech/ring-jwt/793ac1aef8709811b1a48001fa9bf3bf9f631b9e/src/ring/middleware/jwk.clj | clojure | (ns ring.middleware.jwk
(:import (java.net URL)
(com.auth0.jwk GuavaCachedJwkProvider UrlJwkProvider)
(com.auth0.jwt.interfaces RSAKeyProvider ECDSAKeyProvider)))
(defn- new-jwk-provider
[url]
(-> (URL. url)
(UrlJwkProvider.)
(GuavaCachedJwkProvider.)))
(def rsa-key-provider
(memoize
(fn [url]
(let [jwk-provider (new-jwk-provider url)]
(reify RSAKeyProvider
(getPublicKeyById [_ key-id]
(-> (.get jwk-provider key-id)
(.getPublicKey)))
(getPrivateKey [_] nil)
(getPrivateKeyId [_] nil))))))
| |
bb7bfb5964323ad77fe91ac14d65b2645bbff7c1b0d670695fb16d17f40160d7 | areina/elfeed-cljsrn | user.clj | (ns user
(:use [figwheel-sidecar.repl-api :as ra]))
(defn start-figwheel
"Start figwheel for one or more builds"
[& build-ids]
(apply ra/start-figwheel! build-ids)
(ra/cljs-repl))
(defn stop-figwheel
"Stops figwheel"
[]
(ra/stop-figwheel!))
| null | https://raw.githubusercontent.com/areina/elfeed-cljsrn/4dea27f785d24a16da05c0ab2ac0c6a6f23360f1/env/dev/user.clj | clojure | (ns user
(:use [figwheel-sidecar.repl-api :as ra]))
(defn start-figwheel
"Start figwheel for one or more builds"
[& build-ids]
(apply ra/start-figwheel! build-ids)
(ra/cljs-repl))
(defn stop-figwheel
"Stops figwheel"
[]
(ra/stop-figwheel!))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.