_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
3ea31962ffb8ec2cdcf51afd9b0961cd7bf7ad900e02c6ef4f7aecf3b7e08bc9 | c-cube/zulip-log-viewer | zulip_log_view.ml |
module Fmt = CCFormat
module H = Tyxml_html
module Log = (val Logs.src_log (Logs.Src.create "zulip_log_view"))
module DJ = Decoders_yojson.Basic.Decode
let spf = Printf.sprintf
module Date = struct
type t = float
let now = Unix.gettimeofday
let to_string self =
let t = Unix.gmtime self in
spf "%d-%02d-%02d %d:%d:%d (GMT)" (1900+t.tm_year) (1+t.tm_mon) t.tm_mday t.tm_hour t.tm_min t.tm_sec
let pp out self = Fmt.string out (to_string self)
end
module Topic_idx = struct
type t = {
title: string;
latest_date: Date.t;
size: int;
}
let dec_l : t list DJ.decoder =
let open DJ in
key_value_pairs_seq @@ fun title ->
let+ latest_date = field "latest_date" int >|= float_of_int
and+ size = field "size" int in
{title; latest_date; size}
end
module Stream_idx = struct
type t = {
name: string;
id: int;
latest_id: int;
topic_data: Topic_idx.t list;
}
let dec_l : t list DJ.decoder =
let open DJ in
field "streams" @@
key_value_pairs_seq @@ fun name ->
let+ id = field "id" int
and+ latest_id = field "latest_id" int
and+ topic_data = field "topic_data" Topic_idx.dec_l in
{ name; id; latest_id; topic_data }
end
module Msg = struct
type t = {
content: string; (* raw html *)
id: int;
sender_full_name: string;
timestamp: Date.t;
}
let dec =
let open DJ in
let+ content = field "content" string
and+ id = field "id" int
and+ sender_full_name = field "sender_full_name" string
and+ timestamp = field "timestamp" int >|= float_of_int in
{content; id; sender_full_name; timestamp}
let dec_l : t list DJ.decoder = DJ.list dec
end
exception Err of int * string
let error ?(code=500) s = raise (Err (code,s))
let errorf ?code fmt = Fmt.kasprintf (error ?code) fmt
let ret_html (h:_ H.elt list) =
let open H in
let h =
html
(head (title (txt "zulip log viewer"))
[meta ~a:[a_charset "utf-8"] ();
link ~rel:[`Stylesheet] ~href:"/css/" ();
])
(body h)
in
Fmt.asprintf "%a@." (H.pp ()) h
module Server(Config: sig
val dir : string
val port: int
end)
= struct
module Serv = Tiny_httpd
let urlencode = Tiny_httpd_util.percent_encode
let urldecode = Tiny_httpd_util.percent_decode
let to_hex s = spf "\"%s\"" @@ CCString.flat_map (fun c -> spf "%x" (Char.code c)) s
(* the weird '.' encoding into files for topics *)
let dotencode_filename s =
CCString.flat_map
(function
| (' ' | '!' | '"' | '#' | '$' | '%' | '&' | '\'' | '*' | '+'
| ',' | '/' | ':' | ';' | '=' | '?' | '@' | '[' | ']' | '~'
| '^' | '.')
as c ->
spf ".%X" (Char.code c)
| c when Char.code c > 127 -> spf ".%X" (Char.code c)
| c -> String.make 1 c)
s
let slugify = CCString.map (function ' ' -> '-' | c -> c)
let dotencode s = urlencode s |> CCString.map (function ' ' -> '-' | '%' -> '.' | c -> c)
let dotdecode s = CCString.map (function '.' -> '%' | c -> c) s |> urldecode
let (//) = Filename.concat
let (let*?) x f =
let res =
try
let resp = f x in
Ok resp
with Err (code, msg) ->
Log.err (fun k->k "error (code %d):\n%s" code msg);
Error (code, msg)
in
Serv.Response.make_string res
let root _req : Serv.Response.t =
Log.debug (fun k->k "at %a: serve /" Date.pp (Date.now()));
let*? h =
let file = Config.dir // "stream_index.json" in
let streams = match DJ.decode_file Stream_idx.dec_l file with
| Ok x -> x
| Error e ->
errorf ~code:404 "cannot read '%s':\n%s" file (DJ.string_of_error e)
in
let open H in
let h_str (str:Stream_idx.t) =
let url =
spf "/stream/%d/%s" str.id (urlencode str.name)
in
li ~a:[a_class ["list-group-item"]] [
a ~a:[a_href url] [h3 [txt (str.name)]];
details ~a:[a_open ()] (summary [txt "topics"]) [
let h_top (top:Topic_idx.t) =
let open Topic_idx in
let url =
spf "/topic/%d/%s/%s" str.id (urlencode str.name)
(urlencode top.title)
in
li ~a:[a_class ["list-group-item"]] [
span [
a ~a:[a_href url] [b [txt top.title]]
];
span ~a:[a_class ["badge"; "bg-warning"]]
[ txt (spf " %d" top.size) ];
span ~a:[a_class ["text-secondary"]] [
txt (spf " [%s]" (Date.to_string top.latest_date));
]
]
in
ul ~a:[a_class ["list-group"]] @@ List.map h_top str.topic_data
];
]
in
[h1 [txt "Streams"];
ul ~a:[a_class ["list-group"]] @@ List.map h_str streams;
]
in
ret_html h
let goto_root () = H.(a ~a:[a_href "/"] [txt "back to root"])
let dir_of_stream id name : string =
spf "%d-%s" id (slugify name)
let stream id name _req : Serv.Response.t =
Log.debug (fun k->k "at %a: serve stream %d/%s" Date.pp (Date.now()) id name);
let open H in
let*? h =
let dirname = Config.dir // dir_of_stream id name in
Log.debug (fun k->k "dirname should be '%s'" dirname);
let items =
try Sys.readdir dirname |> Array.to_list
with _ -> errorf "error while reading stream dir '%s'" dirname
in
let topics =
CCList.filter_map
(fun s ->
if Filename.check_suffix s ".json" then (
let name = Filename.chop_suffix s ".json" in
let name = match dotdecode name with
| None -> "<error>"
| Some s -> s
in
Some name
) else None)
items
in
let h_topic topic_name : _ H.elt =
let url =
spf "/topic/%d/%s/%s" id (urlencode name) (urlencode topic_name)
in
li ~a:[a_class ["list-group-item"]] [
a ~a:[a_href url] [txt topic_name]
]
in
[ goto_root();
h2 [txt (spf "Stream %s" name)];
ul ~a:[a_class ["list-group"]] (List.map h_topic topics);
]
in
ret_html h
let topic id strname name _req : Serv.Response.t =
Log.debug (fun k->k "at %a: serve topic %d/%s/%s" Date.pp (Date.now()) id strname name);
let open H in
let*? h =
let file =
Config.dir // dir_of_stream id strname
// spf "%s.json" (dotencode_filename name) in
Log.debug (fun k->k "file should be '%s' (exists: %b)" file (Sys.file_exists file));
let messages = match DJ.decode_file Msg.dec_l file with
| Ok x -> x
| Error e -> errorf "could not read messages:\n%s" (DJ.string_of_error e)
in
(* render a single message *)
let h_msg (m:Msg.t) : _ H.elt =
li ~a:[a_class ["list-group-item"; "card"]] [
div ~a:[a_class ["card-title"]] [
h5 [txt m.sender_full_name];
p ~a:[a_class ["text-secondary"]] [txt (Date.to_string m.timestamp)];
];
div ~a:[a_class ["card-body"]] [
div ~a:[a_class ["card-text"]] [
(H.Unsafe.data m.content : [>`P] H.elt);
];
];
div ~a:[a_class ["card-footer"]] [
];
]
in
[ p[goto_root()];
p[a ~a:[a_href (spf "/stream/%d/%s" id (urlencode strname))]
[txt (spf "back to %s" strname)]];
h2 [txt name];
(*txt (spf "topic id=%d %s/%s" id strname name) *)
ul ~a:[a_class ["list-group"]] (List.map h_msg messages)
]
in
ret_html h
let css req =
let md5 = to_hex @@ Digest.string Web_data.css in
if Serv.Request.get_header req "If-None-Match" = Some md5 then (
Serv.Response.make_raw ~code:304 "" (* cache hit *)
) else (
let headers = ["ETag", md5; "Content-Type", "text/css"] in
Serv.Response.make_string ~headers (Ok Web_data.css)
)
let run () =
let server = Serv.create ~port:Config.port () in
Serv.add_route_handler server Serv.Route.(return) root;
Serv.add_route_handler server
Serv.Route.(exact "stream" @/ int @/ string_urlencoded @/ return) stream;
Serv.add_route_handler server
Serv.Route.(exact "topic" @/ int @/ string_urlencoded @/ string_urlencoded @/ return) topic;
Serv.add_route_handler server Serv.Route.(exact "css" @/ return) css;
Serv.run server
end
let () =
let debug = ref false in
let dirs = ref [] in
let port = ref 8085 in
let opts = [
"-d", Arg.Set debug, " enable debug";
"-p", Arg.Set_int port, " port to listen on";
] |> Arg.align in
Arg.parse opts (fun f->dirs := f :: !dirs) "zulip_log_view [dir]+ [opt]*";
Logs.set_reporter (Logs.format_reporter ());
if !debug then Logs.set_level ~all:true (Some Logs.Debug);
begin match !dirs with
| [dir] ->
Log.app (fun k->k "running on :%d" !port);
let module S = Server(struct
let port = !port
let dir = dir
end)
in
begin match S.run () with
| Ok () -> ()
| Error e ->
Log.err (fun k->k "error: %s" (Printexc.to_string e));
exit 1
end
| _ -> failwith "provide exactly one directory";
end
| null | https://raw.githubusercontent.com/c-cube/zulip-log-viewer/d0a128506cf63abd1dd2deee4ee179ce3c6d578c/src/zulip_log_view.ml | ocaml | raw html
the weird '.' encoding into files for topics
render a single message
txt (spf "topic id=%d %s/%s" id strname name)
cache hit |
module Fmt = CCFormat
module H = Tyxml_html
module Log = (val Logs.src_log (Logs.Src.create "zulip_log_view"))
module DJ = Decoders_yojson.Basic.Decode
let spf = Printf.sprintf
module Date = struct
type t = float
let now = Unix.gettimeofday
let to_string self =
let t = Unix.gmtime self in
spf "%d-%02d-%02d %d:%d:%d (GMT)" (1900+t.tm_year) (1+t.tm_mon) t.tm_mday t.tm_hour t.tm_min t.tm_sec
let pp out self = Fmt.string out (to_string self)
end
module Topic_idx = struct
type t = {
title: string;
latest_date: Date.t;
size: int;
}
let dec_l : t list DJ.decoder =
let open DJ in
key_value_pairs_seq @@ fun title ->
let+ latest_date = field "latest_date" int >|= float_of_int
and+ size = field "size" int in
{title; latest_date; size}
end
module Stream_idx = struct
type t = {
name: string;
id: int;
latest_id: int;
topic_data: Topic_idx.t list;
}
let dec_l : t list DJ.decoder =
let open DJ in
field "streams" @@
key_value_pairs_seq @@ fun name ->
let+ id = field "id" int
and+ latest_id = field "latest_id" int
and+ topic_data = field "topic_data" Topic_idx.dec_l in
{ name; id; latest_id; topic_data }
end
module Msg = struct
type t = {
id: int;
sender_full_name: string;
timestamp: Date.t;
}
let dec =
let open DJ in
let+ content = field "content" string
and+ id = field "id" int
and+ sender_full_name = field "sender_full_name" string
and+ timestamp = field "timestamp" int >|= float_of_int in
{content; id; sender_full_name; timestamp}
let dec_l : t list DJ.decoder = DJ.list dec
end
exception Err of int * string
let error ?(code=500) s = raise (Err (code,s))
let errorf ?code fmt = Fmt.kasprintf (error ?code) fmt
let ret_html (h:_ H.elt list) =
let open H in
let h =
html
(head (title (txt "zulip log viewer"))
[meta ~a:[a_charset "utf-8"] ();
link ~rel:[`Stylesheet] ~href:"/css/" ();
])
(body h)
in
Fmt.asprintf "%a@." (H.pp ()) h
module Server(Config: sig
val dir : string
val port: int
end)
= struct
module Serv = Tiny_httpd
let urlencode = Tiny_httpd_util.percent_encode
let urldecode = Tiny_httpd_util.percent_decode
let to_hex s = spf "\"%s\"" @@ CCString.flat_map (fun c -> spf "%x" (Char.code c)) s
let dotencode_filename s =
CCString.flat_map
(function
| (' ' | '!' | '"' | '#' | '$' | '%' | '&' | '\'' | '*' | '+'
| ',' | '/' | ':' | ';' | '=' | '?' | '@' | '[' | ']' | '~'
| '^' | '.')
as c ->
spf ".%X" (Char.code c)
| c when Char.code c > 127 -> spf ".%X" (Char.code c)
| c -> String.make 1 c)
s
let slugify = CCString.map (function ' ' -> '-' | c -> c)
let dotencode s = urlencode s |> CCString.map (function ' ' -> '-' | '%' -> '.' | c -> c)
let dotdecode s = CCString.map (function '.' -> '%' | c -> c) s |> urldecode
let (//) = Filename.concat
let (let*?) x f =
let res =
try
let resp = f x in
Ok resp
with Err (code, msg) ->
Log.err (fun k->k "error (code %d):\n%s" code msg);
Error (code, msg)
in
Serv.Response.make_string res
let root _req : Serv.Response.t =
Log.debug (fun k->k "at %a: serve /" Date.pp (Date.now()));
let*? h =
let file = Config.dir // "stream_index.json" in
let streams = match DJ.decode_file Stream_idx.dec_l file with
| Ok x -> x
| Error e ->
errorf ~code:404 "cannot read '%s':\n%s" file (DJ.string_of_error e)
in
let open H in
let h_str (str:Stream_idx.t) =
let url =
spf "/stream/%d/%s" str.id (urlencode str.name)
in
li ~a:[a_class ["list-group-item"]] [
a ~a:[a_href url] [h3 [txt (str.name)]];
details ~a:[a_open ()] (summary [txt "topics"]) [
let h_top (top:Topic_idx.t) =
let open Topic_idx in
let url =
spf "/topic/%d/%s/%s" str.id (urlencode str.name)
(urlencode top.title)
in
li ~a:[a_class ["list-group-item"]] [
span [
a ~a:[a_href url] [b [txt top.title]]
];
span ~a:[a_class ["badge"; "bg-warning"]]
[ txt (spf " %d" top.size) ];
span ~a:[a_class ["text-secondary"]] [
txt (spf " [%s]" (Date.to_string top.latest_date));
]
]
in
ul ~a:[a_class ["list-group"]] @@ List.map h_top str.topic_data
];
]
in
[h1 [txt "Streams"];
ul ~a:[a_class ["list-group"]] @@ List.map h_str streams;
]
in
ret_html h
let goto_root () = H.(a ~a:[a_href "/"] [txt "back to root"])
let dir_of_stream id name : string =
spf "%d-%s" id (slugify name)
let stream id name _req : Serv.Response.t =
Log.debug (fun k->k "at %a: serve stream %d/%s" Date.pp (Date.now()) id name);
let open H in
let*? h =
let dirname = Config.dir // dir_of_stream id name in
Log.debug (fun k->k "dirname should be '%s'" dirname);
let items =
try Sys.readdir dirname |> Array.to_list
with _ -> errorf "error while reading stream dir '%s'" dirname
in
let topics =
CCList.filter_map
(fun s ->
if Filename.check_suffix s ".json" then (
let name = Filename.chop_suffix s ".json" in
let name = match dotdecode name with
| None -> "<error>"
| Some s -> s
in
Some name
) else None)
items
in
let h_topic topic_name : _ H.elt =
let url =
spf "/topic/%d/%s/%s" id (urlencode name) (urlencode topic_name)
in
li ~a:[a_class ["list-group-item"]] [
a ~a:[a_href url] [txt topic_name]
]
in
[ goto_root();
h2 [txt (spf "Stream %s" name)];
ul ~a:[a_class ["list-group"]] (List.map h_topic topics);
]
in
ret_html h
let topic id strname name _req : Serv.Response.t =
Log.debug (fun k->k "at %a: serve topic %d/%s/%s" Date.pp (Date.now()) id strname name);
let open H in
let*? h =
let file =
Config.dir // dir_of_stream id strname
// spf "%s.json" (dotencode_filename name) in
Log.debug (fun k->k "file should be '%s' (exists: %b)" file (Sys.file_exists file));
let messages = match DJ.decode_file Msg.dec_l file with
| Ok x -> x
| Error e -> errorf "could not read messages:\n%s" (DJ.string_of_error e)
in
let h_msg (m:Msg.t) : _ H.elt =
li ~a:[a_class ["list-group-item"; "card"]] [
div ~a:[a_class ["card-title"]] [
h5 [txt m.sender_full_name];
p ~a:[a_class ["text-secondary"]] [txt (Date.to_string m.timestamp)];
];
div ~a:[a_class ["card-body"]] [
div ~a:[a_class ["card-text"]] [
(H.Unsafe.data m.content : [>`P] H.elt);
];
];
div ~a:[a_class ["card-footer"]] [
];
]
in
[ p[goto_root()];
p[a ~a:[a_href (spf "/stream/%d/%s" id (urlencode strname))]
[txt (spf "back to %s" strname)]];
h2 [txt name];
ul ~a:[a_class ["list-group"]] (List.map h_msg messages)
]
in
ret_html h
let css req =
let md5 = to_hex @@ Digest.string Web_data.css in
if Serv.Request.get_header req "If-None-Match" = Some md5 then (
) else (
let headers = ["ETag", md5; "Content-Type", "text/css"] in
Serv.Response.make_string ~headers (Ok Web_data.css)
)
let run () =
let server = Serv.create ~port:Config.port () in
Serv.add_route_handler server Serv.Route.(return) root;
Serv.add_route_handler server
Serv.Route.(exact "stream" @/ int @/ string_urlencoded @/ return) stream;
Serv.add_route_handler server
Serv.Route.(exact "topic" @/ int @/ string_urlencoded @/ string_urlencoded @/ return) topic;
Serv.add_route_handler server Serv.Route.(exact "css" @/ return) css;
Serv.run server
end
let () =
let debug = ref false in
let dirs = ref [] in
let port = ref 8085 in
let opts = [
"-d", Arg.Set debug, " enable debug";
"-p", Arg.Set_int port, " port to listen on";
] |> Arg.align in
Arg.parse opts (fun f->dirs := f :: !dirs) "zulip_log_view [dir]+ [opt]*";
Logs.set_reporter (Logs.format_reporter ());
if !debug then Logs.set_level ~all:true (Some Logs.Debug);
begin match !dirs with
| [dir] ->
Log.app (fun k->k "running on :%d" !port);
let module S = Server(struct
let port = !port
let dir = dir
end)
in
begin match S.run () with
| Ok () -> ()
| Error e ->
Log.err (fun k->k "error: %s" (Printexc.to_string e));
exit 1
end
| _ -> failwith "provide exactly one directory";
end
|
e98a94acdf0c287080cc8f42e4ebb022bd2b17b407d3b59f9cc3d3d508400acc | maxhbr/LDBcollector | IfrOSS.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Collectors.IfrOSS
( loadIfrOSSFacts
, ifrOSSLFC
, ifrOSSKindToText
) where
import qualified Prelude as P
import MyPrelude hiding (id)
import qualified Data.Text as T
import qualified Data.Vector as V
import Model.License
import Collectors.Common
ifrOSSURL :: URL
ifrOSSURL = ""
ifrOSSLFC :: LicenseFactClassifier
ifrOSSLFC = LFCWithURLAndLicense ifrOSSURL (LFLWithURL "" "ODbL") "ifrOSS"
ifrOSSKindToText :: CopyleftKind -> Maybe String
ifrOSSKindToText StrongCopyleft = Just "Bei Lizenzen mit einem strengen Copyleft-Effekt wird der Lizenznehmer verpflichtet von der ursprünglichen Software abgeleitete Werke ebenfalls nur unter den Bedingungen der Ursprungslizenz weiterzuverbreiten."
ifrOSSKindToText WeakCopyleft = Just "Lizenzen mit beschränktem Copyleft-Effekt haben ebenfalls einen Copyleft-Effekt, der aber nicht alle Berbeitungen und abgeleiteten Werke umfasst, sondern definierte Ausnahmen enthält."
ifrOSSKindToText SaaSCopyleft = ifrOSSKindToText StrongCopyleft
ifrOSSKindToText Copyleft = Just $ unwords [ fromJust $ ifrOSSKindToText StrongCopyleft
, fromJust $ ifrOSSKindToText WeakCopyleft]
ifrOSSKindToText MaybeCopyleft = Nothing
ifrOSSKindToText NoCopyleft = Just "Lizenzen ohne Copyleft-Effekt zeichnen sich dadurch aus, dass sie dem Lizenznehmer alle Freiheiten einer Open Source Lizenz einräumen und für Veränderungen der Software keine Bedingungen hinsichtlich des zu verwendenden Lizenztyps enthalten. Damit kann der Lizenznehmer veränderte Versionen der Software unter beliebigen Lizenzbedingungen weiterverbreiten, also auch in proprietäre Software überführen."
data IfrCopyleftKind
= IfrStrongCopyleft
| IfrStrongCopyleft_GPLlike
| IfrWeakCopyleft
| IfrWeakCopyleft_MPLlike
| IfrLicenseWithChoice
| IfrLicenseWithSpecialRights
| IfrNoCopyleft
deriving (Eq, Show, Generic)
instance ToJSON IfrCopyleftKind
ifrOSSIfrKindToText :: IfrCopyleftKind -> String
ifrOSSIfrKindToText IfrStrongCopyleft = fromJust $ ifrOSSKindToText StrongCopyleft
ifrOSSIfrKindToText IfrStrongCopyleft_GPLlike = unwords [ fromJust $ ifrOSSKindToText StrongCopyleft
, "Die hier aufgeführten Lizenzen enthalten die verschiedenen GPL-Versionen und davon abgeleitete Lizenztexte. Zudem finden sich hier einige GPL-Varianten mit Ausnahmeregelungen vom strengen Copyleft. Diese Lizenzen mit Ausnahmen können auch als beschränktes Copyleft verstanden werden."]
ifrOSSIfrKindToText IfrWeakCopyleft = fromJust $ ifrOSSKindToText WeakCopyleft
ifrOSSIfrKindToText IfrWeakCopyleft_MPLlike = unwords [ fromJust $ ifrOSSKindToText WeakCopyleft
, "Sofern Modifikationen der Software unter MPLartigen Lizenzen in eigenen Dateien realisiert werden, können diese Dateien auch unter anderen, z.B. proprietären Lizenzbedingungen weiterverbreitet werden. Damit soll die Kombination von Software unter verschiedenen Lizenztypen erleichtert werden."]
ifrOSSIfrKindToText IfrLicenseWithChoice = "Diese Lizenzen sehen unterschiedliche rechtliche Folgen vor, je nachdem wie umfangreich eine Modifikation ist. Zudem werden dem Lizenznehmer verschiedene Wahlmöglichkeiten eingeräumt, wie Weiterentwicklungen weiterverbreitet werden können."
ifrOSSIfrKindToText IfrLicenseWithSpecialRights = "Die Lizenzen mit Sonderrechten gewähren den Lizenznehmern zwar alle diejenigen Rechte, die Freie Software ausmachen, sehen aber zugleich besondere Privilegien für den Lizenzgeber bei Weiterentwicklungen durch den Lizenznehmer vor. Diese Lizenzen werden zumeist bei Programmen verwendet, die ursprünglich proprietär vertrieben wurden."
ifrOSSIfrKindToText IfrNoCopyleft = fromJust $ ifrOSSKindToText NoCopyleft
copyleftKindFromIfrOSSKind :: IfrCopyleftKind -> Maybe CopyleftKind
copyleftKindFromIfrOSSKind IfrStrongCopyleft = Just StrongCopyleft
copyleftKindFromIfrOSSKind IfrStrongCopyleft_GPLlike = Just StrongCopyleft
copyleftKindFromIfrOSSKind IfrWeakCopyleft = Just WeakCopyleft
copyleftKindFromIfrOSSKind IfrWeakCopyleft_MPLlike = Just WeakCopyleft
copyleftKindFromIfrOSSKind IfrLicenseWithChoice = Just MaybeCopyleft
copyleftKindFromIfrOSSKind IfrLicenseWithSpecialRights = Nothing
copyleftKindFromIfrOSSKind IfrNoCopyleft = Just NoCopyleft
data IfrOSSFact
= IfrOSSFact
{ ifrName :: LicenseName
, ifrId :: Maybe LicenseName
, ifrKind :: IfrCopyleftKind
, ifrURL :: URL
}
deriving (Show, Generic)
instance ToJSON IfrOSSFact
instance LicenseFactClassifiable IfrOSSFact where
getLicenseFactClassifier _ = ifrOSSLFC
instance LFRaw IfrOSSFact where
getImpliedNames i = CLSR (ifrName i : maybeToList (ifrId i))
getImpliedURLs i = CLSR [(Nothing, ifrURL i)]
getImpliedComments i = mkSLSR i [ifrOSSIfrKindToText (ifrKind i)]
getImpliedCopyleft i = case copyleftKindFromIfrOSSKind (ifrKind i) of
Just c -> mkSLSR i c
Nothing -> NoSLSR
rawIfrOSSFacts :: [IfrOSSFact]
rawIfrOSSFacts = let
noCopyleftLics =
[ (Nothing, "4Suite License (v. 1.1)", "")
, (Nothing, "Academic Free License (AFL) (v. 1.0)", ":80/afl.html")
, (Nothing, "Academic Free License (AFL) (v. 1.1)", ":80/afl.html")
, (Nothing, "Academic Free License (AFL) (v. 1.2)", ":80/afl1.2.html")
, (Nothing, "Academic Free License (AFL) (v. 2.0)", ":80/afl2.0.html")
, (Nothing, "Academic Free License (AFL) (v. 2.1)", ":80/afl21.htm")
, (Nothing, "Academic Free License (AFL) (v. 3.0)", "")
, (Nothing, "Apache License (v. 1.0)", "-1.0")
, (Nothing, "Apache License (v. 1.1)", "-1.1")
, (Nothing, "Apache License (v. 2.0)", "-2.0.html")
, (Nothing, "Beerware License", "/~phk/")
, (Nothing, "Boost Software License (Einordnung unklar)", "")
, (Nothing, "BSD 2-clause \"Simplified\" or \"FreeBSD\" License", "-license.html")
, (Nothing, "BSD 3-clause \"New\" or \"Revised\" License", "-3-Clause")
, (Nothing, "BSD 4-clause \"Original\" or \"Old\" License", "")
, (Nothing, "CeCILL-B Free Software License Agreement", "-B_V1-en.html")
, (Nothing, "Christian Software Public License", "")
, (Nothing, "CNRI Open Source License Agreement", "-5-00.html")
, (Nothing, "Condor Public License (v. 1.1)", "#condor")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL-B", "-B_V1-fr.html")
, (Nothing, "Cougaar Open Source License", "")
, (Nothing, "Cryptix General License", "")
, (Nothing, "Curl License", "")
, (Nothing, "Do What The Fuck You Want To Public License (v. 1.0)", ":WTFPL-1")
, (Nothing, "Do What The Fuck You Want To Public License (v. 2.0)", "/ ")
, (Nothing, "Eclipse Distribution License (v 1.0)", "-v10.php")
, (Nothing, "Educational Community License (v. 1.0)", ":80/license.html")
, (Nothing, "Educational Community License (v. 2.0)", "-2.0")
, (Nothing, "Eiffel Forum License (v. 1.0)", "-nice.org/license/forum.txt")
, (Nothing, "Eiffel Forum License (v. 2.0)", "-nice.org/license/eiffel-forum-license-2.html")
, (Nothing, "Entessa Public License (v. 1.0)", "/")
, (Nothing, "EU DataGrid Software License (v. 2.0)", "-datagrid.web.cern.ch:80/eu-datagrid/license.html")
, (Nothing, "Fair License", "")
, (Nothing, "Free Fuzzy Logic Library Open Source License", "")
, (Nothing, "FreeType Project License", "")
, (Nothing, "FSF Unlimited License", "")
, (Nothing, "Galen Open Source License (GOSL)", "")
, (Nothing, "Globus Toolkit Public License", "")
, (Nothing, "Globus Toolkit Public License (GTPL) (v. 2.0)", "")
, (Nothing, "ICU License", "-project.org:80/repos/icu/icu/trunk/license.html")
, (Nothing, "ImageMagick Terms and Conditions for Use, Reproduction, and Distribution", "")
, (Nothing, "Independent JPEG Group License", ":IJG?rd=Licensing/IJG")
, (Nothing, "ISC License", "-Licence")
, (Nothing, "Historical Permission Notice and Disclaimer (HPND)", "")
, (Nothing, "Horde Apache-like License", "")
, (Nothing, "Horde BSD-like License", "")
, (Nothing, "Indiana University Extreme! Lab Software License, Version 1.2", "")
, (Nothing, "Intel Open Source License for CDSA/CSSM Implementation", "-open-source-license.php")
, (Nothing, "ISC License", "-support-policy/isc-license/")
, (Nothing, "JasPer License Version 2.0", "/~frodo/jasper/LICENSE")
, (Nothing, "JSON License", "")
, (Nothing, "Libpng License", "-LICENSE.txt")
, (Nothing, "Lua Copyright notice", "")
, (Nothing, "Lucent Public License Version (v. 1)", "-labs.com/hidden/lpl4-2-03.html")
, (Nothing, "Lucent Public License Version (v. 1.02)", "-labs.com/plan9/license.html")
, (Nothing, "Microsoft Permissive License (Ms-PL)", ":80/resources/sharedsource/licensingbasics/permissivelicense.mspx (Einordnung unklar)")
, (Nothing, "Microsoft Public License (Ms-PL)", ":80/resources/sharedsource/licensingbasics/publiclicense.mspx (Einordnung unklar)")
, (Nothing, "MirOS License", "-Licence")
, (Nothing, "MIT License", "#licenseText")
, (Nothing, "Mozart License", "-info/license.html")
, (Nothing, "Naumen Public License", "")
, (Nothing, "NTP License", "/~mills/ntp/html/copyright.html")
, (Nothing, "NUnit License", "")
, (Nothing, "Open Group Test Suite License", "")
, (Nothing, "Open Media Group Open Source License", "")
, (Nothing, "OpenLDAP Public License (v. 1.1)", ";a=blob;f=LICENSE;hb=806557a5ad59804ef3a44d5abfbe91d706b0791f")
, (Nothing, "OpenLDAP Public License (v. 1.2)", ";a=blob;f=LICENSE;hb=42b0383c50c299977b5893ee695cf4e486fb0dc7")
, (Nothing, "OpenLDAP Public License (v. 1.3)", ";a=blob;f=LICENSE;hb=e5f8117f0ce088d0bd7a8e18ddf37eaa40eb09b1")
, (Nothing, "OpenLDAP Public License (v. 1.4)", ";a=blob;f=LICENSE;hb=c9f95c2f3f2ffb5e0ae55fe7388af75547660941")
, (Nothing, "OpenLDAP Public License (v. 2.0)", ";a=blob;f=LICENSE;hb=cbf50f4e1185a21abd4c0a54d3f4341fe28f36ea")
, (Nothing, "OpenLDAP Public License (v. 2.0.1)", ";a=blob;f=LICENSE;hb=b6d68acd14e51ca3aab4428bf26522aa74873f0e")
, (Nothing, "OpenLDAP Public License (v. 2.1)", ";a=blob;f=LICENSE;hb=b0d176738e96a0d3b9f85cb51e140a86f21be715")
, (Nothing, "OpenLDAP Public License (v. 2.2)", ";a=blob;f=LICENSE;hb=470b0c18ec67621c85881b2733057fecf4a1acc3")
, (Nothing, "OpenLDAP Public License (v. 2.3)", "")
, (Nothing, "OpenLDAP Public License (v. 2.4)", ";a=blob;f=LICENSE;hb=cd1284c4a91a8a380d904eee68d1583f989ed386")
, (Nothing, "OpenLDAP Public License (v. 2.5)", ";a=blob;f=LICENSE;hb=6852b9d90022e8593c98205413380536b1b5a7cf")
, (Nothing, "OpenLDAP Public License (v. 2.6)", ";a=blob;f=LICENSE;hb=1cae062821881f41b73012ba816434897abf4205")
, (Nothing, "OpenLDAP Public License (v. 2.7)", "")
, (Nothing, "OpenLDAP Public License (v. 2.8)", "")
, (Nothing, "OpenSSL License (Einordnung unklar)", "")
, (Nothing, "Pangeia Informatica Copyright (v. 1.2)", "")
, (Nothing, "Phorum License (v. 2.0)", "")
, (Nothing, "PHP License (v. 3.0)", "")
, (Nothing, "PHP License (v. 3.1)", "")
, (Nothing, "PostgreSQL License", "/")
, (Nothing, "Privaria Attribution Assurance License", "/#license")
, (Nothing, "Python 2.0.1 License", "/")
, (Nothing, "Python 2.4.2 license", "/")
, (Nothing, "Python Software Foundation License (v. 2)", "/")
, (Nothing, "Ruby License", "-lang.org/en/LICENSE.txt")
, (Nothing, "Sendmail License", "")
, (Nothing, "skyBuilders Open Source License", "")
, (Nothing, "SpeechWorks Public License - Software (v. 1.1)", "")
, (Nothing, "Standard ML of New Jersey Copyright Notice", "-labs.com/cm/cs/what/smlnj/license.html")
, (Nothing, "Suneido Free Software License", "-content/uploads/delightful-downloads/2014/10/free_license.txt")
, (Nothing, "Tcl/Tk License Terms", "")
, (Nothing, "Tea Software License", "")
, (Nothing, "The SpeechWorks Public License (v. 1.1 )", "")
, (Nothing, "Trusster Open Source License (v. 1.0a)", "")
, (Nothing, "Udanax Open-Source License", "")
, (Nothing, "Universal Permissive License (v. 1.0)", "-license-2927578.html")
, (Nothing, "University of Illinois/NCSA Open Source License (NSCA)", "")
, (Nothing, "Unlicense", "/")
, (Nothing, "Vovida Software License v 1.0", "-1.4.0/license.txt")
, (Nothing, "W3C Software and Document Notice and License", "-software.html")
, (Nothing, "Wide Open License (WOL)", "")
, (Nothing, "X11 License", "#3")
, (Nothing, "X Window System License", "")
, (Nothing, "X.Net License", "")
, (Nothing, "XFree86 Licence", "")
, (Nothing, "xinetd License", "-bin/cvsweb.cgi/xinetd/COPYRIGHT?rev=1.1.1.1;content-type=text%2Fplain")
, (Nothing, "Zlib license", "")
, (Nothing, "Zope Public License (v. 1.1)", "-1.1")
, (Nothing, "Zope Public License (v. 2.0)", "-2.0")
, (Nothing, "Zope Public License (v. 2.1)", ":80/Resources/ZPL/")
]
strongCopyleftGPLLics =
[ (Nothing, "Affero General Public License (v. 1)", "")
, (Nothing, "Affero General Public License (v. 2)", "")
, (Nothing, "Alternate Route Open Source License (v. 1.1)", "")
, (Nothing, "CrossPoint Quelltextlizenz", "")
, (Nothing, "eCos License (v. 2.0)", "-license.html")
, (Nothing, "FreeCard License", "")
, (Nothing, "GNU Affero General Public License (AGPL-3.0) (v. 3.0)", "-3.0.html")
, (Nothing, "GNU Classpath - GPL with special exception", "")
, (Nothing, "GNU Emacs General Public License", "-soft.org/gpl_history/emacs_gpl.html")
, (Nothing, "GNU General Public License (GPL) (v. 1.0)", "-1.0.html")
, (Nothing, "GNU General Public License (GPL) (v. 2.0)", "-licenses/gpl-2.0.html")
, (Nothing, "GNU General Public License (GPL) (v. 3.0)", "")
, (Nothing, "GNU General Public License (GPL) (v. 3.0)", "-ger.html (Inoffizielle deutsche Übersetzung)")
, (Nothing, "GNU General Public License v2.0 w/Bison exception", "-Gliederung/Bison-exception-2.2")
, (Nothing, "GNU General Public License v2.0 w/Classpath exception", "")
, (Nothing, "GNU General Public License v2.0 w/Font exception", "-faq.html#FontException")
, (Nothing, "GNU General Public License v2.0 w/GCC Runtime Library exception", ";a=blob;f=gcc/libgcc1.c;h=762f5143fc6eed57b6797c82710f3538aa52b40b;hb=cb143a3ce4fb417c68f5fa2691a1b1b1053dfba9")
, (Nothing, "GNU General Public License v3.0 w/Autoconf exception", "-exception-3.0.html")
, (Nothing, "GNU General Public License v3.0 w/GCC Runtime Library exception (RLE 3.0)", "-exception-3.0.html")
, (Nothing, "GNU General Public License v3.0 w/GCC Runtime Library exception (RLE 3.1)", "-exception-3.1.html")
, (Nothing, "Honest Public License (HPL) (v 1.0)", "")
, (Nothing, "Honest Public License (HPL) (v 1.1)", "")
, (Nothing, "Nethack General Public License", "")
, (Nothing, "Open RTLinux Patent License", "")
, (Nothing, "RedHat eCos Public License (v. 2.0)", "-overview.html")
, (Nothing, "Simple Public License (v. 2.0)", "")
]
strongCopyleftLics =
[ (Nothing, "Arphic Public License", "-gnu/chinese-fonts-truetype/LICENSE")
, (Nothing, "CeCILL Free Software License Agreement (v. 1.0)", "-US.html")
, (Nothing, "CeCILL Free Software License Agreement (v. 1.1)", "-US.html")
, (Nothing, "CeCILL Free Software License Agreement (v. 2.0)", "-en.html")
, (Nothing, "CeCILL Free Software License Agreement (v. 2.1)", "-en.html")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL (v. 1.0)", "-fr.html")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL (v. 2.0)", "-fr.html")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL (v. 2.1)", "-fr.html")
, (Nothing, "Common Public License (v. 0.5)", "-v05.html")
, (Nothing, "Common Public License (v. 1.0)", "-v10.html")
, (Nothing, "Deutsche Freie Softwarelizenz (d-fsl)", "-fsl.org/")
, (Nothing, "Eclipse Public License (v. 1.0)", "-v10.html")
, (Nothing, "European Union Public License (v. 1.0)", "")
, (Nothing, "European Union Public Licence (v. 1.1)", "-text-11-12")
, (Nothing, "European Union Public Licence (v. 1.2)", "-text-11-12")
, (Nothing, "German Free Software License", "-nrw.de/produkte/open-access/lizenzen/dfsl/german-free-software-license")
, (Nothing, "IBM Public License", ":80/developerworks/opensource/license10.html")
, (Nothing, "Intel Open Source License", "-open-source-license.php")
, (Nothing, "IPA Font License", "#en")
, (Nothing, "No Limit Public License", "")
, (Nothing, "Non-Profit Open Software License 3.0", "-3.0.html")
, (Nothing, "Open Group Public License", "")
, (Nothing, "Open Software License 1.0", ":80/osl.html")
, (Nothing, "Open Software License 2.0", "")
, (Nothing, "Open Software License 2.1", "")
, (Nothing, "Open Software License 3.0", ":80/OSL3.0.htm")
, (Nothing, "Reciprocal Public License (v. 1.0)", ":80/Biz_RPL.html")
, (Nothing, "Reciprocal Public License (v. 1.1)", "-1.1")
, (Nothing, "Reciprocal Public License (v. 1.3)", "")
, (Nothing, "Reciprocal Public License (v. 1.5)", "-discuss_lists.opensource.org/attachments/20070724/6944e582/attachment.txt")
, (Nothing, "SIL Open Font License (v. 1.0)", "")
, (Nothing, "SIL Open Font License (v. 1.1)", "")
, (Nothing, "Salutation Public License", "")
, (Nothing, "Software AG License Terms (Quip License) (v. 1.3)", "/~dqg/cse350/xml/quip/License.txt")
, (Nothing, "VOSTROM Public License for Open Source", "")
]
weakCopyleftMPLLics =
[ (Nothing, "Common Development and Distribution License (CDDL) (v. 1.0)", "")
, (Nothing, "Common Development and Distribution License, Version 1.1 (CDDL v 1.1)", "")
, (Nothing, "Common Public Attribution License (v. 1.0)", "-marketing-manager/email-marketing-software/openemm/license/")
, (Nothing, "Computer Associates Trusted Open Source License (v. 1.1)", "-vertraege/open-source-lizenz/computer-associates-trusted-open-source-license-version-11.html")
, (Nothing, "CUA Office Public License (v. 1.0)", "")
, (Nothing, "Erlang Public License (v. 1.1)", "")
, (Nothing, "gSOAP Public License (v. 1.0)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.1)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.2)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.3)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.3a)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.3b)", "/~engelen/license.html")
, (Nothing, "ICS Open Source Public License", "")
, (Nothing, "Interbase Public License", ":80/article/0,1410,30198,00.html")
, (Nothing, "Mozilla Public License (v. 1.0)", "-1.0.html")
, (Nothing, "Mozilla Public License (v. 1.1)", "-1.1.html")
, (Nothing, "Mozilla Public License (v. 2.0)", "/")
, (Nothing, "NASA Open Source Agreement (v. 1.1)", "-Source/NASA_Open_Source_Agreement_1.1.txt")
, (Nothing, "NASA Open Source Agreement (v. 1.3)", "/")
, (Nothing, "Netizen Open Source License (NOSL)", "/")
, (Nothing, "Nokia Open Source License", "")
, (Nothing, "Open Public License (v. 1.0)", "")
, (Nothing, "Open Telecom Public License", "")
, (Nothing, "Openbravo Public License", "")
, (Nothing, "OpenC++ License Terms", "/")
, (Nothing, "RedHat eCos Public License (v. 1.1)", "-license.html")
, (Nothing, "Ricoh Source Code Public License", "-1.0A.shtml")
, (Nothing, "SNIA Public License Version (v.1.1)", "")
, (Nothing, "SugarCRM Public License (v. 1.1.3)", "-public-license/en")
, (Nothing, "Sun Industry Standards Source License (v. 1.0)", ":80/project/www/sissl_license.html")
, (Nothing, "Sun Industry Standards Source License (v. 1.1)", "")
, (Nothing, "Sun Industry Standards Source License (v. 1.2)", "")
, (Nothing, "Sun Public License", "")
, (Nothing, "Sun Public License v1.0", "")
, (Nothing, "Sybase Open Watcom Public License 1.0", "ftp")
, (Nothing, "Zend Engine License (v. 2.0)", "")
, (Nothing, "Zenplex Public License", "")
, (Nothing, "Zimbra Public License (ZPL) (v. 1.2)", "")
, (Nothing, "Zimbra Publice License (v. 1.3)", "-public-license-1-3.html")
, (Nothing, "Zimbra Publice License (v. 1.4)", "-public-license-1-4/")
]
weakCopyleftLics =
[ (Nothing, "Adaptive Public License (v.1.0)", "")
, (Nothing, "Apple Public Source License (v. 2.0)", "")
, (Nothing, "BitTorrent Open Source License (v. 1.0)", "-bin/viewvc.cgi/gentoo-x86/licenses/BitTorrent?diff_format=s&revision=1.1.1.1&view=markup")
, (Nothing, "Bremer Lizenz für freie Softwarebibliotheken (OSCI-Lizenz) (v. 1.0)", " (.pdf-Dokument)")
, (Nothing, "CeCILL-C Free Software License Agreement", "-C_V1-en.html")
, (Nothing, "Code Project Open License (CPOL) (v. 1.02)", "")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL-C", "-C_V1-fr.html")
, (Nothing, "Cougaar Open Source License Agreement", ":80/docman/view.php/17/126/old_cosl_license.html (Einordnung unklar)")
, (Nothing, "Eclipse Public License (v. 2.0)", "-2.0/EPL-2.0.html")
, (Nothing, "GNU Library General Public License (LGPL) (v. 2.0)", "")
, (Nothing, "GNU Lesser General Public License (LGPL) (v. 2.1)", "-licenses/lgpl-2.1.html")
, (Nothing, "GNU Lesser General Public License (LGPL) (v. 3.0)", "")
, (Nothing, "GNU Lesser General Public License (LGPL) (v. 3.0)", "-ger.html (Inoffizielle deutsche Übersetzung)")
, (Nothing, "Hi-Potent Open Source License", "-potent.com/license.html")
, (Nothing, "Jabber Open Source License", "")
, (Nothing, "Microsoft Reciprocal License (Ms-RL)", ":80/resources/sharedsource/licensingbasics/reciprocallicense.mspx")
, (Nothing, "Motosoto Open Source License (v. 0.9.1)", "")
, (Nothing, "Open CASCADE Technology Public License (v. 6.6)", "-public-license")
, (Nothing, "wxWindows License (v. 1.0)", ":80/licence.htm")
, (Nothing, "wxWindows Library License (v. 3.0)", ":80/licence3.txt")
, (Nothing, "wxWindows Library License (v. 3.1)", "/")
, (Nothing, "Yahoo! Public License (YPL) (v. 1.1)", "")
]
withChoiceLics =
[ (Nothing, "ANTLR 2 License", "")
, (Nothing, "Artistic License (v. 1.0)", " (Einordnung unklar)")
, (Nothing, "Artistic License (v. 2.0)", "")
, (Nothing, "Clarified Artistic License", "")
, (Nothing, "Frameworx Open License (v. 1.0)", "-1.0.html")
, (Nothing, "Keith Devens' Open Source License", "/")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.0)", "-project.org/lppl/lppl-1-0.html")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.1)", "-project.org/lppl/lppl-1-1.html")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.2)", "-project.org/lppl/lppl-1-2.html")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.3b)", "-project.org/lppl/lppl-1-3b/")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.3b)", "-project.org/lppl/lppl-1-3b-de.html (Inoffizielle deutsche Übersetzung)")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.3c)", "-project.org/lppl/lppl-1-3c.html")
, (Nothing, "Physnet Package License", "")
, (Nothing, "Ruby License", "-lang.org/en/about/license.txt")
, (Nothing, "SFL License Agreement", "")
, (Nothing, "SGI Free Software License B (v. 1.0)", " (Einordnung unklar)")
, (Nothing, "SGI Free Software License B (v. 1.1)", ":80/projects/FreeB/SGIFreeSWLicB.1.1.doc (Einordnung unklar)")
, (Nothing, "Sleepycat License", "")
, (Nothing, "Sleepycat Software Product License", "")
, (Nothing, "Vim License", "#license")
]
withSpecialRightsLics =
[ (Nothing, "Apple Public Source License 1.0", " (Einordnung unklar)")
, (Nothing, "Apple Public Source License 1.1", "-7/APPLE_LICENSE (Einordnung unklar)")
, (Nothing, "Apple Public Source License (v. 1.2)", " (Einordnung unklar)")
, (Nothing, "Macromedia Open Source License Agreement (v. 1.0)", "")
, (Nothing, "Netscape Public License (NPL) (v. 1.0)", "-1.0.html")
, (Nothing, "Netscape Public License (NPL) (v. 1.1)", ":80/MPL/NPL-1.1.html")
, (Nothing, "OCLC Research Public License (v. 1.0)", "/")
, (Nothing, "OCLC Research Public License (v. 2.0)", "")
, (Nothing, "Open Map Software License Agreement", ":80/license.html")
, (Nothing, "Q Public License (v. 1.0) (QPL)", ":80/4.0/qpl.html")
, (Nothing, "RealNetworks Community Source License - Research and Development Use (RCSL R&D) (v 2.0)", "")
, (Nothing, "RealNetworks Community Source License - Research and Development Use (RCSL R&D) (v 3.0)", "")
, (Nothing, "RealNetworks Public Source License (RPSL) (v. 1.0)", "")
]
mkRaws kind = map (\(i, n, u) -> IfrOSSFact n i kind u)
in mkRaws IfrNoCopyleft noCopyleftLics
++ mkRaws IfrStrongCopyleft_GPLlike strongCopyleftGPLLics
++ mkRaws IfrStrongCopyleft strongCopyleftLics
++ mkRaws IfrWeakCopyleft_MPLlike weakCopyleftMPLLics
++ mkRaws IfrWeakCopyleft weakCopyleftLics
++ mkRaws IfrLicenseWithChoice withChoiceLics
++ mkRaws IfrLicenseWithSpecialRights withSpecialRightsLics
loadIfrOSSFacts :: IO Facts
loadIfrOSSFacts = do
logThatFactsAreLoadedFrom "ifrOSS"
return . V.fromList $ map (LicenseFact (Just ifrOSSURL)) rawIfrOSSFacts
| null | https://raw.githubusercontent.com/maxhbr/LDBcollector/51d940f0af00b2acdd7de246b2be16fa30fc8a6b/src/Collectors/IfrOSS.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
module Collectors.IfrOSS
( loadIfrOSSFacts
, ifrOSSLFC
, ifrOSSKindToText
) where
import qualified Prelude as P
import MyPrelude hiding (id)
import qualified Data.Text as T
import qualified Data.Vector as V
import Model.License
import Collectors.Common
ifrOSSURL :: URL
ifrOSSURL = ""
ifrOSSLFC :: LicenseFactClassifier
ifrOSSLFC = LFCWithURLAndLicense ifrOSSURL (LFLWithURL "" "ODbL") "ifrOSS"
ifrOSSKindToText :: CopyleftKind -> Maybe String
ifrOSSKindToText StrongCopyleft = Just "Bei Lizenzen mit einem strengen Copyleft-Effekt wird der Lizenznehmer verpflichtet von der ursprünglichen Software abgeleitete Werke ebenfalls nur unter den Bedingungen der Ursprungslizenz weiterzuverbreiten."
ifrOSSKindToText WeakCopyleft = Just "Lizenzen mit beschränktem Copyleft-Effekt haben ebenfalls einen Copyleft-Effekt, der aber nicht alle Berbeitungen und abgeleiteten Werke umfasst, sondern definierte Ausnahmen enthält."
ifrOSSKindToText SaaSCopyleft = ifrOSSKindToText StrongCopyleft
ifrOSSKindToText Copyleft = Just $ unwords [ fromJust $ ifrOSSKindToText StrongCopyleft
, fromJust $ ifrOSSKindToText WeakCopyleft]
ifrOSSKindToText MaybeCopyleft = Nothing
ifrOSSKindToText NoCopyleft = Just "Lizenzen ohne Copyleft-Effekt zeichnen sich dadurch aus, dass sie dem Lizenznehmer alle Freiheiten einer Open Source Lizenz einräumen und für Veränderungen der Software keine Bedingungen hinsichtlich des zu verwendenden Lizenztyps enthalten. Damit kann der Lizenznehmer veränderte Versionen der Software unter beliebigen Lizenzbedingungen weiterverbreiten, also auch in proprietäre Software überführen."
data IfrCopyleftKind
= IfrStrongCopyleft
| IfrStrongCopyleft_GPLlike
| IfrWeakCopyleft
| IfrWeakCopyleft_MPLlike
| IfrLicenseWithChoice
| IfrLicenseWithSpecialRights
| IfrNoCopyleft
deriving (Eq, Show, Generic)
instance ToJSON IfrCopyleftKind
ifrOSSIfrKindToText :: IfrCopyleftKind -> String
ifrOSSIfrKindToText IfrStrongCopyleft = fromJust $ ifrOSSKindToText StrongCopyleft
ifrOSSIfrKindToText IfrStrongCopyleft_GPLlike = unwords [ fromJust $ ifrOSSKindToText StrongCopyleft
, "Die hier aufgeführten Lizenzen enthalten die verschiedenen GPL-Versionen und davon abgeleitete Lizenztexte. Zudem finden sich hier einige GPL-Varianten mit Ausnahmeregelungen vom strengen Copyleft. Diese Lizenzen mit Ausnahmen können auch als beschränktes Copyleft verstanden werden."]
ifrOSSIfrKindToText IfrWeakCopyleft = fromJust $ ifrOSSKindToText WeakCopyleft
ifrOSSIfrKindToText IfrWeakCopyleft_MPLlike = unwords [ fromJust $ ifrOSSKindToText WeakCopyleft
, "Sofern Modifikationen der Software unter MPLartigen Lizenzen in eigenen Dateien realisiert werden, können diese Dateien auch unter anderen, z.B. proprietären Lizenzbedingungen weiterverbreitet werden. Damit soll die Kombination von Software unter verschiedenen Lizenztypen erleichtert werden."]
ifrOSSIfrKindToText IfrLicenseWithChoice = "Diese Lizenzen sehen unterschiedliche rechtliche Folgen vor, je nachdem wie umfangreich eine Modifikation ist. Zudem werden dem Lizenznehmer verschiedene Wahlmöglichkeiten eingeräumt, wie Weiterentwicklungen weiterverbreitet werden können."
ifrOSSIfrKindToText IfrLicenseWithSpecialRights = "Die Lizenzen mit Sonderrechten gewähren den Lizenznehmern zwar alle diejenigen Rechte, die Freie Software ausmachen, sehen aber zugleich besondere Privilegien für den Lizenzgeber bei Weiterentwicklungen durch den Lizenznehmer vor. Diese Lizenzen werden zumeist bei Programmen verwendet, die ursprünglich proprietär vertrieben wurden."
ifrOSSIfrKindToText IfrNoCopyleft = fromJust $ ifrOSSKindToText NoCopyleft
copyleftKindFromIfrOSSKind :: IfrCopyleftKind -> Maybe CopyleftKind
copyleftKindFromIfrOSSKind IfrStrongCopyleft = Just StrongCopyleft
copyleftKindFromIfrOSSKind IfrStrongCopyleft_GPLlike = Just StrongCopyleft
copyleftKindFromIfrOSSKind IfrWeakCopyleft = Just WeakCopyleft
copyleftKindFromIfrOSSKind IfrWeakCopyleft_MPLlike = Just WeakCopyleft
copyleftKindFromIfrOSSKind IfrLicenseWithChoice = Just MaybeCopyleft
copyleftKindFromIfrOSSKind IfrLicenseWithSpecialRights = Nothing
copyleftKindFromIfrOSSKind IfrNoCopyleft = Just NoCopyleft
data IfrOSSFact
= IfrOSSFact
{ ifrName :: LicenseName
, ifrId :: Maybe LicenseName
, ifrKind :: IfrCopyleftKind
, ifrURL :: URL
}
deriving (Show, Generic)
instance ToJSON IfrOSSFact
instance LicenseFactClassifiable IfrOSSFact where
getLicenseFactClassifier _ = ifrOSSLFC
instance LFRaw IfrOSSFact where
getImpliedNames i = CLSR (ifrName i : maybeToList (ifrId i))
getImpliedURLs i = CLSR [(Nothing, ifrURL i)]
getImpliedComments i = mkSLSR i [ifrOSSIfrKindToText (ifrKind i)]
getImpliedCopyleft i = case copyleftKindFromIfrOSSKind (ifrKind i) of
Just c -> mkSLSR i c
Nothing -> NoSLSR
rawIfrOSSFacts :: [IfrOSSFact]
rawIfrOSSFacts = let
noCopyleftLics =
[ (Nothing, "4Suite License (v. 1.1)", "")
, (Nothing, "Academic Free License (AFL) (v. 1.0)", ":80/afl.html")
, (Nothing, "Academic Free License (AFL) (v. 1.1)", ":80/afl.html")
, (Nothing, "Academic Free License (AFL) (v. 1.2)", ":80/afl1.2.html")
, (Nothing, "Academic Free License (AFL) (v. 2.0)", ":80/afl2.0.html")
, (Nothing, "Academic Free License (AFL) (v. 2.1)", ":80/afl21.htm")
, (Nothing, "Academic Free License (AFL) (v. 3.0)", "")
, (Nothing, "Apache License (v. 1.0)", "-1.0")
, (Nothing, "Apache License (v. 1.1)", "-1.1")
, (Nothing, "Apache License (v. 2.0)", "-2.0.html")
, (Nothing, "Beerware License", "/~phk/")
, (Nothing, "Boost Software License (Einordnung unklar)", "")
, (Nothing, "BSD 2-clause \"Simplified\" or \"FreeBSD\" License", "-license.html")
, (Nothing, "BSD 3-clause \"New\" or \"Revised\" License", "-3-Clause")
, (Nothing, "BSD 4-clause \"Original\" or \"Old\" License", "")
, (Nothing, "CeCILL-B Free Software License Agreement", "-B_V1-en.html")
, (Nothing, "Christian Software Public License", "")
, (Nothing, "CNRI Open Source License Agreement", "-5-00.html")
, (Nothing, "Condor Public License (v. 1.1)", "#condor")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL-B", "-B_V1-fr.html")
, (Nothing, "Cougaar Open Source License", "")
, (Nothing, "Cryptix General License", "")
, (Nothing, "Curl License", "")
, (Nothing, "Do What The Fuck You Want To Public License (v. 1.0)", ":WTFPL-1")
, (Nothing, "Do What The Fuck You Want To Public License (v. 2.0)", "/ ")
, (Nothing, "Eclipse Distribution License (v 1.0)", "-v10.php")
, (Nothing, "Educational Community License (v. 1.0)", ":80/license.html")
, (Nothing, "Educational Community License (v. 2.0)", "-2.0")
, (Nothing, "Eiffel Forum License (v. 1.0)", "-nice.org/license/forum.txt")
, (Nothing, "Eiffel Forum License (v. 2.0)", "-nice.org/license/eiffel-forum-license-2.html")
, (Nothing, "Entessa Public License (v. 1.0)", "/")
, (Nothing, "EU DataGrid Software License (v. 2.0)", "-datagrid.web.cern.ch:80/eu-datagrid/license.html")
, (Nothing, "Fair License", "")
, (Nothing, "Free Fuzzy Logic Library Open Source License", "")
, (Nothing, "FreeType Project License", "")
, (Nothing, "FSF Unlimited License", "")
, (Nothing, "Galen Open Source License (GOSL)", "")
, (Nothing, "Globus Toolkit Public License", "")
, (Nothing, "Globus Toolkit Public License (GTPL) (v. 2.0)", "")
, (Nothing, "ICU License", "-project.org:80/repos/icu/icu/trunk/license.html")
, (Nothing, "ImageMagick Terms and Conditions for Use, Reproduction, and Distribution", "")
, (Nothing, "Independent JPEG Group License", ":IJG?rd=Licensing/IJG")
, (Nothing, "ISC License", "-Licence")
, (Nothing, "Historical Permission Notice and Disclaimer (HPND)", "")
, (Nothing, "Horde Apache-like License", "")
, (Nothing, "Horde BSD-like License", "")
, (Nothing, "Indiana University Extreme! Lab Software License, Version 1.2", "")
, (Nothing, "Intel Open Source License for CDSA/CSSM Implementation", "-open-source-license.php")
, (Nothing, "ISC License", "-support-policy/isc-license/")
, (Nothing, "JasPer License Version 2.0", "/~frodo/jasper/LICENSE")
, (Nothing, "JSON License", "")
, (Nothing, "Libpng License", "-LICENSE.txt")
, (Nothing, "Lua Copyright notice", "")
, (Nothing, "Lucent Public License Version (v. 1)", "-labs.com/hidden/lpl4-2-03.html")
, (Nothing, "Lucent Public License Version (v. 1.02)", "-labs.com/plan9/license.html")
, (Nothing, "Microsoft Permissive License (Ms-PL)", ":80/resources/sharedsource/licensingbasics/permissivelicense.mspx (Einordnung unklar)")
, (Nothing, "Microsoft Public License (Ms-PL)", ":80/resources/sharedsource/licensingbasics/publiclicense.mspx (Einordnung unklar)")
, (Nothing, "MirOS License", "-Licence")
, (Nothing, "MIT License", "#licenseText")
, (Nothing, "Mozart License", "-info/license.html")
, (Nothing, "Naumen Public License", "")
, (Nothing, "NTP License", "/~mills/ntp/html/copyright.html")
, (Nothing, "NUnit License", "")
, (Nothing, "Open Group Test Suite License", "")
, (Nothing, "Open Media Group Open Source License", "")
, (Nothing, "OpenLDAP Public License (v. 1.1)", ";a=blob;f=LICENSE;hb=806557a5ad59804ef3a44d5abfbe91d706b0791f")
, (Nothing, "OpenLDAP Public License (v. 1.2)", ";a=blob;f=LICENSE;hb=42b0383c50c299977b5893ee695cf4e486fb0dc7")
, (Nothing, "OpenLDAP Public License (v. 1.3)", ";a=blob;f=LICENSE;hb=e5f8117f0ce088d0bd7a8e18ddf37eaa40eb09b1")
, (Nothing, "OpenLDAP Public License (v. 1.4)", ";a=blob;f=LICENSE;hb=c9f95c2f3f2ffb5e0ae55fe7388af75547660941")
, (Nothing, "OpenLDAP Public License (v. 2.0)", ";a=blob;f=LICENSE;hb=cbf50f4e1185a21abd4c0a54d3f4341fe28f36ea")
, (Nothing, "OpenLDAP Public License (v. 2.0.1)", ";a=blob;f=LICENSE;hb=b6d68acd14e51ca3aab4428bf26522aa74873f0e")
, (Nothing, "OpenLDAP Public License (v. 2.1)", ";a=blob;f=LICENSE;hb=b0d176738e96a0d3b9f85cb51e140a86f21be715")
, (Nothing, "OpenLDAP Public License (v. 2.2)", ";a=blob;f=LICENSE;hb=470b0c18ec67621c85881b2733057fecf4a1acc3")
, (Nothing, "OpenLDAP Public License (v. 2.3)", "")
, (Nothing, "OpenLDAP Public License (v. 2.4)", ";a=blob;f=LICENSE;hb=cd1284c4a91a8a380d904eee68d1583f989ed386")
, (Nothing, "OpenLDAP Public License (v. 2.5)", ";a=blob;f=LICENSE;hb=6852b9d90022e8593c98205413380536b1b5a7cf")
, (Nothing, "OpenLDAP Public License (v. 2.6)", ";a=blob;f=LICENSE;hb=1cae062821881f41b73012ba816434897abf4205")
, (Nothing, "OpenLDAP Public License (v. 2.7)", "")
, (Nothing, "OpenLDAP Public License (v. 2.8)", "")
, (Nothing, "OpenSSL License (Einordnung unklar)", "")
, (Nothing, "Pangeia Informatica Copyright (v. 1.2)", "")
, (Nothing, "Phorum License (v. 2.0)", "")
, (Nothing, "PHP License (v. 3.0)", "")
, (Nothing, "PHP License (v. 3.1)", "")
, (Nothing, "PostgreSQL License", "/")
, (Nothing, "Privaria Attribution Assurance License", "/#license")
, (Nothing, "Python 2.0.1 License", "/")
, (Nothing, "Python 2.4.2 license", "/")
, (Nothing, "Python Software Foundation License (v. 2)", "/")
, (Nothing, "Ruby License", "-lang.org/en/LICENSE.txt")
, (Nothing, "Sendmail License", "")
, (Nothing, "skyBuilders Open Source License", "")
, (Nothing, "SpeechWorks Public License - Software (v. 1.1)", "")
, (Nothing, "Standard ML of New Jersey Copyright Notice", "-labs.com/cm/cs/what/smlnj/license.html")
, (Nothing, "Suneido Free Software License", "-content/uploads/delightful-downloads/2014/10/free_license.txt")
, (Nothing, "Tcl/Tk License Terms", "")
, (Nothing, "Tea Software License", "")
, (Nothing, "The SpeechWorks Public License (v. 1.1 )", "")
, (Nothing, "Trusster Open Source License (v. 1.0a)", "")
, (Nothing, "Udanax Open-Source License", "")
, (Nothing, "Universal Permissive License (v. 1.0)", "-license-2927578.html")
, (Nothing, "University of Illinois/NCSA Open Source License (NSCA)", "")
, (Nothing, "Unlicense", "/")
, (Nothing, "Vovida Software License v 1.0", "-1.4.0/license.txt")
, (Nothing, "W3C Software and Document Notice and License", "-software.html")
, (Nothing, "Wide Open License (WOL)", "")
, (Nothing, "X11 License", "#3")
, (Nothing, "X Window System License", "")
, (Nothing, "X.Net License", "")
, (Nothing, "XFree86 Licence", "")
, (Nothing, "xinetd License", "-bin/cvsweb.cgi/xinetd/COPYRIGHT?rev=1.1.1.1;content-type=text%2Fplain")
, (Nothing, "Zlib license", "")
, (Nothing, "Zope Public License (v. 1.1)", "-1.1")
, (Nothing, "Zope Public License (v. 2.0)", "-2.0")
, (Nothing, "Zope Public License (v. 2.1)", ":80/Resources/ZPL/")
]
strongCopyleftGPLLics =
[ (Nothing, "Affero General Public License (v. 1)", "")
, (Nothing, "Affero General Public License (v. 2)", "")
, (Nothing, "Alternate Route Open Source License (v. 1.1)", "")
, (Nothing, "CrossPoint Quelltextlizenz", "")
, (Nothing, "eCos License (v. 2.0)", "-license.html")
, (Nothing, "FreeCard License", "")
, (Nothing, "GNU Affero General Public License (AGPL-3.0) (v. 3.0)", "-3.0.html")
, (Nothing, "GNU Classpath - GPL with special exception", "")
, (Nothing, "GNU Emacs General Public License", "-soft.org/gpl_history/emacs_gpl.html")
, (Nothing, "GNU General Public License (GPL) (v. 1.0)", "-1.0.html")
, (Nothing, "GNU General Public License (GPL) (v. 2.0)", "-licenses/gpl-2.0.html")
, (Nothing, "GNU General Public License (GPL) (v. 3.0)", "")
, (Nothing, "GNU General Public License (GPL) (v. 3.0)", "-ger.html (Inoffizielle deutsche Übersetzung)")
, (Nothing, "GNU General Public License v2.0 w/Bison exception", "-Gliederung/Bison-exception-2.2")
, (Nothing, "GNU General Public License v2.0 w/Classpath exception", "")
, (Nothing, "GNU General Public License v2.0 w/Font exception", "-faq.html#FontException")
, (Nothing, "GNU General Public License v2.0 w/GCC Runtime Library exception", ";a=blob;f=gcc/libgcc1.c;h=762f5143fc6eed57b6797c82710f3538aa52b40b;hb=cb143a3ce4fb417c68f5fa2691a1b1b1053dfba9")
, (Nothing, "GNU General Public License v3.0 w/Autoconf exception", "-exception-3.0.html")
, (Nothing, "GNU General Public License v3.0 w/GCC Runtime Library exception (RLE 3.0)", "-exception-3.0.html")
, (Nothing, "GNU General Public License v3.0 w/GCC Runtime Library exception (RLE 3.1)", "-exception-3.1.html")
, (Nothing, "Honest Public License (HPL) (v 1.0)", "")
, (Nothing, "Honest Public License (HPL) (v 1.1)", "")
, (Nothing, "Nethack General Public License", "")
, (Nothing, "Open RTLinux Patent License", "")
, (Nothing, "RedHat eCos Public License (v. 2.0)", "-overview.html")
, (Nothing, "Simple Public License (v. 2.0)", "")
]
strongCopyleftLics =
[ (Nothing, "Arphic Public License", "-gnu/chinese-fonts-truetype/LICENSE")
, (Nothing, "CeCILL Free Software License Agreement (v. 1.0)", "-US.html")
, (Nothing, "CeCILL Free Software License Agreement (v. 1.1)", "-US.html")
, (Nothing, "CeCILL Free Software License Agreement (v. 2.0)", "-en.html")
, (Nothing, "CeCILL Free Software License Agreement (v. 2.1)", "-en.html")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL (v. 1.0)", "-fr.html")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL (v. 2.0)", "-fr.html")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL (v. 2.1)", "-fr.html")
, (Nothing, "Common Public License (v. 0.5)", "-v05.html")
, (Nothing, "Common Public License (v. 1.0)", "-v10.html")
, (Nothing, "Deutsche Freie Softwarelizenz (d-fsl)", "-fsl.org/")
, (Nothing, "Eclipse Public License (v. 1.0)", "-v10.html")
, (Nothing, "European Union Public License (v. 1.0)", "")
, (Nothing, "European Union Public Licence (v. 1.1)", "-text-11-12")
, (Nothing, "European Union Public Licence (v. 1.2)", "-text-11-12")
, (Nothing, "German Free Software License", "-nrw.de/produkte/open-access/lizenzen/dfsl/german-free-software-license")
, (Nothing, "IBM Public License", ":80/developerworks/opensource/license10.html")
, (Nothing, "Intel Open Source License", "-open-source-license.php")
, (Nothing, "IPA Font License", "#en")
, (Nothing, "No Limit Public License", "")
, (Nothing, "Non-Profit Open Software License 3.0", "-3.0.html")
, (Nothing, "Open Group Public License", "")
, (Nothing, "Open Software License 1.0", ":80/osl.html")
, (Nothing, "Open Software License 2.0", "")
, (Nothing, "Open Software License 2.1", "")
, (Nothing, "Open Software License 3.0", ":80/OSL3.0.htm")
, (Nothing, "Reciprocal Public License (v. 1.0)", ":80/Biz_RPL.html")
, (Nothing, "Reciprocal Public License (v. 1.1)", "-1.1")
, (Nothing, "Reciprocal Public License (v. 1.3)", "")
, (Nothing, "Reciprocal Public License (v. 1.5)", "-discuss_lists.opensource.org/attachments/20070724/6944e582/attachment.txt")
, (Nothing, "SIL Open Font License (v. 1.0)", "")
, (Nothing, "SIL Open Font License (v. 1.1)", "")
, (Nothing, "Salutation Public License", "")
, (Nothing, "Software AG License Terms (Quip License) (v. 1.3)", "/~dqg/cse350/xml/quip/License.txt")
, (Nothing, "VOSTROM Public License for Open Source", "")
]
weakCopyleftMPLLics =
[ (Nothing, "Common Development and Distribution License (CDDL) (v. 1.0)", "")
, (Nothing, "Common Development and Distribution License, Version 1.1 (CDDL v 1.1)", "")
, (Nothing, "Common Public Attribution License (v. 1.0)", "-marketing-manager/email-marketing-software/openemm/license/")
, (Nothing, "Computer Associates Trusted Open Source License (v. 1.1)", "-vertraege/open-source-lizenz/computer-associates-trusted-open-source-license-version-11.html")
, (Nothing, "CUA Office Public License (v. 1.0)", "")
, (Nothing, "Erlang Public License (v. 1.1)", "")
, (Nothing, "gSOAP Public License (v. 1.0)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.1)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.2)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.3)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.3a)", ":80/~engelen/license.html")
, (Nothing, "gSOAP Public License (v. 1.3b)", "/~engelen/license.html")
, (Nothing, "ICS Open Source Public License", "")
, (Nothing, "Interbase Public License", ":80/article/0,1410,30198,00.html")
, (Nothing, "Mozilla Public License (v. 1.0)", "-1.0.html")
, (Nothing, "Mozilla Public License (v. 1.1)", "-1.1.html")
, (Nothing, "Mozilla Public License (v. 2.0)", "/")
, (Nothing, "NASA Open Source Agreement (v. 1.1)", "-Source/NASA_Open_Source_Agreement_1.1.txt")
, (Nothing, "NASA Open Source Agreement (v. 1.3)", "/")
, (Nothing, "Netizen Open Source License (NOSL)", "/")
, (Nothing, "Nokia Open Source License", "")
, (Nothing, "Open Public License (v. 1.0)", "")
, (Nothing, "Open Telecom Public License", "")
, (Nothing, "Openbravo Public License", "")
, (Nothing, "OpenC++ License Terms", "/")
, (Nothing, "RedHat eCos Public License (v. 1.1)", "-license.html")
, (Nothing, "Ricoh Source Code Public License", "-1.0A.shtml")
, (Nothing, "SNIA Public License Version (v.1.1)", "")
, (Nothing, "SugarCRM Public License (v. 1.1.3)", "-public-license/en")
, (Nothing, "Sun Industry Standards Source License (v. 1.0)", ":80/project/www/sissl_license.html")
, (Nothing, "Sun Industry Standards Source License (v. 1.1)", "")
, (Nothing, "Sun Industry Standards Source License (v. 1.2)", "")
, (Nothing, "Sun Public License", "")
, (Nothing, "Sun Public License v1.0", "")
, (Nothing, "Sybase Open Watcom Public License 1.0", "ftp")
, (Nothing, "Zend Engine License (v. 2.0)", "")
, (Nothing, "Zenplex Public License", "")
, (Nothing, "Zimbra Public License (ZPL) (v. 1.2)", "")
, (Nothing, "Zimbra Publice License (v. 1.3)", "-public-license-1-3.html")
, (Nothing, "Zimbra Publice License (v. 1.4)", "-public-license-1-4/")
]
weakCopyleftLics =
[ (Nothing, "Adaptive Public License (v.1.0)", "")
, (Nothing, "Apple Public Source License (v. 2.0)", "")
, (Nothing, "BitTorrent Open Source License (v. 1.0)", "-bin/viewvc.cgi/gentoo-x86/licenses/BitTorrent?diff_format=s&revision=1.1.1.1&view=markup")
, (Nothing, "Bremer Lizenz für freie Softwarebibliotheken (OSCI-Lizenz) (v. 1.0)", " (.pdf-Dokument)")
, (Nothing, "CeCILL-C Free Software License Agreement", "-C_V1-en.html")
, (Nothing, "Code Project Open License (CPOL) (v. 1.02)", "")
, (Nothing, "Contrat de License de Logiciel Libre CeCILL-C", "-C_V1-fr.html")
, (Nothing, "Cougaar Open Source License Agreement", ":80/docman/view.php/17/126/old_cosl_license.html (Einordnung unklar)")
, (Nothing, "Eclipse Public License (v. 2.0)", "-2.0/EPL-2.0.html")
, (Nothing, "GNU Library General Public License (LGPL) (v. 2.0)", "")
, (Nothing, "GNU Lesser General Public License (LGPL) (v. 2.1)", "-licenses/lgpl-2.1.html")
, (Nothing, "GNU Lesser General Public License (LGPL) (v. 3.0)", "")
, (Nothing, "GNU Lesser General Public License (LGPL) (v. 3.0)", "-ger.html (Inoffizielle deutsche Übersetzung)")
, (Nothing, "Hi-Potent Open Source License", "-potent.com/license.html")
, (Nothing, "Jabber Open Source License", "")
, (Nothing, "Microsoft Reciprocal License (Ms-RL)", ":80/resources/sharedsource/licensingbasics/reciprocallicense.mspx")
, (Nothing, "Motosoto Open Source License (v. 0.9.1)", "")
, (Nothing, "Open CASCADE Technology Public License (v. 6.6)", "-public-license")
, (Nothing, "wxWindows License (v. 1.0)", ":80/licence.htm")
, (Nothing, "wxWindows Library License (v. 3.0)", ":80/licence3.txt")
, (Nothing, "wxWindows Library License (v. 3.1)", "/")
, (Nothing, "Yahoo! Public License (YPL) (v. 1.1)", "")
]
withChoiceLics =
[ (Nothing, "ANTLR 2 License", "")
, (Nothing, "Artistic License (v. 1.0)", " (Einordnung unklar)")
, (Nothing, "Artistic License (v. 2.0)", "")
, (Nothing, "Clarified Artistic License", "")
, (Nothing, "Frameworx Open License (v. 1.0)", "-1.0.html")
, (Nothing, "Keith Devens' Open Source License", "/")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.0)", "-project.org/lppl/lppl-1-0.html")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.1)", "-project.org/lppl/lppl-1-1.html")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.2)", "-project.org/lppl/lppl-1-2.html")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.3b)", "-project.org/lppl/lppl-1-3b/")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.3b)", "-project.org/lppl/lppl-1-3b-de.html (Inoffizielle deutsche Übersetzung)")
, (Nothing, "LaTeX Project Public License (LPPL) (v. 1.3c)", "-project.org/lppl/lppl-1-3c.html")
, (Nothing, "Physnet Package License", "")
, (Nothing, "Ruby License", "-lang.org/en/about/license.txt")
, (Nothing, "SFL License Agreement", "")
, (Nothing, "SGI Free Software License B (v. 1.0)", " (Einordnung unklar)")
, (Nothing, "SGI Free Software License B (v. 1.1)", ":80/projects/FreeB/SGIFreeSWLicB.1.1.doc (Einordnung unklar)")
, (Nothing, "Sleepycat License", "")
, (Nothing, "Sleepycat Software Product License", "")
, (Nothing, "Vim License", "#license")
]
withSpecialRightsLics =
[ (Nothing, "Apple Public Source License 1.0", " (Einordnung unklar)")
, (Nothing, "Apple Public Source License 1.1", "-7/APPLE_LICENSE (Einordnung unklar)")
, (Nothing, "Apple Public Source License (v. 1.2)", " (Einordnung unklar)")
, (Nothing, "Macromedia Open Source License Agreement (v. 1.0)", "")
, (Nothing, "Netscape Public License (NPL) (v. 1.0)", "-1.0.html")
, (Nothing, "Netscape Public License (NPL) (v. 1.1)", ":80/MPL/NPL-1.1.html")
, (Nothing, "OCLC Research Public License (v. 1.0)", "/")
, (Nothing, "OCLC Research Public License (v. 2.0)", "")
, (Nothing, "Open Map Software License Agreement", ":80/license.html")
, (Nothing, "Q Public License (v. 1.0) (QPL)", ":80/4.0/qpl.html")
, (Nothing, "RealNetworks Community Source License - Research and Development Use (RCSL R&D) (v 2.0)", "")
, (Nothing, "RealNetworks Community Source License - Research and Development Use (RCSL R&D) (v 3.0)", "")
, (Nothing, "RealNetworks Public Source License (RPSL) (v. 1.0)", "")
]
mkRaws kind = map (\(i, n, u) -> IfrOSSFact n i kind u)
in mkRaws IfrNoCopyleft noCopyleftLics
++ mkRaws IfrStrongCopyleft_GPLlike strongCopyleftGPLLics
++ mkRaws IfrStrongCopyleft strongCopyleftLics
++ mkRaws IfrWeakCopyleft_MPLlike weakCopyleftMPLLics
++ mkRaws IfrWeakCopyleft weakCopyleftLics
++ mkRaws IfrLicenseWithChoice withChoiceLics
++ mkRaws IfrLicenseWithSpecialRights withSpecialRightsLics
loadIfrOSSFacts :: IO Facts
loadIfrOSSFacts = do
logThatFactsAreLoadedFrom "ifrOSS"
return . V.fromList $ map (LicenseFact (Just ifrOSSURL)) rawIfrOSSFacts
|
0a33ba4bdd087c6e58271e32d321b320de7017cdf15519c2396b9adf44e7a523 | asmala/clj-simple-form | test_support.clj | (ns clj-simple-form.test-support
(:require [taoensso.tower :as tower]
[clj-simple-form.util :as util]))
(def dictionary
{:en {:missing "Translation missing"
:simple-form {:profile {:labels {:email "Email"}
:hints {:email "Double-check your email address."}}
:address {:labels {:street "Street"}}
:defaults {:labels {:name "Name"}}}}})
(tower/set-config! [:dictionary] dictionary)
| null | https://raw.githubusercontent.com/asmala/clj-simple-form/b1c566b1f0fe532639b15832b557f1608598a0a2/clj-simple-form-core/test/clj_simple_form/test_support.clj | clojure | (ns clj-simple-form.test-support
(:require [taoensso.tower :as tower]
[clj-simple-form.util :as util]))
(def dictionary
{:en {:missing "Translation missing"
:simple-form {:profile {:labels {:email "Email"}
:hints {:email "Double-check your email address."}}
:address {:labels {:street "Street"}}
:defaults {:labels {:name "Name"}}}}})
(tower/set-config! [:dictionary] dictionary)
| |
2f52b088600e794f66dd9737e4f9a6199a0b08b6511d76fda4009422c7669c20 | facebook/duckling | Tests.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Numeral.TR.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Numeral.TR.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "TR Tests"
[ makeCorpusTest [Seal Numeral] corpus
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/tests/Duckling/Numeral/TR/Tests.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Numeral.TR.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Numeral.TR.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "TR Tests"
[ makeCorpusTest [Seal Numeral] corpus
]
|
57f0a5d7e4173b637bff24318d80666cd24b623f217cf0da55a5160897971d9c | sgbj/MaximaSharp | derf.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 46c1f6a93b0d 2012/05/03 04:40:28 toy $ "
" f2cl2.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
;;; "f2cl5.l,v 46c1f6a93b0d 2012/05/03 04:40:28 toy $"
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v fceac530ef0c 2011/11/26 04:02:26 toy $ " )
Using Lisp CMU Common Lisp snapshot-2012 - 04 ( )
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':simple-array)
;;; (:array-slicing nil) (:declare-common nil)
;;; (:float-format double-float))
(in-package :slatec)
(let ((nterf 0)
(xbig 0.0)
(sqeps 0.0)
(erfcs
(make-array 21
:element-type 'double-float
:initial-contents '(-0.049046121234691806
-0.14226120510371365
0.010035582187599796
-5.768764699767485e-4
2.741993125219606e-5
-1.1043175507344507e-6
3.8488755420345036e-8
-1.1808582533875466e-9
3.2334215826050907e-11
-7.991015947004549e-13
1.7990725113961456e-14
-3.718635487818693e-16
7.103599003714253e-18
-1.2612455119155226e-19
2.0916406941769294e-21
-3.2539731029314073e-23
4.766867209797675e-25
-6.598012078285134e-27
8.655011469963763e-29
-1.0788925177498064e-30
1.2811883993017003e-32)))
(sqrtpi 1.772453850905516)
(first$ nil))
(declare (type (f2cl-lib:integer4) nterf)
(type (double-float) xbig sqeps sqrtpi)
(type (simple-array double-float (21)) erfcs)
(type f2cl-lib:logical first$))
(setq first$ f2cl-lib:%true%)
(defun derf (x)
(declare (type (double-float) x))
(prog ((y 0.0) (derf 0.0))
(declare (type (double-float) derf y))
(cond
(first$
(setf nterf
(initds erfcs 21
(* 0.1f0 (f2cl-lib:freal (f2cl-lib:d1mach 3)))))
(setf xbig
(f2cl-lib:fsqrt
(- (f2cl-lib:flog (* sqrtpi (f2cl-lib:d1mach 3))))))
(setf sqeps (f2cl-lib:fsqrt (* 2.0 (f2cl-lib:d1mach 3))))))
(setf first$ f2cl-lib:%false%)
(setf y (abs x))
(if (> y 1.0) (go label20))
(if (<= y sqeps) (setf derf (/ (* 2.0 x) sqrtpi)))
(if (> y sqeps)
(setf derf (* x (+ 1.0 (dcsevl (- (* 2.0 x x) 1.0) erfcs nterf)))))
(go end_label)
label20
(if (<= y xbig) (setf derf (f2cl-lib:sign (- 1.0 (derfc y)) x)))
(if (> y xbig) (setf derf (f2cl-lib:sign 1.0 x)))
(go end_label)
end_label
(return (values derf nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::derf fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo :arg-types '((double-float))
:return-values '(nil)
:calls '(fortran-to-lisp::derfc
fortran-to-lisp::dcsevl
fortran-to-lisp::initds
fortran-to-lisp::d1mach))))
| null | https://raw.githubusercontent.com/sgbj/MaximaSharp/75067d7e045b9ed50883b5eb09803b4c8f391059/Test/bin/Debug/Maxima-5.30.0/share/maxima/5.30.0/src/numerical/slatec/derf.lisp | lisp | Compiled by f2cl version:
"f2cl5.l,v 46c1f6a93b0d 2012/05/03 04:40:28 toy $"
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':simple-array)
(:array-slicing nil) (:declare-common nil)
(:float-format double-float)) | ( " f2cl1.l , v 46c1f6a93b0d 2012/05/03 04:40:28 toy $ "
" f2cl2.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v fceac530ef0c 2011/11/26 04:02:26 toy $ " )
Using Lisp CMU Common Lisp snapshot-2012 - 04 ( )
(in-package :slatec)
(let ((nterf 0)
(xbig 0.0)
(sqeps 0.0)
(erfcs
(make-array 21
:element-type 'double-float
:initial-contents '(-0.049046121234691806
-0.14226120510371365
0.010035582187599796
-5.768764699767485e-4
2.741993125219606e-5
-1.1043175507344507e-6
3.8488755420345036e-8
-1.1808582533875466e-9
3.2334215826050907e-11
-7.991015947004549e-13
1.7990725113961456e-14
-3.718635487818693e-16
7.103599003714253e-18
-1.2612455119155226e-19
2.0916406941769294e-21
-3.2539731029314073e-23
4.766867209797675e-25
-6.598012078285134e-27
8.655011469963763e-29
-1.0788925177498064e-30
1.2811883993017003e-32)))
(sqrtpi 1.772453850905516)
(first$ nil))
(declare (type (f2cl-lib:integer4) nterf)
(type (double-float) xbig sqeps sqrtpi)
(type (simple-array double-float (21)) erfcs)
(type f2cl-lib:logical first$))
(setq first$ f2cl-lib:%true%)
(defun derf (x)
(declare (type (double-float) x))
(prog ((y 0.0) (derf 0.0))
(declare (type (double-float) derf y))
(cond
(first$
(setf nterf
(initds erfcs 21
(* 0.1f0 (f2cl-lib:freal (f2cl-lib:d1mach 3)))))
(setf xbig
(f2cl-lib:fsqrt
(- (f2cl-lib:flog (* sqrtpi (f2cl-lib:d1mach 3))))))
(setf sqeps (f2cl-lib:fsqrt (* 2.0 (f2cl-lib:d1mach 3))))))
(setf first$ f2cl-lib:%false%)
(setf y (abs x))
(if (> y 1.0) (go label20))
(if (<= y sqeps) (setf derf (/ (* 2.0 x) sqrtpi)))
(if (> y sqeps)
(setf derf (* x (+ 1.0 (dcsevl (- (* 2.0 x x) 1.0) erfcs nterf)))))
(go end_label)
label20
(if (<= y xbig) (setf derf (f2cl-lib:sign (- 1.0 (derfc y)) x)))
(if (> y xbig) (setf derf (f2cl-lib:sign 1.0 x)))
(go end_label)
end_label
(return (values derf nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::derf fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo :arg-types '((double-float))
:return-values '(nil)
:calls '(fortran-to-lisp::derfc
fortran-to-lisp::dcsevl
fortran-to-lisp::initds
fortran-to-lisp::d1mach))))
|
f04b773949db441c92e3f098065b82cc1cae8db24acf0eb4c70646f7533050f7 | vimus/libmpd-haskell | PlaybackControlSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module Network.MPD.Applicative.PlaybackControlSpec (main, spec) where
import TestUtil
import Network.MPD.Applicative.PlaybackControl
import Network.MPD.Commands.Types
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "next" $ do
it "sends a next request" $ do
next `with` [("next", Right "OK")] `shouldBe` Right ()
describe "pause" $ do
it "sends a play request" $ do
pause False `with` [("pause 0", Right "OK")] `shouldBe` Right ()
describe "toggle" $ do
it "toggles playback" $ do
toggle `with` [("pause", Right "OK")] `shouldBe` Right ()
describe "play" $ do
it "sends a play request" $ do
play Nothing `with` [("play", Right "OK")] `shouldBe` Right ()
it "optionally takes a position to start playback at" $ do
play (Just 1) `with` [("play 1", Right "OK")] `shouldBe` Right ()
describe "playId" $ do
it "like 'play' but takes an id instead" $ do
playId (Id 1) `with` [("playid 1", Right "OK")] `shouldBe` Right ()
describe "previous" $ do
it "sends a request" $ do
previous `with` [("previous", Right "OK")] `shouldBe` Right ()
describe "seek" $ do
it "sends a seek request" $ do
seek 1 10 `with` [("seek 1 10.0", Right "OK")] `shouldBe` Right ()
describe "seekId" $ do
it "is like 'seek' but takes an id" $ do
seekId (Id 1) 10
`with` [("seekid 1 10.0", Right "OK")]
`shouldBe` Right ()
describe "seekCur" $ do
it "sends a seek request on the current song, absolute time" $ do
seekCur True 10
`with` [("seekcur 10.0", Right "OK")]
`shouldBe` Right ()
it "sends a seek request on the current song, positive relative time" $ do
seekCur False 10
`with` [("seekcur +10.0", Right "OK")]
`shouldBe` Right ()
it "sends a seek request on the current song, positive negative time" $ do
seekCur False (-10)
`with` [("seekcur -10.0", Right "OK")]
`shouldBe` Right ()
describe "stop" $ do
it "sends a stop request" $ do
stop `with` [("stop", Right "OK")] `shouldBe` Right ()
| null | https://raw.githubusercontent.com/vimus/libmpd-haskell/1ec02deba33ce2a16012d8f0954e648eb4b5c485/tests/Network/MPD/Applicative/PlaybackControlSpec.hs | haskell | # LANGUAGE OverloadedStrings # |
module Network.MPD.Applicative.PlaybackControlSpec (main, spec) where
import TestUtil
import Network.MPD.Applicative.PlaybackControl
import Network.MPD.Commands.Types
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "next" $ do
it "sends a next request" $ do
next `with` [("next", Right "OK")] `shouldBe` Right ()
describe "pause" $ do
it "sends a play request" $ do
pause False `with` [("pause 0", Right "OK")] `shouldBe` Right ()
describe "toggle" $ do
it "toggles playback" $ do
toggle `with` [("pause", Right "OK")] `shouldBe` Right ()
describe "play" $ do
it "sends a play request" $ do
play Nothing `with` [("play", Right "OK")] `shouldBe` Right ()
it "optionally takes a position to start playback at" $ do
play (Just 1) `with` [("play 1", Right "OK")] `shouldBe` Right ()
describe "playId" $ do
it "like 'play' but takes an id instead" $ do
playId (Id 1) `with` [("playid 1", Right "OK")] `shouldBe` Right ()
describe "previous" $ do
it "sends a request" $ do
previous `with` [("previous", Right "OK")] `shouldBe` Right ()
describe "seek" $ do
it "sends a seek request" $ do
seek 1 10 `with` [("seek 1 10.0", Right "OK")] `shouldBe` Right ()
describe "seekId" $ do
it "is like 'seek' but takes an id" $ do
seekId (Id 1) 10
`with` [("seekid 1 10.0", Right "OK")]
`shouldBe` Right ()
describe "seekCur" $ do
it "sends a seek request on the current song, absolute time" $ do
seekCur True 10
`with` [("seekcur 10.0", Right "OK")]
`shouldBe` Right ()
it "sends a seek request on the current song, positive relative time" $ do
seekCur False 10
`with` [("seekcur +10.0", Right "OK")]
`shouldBe` Right ()
it "sends a seek request on the current song, positive negative time" $ do
seekCur False (-10)
`with` [("seekcur -10.0", Right "OK")]
`shouldBe` Right ()
describe "stop" $ do
it "sends a stop request" $ do
stop `with` [("stop", Right "OK")] `shouldBe` Right ()
|
1fafc8a99e8038cf5a68861cafe236ab9f2911b0d8299875e23b8149732da646 | ekmett/rcu | IncCounterExperiment.hs | # LANGUAGE CPP #
# LANGUAGE MagicHash #
# LANGUAGE UnboxedTuples #
-----------------------------------------------------------------------------
-- |
Copyright : ( C ) 2015 and
-- License : BSD-style (see the file LICENSE)
Maintainer : < > ,
< >
-- Stability : experimental
-- Portability : non-portable
--
-- Which counter increment is faster?
-----------------------------------------------------------------------------
module Main where
import Control.Monad (forM_)
import Control.Monad.Primitive (primitive)
import Criterion.Main (bench, bgroup, defaultMain, nfIO)
import Data.Word (Word64)
import Data.Primitive
( MutableByteArray , plusWord , readWord64Array , writeWord64Array )
import GHC.Word (Word64 (W64#))
-- | Counter for causal ordering.
newtype Counter = Counter (MutableByteArray RealWorld)
instance Eq Counter where
Counter m == Counter n = sameMutableByteArray m n
offline :: Word64
offline = 0
online :: Word64
online = 1
-- counterInc :: Word64
counterInc = 2 -- online threads will never overflow to 0
newCounter :: IO Counter
newCounter = do
b <- newByteArray 8
writeByteArray b 0 online
return (Counter b)
# INLINE newCounter #
readCounter :: Counter -> IO Word64
readCounter (Counter c) = readByteArray c 0
# INLINE readCounter #
writeCounter :: Counter -> Word64 -> IO ()
writeCounter (Counter c) w = writeByteArray c 0 w
# INLINE writeCounter #
incCounterAtomic :: Counter -> IO Word64
incCounterAtomic (Counter (MutableByteArray c)) = primitive $ \ s ->
case fetchAddIntArray# c 0# 2# s of
(# s', r #) ->
#if MIN_VERSION_base(4,17,0)
(# s', W64# (wordToWord64# (int2Word# r)) #)
#else
(# s', W64# (int2Word# r) #)
#endif
# INLINE incCounterAtomic #
incCounterNonAtomicFancy :: Counter -> IO Word64
incCounterNonAtomicFancy (Counter (MutableByteArray c)) = primitive $ \ s ->
case readWord64Array# c 0# s of
(# s', r #) ->
#if MIN_VERSION_base(4,17,0)
case plusWord64# r (wordToWord64# 2##) of
#else
case plusWord# r 2## of
#endif
r' -> case writeWord64Array# c 0# r' s' of
s'' -> (# s'', W64# r' #)
# INLINE incCounterNonAtomicFancy #
incCounterNonAtomic :: Counter -> IO Word64
incCounterNonAtomic c = do
x <- (+ 2) <$> readCounter c
writeCounter c x
return x
# INLINE incCounterNonAtomic #
main :: IO ()
main = defaultMain [ bgroup "incCounterAtomic" $ bunches incCounterAtomic
, bgroup "incCounterNonAtomicFancy" $ bunches incCounterNonAtomicFancy
, bgroup "incCounterNonAtomic" $ bunches incCounterNonAtomic ]
where bunches m = [ bench (show n)
$ nfIO $ do c <- newCounter
forM_ [1..n] $ \ _ -> m c
| n <- map ((10 :: Word64) ^) [(6 :: Word64)..7] ]
| null | https://raw.githubusercontent.com/ekmett/rcu/d81f03d7fcf3267f6efa0da14b8e91984bfdb7df/examples/IncCounterExperiment.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style (see the file LICENSE)
Stability : experimental
Portability : non-portable
Which counter increment is faster?
---------------------------------------------------------------------------
| Counter for causal ordering.
counterInc :: Word64
online threads will never overflow to 0 | # LANGUAGE CPP #
# LANGUAGE MagicHash #
# LANGUAGE UnboxedTuples #
Copyright : ( C ) 2015 and
Maintainer : < > ,
< >
module Main where
import Control.Monad (forM_)
import Control.Monad.Primitive (primitive)
import Criterion.Main (bench, bgroup, defaultMain, nfIO)
import Data.Word (Word64)
import Data.Primitive
( MutableByteArray , plusWord , readWord64Array , writeWord64Array )
import GHC.Word (Word64 (W64#))
newtype Counter = Counter (MutableByteArray RealWorld)
instance Eq Counter where
Counter m == Counter n = sameMutableByteArray m n
offline :: Word64
offline = 0
online :: Word64
online = 1
newCounter :: IO Counter
newCounter = do
b <- newByteArray 8
writeByteArray b 0 online
return (Counter b)
# INLINE newCounter #
readCounter :: Counter -> IO Word64
readCounter (Counter c) = readByteArray c 0
# INLINE readCounter #
writeCounter :: Counter -> Word64 -> IO ()
writeCounter (Counter c) w = writeByteArray c 0 w
# INLINE writeCounter #
incCounterAtomic :: Counter -> IO Word64
incCounterAtomic (Counter (MutableByteArray c)) = primitive $ \ s ->
case fetchAddIntArray# c 0# 2# s of
(# s', r #) ->
#if MIN_VERSION_base(4,17,0)
(# s', W64# (wordToWord64# (int2Word# r)) #)
#else
(# s', W64# (int2Word# r) #)
#endif
# INLINE incCounterAtomic #
incCounterNonAtomicFancy :: Counter -> IO Word64
incCounterNonAtomicFancy (Counter (MutableByteArray c)) = primitive $ \ s ->
case readWord64Array# c 0# s of
(# s', r #) ->
#if MIN_VERSION_base(4,17,0)
case plusWord64# r (wordToWord64# 2##) of
#else
case plusWord# r 2## of
#endif
r' -> case writeWord64Array# c 0# r' s' of
s'' -> (# s'', W64# r' #)
# INLINE incCounterNonAtomicFancy #
incCounterNonAtomic :: Counter -> IO Word64
incCounterNonAtomic c = do
x <- (+ 2) <$> readCounter c
writeCounter c x
return x
# INLINE incCounterNonAtomic #
main :: IO ()
main = defaultMain [ bgroup "incCounterAtomic" $ bunches incCounterAtomic
, bgroup "incCounterNonAtomicFancy" $ bunches incCounterNonAtomicFancy
, bgroup "incCounterNonAtomic" $ bunches incCounterNonAtomic ]
where bunches m = [ bench (show n)
$ nfIO $ do c <- newCounter
forM_ [1..n] $ \ _ -> m c
| n <- map ((10 :: Word64) ^) [(6 :: Word64)..7] ]
|
7e62567fdfe5b9090c9d55a1392fbc08abca899d0013270ed95d09599211dd54 | joinr/spork | tags.clj | ;;an abstract set of functions for working on maps, treating them as
;;simple tag databases. tags are categories applied to multiple
;;subjects. Subjects are unique identifiers that are subjected to a tag.
;;Tags are basically meta-data....except it's explicit.
Update Feb 2017 - Looking to revisit this . There are more effecient
;;ways to store tags. Additionally, the original use-case may no longer
;;be a production need. Possible alternatives are logic-databases like
;;datalog/datascript that store relations and facts to be queried.
(ns spork.util.tags
(:require [spork.util [general :as gen]]
[clojure.set :as cljset]))
(defprotocol ITagStore
(get-tags [store subject] "Fetch any tags associated with a subject from database m.")
(get-subjects [store tag] "Fetch any subjects associated with a tag, from database m.")
(add-tag [store tag] "Add a tag to the database. Should require a subject to exist.")
(add-subject [store subject])
(drop-tag [store tag] "Drops a tag from all subjects, removes from store.")
(drop-subject [store subject] "Drops a subject from all tags, removes from store.")
(tag-subject [store tag subject] "Impose a tag on a subject.")
(untag-subject [store tag subject] "Remove a tag from a subject."))
(defmacro try-get [m k not-found]
`(if-let [v# (get ~m ~k)]
v#
~not-found))
(declare mutable-tags)
;;Generate a simple tag database implemented using maps.
;;tags correspond to simple attributes (usually keywords)
;;associated with a subject, where subjects are unique
identifiers . It 's really just two tables with many->many
;;relationships. Calling it a tag is more for semantic
;;clarity...
(defrecord tags [tags subjects]
ITagStore
(get-tags [store subject] (get subjects subject))
(get-subjects [store tag] (get tags tag))
(add-tag [store tag] (tags. (assoc tags tag #{}) subjects))
(add-subject [store subject] (tags. tags (assoc subjects subject #{})))
(drop-tag [store tag] (tags. (dissoc tags tag)
(reduce (fn [acc subj]
(if-let [restags (disj (get acc subj) tag)]
(assoc acc subj restags)
(dissoc acc subj)))
subjects
(get tags tag))))
(drop-subject [store subject] (tags. (reduce (fn [acc tag]
(if-let [ressubjs (disj (get acc tag) subject)]
(assoc acc tag ressubjs)
(dissoc acc tag)))
tags
(get subjects subject))
(dissoc subjects subject)))
(tag-subject [store tag subject]
(let [oldt (try-get tags tag #{})
olds (try-get subjects subject #{})]
(tags. (assoc tags tag (conj oldt subject))
(assoc subjects subject (conj olds tag)))))
(untag-subject [store tag subject]
(let [new-tags (disj (try-get tags tag #{}) subject)
new-subjects (disj (try-get subjects subject #{}) tag)]
(tags. (if (empty? new-tags)
(dissoc tags tag)
(assoc tags tag new-tags))
(if (empty? new-subjects)
(dissoc subjects subject)
(assoc subjects subject new-subjects)))))
clojure.lang.IEditableCollection
(asTransient [coll] (mutable-tags (gen/transient2 tags) (gen/transient2 subjects))))
We 're generally not storing a ton of stuff in the tag entries . Can
;;probabably pull this back a bit to use normal hashsets for the
;;actual entries.
(defrecord mtags [tags subjects]
ITagStore
(get-tags [store subject] (get subjects subject))
(get-subjects [store tag] (get tags tag))
(add-tag [store tag] (mtags. (assoc! tags tag (transient #{})) subjects))
(add-subject [store subject] (mtags. tags (assoc! subjects subject (transient #{}))))
(drop-tag [store tag] (mtags. (dissoc! tags tag)
(reduce (fn [acc subj]
(if-let [restags (disj! (get acc subj) tag)]
(assoc! acc subj restags)
(dissoc! acc subj)))
subjects
(get tags tag))))
(drop-subject [store subject] (mtags. (reduce (fn [acc tag]
(if-let [ressubjs (disj! (get acc tag) subject)]
(assoc! acc tag ressubjs)
(dissoc! acc tag)))
tags
(get subjects subject))
(dissoc! subjects subject)))
(tag-subject [store tag subject]
(let [oldt (try-get tags tag (transient #{}))
olds (try-get subjects subject (transient #{}))]
(mtags. (assoc! tags tag (conj! oldt subject))
(assoc! subjects subject (conj! olds tag)))))
(untag-subject [store tag subject]
(let [new-tags (disj! (try-get tags tag (transient #{})) subject)
new-subjects (disj! (try-get subjects subject (transient #{})) tag)]
(mtags. (if (zero? (count new-tags))
(dissoc! tags tag)
(assoc! tags tag new-tags))
(if (zero? (count new-subjects))
(dissoc! subjects subject)
(assoc! subjects subject new-subjects)))))
clojure.lang.ITransientCollection
(persistent [coll] (->tags (gen/persistent2! tags) (gen/persistent2! subjects)))
(conj [coll v] (.tag-subject coll (key v) (val v))))
(def empty-tags (tags. {} {}))
(defn mutable-tags [tgs subjcs] (mtags. tgs subjcs))
(extend-protocol ITagStore nil
(get-tags [store subject] nil)
(get-subjects [store tag] nil)
(add-tag [store tag] (add-tag empty-tags tag))
(add-subject [store subject] (add-subject empty-tags subject))
(drop-tag [store tag] nil)
(drop-subject [store subject] nil)
(tag-subject [store tag subject] (tag-subject empty-tags tag subject))
(untag-subject [store tag subject] nil))
(definline subject->tags [tags subject] `(get-subjects ~tags ~subject))
(definline tag->subjects [tags tag] `(get-tags ~tags ~tag))
(defn has-tag? [tags tag subject] (contains? (tag->subjects tags tag) subject))
(defn has-tags? [tags subject xs]
(when-let [knowns (subject->tags tags subject)]
(every? knowns xs)))
(defn some-tags? [tags subject xs]
(let [knowns (subject->tags tags subject)]
(reduce (fn [acc x]
(if (contains? knowns x)
(reduced x)
acc)) nil xs)))
(defn has-subject? [tags tag subject] (has-tag? tags tag subject))
(defn has-subjects? [tags tag xs]
(when-let [knowns (tag->subjects tags tag)]
(every? knowns xs)))
(defn some-subjects? [tags tag xs]
(let [knowns (tag->subjects tags tag)]
(reduce (fn [acc x]
(if (contains? knowns x)
(reduced x)
acc)) nil xs)))
(defn and-tags
"Select subjects that have every tag in xs."
[m xs]
(reduce #(cljset/intersection %1 (tag->subjects m %2)) #{} xs))
(defn or-tags
"Select subjects that have any tag in xs."
[m xs]
(reduce #(cljset/union %1 (tag->subjects m %2)) #{} xs))
(defn multi-tag
"Impose many tags on a subject."
[m subject tags]
(reduce #(tag-subject %1 subject %2 ) m tags))
(defn multi-untag
"Remove multiple tags from a subject."
[m subject tags]
(reduce #(untag-subject %1 subject %2 ) m tags))
(comment ;testing
(def simple-tags
(-> empty-tags
(multi-tag "tom" [:has-name :male])))
(def dropped-tags
(-> simple-tags
(untag-subject "tom" :has-name)))
(def tagset [:old :fat :mean :onery :bald :goof])
)
| null | https://raw.githubusercontent.com/joinr/spork/bb80eddadf90bf92745bf5315217e25a99fbf9d6/src/spork/util/tags.clj | clojure | an abstract set of functions for working on maps, treating them as
simple tag databases. tags are categories applied to multiple
subjects. Subjects are unique identifiers that are subjected to a tag.
Tags are basically meta-data....except it's explicit.
ways to store tags. Additionally, the original use-case may no longer
be a production need. Possible alternatives are logic-databases like
datalog/datascript that store relations and facts to be queried.
Generate a simple tag database implemented using maps.
tags correspond to simple attributes (usually keywords)
associated with a subject, where subjects are unique
relationships. Calling it a tag is more for semantic
clarity...
probabably pull this back a bit to use normal hashsets for the
actual entries.
testing |
Update Feb 2017 - Looking to revisit this . There are more effecient
(ns spork.util.tags
(:require [spork.util [general :as gen]]
[clojure.set :as cljset]))
(defprotocol ITagStore
(get-tags [store subject] "Fetch any tags associated with a subject from database m.")
(get-subjects [store tag] "Fetch any subjects associated with a tag, from database m.")
(add-tag [store tag] "Add a tag to the database. Should require a subject to exist.")
(add-subject [store subject])
(drop-tag [store tag] "Drops a tag from all subjects, removes from store.")
(drop-subject [store subject] "Drops a subject from all tags, removes from store.")
(tag-subject [store tag subject] "Impose a tag on a subject.")
(untag-subject [store tag subject] "Remove a tag from a subject."))
(defmacro try-get [m k not-found]
`(if-let [v# (get ~m ~k)]
v#
~not-found))
(declare mutable-tags)
identifiers . It 's really just two tables with many->many
(defrecord tags [tags subjects]
ITagStore
(get-tags [store subject] (get subjects subject))
(get-subjects [store tag] (get tags tag))
(add-tag [store tag] (tags. (assoc tags tag #{}) subjects))
(add-subject [store subject] (tags. tags (assoc subjects subject #{})))
(drop-tag [store tag] (tags. (dissoc tags tag)
(reduce (fn [acc subj]
(if-let [restags (disj (get acc subj) tag)]
(assoc acc subj restags)
(dissoc acc subj)))
subjects
(get tags tag))))
(drop-subject [store subject] (tags. (reduce (fn [acc tag]
(if-let [ressubjs (disj (get acc tag) subject)]
(assoc acc tag ressubjs)
(dissoc acc tag)))
tags
(get subjects subject))
(dissoc subjects subject)))
(tag-subject [store tag subject]
(let [oldt (try-get tags tag #{})
olds (try-get subjects subject #{})]
(tags. (assoc tags tag (conj oldt subject))
(assoc subjects subject (conj olds tag)))))
(untag-subject [store tag subject]
(let [new-tags (disj (try-get tags tag #{}) subject)
new-subjects (disj (try-get subjects subject #{}) tag)]
(tags. (if (empty? new-tags)
(dissoc tags tag)
(assoc tags tag new-tags))
(if (empty? new-subjects)
(dissoc subjects subject)
(assoc subjects subject new-subjects)))))
clojure.lang.IEditableCollection
(asTransient [coll] (mutable-tags (gen/transient2 tags) (gen/transient2 subjects))))
We 're generally not storing a ton of stuff in the tag entries . Can
(defrecord mtags [tags subjects]
ITagStore
(get-tags [store subject] (get subjects subject))
(get-subjects [store tag] (get tags tag))
(add-tag [store tag] (mtags. (assoc! tags tag (transient #{})) subjects))
(add-subject [store subject] (mtags. tags (assoc! subjects subject (transient #{}))))
(drop-tag [store tag] (mtags. (dissoc! tags tag)
(reduce (fn [acc subj]
(if-let [restags (disj! (get acc subj) tag)]
(assoc! acc subj restags)
(dissoc! acc subj)))
subjects
(get tags tag))))
(drop-subject [store subject] (mtags. (reduce (fn [acc tag]
(if-let [ressubjs (disj! (get acc tag) subject)]
(assoc! acc tag ressubjs)
(dissoc! acc tag)))
tags
(get subjects subject))
(dissoc! subjects subject)))
(tag-subject [store tag subject]
(let [oldt (try-get tags tag (transient #{}))
olds (try-get subjects subject (transient #{}))]
(mtags. (assoc! tags tag (conj! oldt subject))
(assoc! subjects subject (conj! olds tag)))))
(untag-subject [store tag subject]
(let [new-tags (disj! (try-get tags tag (transient #{})) subject)
new-subjects (disj! (try-get subjects subject (transient #{})) tag)]
(mtags. (if (zero? (count new-tags))
(dissoc! tags tag)
(assoc! tags tag new-tags))
(if (zero? (count new-subjects))
(dissoc! subjects subject)
(assoc! subjects subject new-subjects)))))
clojure.lang.ITransientCollection
(persistent [coll] (->tags (gen/persistent2! tags) (gen/persistent2! subjects)))
(conj [coll v] (.tag-subject coll (key v) (val v))))
(def empty-tags (tags. {} {}))
(defn mutable-tags [tgs subjcs] (mtags. tgs subjcs))
(extend-protocol ITagStore nil
(get-tags [store subject] nil)
(get-subjects [store tag] nil)
(add-tag [store tag] (add-tag empty-tags tag))
(add-subject [store subject] (add-subject empty-tags subject))
(drop-tag [store tag] nil)
(drop-subject [store subject] nil)
(tag-subject [store tag subject] (tag-subject empty-tags tag subject))
(untag-subject [store tag subject] nil))
(definline subject->tags [tags subject] `(get-subjects ~tags ~subject))
(definline tag->subjects [tags tag] `(get-tags ~tags ~tag))
(defn has-tag? [tags tag subject] (contains? (tag->subjects tags tag) subject))
(defn has-tags? [tags subject xs]
(when-let [knowns (subject->tags tags subject)]
(every? knowns xs)))
(defn some-tags? [tags subject xs]
(let [knowns (subject->tags tags subject)]
(reduce (fn [acc x]
(if (contains? knowns x)
(reduced x)
acc)) nil xs)))
(defn has-subject? [tags tag subject] (has-tag? tags tag subject))
(defn has-subjects? [tags tag xs]
(when-let [knowns (tag->subjects tags tag)]
(every? knowns xs)))
(defn some-subjects? [tags tag xs]
(let [knowns (tag->subjects tags tag)]
(reduce (fn [acc x]
(if (contains? knowns x)
(reduced x)
acc)) nil xs)))
(defn and-tags
"Select subjects that have every tag in xs."
[m xs]
(reduce #(cljset/intersection %1 (tag->subjects m %2)) #{} xs))
(defn or-tags
"Select subjects that have any tag in xs."
[m xs]
(reduce #(cljset/union %1 (tag->subjects m %2)) #{} xs))
(defn multi-tag
"Impose many tags on a subject."
[m subject tags]
(reduce #(tag-subject %1 subject %2 ) m tags))
(defn multi-untag
"Remove multiple tags from a subject."
[m subject tags]
(reduce #(untag-subject %1 subject %2 ) m tags))
(def simple-tags
(-> empty-tags
(multi-tag "tom" [:has-name :male])))
(def dropped-tags
(-> simple-tags
(untag-subject "tom" :has-name)))
(def tagset [:old :fat :mean :onery :bald :goof])
)
|
ffeb2ef6ea83e2c751ae72c6d0f2ec4f7ba3f1074bb93ebf07d09d6cd01d942a | joelburget/lvca | DeBruijn_2d.ml | open Base
open Lvca_util
open Option.Let_syntax
type 'info term =
| Operator of 'info * string * 'info scope list
| BoundVar of 'info * int * int
| FreeVar of 'info * string
| Primitive of 'info Primitive.All.t
and 'info scope = Scope of 'info Pattern.t list * 'info term
let rec to_nominal' ctx = function
| BoundVar (info, ix1, ix2) ->
List.nth ctx ix1
|> Option.bind ~f:(Fn.flip List.nth ix2)
|> Option.map ~f:(fun name -> Nominal.Term.Var (info, name))
| Operator (info, tag, subtms) ->
subtms
|> List.map ~f:(scope_to_nominal ctx)
|> Option.all
|> Option.map ~f:(fun subtms' -> Nominal.Term.Operator (info, tag, subtms'))
| FreeVar (info, name) -> Some (Var (info, name))
| Primitive prim -> Some (Nominal.Term.Primitive prim)
and scope_to_nominal ctx (Scope (binders, body)) =
let ctx =
binders
|> List.map ~f:(fun pat -> pat |> Pattern.list_vars_of_pattern |> List.map ~f:snd)
|> List.append ctx
in
let%map body = to_nominal' ctx body in
Nominal.Scope.Scope (binders, body)
;;
let to_nominal tm = to_nominal' [] tm
let rec of_nominal_with_bindings env = function
| Nominal.Term.Operator (info, tag, subtms) ->
let open Result.Let_syntax in
let%map subtms' = subtms |> List.map ~f:(scope_of_nominal env) |> Result.all in
Operator (info, tag, subtms')
| Var (info, name) ->
Ok
(match Map.find env name with
| None -> FreeVar (info, name)
| Some (i, j) -> BoundVar (info, i, j))
| Primitive prim -> Ok (Primitive prim)
and scope_of_nominal env (Nominal.Scope.Scope (pats, body) as scope) =
let open Result.Let_syntax in
let n = List.length pats in
let var_nums : (string * (int * int)) list =
pats
|> List.mapi ~f:(fun i pat ->
pat
|> Pattern.list_vars_of_pattern
|> List.mapi ~f:(fun j (_, var) -> var, (i, j)))
|> List.join
in
match String.Map.of_alist var_nums with
| `Ok var_map ->
let env' : (int * int) String.Map.t =
env |> Map.map ~f:(fun (i, j) -> i + n, j) |> Map.union_right_biased var_map
in
let%map body' = of_nominal_with_bindings env' body in
Scope (pats, body')
| `Duplicate_key _key -> Error scope
;;
let of_nominal tm = of_nominal_with_bindings String.Map.empty tm
let rec alpha_equivalent t1 t2 =
match t1, t2 with
| Operator (_, h1, subtms1), Operator (_, h2, subtms2) ->
String.(h1 = h2)
&&
(match List.zip subtms1 subtms2 with
| Ok zipped ->
List.for_all zipped ~f:(fun (Scope (_, body1), Scope (_, body2)) ->
alpha_equivalent body1 body2)
| Unequal_lengths -> false)
| BoundVar (_, i1, j1), BoundVar (_, i2, j2) -> Int.(i1 = i2 && j1 = j2)
| FreeVar (_, name1), FreeVar (_, name2) -> String.(name1 = name2)
| Primitive p1, Primitive p2 ->
Primitive.All.equal
~info_eq:Unit.( = )
(Primitive.All.erase p1)
(Primitive.All.erase p2)
| _, _ -> false
;;
let rec select_path ~path tm =
match path with
| [] -> Ok tm
| i :: path ->
(match tm with
| BoundVar _ | FreeVar _ | Primitive _ -> Error "TODO: message"
| Operator (_, _, scopes) ->
(match List.nth scopes i with
| None -> Error "TODO: message"
| Some (Scope (_pats, tm)) -> select_path ~path tm))
;;
| null | https://raw.githubusercontent.com/joelburget/lvca/f8a3b8e294f564d1fc9836af63e6169b26ca0234/experimental/DeBruijn_2d.ml | ocaml | open Base
open Lvca_util
open Option.Let_syntax
type 'info term =
| Operator of 'info * string * 'info scope list
| BoundVar of 'info * int * int
| FreeVar of 'info * string
| Primitive of 'info Primitive.All.t
and 'info scope = Scope of 'info Pattern.t list * 'info term
let rec to_nominal' ctx = function
| BoundVar (info, ix1, ix2) ->
List.nth ctx ix1
|> Option.bind ~f:(Fn.flip List.nth ix2)
|> Option.map ~f:(fun name -> Nominal.Term.Var (info, name))
| Operator (info, tag, subtms) ->
subtms
|> List.map ~f:(scope_to_nominal ctx)
|> Option.all
|> Option.map ~f:(fun subtms' -> Nominal.Term.Operator (info, tag, subtms'))
| FreeVar (info, name) -> Some (Var (info, name))
| Primitive prim -> Some (Nominal.Term.Primitive prim)
and scope_to_nominal ctx (Scope (binders, body)) =
let ctx =
binders
|> List.map ~f:(fun pat -> pat |> Pattern.list_vars_of_pattern |> List.map ~f:snd)
|> List.append ctx
in
let%map body = to_nominal' ctx body in
Nominal.Scope.Scope (binders, body)
;;
let to_nominal tm = to_nominal' [] tm
let rec of_nominal_with_bindings env = function
| Nominal.Term.Operator (info, tag, subtms) ->
let open Result.Let_syntax in
let%map subtms' = subtms |> List.map ~f:(scope_of_nominal env) |> Result.all in
Operator (info, tag, subtms')
| Var (info, name) ->
Ok
(match Map.find env name with
| None -> FreeVar (info, name)
| Some (i, j) -> BoundVar (info, i, j))
| Primitive prim -> Ok (Primitive prim)
and scope_of_nominal env (Nominal.Scope.Scope (pats, body) as scope) =
let open Result.Let_syntax in
let n = List.length pats in
let var_nums : (string * (int * int)) list =
pats
|> List.mapi ~f:(fun i pat ->
pat
|> Pattern.list_vars_of_pattern
|> List.mapi ~f:(fun j (_, var) -> var, (i, j)))
|> List.join
in
match String.Map.of_alist var_nums with
| `Ok var_map ->
let env' : (int * int) String.Map.t =
env |> Map.map ~f:(fun (i, j) -> i + n, j) |> Map.union_right_biased var_map
in
let%map body' = of_nominal_with_bindings env' body in
Scope (pats, body')
| `Duplicate_key _key -> Error scope
;;
let of_nominal tm = of_nominal_with_bindings String.Map.empty tm
let rec alpha_equivalent t1 t2 =
match t1, t2 with
| Operator (_, h1, subtms1), Operator (_, h2, subtms2) ->
String.(h1 = h2)
&&
(match List.zip subtms1 subtms2 with
| Ok zipped ->
List.for_all zipped ~f:(fun (Scope (_, body1), Scope (_, body2)) ->
alpha_equivalent body1 body2)
| Unequal_lengths -> false)
| BoundVar (_, i1, j1), BoundVar (_, i2, j2) -> Int.(i1 = i2 && j1 = j2)
| FreeVar (_, name1), FreeVar (_, name2) -> String.(name1 = name2)
| Primitive p1, Primitive p2 ->
Primitive.All.equal
~info_eq:Unit.( = )
(Primitive.All.erase p1)
(Primitive.All.erase p2)
| _, _ -> false
;;
let rec select_path ~path tm =
match path with
| [] -> Ok tm
| i :: path ->
(match tm with
| BoundVar _ | FreeVar _ | Primitive _ -> Error "TODO: message"
| Operator (_, _, scopes) ->
(match List.nth scopes i with
| None -> Error "TODO: message"
| Some (Scope (_pats, tm)) -> select_path ~path tm))
;;
| |
b9e67eea9bb7c35a9788403b1115ce78a92a00ae6b79510d6184a5e510936505 | chenyukang/eopl | 06.scm |
;; How many different evaluation orders are possible for the procedure
;; calls in (lambda (x y) (+ (f (g x)) (h (j y))))? For each evaluation
order , write a CPS expression that calls the procedures in that order .
4 ! ? for function f , , h , j.
(lambda (x y)
(+ (f (g x))
(h (j y))))
;; g -> j -> f -> h -> +
(lambda (x y cont)
(g x (lambda (val)
(j y (lambda (val2)
(f val (lambda (val3)
(h val2 (lambda (val4)
(cont (+ val3 val4)))))))))))
;; j -> g -> f -> h
(lambda (x y cont)
(j y (lambda (val)
(g x (lambda (val2)
(f val2 (lambda (val3)
(h val (lambda (val4)
(cont (+ val3 val4)))))))))))
;; g -> j -> h -> f
(lambda (x y cont)
(g x (lambda (val)
(j y (lambda (val2)
(h val2 (lambda (val3)
(f val (lambda (val4)
(cont (+ val4 val3)))))))))))
;; j -> g -> h -> f
(lambda (x y cont)
(j y (lambda (val)
(g x (lambda (val2)
(h val (lambda (val3)
(f val2 (lambda (val4)
(cont (+ val3 val4)))))))))))
;; g -> f -> j -> h
(lambda (x y cont)
(g x (lambda (val)
(f val (lambda (val2)
(j y (lambda (val3)
(h val3 (lambda (val4)
(cont (+ val2 val4)))))))))))
;; j -> h -> g -> f
(lambda (x y cont)
(j y (lambda (val)
(h val (lambda (val2)
(g x (lambda (val3)
(f val3 (lambda (val4)
(cont (+ val4 val2)))))))))))
| null | https://raw.githubusercontent.com/chenyukang/eopl/0406ff23b993bfe020294fa70d2597b1ce4f9b78/ch6/06.scm | scheme | How many different evaluation orders are possible for the procedure
calls in (lambda (x y) (+ (f (g x)) (h (j y))))? For each evaluation
g -> j -> f -> h -> +
j -> g -> f -> h
g -> j -> h -> f
j -> g -> h -> f
g -> f -> j -> h
j -> h -> g -> f |
order , write a CPS expression that calls the procedures in that order .
4 ! ? for function f , , h , j.
(lambda (x y)
(+ (f (g x))
(h (j y))))
(lambda (x y cont)
(g x (lambda (val)
(j y (lambda (val2)
(f val (lambda (val3)
(h val2 (lambda (val4)
(cont (+ val3 val4)))))))))))
(lambda (x y cont)
(j y (lambda (val)
(g x (lambda (val2)
(f val2 (lambda (val3)
(h val (lambda (val4)
(cont (+ val3 val4)))))))))))
(lambda (x y cont)
(g x (lambda (val)
(j y (lambda (val2)
(h val2 (lambda (val3)
(f val (lambda (val4)
(cont (+ val4 val3)))))))))))
(lambda (x y cont)
(j y (lambda (val)
(g x (lambda (val2)
(h val (lambda (val3)
(f val2 (lambda (val4)
(cont (+ val3 val4)))))))))))
(lambda (x y cont)
(g x (lambda (val)
(f val (lambda (val2)
(j y (lambda (val3)
(h val3 (lambda (val4)
(cont (+ val2 val4)))))))))))
(lambda (x y cont)
(j y (lambda (val)
(h val (lambda (val2)
(g x (lambda (val3)
(f val3 (lambda (val4)
(cont (+ val4 val2)))))))))))
|
7a314c6ed1c06d9deb24f44ab0f692b22fab0d7159d1a6809c19d5e30f4affe2 | nextjournal/clerk-slideshow | simple_slideshow.clj | # Hello there 👋
;;
;; `clerk-slideshow` enables you to create beautiful interactive slide decks
;; using Clerk notebooks.
;; ---
;; ## How does it work?
;;
;; Simply require `clerk-slideshow`
(ns simple-slideshow
(:require [nextjournal.clerk :as clerk]
[nextjournal.clerk-slideshow :as slideshow]))
;; and add it to Clerk’s existing viewers
^{::clerk/visibility {:code :hide :result :hide}}
(clerk/add-viewers! [slideshow/viewer])
;; ---
;; ## What now?
;;
With that in place , you can use comments to write your slides ’ content .
;; Use Markdown rulers (`---`) to separate your slides. You can use everything that
;; you’ll normally use in your Clerk notebooks:
;; Markdown, plots, code blocks, you name it.
;;
Press ` ← ` and ` → ` to navigate between slides or ` Escape ` to get an overview .
;;
;; Now some demos 👉
;; ---
# # 📊 A Plotly graph
^{::clerk/visibility {:code :hide}}
(clerk/plotly {:data [{:z [[1 2 3] [3 2 1]] :type "surface"}]})
;; ---
# # 📈 A Vega Lite graph
^{::clerk/visibility {:code :hide}}
(clerk/vl {:width 650 :height 400 :data {:url "-datasets/data/us-10m.json"
:format {:type "topojson" :feature "counties"}}
:transform [{:lookup "id" :from {:data {:url "-datasets/data/unemployment.tsv"}
:key "id" :fields ["rate"]}}]
:projection {:type "albersUsa"} :mark "geoshape" :encoding {:color {:field "rate" :type "quantitative"}}})
;; ---
# # And that ’s it for now ! 👋
;;
;; More demos will follow soon!
| null | https://raw.githubusercontent.com/nextjournal/clerk-slideshow/562f634494a1e1a9149ed78d5d39fd9486cc00ba/notebooks/simple_slideshow.clj | clojure |
`clerk-slideshow` enables you to create beautiful interactive slide decks
using Clerk notebooks.
---
## How does it work?
Simply require `clerk-slideshow`
and add it to Clerk’s existing viewers
---
## What now?
Use Markdown rulers (`---`) to separate your slides. You can use everything that
you’ll normally use in your Clerk notebooks:
Markdown, plots, code blocks, you name it.
Now some demos 👉
---
---
---
More demos will follow soon! | # Hello there 👋
(ns simple-slideshow
(:require [nextjournal.clerk :as clerk]
[nextjournal.clerk-slideshow :as slideshow]))
^{::clerk/visibility {:code :hide :result :hide}}
(clerk/add-viewers! [slideshow/viewer])
With that in place , you can use comments to write your slides ’ content .
Press ` ← ` and ` → ` to navigate between slides or ` Escape ` to get an overview .
# # 📊 A Plotly graph
^{::clerk/visibility {:code :hide}}
(clerk/plotly {:data [{:z [[1 2 3] [3 2 1]] :type "surface"}]})
# # 📈 A Vega Lite graph
^{::clerk/visibility {:code :hide}}
(clerk/vl {:width 650 :height 400 :data {:url "-datasets/data/us-10m.json"
:format {:type "topojson" :feature "counties"}}
:transform [{:lookup "id" :from {:data {:url "-datasets/data/unemployment.tsv"}
:key "id" :fields ["rate"]}}]
:projection {:type "albersUsa"} :mark "geoshape" :encoding {:color {:field "rate" :type "quantitative"}}})
# # And that ’s it for now ! 👋
|
8f5f80e8080a9ae4e826c1434bfc4ff768708023af9c79a8fde3def27cbf1e5f | fmthoma/vgrep | KeybindingMap.hs | # LANGUAGE GeneralizedNewtypeDeriving #
module Vgrep.KeybindingMap where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Vgrep.Command
import qualified Vgrep.Key as Key
newtype KeybindingMap = KeybindingMap { unKeybindingMap :: Map Key.Chord Command }
deriving (Show, Eq, Semigroup, Monoid)
lookup :: Key.Chord -> KeybindingMap -> Maybe Command
lookup chord (KeybindingMap m) = M.lookup chord m
fromList :: [(Key.Chord, Command)] -> KeybindingMap
fromList = KeybindingMap . M.fromList
| null | https://raw.githubusercontent.com/fmthoma/vgrep/f3b140bf3150a3699234469c34ff8c13a298998e/src/Vgrep/KeybindingMap.hs | haskell | # LANGUAGE GeneralizedNewtypeDeriving #
module Vgrep.KeybindingMap where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Vgrep.Command
import qualified Vgrep.Key as Key
newtype KeybindingMap = KeybindingMap { unKeybindingMap :: Map Key.Chord Command }
deriving (Show, Eq, Semigroup, Monoid)
lookup :: Key.Chord -> KeybindingMap -> Maybe Command
lookup chord (KeybindingMap m) = M.lookup chord m
fromList :: [(Key.Chord, Command)] -> KeybindingMap
fromList = KeybindingMap . M.fromList
| |
1cda4e8e722bbfc5f56459d18fcede051ffa5bcbb90aa18729965eeada969e15 | Frama-C/Frama-C-snapshot | split_return.mli | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** This module is used to merge together the final states of a function
according to a given strategy. Default is to merge all states together *)
val pretty_strategies: unit -> unit
val kf_strategy: Kernel_function.t -> Split_strategy.t
(*
Local Variables:
compile-command: "make -C ../../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/value/partitioning/split_return.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* This module is used to merge together the final states of a function
according to a given strategy. Default is to merge all states together
Local Variables:
compile-command: "make -C ../../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
val pretty_strategies: unit -> unit
val kf_strategy: Kernel_function.t -> Split_strategy.t
|
e9d62db714ce16ac355017a435b50479d38e273667b86f23a6c1958b52af7e16 | geoffder/dometyl-keyboard | bottom.mli | open OCADml
open OSCADml
type bump_loc
* [ thumb ? loc col row ]
Compute a bumpon location under the thumb key at [ col ] and [ row ] with a
relative position of [ loc ] in a coordinate space where the origin of
the key is [ ( v2 0.5 0.5 ) ] .
Compute a bumpon location under the thumb key at [col] and [row] with a
relative position of [loc] in a coordinate space where the origin of
the key is [(v2 0.5 0.5)]. *)
val thumb : ?loc:v2 -> Idx.t -> Idx.t -> bump_loc
* [ body ? loc col row ]
Compute a bumpon location under the body key at [ col ] and [ row ] with a
relative position of [ loc ] in a coordinate space where the origin of
the key is [ ( v2 0.5 0.5 ) ] .
Compute a bumpon location under the body key at [col] and [row] with a
relative position of [loc] in a coordinate space where the origin of
the key is [(v2 0.5 0.5)]. *)
val body : ?loc:v2 -> Idx.t -> Idx.t -> bump_loc
(** [point p]
Specify a a bumpon location at the absolute xy coordinate [p]. *)
val point : v2 -> bump_loc
(** A list of default relative (placed under keyholes) bumpon locations *)
val default_bumps : bump_loc list
* [ locate_bump plate loc ]
Get the xy coordinate specified by [ loc ] , if it is a legal position .
Get the xy coordinate specified by [loc], if it is a legal position. *)
val locate_bump : Plate.t -> bump_loc -> V2.t option
* [ make case ]
Generate a base plate to fit [ case ] .
- [ thickness ] sets the thickness of the plate in ( default [= 1.65 ] )
- [ bumpon_rad ] sets the bumpon inset radius ( default [= 5.5 ] )
- [ bumpon_inset ] sets how deep the bumpon insets should be ( default [= 0.8 ] )
- if provided , [ fastener ] overrides the fastener information carried by the
case
Generate a base plate to fit [case].
- [thickness] sets the thickness of the plate in mm (default [= 1.65])
- [bumpon_rad] sets the bumpon inset radius (default [= 5.5])
- [bumpon_inset] sets how deep the bumpon insets should be (default [= 0.8])
- if provided, [fastener] overrides the fastener information carried by the
case *)
val make
: ?thickness:float
-> ?fastener:Eyelet.fastener
-> ?bumpon_rad:float
-> ?bumpon_inset:float
-> ?bump_locs:bump_loc list
-> Case.t
-> Scad.d3
| null | https://raw.githubusercontent.com/geoffder/dometyl-keyboard/5efa847361e0ceb80de5b03dd28a702d6b42412d/lib/bottom.mli | ocaml | * [point p]
Specify a a bumpon location at the absolute xy coordinate [p].
* A list of default relative (placed under keyholes) bumpon locations | open OCADml
open OSCADml
type bump_loc
* [ thumb ? loc col row ]
Compute a bumpon location under the thumb key at [ col ] and [ row ] with a
relative position of [ loc ] in a coordinate space where the origin of
the key is [ ( v2 0.5 0.5 ) ] .
Compute a bumpon location under the thumb key at [col] and [row] with a
relative position of [loc] in a coordinate space where the origin of
the key is [(v2 0.5 0.5)]. *)
val thumb : ?loc:v2 -> Idx.t -> Idx.t -> bump_loc
* [ body ? loc col row ]
Compute a bumpon location under the body key at [ col ] and [ row ] with a
relative position of [ loc ] in a coordinate space where the origin of
the key is [ ( v2 0.5 0.5 ) ] .
Compute a bumpon location under the body key at [col] and [row] with a
relative position of [loc] in a coordinate space where the origin of
the key is [(v2 0.5 0.5)]. *)
val body : ?loc:v2 -> Idx.t -> Idx.t -> bump_loc
val point : v2 -> bump_loc
val default_bumps : bump_loc list
* [ locate_bump plate loc ]
Get the xy coordinate specified by [ loc ] , if it is a legal position .
Get the xy coordinate specified by [loc], if it is a legal position. *)
val locate_bump : Plate.t -> bump_loc -> V2.t option
* [ make case ]
Generate a base plate to fit [ case ] .
- [ thickness ] sets the thickness of the plate in ( default [= 1.65 ] )
- [ bumpon_rad ] sets the bumpon inset radius ( default [= 5.5 ] )
- [ bumpon_inset ] sets how deep the bumpon insets should be ( default [= 0.8 ] )
- if provided , [ fastener ] overrides the fastener information carried by the
case
Generate a base plate to fit [case].
- [thickness] sets the thickness of the plate in mm (default [= 1.65])
- [bumpon_rad] sets the bumpon inset radius (default [= 5.5])
- [bumpon_inset] sets how deep the bumpon insets should be (default [= 0.8])
- if provided, [fastener] overrides the fastener information carried by the
case *)
val make
: ?thickness:float
-> ?fastener:Eyelet.fastener
-> ?bumpon_rad:float
-> ?bumpon_inset:float
-> ?bump_locs:bump_loc list
-> Case.t
-> Scad.d3
|
ce70656ae2c2141d92248219d835ccb7c8c4ed2c1d9828317082977d20db5615 | kelamg/HtDP2e-workthrough | ex205.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-abbr-reader.ss" "lang")((modname ex205) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(require 2htdp/batch-io)
(require 2htdp/itunes)
; itunes export file from:
;
(define ITUNES-LOCATION "IO/itunes.xml")
LLists
uncomment to run on .xml file
;(define list-tracks
( read - itunes - as - lists ITUNES - LOCATION ) )
the 2htdp / itunes library documentation , part 2 :
An LLists is one of :
; – '()
– ( cons LAssoc LLists )
; An LAssoc is one of:
; – '()
– ( cons Association LAssoc )
;
An Association is a list of two items :
( cons String ( cons BSDN ' ( ) ) )
A BSDN is one of :
; – Boolean
; – Number
; – String
; – Date
;; Helpers for creating associations
;; BDSN (String) -> Association
;; produce a name association
(define (create-title s)
(list "name" s))
;; BDSN (String) -> Association
;; produce an artist association
(define (create-artist s)
(list "artist" s))
;; BDSN (String) -> Association
;; produce an album title association
(define (create-album s)
(list "album" s))
;; BDSN (Number) -> Association
;; produce a track duration association
(define (create-time n)
(list "time" n))
;; BDSN (Number) -> Association
;; produce a track number association
(define (create-track# n)
(list "track#" n))
;; BDSN (Date) -> Association
;; produce a date added association
(define (create-added d)
(list "added" d))
;; BDSN (Number) -> Association
;; produce a play count association
(define (create-play# n)
(list "play#" n))
;; BDSN (Date) -> Association
;; produce a last played association
(define (create-played d)
(list "played" d))
(define AD1 (create-date 2017 11 3 1 55 34))
(define AD2 (create-date 2010 2 14 11 23 5))
(define AD3 (create-date 2011 12 19 23 50 42))
(define AD4 (create-date 2011 11 25 22 00 20))
(define AD5 (create-date 2012 10 22 9 59 55))
(define PD1 (create-date 2018 8 17 2 30 12))
(define PD2 (create-date 2015 12 5 15 47 58))
(define PD3 (create-date 2014 6 21 15 13 29))
(define PD4 (create-date 2017 3 30 17 45 22))
(define PD5 (create-date 2018 1 3 9 3 50))
(define PD6 (create-date 2018 8 17 2 22 56))
(define PD7 (create-date 2015 12 5 15 55 13))
;; A Track is LAssoc
(define A1
(list (create-title "I Hope I Sleep Tonight")
(create-artist "DJ Seinfeld")
(create-album "Time Spent Away From U")
(create-time 4060)
(create-track# 1)
(create-added AD1)
(create-play# 323)
(create-played PD1)))
(define A2
(list (create-title "Beat It")
(create-artist "Michael Jackson")
(create-album "Thriller")
(create-time 4180)
(create-track# 5)
(create-added AD2)
(create-play# 123)
(create-played PD2)))
(define A3
(list (create-title "Memory Lane")
(create-artist "Netsky")
(create-album "UKF Drum & Bass 2010")
(create-time 5350)
(create-track# 1)
(create-added AD3)
(create-play# 148)
(create-played PD3)))
(define A4
(list (create-title "All of the Lights")
(create-artist "Kanye West")
(create-album "My Beautiful Dark Twisted Fantasy")
(create-time 5000)
(create-track# 5)
(create-added AD4)
(create-play# 93)
(create-played PD4)))
(define A5
(list (create-title "Lose Yourself")
(create-artist "Eminem")
(create-album "8 Mile: Music from and Inspired by the Motion Picture")
(create-time 5200)
(create-track# 1)
(create-added AD5)
(create-play# 231)
(create-played PD5)))
(define A6
(list (create-title "U")
(create-artist "DJ Seinfeld")
(create-album "Time Spent Away From U")
(create-time 6080)
(create-track# 9)
(create-added AD1)
(create-play# 351)
(create-played PD6)))
(define A7
(list (create-title "Billie Jean")
(create-artist "Michael Jackson")
(create-album "Thriller")
(create-time 4540)
(create-track# 6)
(create-added AD2)
(create-play# 170)
(create-played PD7)))
LLists
(define LL1 (list A1))
(define LL2 (list A1 A2 A3))
(define LL3 (list A1 A2 A3 A4 A5))
(define LL4 (list A1 A2 A3 A4 A5 A6))
(define LL5 (list A1 A2 A3 A4 A5 A6 A7))
| null | https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Arbitrarily-Large-Data/ex205.rkt | racket | about the language level of this file in a form that our tools can easily process.
itunes export file from:
(define list-tracks
– '()
An LAssoc is one of:
– '()
– Boolean
– Number
– String
– Date
Helpers for creating associations
BDSN (String) -> Association
produce a name association
BDSN (String) -> Association
produce an artist association
BDSN (String) -> Association
produce an album title association
BDSN (Number) -> Association
produce a track duration association
BDSN (Number) -> Association
produce a track number association
BDSN (Date) -> Association
produce a date added association
BDSN (Number) -> Association
produce a play count association
BDSN (Date) -> Association
produce a last played association
A Track is LAssoc | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-abbr-reader.ss" "lang")((modname ex205) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(require 2htdp/batch-io)
(require 2htdp/itunes)
(define ITUNES-LOCATION "IO/itunes.xml")
LLists
uncomment to run on .xml file
( read - itunes - as - lists ITUNES - LOCATION ) )
the 2htdp / itunes library documentation , part 2 :
An LLists is one of :
– ( cons LAssoc LLists )
– ( cons Association LAssoc )
An Association is a list of two items :
( cons String ( cons BSDN ' ( ) ) )
A BSDN is one of :
(define (create-title s)
(list "name" s))
(define (create-artist s)
(list "artist" s))
(define (create-album s)
(list "album" s))
(define (create-time n)
(list "time" n))
(define (create-track# n)
(list "track#" n))
(define (create-added d)
(list "added" d))
(define (create-play# n)
(list "play#" n))
(define (create-played d)
(list "played" d))
(define AD1 (create-date 2017 11 3 1 55 34))
(define AD2 (create-date 2010 2 14 11 23 5))
(define AD3 (create-date 2011 12 19 23 50 42))
(define AD4 (create-date 2011 11 25 22 00 20))
(define AD5 (create-date 2012 10 22 9 59 55))
(define PD1 (create-date 2018 8 17 2 30 12))
(define PD2 (create-date 2015 12 5 15 47 58))
(define PD3 (create-date 2014 6 21 15 13 29))
(define PD4 (create-date 2017 3 30 17 45 22))
(define PD5 (create-date 2018 1 3 9 3 50))
(define PD6 (create-date 2018 8 17 2 22 56))
(define PD7 (create-date 2015 12 5 15 55 13))
(define A1
(list (create-title "I Hope I Sleep Tonight")
(create-artist "DJ Seinfeld")
(create-album "Time Spent Away From U")
(create-time 4060)
(create-track# 1)
(create-added AD1)
(create-play# 323)
(create-played PD1)))
(define A2
(list (create-title "Beat It")
(create-artist "Michael Jackson")
(create-album "Thriller")
(create-time 4180)
(create-track# 5)
(create-added AD2)
(create-play# 123)
(create-played PD2)))
(define A3
(list (create-title "Memory Lane")
(create-artist "Netsky")
(create-album "UKF Drum & Bass 2010")
(create-time 5350)
(create-track# 1)
(create-added AD3)
(create-play# 148)
(create-played PD3)))
(define A4
(list (create-title "All of the Lights")
(create-artist "Kanye West")
(create-album "My Beautiful Dark Twisted Fantasy")
(create-time 5000)
(create-track# 5)
(create-added AD4)
(create-play# 93)
(create-played PD4)))
(define A5
(list (create-title "Lose Yourself")
(create-artist "Eminem")
(create-album "8 Mile: Music from and Inspired by the Motion Picture")
(create-time 5200)
(create-track# 1)
(create-added AD5)
(create-play# 231)
(create-played PD5)))
(define A6
(list (create-title "U")
(create-artist "DJ Seinfeld")
(create-album "Time Spent Away From U")
(create-time 6080)
(create-track# 9)
(create-added AD1)
(create-play# 351)
(create-played PD6)))
(define A7
(list (create-title "Billie Jean")
(create-artist "Michael Jackson")
(create-album "Thriller")
(create-time 4540)
(create-track# 6)
(create-added AD2)
(create-play# 170)
(create-played PD7)))
LLists
(define LL1 (list A1))
(define LL2 (list A1 A2 A3))
(define LL3 (list A1 A2 A3 A4 A5))
(define LL4 (list A1 A2 A3 A4 A5 A6))
(define LL5 (list A1 A2 A3 A4 A5 A6 A7))
|
897a6f91a73b66a66ece2a7061c99a165989f8646e45b8ec3951cdffe8cdd3e0 | GaloisInc/network-hans | Types.hs | module Network.Socket.Types(
Socket
, newSocket
, connectIP4
, bindIP4
, listen
, acceptIP4
, SocketStatus(..)
, getSocketStatus
, Direction(..)
, directionOpen
, withTcpSocket
, withUdpSocket
, socketToHandle
, ShutdownCmd(..)
, shutdown
, close
, toIP4, fromIP4
, setNetworkStack
)
where
import Control.Concurrent.MVar(MVar, newMVar, withMVar,
readMVar, swapMVar,
modifyMVar, modifyMVar_)
import Control.Monad(void)
import Data.Bits(shiftL, shiftR, (.|.), (.&.))
import Data.Maybe(fromMaybe)
import Data.Typeable(Typeable)
import Data.Word(Word64, Word32)
import Hans.Addr(wildcardAddr)
import Hans.IP4(IP4, packIP4, unpackIP4)
import Hans.Lens(view)
import Hans.Socket(TcpSocket, TcpListenSocket, UdpSocket, SockPort,
tcpRemoteAddr, tcpRemotePort, defaultSocketConfig,
newUdpSocket, sConnect, sListen, sAccept, sClose)
import Hans.Socket.Handle(makeHansHandle)
import Hans.Types(NetworkStack)
import System.IO(Handle, IOMode(..))
import System.IO.Unsafe(unsafePerformIO)
data Socket = Socket (MVar SocketState) Word64
instance Eq Socket where
(Socket _ a) == (Socket _ b) = a == b
newSocket :: Bool -> IO Socket
newSocket isTcp =
do mv <- newMVar (if isTcp then CreatedTcp else CreatedUdp)
ident <- nextSocketIdent
return (Socket mv ident)
data SocketState = CreatedTcp | CreatedUdp
| BoundUdp (UdpSocket IP4)
| BoundTcp (Maybe IP4) (Maybe SockPort)
| ListeningTcp (TcpListenSocket IP4)
| ConnectedTcp (TcpSocket IP4) Direction
| Converted
| ClosedSocket
connectIP4 :: Socket -> IP4 -> SockPort -> IO ()
connectIP4 (Socket mvs _) addr port =
modifyMVar_ mvs $ \ curstate ->
case curstate of
CreatedTcp ->
do let conf = defaultSocketConfig
src = wildcardAddr undefined
ns <- readMVar evilNetworkStackMVar
sock <- sConnect ns conf Nothing src Nothing addr port
return (ConnectedTcp sock ForBoth)
CreatedUdp ->
fail "Cannot connect UDP socket."
BoundUdp _ ->
fail "Cannot connect bound UDP socket."
BoundTcp maddr mport ->
do let conf = defaultSocketConfig
src = fromMaybe (wildcardAddr undefined) maddr
ns <- readMVar evilNetworkStackMVar
sock <- sConnect ns conf Nothing src mport addr port
return (ConnectedTcp sock ForBoth)
ListeningTcp _ ->
fail "Canoot connect listening TCP socket."
ConnectedTcp _ _ ->
fail "Cannot connect connected TCP socket."
Converted ->
fail "Cannot connect converted socket."
ClosedSocket ->
fail "Cannot connect closed socket."
bindIP4 :: Socket -> Maybe IP4 -> Maybe SockPort -> IO ()
bindIP4 (Socket mvs _) maddr mport =
modifyMVar_ mvs $ \ curstate ->
case curstate of
CreatedTcp ->
return (BoundTcp maddr mport)
CreatedUdp ->
do let addr = fromMaybe (wildcardAddr undefined) maddr
conf = defaultSocketConfig
ns <- readMVar evilNetworkStackMVar
sock <- newUdpSocket ns conf Nothing addr mport
return (BoundUdp sock)
BoundUdp _ ->
fail "Cannot re-bind bound UDP port."
BoundTcp _ _ ->
fail "Cannot re-bind bound TCP port."
ListeningTcp _ ->
fail "Cannot bind listening TCP socket."
ConnectedTcp _ _ ->
fail "Cannot bind connected TCP socket."
Converted ->
fail "Cannot bind converted socket."
ClosedSocket ->
fail "Cannot connect closed socket."
listen :: Socket -> Int -> IO ()
listen (Socket mvs _) backlog =
modifyMVar_ mvs $ \ curstate ->
case curstate of
BoundTcp maddr (Just port) ->
do let addr = fromMaybe (wildcardAddr undefined) maddr
conf = defaultSocketConfig
ns <- readMVar evilNetworkStackMVar
lsock <- sListen ns conf addr port backlog
return (ListeningTcp lsock)
BoundTcp _ Nothing ->
fail "Cannot listen on socket with unbound port."
_ ->
fail "Cannot listen on unbound TCP port."
acceptIP4 :: Socket -> IO (Socket, IP4, SockPort)
acceptIP4 (Socket mvs _) =
do curstate <- readMVar mvs
case curstate of
ListeningTcp lsock ->
do sock <- sAccept lsock
let addr = view tcpRemoteAddr sock
port = view tcpRemotePort sock
stateMV <- newMVar (ConnectedTcp sock ForBoth)
ident <- nextSocketIdent
return (Socket stateMV ident, addr, port)
_ ->
fail "Illegal state for accept socket."
data ShutdownCmd = ShutdownReceive | ShutdownSend | ShutdownBoth
deriving (Typeable, Eq)
shutdown :: Socket -> ShutdownCmd -> IO ()
shutdown (Socket mvs _) cmd =
modifyMVar_ mvs $ \ curstate ->
case curstate of
ConnectedTcp sock _ | cmd == ShutdownBoth ->
sClose sock >> return ClosedSocket
ConnectedTcp sock ForRead | cmd == ShutdownReceive ->
sClose sock >> return ClosedSocket
ConnectedTcp sock ForWrite | cmd == ShutdownSend ->
sClose sock >> return ClosedSocket
ConnectedTcp sock _ | cmd == ShutdownReceive ->
return (ConnectedTcp sock ForWrite)
ConnectedTcp sock _ | cmd == ShutdownSend ->
return (ConnectedTcp sock ForRead)
ConnectedTcp _ _ ->
fail "Internal consistency error in shutdown."
_ ->
fail "Shutdown called on un-connected socket."
close :: Socket -> IO ()
close (Socket mvs _) =
modifyMVar_ mvs (\ _ -> return ClosedSocket)
socketToHandle :: Socket -> IOMode -> IO Handle
socketToHandle (Socket mvs _) mode =
modifyMVar mvs $ \ curstate ->
case curstate of
ConnectedTcp sock ForBoth ->
do hndl <- makeHansHandle sock mode
return (Converted, hndl)
ConnectedTcp sock ForRead | mode == ReadMode ->
do hndl <- makeHansHandle sock mode
return (Converted, hndl)
ConnectedTcp sock ForWrite | mode `elem` [AppendMode, WriteMode] ->
do hndl <- makeHansHandle sock mode
return (Converted, hndl)
ConnectedTcp _ allowed ->
fail ("Access error converted socket to handle. Socket is in " ++
show allowed ++ " mode, but IOMode was " ++ show mode)
_ ->
fail ("Cannot convert unconnected socket to a handle.")
data SocketStatus = NotConnected
| Bound
| Listening
| Connected
| ConvertedToHandle
| Closed
deriving (Eq, Show)
getSocketStatus :: Socket -> IO SocketStatus
getSocketStatus (Socket mvs _) =
withMVar mvs $ \ curstate ->
case curstate of
CreatedTcp -> return NotConnected
CreatedUdp -> return NotConnected
BoundUdp _ -> return Bound
BoundTcp _ _ -> return Bound
ListeningTcp _ -> return Listening
ConnectedTcp _ _ -> return Connected
Converted -> return ConvertedToHandle
ClosedSocket -> return Closed
data Direction = ForWrite | ForRead | ForBoth | ForNeither
deriving (Show)
directionOpen :: Socket -> Direction -> IO Bool
directionOpen (Socket mvs _) req =
withMVar mvs $ \ curstate ->
case curstate of
ConnectedTcp _ dir -> return (modesMatch req dir)
_ -> return False
withTcpSocket :: Socket -> Direction -> (TcpSocket IP4 -> IO a) -> IO a
withTcpSocket (Socket mvs _) dir action =
withMVar mvs $ \ curstate ->
case curstate of
ConnectedTcp sock dir' | modesMatch dir dir' ->
action sock
ConnectedTcp _ dir' ->
fail ("Mismatch between requested direction (" ++ show dir ++
") and allowed (" ++ show dir' ++ ")")
_ ->
fail ("TCP operation on non-TCP socket.")
withUdpSocket :: Socket -> (UdpSocket IP4 -> IO a) -> IO a
withUdpSocket (Socket mvs _) action =
withMVar mvs $ \ curstate ->
case curstate of
BoundUdp udps ->
action udps
_ ->
fail "UDP operation on non-UDP socket."
toIP4 :: Word32 -> IP4
toIP4 w32 = packIP4 a b c d
where
a = fromIntegral ((w32 `shiftR` 24) .&. 0xFF)
b = fromIntegral ((w32 `shiftR` 16) .&. 0xFF)
c = fromIntegral ((w32 `shiftR` 8) .&. 0xFF)
d = fromIntegral ((w32 `shiftR` 0) .&. 0xFF)
fromIP4 :: IP4 -> Word32
fromIP4 ipaddr = w32
where
(a, b, c, d) = unpackIP4 ipaddr
--
w32 = a' .|. b' .|. c' .|. d'
a' = fromIntegral a `shiftL` 24
b' = fromIntegral b `shiftL` 16
c' = fromIntegral c `shiftL` 8
d' = fromIntegral d `shiftL` 0
modesMatch :: Direction -> Direction -> Bool
modesMatch ForBoth ForBoth = True
modesMatch ForBoth _ = False
modesMatch ForRead ForBoth = True
modesMatch ForRead ForRead = True
modesMatch ForRead _ = False
modesMatch ForWrite ForBoth = True
modesMatch ForWrite ForWrite = True
modesMatch ForWrite _ = False
modesMatch ForNeither _ = True
# NOINLINE evilNetworkStackMVar #
evilNetworkStackMVar :: MVar NetworkStack
evilNetworkStackMVar =
unsafePerformIO (newMVar (error "Access before network stack set!"))
# NOINLINE evilSocketIDMVar #
evilSocketIDMVar :: MVar Word64
evilSocketIDMVar =
unsafePerformIO (newMVar 1)
setNetworkStack :: NetworkStack -> IO ()
setNetworkStack = void . swapMVar evilNetworkStackMVar
nextSocketIdent :: IO Word64
nextSocketIdent = modifyMVar evilSocketIDMVar (\ x -> return (x + 1, x))
| null | https://raw.githubusercontent.com/GaloisInc/network-hans/5bc00507ca34cc33c57065f420c9e4742abf0397/src/Network/Socket/Types.hs | haskell | module Network.Socket.Types(
Socket
, newSocket
, connectIP4
, bindIP4
, listen
, acceptIP4
, SocketStatus(..)
, getSocketStatus
, Direction(..)
, directionOpen
, withTcpSocket
, withUdpSocket
, socketToHandle
, ShutdownCmd(..)
, shutdown
, close
, toIP4, fromIP4
, setNetworkStack
)
where
import Control.Concurrent.MVar(MVar, newMVar, withMVar,
readMVar, swapMVar,
modifyMVar, modifyMVar_)
import Control.Monad(void)
import Data.Bits(shiftL, shiftR, (.|.), (.&.))
import Data.Maybe(fromMaybe)
import Data.Typeable(Typeable)
import Data.Word(Word64, Word32)
import Hans.Addr(wildcardAddr)
import Hans.IP4(IP4, packIP4, unpackIP4)
import Hans.Lens(view)
import Hans.Socket(TcpSocket, TcpListenSocket, UdpSocket, SockPort,
tcpRemoteAddr, tcpRemotePort, defaultSocketConfig,
newUdpSocket, sConnect, sListen, sAccept, sClose)
import Hans.Socket.Handle(makeHansHandle)
import Hans.Types(NetworkStack)
import System.IO(Handle, IOMode(..))
import System.IO.Unsafe(unsafePerformIO)
data Socket = Socket (MVar SocketState) Word64
instance Eq Socket where
(Socket _ a) == (Socket _ b) = a == b
newSocket :: Bool -> IO Socket
newSocket isTcp =
do mv <- newMVar (if isTcp then CreatedTcp else CreatedUdp)
ident <- nextSocketIdent
return (Socket mv ident)
data SocketState = CreatedTcp | CreatedUdp
| BoundUdp (UdpSocket IP4)
| BoundTcp (Maybe IP4) (Maybe SockPort)
| ListeningTcp (TcpListenSocket IP4)
| ConnectedTcp (TcpSocket IP4) Direction
| Converted
| ClosedSocket
connectIP4 :: Socket -> IP4 -> SockPort -> IO ()
connectIP4 (Socket mvs _) addr port =
modifyMVar_ mvs $ \ curstate ->
case curstate of
CreatedTcp ->
do let conf = defaultSocketConfig
src = wildcardAddr undefined
ns <- readMVar evilNetworkStackMVar
sock <- sConnect ns conf Nothing src Nothing addr port
return (ConnectedTcp sock ForBoth)
CreatedUdp ->
fail "Cannot connect UDP socket."
BoundUdp _ ->
fail "Cannot connect bound UDP socket."
BoundTcp maddr mport ->
do let conf = defaultSocketConfig
src = fromMaybe (wildcardAddr undefined) maddr
ns <- readMVar evilNetworkStackMVar
sock <- sConnect ns conf Nothing src mport addr port
return (ConnectedTcp sock ForBoth)
ListeningTcp _ ->
fail "Canoot connect listening TCP socket."
ConnectedTcp _ _ ->
fail "Cannot connect connected TCP socket."
Converted ->
fail "Cannot connect converted socket."
ClosedSocket ->
fail "Cannot connect closed socket."
bindIP4 :: Socket -> Maybe IP4 -> Maybe SockPort -> IO ()
bindIP4 (Socket mvs _) maddr mport =
modifyMVar_ mvs $ \ curstate ->
case curstate of
CreatedTcp ->
return (BoundTcp maddr mport)
CreatedUdp ->
do let addr = fromMaybe (wildcardAddr undefined) maddr
conf = defaultSocketConfig
ns <- readMVar evilNetworkStackMVar
sock <- newUdpSocket ns conf Nothing addr mport
return (BoundUdp sock)
BoundUdp _ ->
fail "Cannot re-bind bound UDP port."
BoundTcp _ _ ->
fail "Cannot re-bind bound TCP port."
ListeningTcp _ ->
fail "Cannot bind listening TCP socket."
ConnectedTcp _ _ ->
fail "Cannot bind connected TCP socket."
Converted ->
fail "Cannot bind converted socket."
ClosedSocket ->
fail "Cannot connect closed socket."
listen :: Socket -> Int -> IO ()
listen (Socket mvs _) backlog =
modifyMVar_ mvs $ \ curstate ->
case curstate of
BoundTcp maddr (Just port) ->
do let addr = fromMaybe (wildcardAddr undefined) maddr
conf = defaultSocketConfig
ns <- readMVar evilNetworkStackMVar
lsock <- sListen ns conf addr port backlog
return (ListeningTcp lsock)
BoundTcp _ Nothing ->
fail "Cannot listen on socket with unbound port."
_ ->
fail "Cannot listen on unbound TCP port."
acceptIP4 :: Socket -> IO (Socket, IP4, SockPort)
acceptIP4 (Socket mvs _) =
do curstate <- readMVar mvs
case curstate of
ListeningTcp lsock ->
do sock <- sAccept lsock
let addr = view tcpRemoteAddr sock
port = view tcpRemotePort sock
stateMV <- newMVar (ConnectedTcp sock ForBoth)
ident <- nextSocketIdent
return (Socket stateMV ident, addr, port)
_ ->
fail "Illegal state for accept socket."
data ShutdownCmd = ShutdownReceive | ShutdownSend | ShutdownBoth
deriving (Typeable, Eq)
shutdown :: Socket -> ShutdownCmd -> IO ()
shutdown (Socket mvs _) cmd =
modifyMVar_ mvs $ \ curstate ->
case curstate of
ConnectedTcp sock _ | cmd == ShutdownBoth ->
sClose sock >> return ClosedSocket
ConnectedTcp sock ForRead | cmd == ShutdownReceive ->
sClose sock >> return ClosedSocket
ConnectedTcp sock ForWrite | cmd == ShutdownSend ->
sClose sock >> return ClosedSocket
ConnectedTcp sock _ | cmd == ShutdownReceive ->
return (ConnectedTcp sock ForWrite)
ConnectedTcp sock _ | cmd == ShutdownSend ->
return (ConnectedTcp sock ForRead)
ConnectedTcp _ _ ->
fail "Internal consistency error in shutdown."
_ ->
fail "Shutdown called on un-connected socket."
close :: Socket -> IO ()
close (Socket mvs _) =
modifyMVar_ mvs (\ _ -> return ClosedSocket)
socketToHandle :: Socket -> IOMode -> IO Handle
socketToHandle (Socket mvs _) mode =
modifyMVar mvs $ \ curstate ->
case curstate of
ConnectedTcp sock ForBoth ->
do hndl <- makeHansHandle sock mode
return (Converted, hndl)
ConnectedTcp sock ForRead | mode == ReadMode ->
do hndl <- makeHansHandle sock mode
return (Converted, hndl)
ConnectedTcp sock ForWrite | mode `elem` [AppendMode, WriteMode] ->
do hndl <- makeHansHandle sock mode
return (Converted, hndl)
ConnectedTcp _ allowed ->
fail ("Access error converted socket to handle. Socket is in " ++
show allowed ++ " mode, but IOMode was " ++ show mode)
_ ->
fail ("Cannot convert unconnected socket to a handle.")
data SocketStatus = NotConnected
| Bound
| Listening
| Connected
| ConvertedToHandle
| Closed
deriving (Eq, Show)
getSocketStatus :: Socket -> IO SocketStatus
getSocketStatus (Socket mvs _) =
withMVar mvs $ \ curstate ->
case curstate of
CreatedTcp -> return NotConnected
CreatedUdp -> return NotConnected
BoundUdp _ -> return Bound
BoundTcp _ _ -> return Bound
ListeningTcp _ -> return Listening
ConnectedTcp _ _ -> return Connected
Converted -> return ConvertedToHandle
ClosedSocket -> return Closed
data Direction = ForWrite | ForRead | ForBoth | ForNeither
deriving (Show)
directionOpen :: Socket -> Direction -> IO Bool
directionOpen (Socket mvs _) req =
withMVar mvs $ \ curstate ->
case curstate of
ConnectedTcp _ dir -> return (modesMatch req dir)
_ -> return False
withTcpSocket :: Socket -> Direction -> (TcpSocket IP4 -> IO a) -> IO a
withTcpSocket (Socket mvs _) dir action =
withMVar mvs $ \ curstate ->
case curstate of
ConnectedTcp sock dir' | modesMatch dir dir' ->
action sock
ConnectedTcp _ dir' ->
fail ("Mismatch between requested direction (" ++ show dir ++
") and allowed (" ++ show dir' ++ ")")
_ ->
fail ("TCP operation on non-TCP socket.")
withUdpSocket :: Socket -> (UdpSocket IP4 -> IO a) -> IO a
withUdpSocket (Socket mvs _) action =
withMVar mvs $ \ curstate ->
case curstate of
BoundUdp udps ->
action udps
_ ->
fail "UDP operation on non-UDP socket."
toIP4 :: Word32 -> IP4
toIP4 w32 = packIP4 a b c d
where
a = fromIntegral ((w32 `shiftR` 24) .&. 0xFF)
b = fromIntegral ((w32 `shiftR` 16) .&. 0xFF)
c = fromIntegral ((w32 `shiftR` 8) .&. 0xFF)
d = fromIntegral ((w32 `shiftR` 0) .&. 0xFF)
fromIP4 :: IP4 -> Word32
fromIP4 ipaddr = w32
where
(a, b, c, d) = unpackIP4 ipaddr
w32 = a' .|. b' .|. c' .|. d'
a' = fromIntegral a `shiftL` 24
b' = fromIntegral b `shiftL` 16
c' = fromIntegral c `shiftL` 8
d' = fromIntegral d `shiftL` 0
modesMatch :: Direction -> Direction -> Bool
modesMatch ForBoth ForBoth = True
modesMatch ForBoth _ = False
modesMatch ForRead ForBoth = True
modesMatch ForRead ForRead = True
modesMatch ForRead _ = False
modesMatch ForWrite ForBoth = True
modesMatch ForWrite ForWrite = True
modesMatch ForWrite _ = False
modesMatch ForNeither _ = True
# NOINLINE evilNetworkStackMVar #
evilNetworkStackMVar :: MVar NetworkStack
evilNetworkStackMVar =
unsafePerformIO (newMVar (error "Access before network stack set!"))
# NOINLINE evilSocketIDMVar #
evilSocketIDMVar :: MVar Word64
evilSocketIDMVar =
unsafePerformIO (newMVar 1)
setNetworkStack :: NetworkStack -> IO ()
setNetworkStack = void . swapMVar evilNetworkStackMVar
nextSocketIdent :: IO Word64
nextSocketIdent = modifyMVar evilSocketIDMVar (\ x -> return (x + 1, x))
| |
a7d77e4813676a5423704ad8fd72dd760043176605e1b152d31e803e552fd1f2 | facebook/pyre-check | aliasEnvironmentTest.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Core
open OUnit2
open Ast
open Analysis
open Pyre
open Test
let test_simple_registration context =
let assert_registers source name expected =
let project = ScratchProject.setup ["test.py", source] ~include_typeshed_stubs:false ~context in
let read_only =
ScratchProject.errors_environment project
|> ErrorsEnvironment.Testing.ReadOnly.alias_environment
in
let expected = expected >>| fun expected -> Type.TypeAlias (Type.Primitive expected) in
let printer v = v >>| Type.show_alias |> Option.value ~default:"none" in
assert_equal ~printer expected (AliasEnvironment.ReadOnly.get_alias read_only name)
in
assert_registers {|
class C:
pass
X = C
|} "test.X" (Some "test.C");
assert_registers {|
class D:
pass
X = D
Y = X
|} "test.Y" (Some "test.D");
assert_registers
{|
class E:
pass
X = E
Y = X
Z = Y
|}
"test.Z"
(Some "test.E");
assert_registers {|
X = Z
Y = X
Z = Y
|} "test.Z" None;
assert_registers {|
x = None
|} "test.x" None;
()
let test_harder_registrations context =
let assert_registers ?external_sources ~expected_alias source name =
let project = ScratchProject.setup ?external_sources ["test.py", source] ~context in
let read_only =
ScratchProject.errors_environment project
|> ErrorsEnvironment.Testing.ReadOnly.alias_environment
in
let printer alias =
alias >>| Type.sexp_of_alias >>| Sexp.to_string_hum |> Option.value ~default:"none"
in
assert_equal ~printer expected_alias (AliasEnvironment.ReadOnly.get_alias read_only name)
in
let parsed_assert_registers source name expected =
let parser expression =
parse_single_expression expression |> Type.create ~aliases:Type.empty_aliases
in
let expected_alias = expected >>| parser >>| fun alias -> Type.TypeAlias alias in
assert_registers ~expected_alias source name
in
let unparsed_assert_registers source name expected =
let expected_alias = expected >>| fun alias -> Type.TypeAlias alias in
assert_registers ~expected_alias source name
in
parsed_assert_registers {|
X = int
|} "test.X" (Some "int");
parsed_assert_registers
{|
from typing import Tuple
X = int
Y = Tuple[X, X]
|}
"test.Y"
(Some "typing.Tuple[int, int]");
parsed_assert_registers
{|
from typing import Tuple, List
B = int
A = List[B]
Z = Tuple[A, B]
|}
"test.Z"
(Some "typing.Tuple[typing.List[int], int]");
unparsed_assert_registers
{|
from mypy_extensions import TypedDict
X = int
class Q(TypedDict):
a: X
|}
"test.Q"
(* TypedDicts are treated as proper classes, not aliases. *)
None;
parsed_assert_registers {|
class Foo: ...
X = Foo[unknown.get("key")]
|} "test.X" None;
Do n't treat a global string assignment as an alias unless it is marked as ` TypeAlias ` .
assert_registers {|
X = int
Y = "X"
|} "test.Y" ~expected_alias:None;
assert_registers
{|
import typing
X = int
Y: typing_extensions.TypeAlias = "X"
|}
"test.Y"
~expected_alias:(Some (Type.TypeAlias Type.integer));
(* Recursive alias. *)
assert_registers
{|
from typing import Tuple, Union
Tree = Union[int, Tuple["Tree", "Tree"]]
|}
"test.Tree"
~expected_alias:
(Some
(Type.TypeAlias
(Type.RecursiveType.create
~name:"test.Tree"
~body:
(Type.union
[
Type.integer;
Type.tuple [Type.Primitive "test.Tree"; Type.Primitive "test.Tree"];
]))));
(* Forbid directly-recursive aliases. *)
assert_registers {|
Tree = "Tree"
|} "test.Tree" ~expected_alias:None;
assert_registers
{|
from typing import Union
X = Union[int, "X"]
|}
"test.X"
~expected_alias:None;
assert_registers
{|
from typing import Annotated
X = Annotated["X", int]
|}
"test.X"
~expected_alias:None;
assert_registers
{|
from typing import Tuple, TypeVar, Union
T = TypeVar("T")
GenericTree = Union[T, Tuple["GenericTree[T]", "GenericTree[T]"]]
|}
"test.GenericTree"
~expected_alias:None;
(* Aliases referring to recursive aliases. *)
assert_registers
{|
from typing import List, Union
X = List["X"]
Y = List[X]
|}
"test.Y"
~expected_alias:
(Some
(Type.TypeAlias
(Type.list
(Type.RecursiveType.create
~name:"test.X"
~body:(Type.list (Type.Primitive "test.X"))))));
assert_registers
{|
from typing import List, Sequence, Union
X = Union[
Sequence["X"],
List["X"]
]
Y = Union[int, X]
|}
"test.Y"
~expected_alias:
(Some
(Type.TypeAlias
(Type.union
[
Type.integer;
Type.RecursiveType.create
~name:"test.X"
~body:
(Type.union
[
Type.list (Type.Primitive "test.X");
Type.parametric "typing.Sequence" [Single (Type.Primitive "test.X")];
]);
])));
assert_registers
{|
from typing import List, Sequence, Union
X = Union[
Sequence["X"],
List["X"]
]
Y = Union[int, X]
Z = List[Y]
|}
"test.Z"
~expected_alias:
(Some
(Type.TypeAlias
(Type.list
(Type.union
[
Type.integer;
Type.RecursiveType.create
~name:"test.X"
~body:
(Type.union
[
Type.list (Type.Primitive "test.X");
Type.parametric "typing.Sequence" [Single (Type.Primitive "test.X")];
]);
]))));
assert_registers
{|
from typing import Generic, TypeVar
from pyre_extensions import TypeVarTuple, Unpack
from typing_extensions import Literal as L
T = TypeVar("T")
Ts = TypeVarTuple("Ts")
class Tensor(Generic[T, Unpack[Ts]]): ...
FloatTensor = Tensor[float, Unpack[Ts]]
|}
"test.FloatTensor"
~expected_alias:
(Some
(Type.TypeAlias
(Type.parametric
"test.Tensor"
[
Single Type.float;
Unpacked
(Type.OrderedTypes.Concatenation.create_unpackable
(Type.Variable.Variadic.Tuple.create "test.Ts"));
])));
(* An alias containing "..." should not mistake the "..." for some unknown alias. *)
assert_registers
~external_sources:["reexports_callable.py", {|
from typing import Callable as Callable
|}]
{|
from reexports_callable import Callable
F = Callable[..., int]
|}
"test.F"
~expected_alias:(Some (Type.TypeAlias (Type.Callable.create ~annotation:Type.integer ())));
(* Allow the union syntax in type aliases. *)
parsed_assert_registers {|
X = int | str
|} "test.X" (Some "typing.Union[int, str]");
()
let test_updates context =
let assert_updates
?(original_sources = [])
?(new_sources = [])
~middle_actions
~expected_triggers
?post_actions
()
=
Memory.reset_shared_memory ();
let project =
ScratchProject.setup
~include_typeshed_stubs:false
~track_dependencies:true
~in_memory:false
original_sources
~context
in
let configuration = ScratchProject.configuration_of project in
let read_only =
ScratchProject.errors_environment project
|> ErrorsEnvironment.Testing.ReadOnly.alias_environment
in
let execute_action (alias_name, dependency, expectation) =
let printer v =
v >>| Type.sexp_of_alias >>| Sexp.to_string_hum |> Option.value ~default:"none"
in
let expectation =
expectation
>>| parse_single_expression
>>| Type.create ~aliases:Type.empty_aliases
>>| fun alias -> Type.TypeAlias alias
in
AliasEnvironment.ReadOnly.get_alias read_only ~dependency alias_name
|> assert_equal ~printer expectation
in
List.iter middle_actions ~f:execute_action;
List.iter original_sources ~f:(fun (relative, _) ->
ScratchProject.delete_file project ~relative);
List.iter new_sources ~f:(fun (relative, content) ->
ScratchProject.add_file project ~relative content);
let update_result =
let { Configuration.Analysis.local_root; _ } = configuration in
List.map new_sources ~f:(fun (relative, _) ->
Test.relative_artifact_path ~root:local_root ~relative
|> ArtifactPath.Event.(create ~kind:Kind.Unknown))
|> ScratchProject.update_environment project
|> ErrorsEnvironment.Testing.UpdateResult.alias_environment
in
let printer set =
SharedMemoryKeys.DependencyKey.RegisteredSet.elements set
|> List.map ~f:SharedMemoryKeys.DependencyKey.get_key
|> List.to_string ~f:SharedMemoryKeys.show_dependency
in
let expected_triggers =
SharedMemoryKeys.DependencyKey.RegisteredSet.of_list expected_triggers
in
assert_equal
~printer
expected_triggers
(AliasEnvironment.UpdateResult.locally_triggered_dependencies update_result);
post_actions >>| List.iter ~f:execute_action |> Option.value ~default:()
in
let dependency =
SharedMemoryKeys.DependencyKey.Registry.register (TypeCheckDefine (Reference.create "dep"))
in
let assert_test_py_updates ?original_source ?new_source =
assert_updates
?original_sources:(original_source >>| fun source -> ["test.py", source])
?new_sources:(new_source >>| fun source -> ["test.py", source])
in
assert_test_py_updates
~original_source:{|
class C:
pass
X = C
|}
~new_source:{|
class C:
pass
X = C
|}
~middle_actions:["test.X", dependency, Some "test.C"]
~expected_triggers:[]
~post_actions:["test.X", dependency, Some "test.C"]
();
assert_test_py_updates
~original_source:{|
class C:
pass
X = C
|}
~new_source:{|
X = C
|}
~middle_actions:["test.X", dependency, Some "test.C"]
~expected_triggers:[dependency]
~post_actions:["test.X", dependency, None]
();
assert_test_py_updates
~original_source:{|
class C:
pass
X = C
|}
~new_source:{|
class C:
pass
Y = C
X = Y
|}
~middle_actions:["test.X", dependency, Some "test.C"]
(* Even if the route to the alias changed, no trigger *)
~expected_triggers:[]
~post_actions:["test.X", dependency, Some "test.C"]
();
assert_updates
~original_sources:
[
"test.py", {|
from placeholder import Q
X = Q
|};
"placeholder.pyi", {|
# pyre-placeholder-stub
|};
]
~new_sources:
[
"test.py", {|
from placeholder import Q
X = Q
|};
"placeholder.pyi", {|
|};
]
~middle_actions:["test.X", dependency, Some "typing.Any"]
~expected_triggers:[dependency]
~post_actions:["test.X", dependency, None]
();
(* Addition should trigger previous failed reads *)
assert_updates
~original_sources:["test.py", {|
|}]
~new_sources:["test.py", {|
class C:
pass
X = C
|}]
~middle_actions:["test.X", dependency, None]
~expected_triggers:[dependency]
~post_actions:["test.X", dependency, Some "test.C"]
();
()
let () =
"environment"
>::: [
"simple_registration" >:: test_simple_registration;
"compounds" >:: test_harder_registrations;
"updates" >:: test_updates;
]
|> Test.run
| null | https://raw.githubusercontent.com/facebook/pyre-check/b01273d38792dc2b92ea6b2aa37668c4e86651fb/source/analysis/test/aliasEnvironmentTest.ml | ocaml | TypedDicts are treated as proper classes, not aliases.
Recursive alias.
Forbid directly-recursive aliases.
Aliases referring to recursive aliases.
An alias containing "..." should not mistake the "..." for some unknown alias.
Allow the union syntax in type aliases.
Even if the route to the alias changed, no trigger
Addition should trigger previous failed reads |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Core
open OUnit2
open Ast
open Analysis
open Pyre
open Test
let test_simple_registration context =
let assert_registers source name expected =
let project = ScratchProject.setup ["test.py", source] ~include_typeshed_stubs:false ~context in
let read_only =
ScratchProject.errors_environment project
|> ErrorsEnvironment.Testing.ReadOnly.alias_environment
in
let expected = expected >>| fun expected -> Type.TypeAlias (Type.Primitive expected) in
let printer v = v >>| Type.show_alias |> Option.value ~default:"none" in
assert_equal ~printer expected (AliasEnvironment.ReadOnly.get_alias read_only name)
in
assert_registers {|
class C:
pass
X = C
|} "test.X" (Some "test.C");
assert_registers {|
class D:
pass
X = D
Y = X
|} "test.Y" (Some "test.D");
assert_registers
{|
class E:
pass
X = E
Y = X
Z = Y
|}
"test.Z"
(Some "test.E");
assert_registers {|
X = Z
Y = X
Z = Y
|} "test.Z" None;
assert_registers {|
x = None
|} "test.x" None;
()
let test_harder_registrations context =
let assert_registers ?external_sources ~expected_alias source name =
let project = ScratchProject.setup ?external_sources ["test.py", source] ~context in
let read_only =
ScratchProject.errors_environment project
|> ErrorsEnvironment.Testing.ReadOnly.alias_environment
in
let printer alias =
alias >>| Type.sexp_of_alias >>| Sexp.to_string_hum |> Option.value ~default:"none"
in
assert_equal ~printer expected_alias (AliasEnvironment.ReadOnly.get_alias read_only name)
in
let parsed_assert_registers source name expected =
let parser expression =
parse_single_expression expression |> Type.create ~aliases:Type.empty_aliases
in
let expected_alias = expected >>| parser >>| fun alias -> Type.TypeAlias alias in
assert_registers ~expected_alias source name
in
let unparsed_assert_registers source name expected =
let expected_alias = expected >>| fun alias -> Type.TypeAlias alias in
assert_registers ~expected_alias source name
in
parsed_assert_registers {|
X = int
|} "test.X" (Some "int");
parsed_assert_registers
{|
from typing import Tuple
X = int
Y = Tuple[X, X]
|}
"test.Y"
(Some "typing.Tuple[int, int]");
parsed_assert_registers
{|
from typing import Tuple, List
B = int
A = List[B]
Z = Tuple[A, B]
|}
"test.Z"
(Some "typing.Tuple[typing.List[int], int]");
unparsed_assert_registers
{|
from mypy_extensions import TypedDict
X = int
class Q(TypedDict):
a: X
|}
"test.Q"
None;
parsed_assert_registers {|
class Foo: ...
X = Foo[unknown.get("key")]
|} "test.X" None;
Do n't treat a global string assignment as an alias unless it is marked as ` TypeAlias ` .
assert_registers {|
X = int
Y = "X"
|} "test.Y" ~expected_alias:None;
assert_registers
{|
import typing
X = int
Y: typing_extensions.TypeAlias = "X"
|}
"test.Y"
~expected_alias:(Some (Type.TypeAlias Type.integer));
assert_registers
{|
from typing import Tuple, Union
Tree = Union[int, Tuple["Tree", "Tree"]]
|}
"test.Tree"
~expected_alias:
(Some
(Type.TypeAlias
(Type.RecursiveType.create
~name:"test.Tree"
~body:
(Type.union
[
Type.integer;
Type.tuple [Type.Primitive "test.Tree"; Type.Primitive "test.Tree"];
]))));
assert_registers {|
Tree = "Tree"
|} "test.Tree" ~expected_alias:None;
assert_registers
{|
from typing import Union
X = Union[int, "X"]
|}
"test.X"
~expected_alias:None;
assert_registers
{|
from typing import Annotated
X = Annotated["X", int]
|}
"test.X"
~expected_alias:None;
assert_registers
{|
from typing import Tuple, TypeVar, Union
T = TypeVar("T")
GenericTree = Union[T, Tuple["GenericTree[T]", "GenericTree[T]"]]
|}
"test.GenericTree"
~expected_alias:None;
assert_registers
{|
from typing import List, Union
X = List["X"]
Y = List[X]
|}
"test.Y"
~expected_alias:
(Some
(Type.TypeAlias
(Type.list
(Type.RecursiveType.create
~name:"test.X"
~body:(Type.list (Type.Primitive "test.X"))))));
assert_registers
{|
from typing import List, Sequence, Union
X = Union[
Sequence["X"],
List["X"]
]
Y = Union[int, X]
|}
"test.Y"
~expected_alias:
(Some
(Type.TypeAlias
(Type.union
[
Type.integer;
Type.RecursiveType.create
~name:"test.X"
~body:
(Type.union
[
Type.list (Type.Primitive "test.X");
Type.parametric "typing.Sequence" [Single (Type.Primitive "test.X")];
]);
])));
assert_registers
{|
from typing import List, Sequence, Union
X = Union[
Sequence["X"],
List["X"]
]
Y = Union[int, X]
Z = List[Y]
|}
"test.Z"
~expected_alias:
(Some
(Type.TypeAlias
(Type.list
(Type.union
[
Type.integer;
Type.RecursiveType.create
~name:"test.X"
~body:
(Type.union
[
Type.list (Type.Primitive "test.X");
Type.parametric "typing.Sequence" [Single (Type.Primitive "test.X")];
]);
]))));
assert_registers
{|
from typing import Generic, TypeVar
from pyre_extensions import TypeVarTuple, Unpack
from typing_extensions import Literal as L
T = TypeVar("T")
Ts = TypeVarTuple("Ts")
class Tensor(Generic[T, Unpack[Ts]]): ...
FloatTensor = Tensor[float, Unpack[Ts]]
|}
"test.FloatTensor"
~expected_alias:
(Some
(Type.TypeAlias
(Type.parametric
"test.Tensor"
[
Single Type.float;
Unpacked
(Type.OrderedTypes.Concatenation.create_unpackable
(Type.Variable.Variadic.Tuple.create "test.Ts"));
])));
assert_registers
~external_sources:["reexports_callable.py", {|
from typing import Callable as Callable
|}]
{|
from reexports_callable import Callable
F = Callable[..., int]
|}
"test.F"
~expected_alias:(Some (Type.TypeAlias (Type.Callable.create ~annotation:Type.integer ())));
parsed_assert_registers {|
X = int | str
|} "test.X" (Some "typing.Union[int, str]");
()
let test_updates context =
let assert_updates
?(original_sources = [])
?(new_sources = [])
~middle_actions
~expected_triggers
?post_actions
()
=
Memory.reset_shared_memory ();
let project =
ScratchProject.setup
~include_typeshed_stubs:false
~track_dependencies:true
~in_memory:false
original_sources
~context
in
let configuration = ScratchProject.configuration_of project in
let read_only =
ScratchProject.errors_environment project
|> ErrorsEnvironment.Testing.ReadOnly.alias_environment
in
let execute_action (alias_name, dependency, expectation) =
let printer v =
v >>| Type.sexp_of_alias >>| Sexp.to_string_hum |> Option.value ~default:"none"
in
let expectation =
expectation
>>| parse_single_expression
>>| Type.create ~aliases:Type.empty_aliases
>>| fun alias -> Type.TypeAlias alias
in
AliasEnvironment.ReadOnly.get_alias read_only ~dependency alias_name
|> assert_equal ~printer expectation
in
List.iter middle_actions ~f:execute_action;
List.iter original_sources ~f:(fun (relative, _) ->
ScratchProject.delete_file project ~relative);
List.iter new_sources ~f:(fun (relative, content) ->
ScratchProject.add_file project ~relative content);
let update_result =
let { Configuration.Analysis.local_root; _ } = configuration in
List.map new_sources ~f:(fun (relative, _) ->
Test.relative_artifact_path ~root:local_root ~relative
|> ArtifactPath.Event.(create ~kind:Kind.Unknown))
|> ScratchProject.update_environment project
|> ErrorsEnvironment.Testing.UpdateResult.alias_environment
in
let printer set =
SharedMemoryKeys.DependencyKey.RegisteredSet.elements set
|> List.map ~f:SharedMemoryKeys.DependencyKey.get_key
|> List.to_string ~f:SharedMemoryKeys.show_dependency
in
let expected_triggers =
SharedMemoryKeys.DependencyKey.RegisteredSet.of_list expected_triggers
in
assert_equal
~printer
expected_triggers
(AliasEnvironment.UpdateResult.locally_triggered_dependencies update_result);
post_actions >>| List.iter ~f:execute_action |> Option.value ~default:()
in
let dependency =
SharedMemoryKeys.DependencyKey.Registry.register (TypeCheckDefine (Reference.create "dep"))
in
let assert_test_py_updates ?original_source ?new_source =
assert_updates
?original_sources:(original_source >>| fun source -> ["test.py", source])
?new_sources:(new_source >>| fun source -> ["test.py", source])
in
assert_test_py_updates
~original_source:{|
class C:
pass
X = C
|}
~new_source:{|
class C:
pass
X = C
|}
~middle_actions:["test.X", dependency, Some "test.C"]
~expected_triggers:[]
~post_actions:["test.X", dependency, Some "test.C"]
();
assert_test_py_updates
~original_source:{|
class C:
pass
X = C
|}
~new_source:{|
X = C
|}
~middle_actions:["test.X", dependency, Some "test.C"]
~expected_triggers:[dependency]
~post_actions:["test.X", dependency, None]
();
assert_test_py_updates
~original_source:{|
class C:
pass
X = C
|}
~new_source:{|
class C:
pass
Y = C
X = Y
|}
~middle_actions:["test.X", dependency, Some "test.C"]
~expected_triggers:[]
~post_actions:["test.X", dependency, Some "test.C"]
();
assert_updates
~original_sources:
[
"test.py", {|
from placeholder import Q
X = Q
|};
"placeholder.pyi", {|
# pyre-placeholder-stub
|};
]
~new_sources:
[
"test.py", {|
from placeholder import Q
X = Q
|};
"placeholder.pyi", {|
|};
]
~middle_actions:["test.X", dependency, Some "typing.Any"]
~expected_triggers:[dependency]
~post_actions:["test.X", dependency, None]
();
assert_updates
~original_sources:["test.py", {|
|}]
~new_sources:["test.py", {|
class C:
pass
X = C
|}]
~middle_actions:["test.X", dependency, None]
~expected_triggers:[dependency]
~post_actions:["test.X", dependency, Some "test.C"]
();
()
let () =
"environment"
>::: [
"simple_registration" >:: test_simple_registration;
"compounds" >:: test_harder_registrations;
"updates" >:: test_updates;
]
|> Test.run
|
5a52b1c8718ccc0d2d6a8cc73ddd4a05ceb0dc428019ed5cdb4709887633de36 | HaskellCNOrg/snap-web | Tests.hs | {-# LANGUAGE OverloadedStrings #-}
module Views.Tests (tests) where
import Test.Framework (Test, testGroup)
import qualified Views.PaginationSplicesTest (tests)
import qualified Views.ReplySplicesTest (tests)
tests :: Test
tests = testGroup "Test.Views"
[ Views.ReplySplicesTest.tests
, Views.PaginationSplicesTest.tests
]
| null | https://raw.githubusercontent.com/HaskellCNOrg/snap-web/f104fd9b8fc5ae74fc7b8002f0eb3f182a61529e/tests/Views/Tests.hs | haskell | # LANGUAGE OverloadedStrings # |
module Views.Tests (tests) where
import Test.Framework (Test, testGroup)
import qualified Views.PaginationSplicesTest (tests)
import qualified Views.ReplySplicesTest (tests)
tests :: Test
tests = testGroup "Test.Views"
[ Views.ReplySplicesTest.tests
, Views.PaginationSplicesTest.tests
]
|
af889fb86a2a20b6e9383e19fe6e217f3f45a184a7963d98795f069c213f8f72 | samply/blaze | all.clj | (ns blaze.db.impl.search-param.all
"A internal search parameter returning all resources of a type.
This search param is used to put the date search param on _lastUpdated into
second position if no other search param is available for the first position.
The date search param on _lastUpdated can't be in first position, because it
will return resources more than once if multiple updates with the same hash
are index with different lastUpdate times."
(:require
[blaze.db.impl.index.resource-as-of :as rao]
[blaze.db.impl.protocols :as p]))
(def search-param
(reify p/SearchParam
(-compile-value [_ _ _])
(-resource-handles [_ context tid _ _]
(rao/type-list context tid))
(-resource-handles [_ context tid _ _ start-id]
(rao/type-list context tid start-id))
(-index-values [_ _ _])))
| null | https://raw.githubusercontent.com/samply/blaze/ccfad24c890c25a87ba4e3cde035ba8dbfd4d239/modules/db/src/blaze/db/impl/search_param/all.clj | clojure | (ns blaze.db.impl.search-param.all
"A internal search parameter returning all resources of a type.
This search param is used to put the date search param on _lastUpdated into
second position if no other search param is available for the first position.
The date search param on _lastUpdated can't be in first position, because it
will return resources more than once if multiple updates with the same hash
are index with different lastUpdate times."
(:require
[blaze.db.impl.index.resource-as-of :as rao]
[blaze.db.impl.protocols :as p]))
(def search-param
(reify p/SearchParam
(-compile-value [_ _ _])
(-resource-handles [_ context tid _ _]
(rao/type-list context tid))
(-resource-handles [_ context tid _ _ start-id]
(rao/type-list context tid start-id))
(-index-values [_ _ _])))
| |
6b6ef053b9df539417cb35c67fb9557c84d1799fbde1e27994fab743617c734a | jgm/texmath | OMML.hs | # LANGUAGE PatternGuards #
{-# LANGUAGE OverloadedStrings #-}
Copyright ( C ) 2014 < >
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2014 Jesse Rosenthal <>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
|
Module : Text . TeXMath . Readers . OMML
Copyright : Copyright ( C ) 2014
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Types and functions for conversion of OMML into TeXMath ' Exp 's .
Module : Text.TeXMath.Readers.OMML
Copyright : Copyright (C) 2014 Jesse Rosenthal
License : GNU GPL, version 2 or above
Maintainer : Jesse Rosenthal <>
Stability : alpha
Portability : portable
Types and functions for conversion of OMML into TeXMath 'Exp's.
-}
module Text.TeXMath.Readers.OMML (readOMML) where
import Text.XML.Light
import Data.Maybe (isJust, mapMaybe, fromMaybe)
import Data.List (intercalate)
import Data.Char (isDigit, readLitChar)
import qualified Data.Text as T
import Text.TeXMath.Types
import Text.TeXMath.Shared (fixTree, getSpaceWidth, getOperator)
import Text.TeXMath.Unicode.ToTeX (getSymbolType)
import Text.TeXMath.Unicode.Fonts (getUnicode, textToFont)
import Data.List.Split (splitWhen)
readOMML :: T.Text -> Either T.Text [Exp]
readOMML s | Just e <- parseXMLDoc s =
case elemToOMML e of
Just exs -> Right $ map fixTree $ unGroup exs
Nothing -> Left "xml file was not an <m:oMathPara> or <m:oMath> element."
readOMML _ = Left "Couldn't parse OMML file"
unGroup :: [Exp] -> [Exp]
unGroup [EGrouped exps] = exps
unGroup exps = exps
elemToOMML :: Element -> Maybe [Exp]
elemToOMML element | isElem "m" "oMathPara" element = do
let expList = mapMaybe elemToOMML (elChildren element)
return $ map (\l -> if length l == 1 then (head l) else EGrouped l) expList
elemToOMML element | isElem "m" "oMath" element =
Just $ concat $ mapMaybe elemToExps $ unwrapWTags $ elChildren element
elemToOMML _ = Nothing
oMath can contain w : hyperlink , w : sdt , etc . I ca n't find a complete
-- documentation of this, so we just unwrap any w:tag immediately
-- beneath oMath. Note that this shouldn't affect the "w" tags in
-- elemToOMathRunElem(s) because, those are underneath an "m:r" tag.
unwrapWTags :: [Element] -> [Element]
unwrapWTags elements = concatMap unwrapChild elements
where unwrapChild element = case qPrefix $ elName element of
Just "w" -> elChildren element
_ -> [element]
Kept as String because of Text . XML.Light
isElem :: String -> String -> Element -> Bool
isElem prefix name element =
let qp = fromMaybe "" (qPrefix (elName element))
in
qName (elName element) == name &&
qp == prefix
Kept as String because of Text . XML.Light
hasElemName :: String -> String -> QName -> Bool
hasElemName prefix name qn =
let qp = fromMaybe "" (qPrefix qn)
in
qName qn == name &&
qp == prefix
data OMathRunElem = TextRun T.Text
| LnBrk
| Tab
deriving Show
data OMathRunTextStyle = NoStyle
| Normal
| Styled { oMathScript :: Maybe OMathTextScript
, oMathStyle :: Maybe OMathTextStyle }
deriving Show
data OMathTextScript = ORoman
| OScript
| OFraktur
| ODoubleStruck
| OSansSerif
| OMonospace
deriving (Show, Eq)
data OMathTextStyle = OPlain
| OBold
| OItalic
| OBoldItalic
deriving (Show, Eq)
elemToBase :: Element -> Maybe Exp
elemToBase element | isElem "m" "e" element = do
bs <- elemToBases element
return $ case bs of
(e : []) -> e
exps -> EGrouped exps
elemToBase _ = Nothing
elemToBases :: Element -> Maybe [Exp]
elemToBases element | isElem "m" "e" element =
return $ concat $ mapMaybe elemToExps' (elChildren element)
elemToBases _ = Nothing
breakOnAmpersand :: [Exp] -> [[Exp]]
breakOnAmpersand = splitWhen isAmpersand
where
isAmpersand (ESymbol _ "&") = True
isAmpersand _ = False
elemToOMathRunTextStyle :: Element -> OMathRunTextStyle
elemToOMathRunTextStyle element
| Just mrPr <- filterChildName (hasElemName"m" "rPr") element
, Just _ <- filterChildName (hasElemName"m" "nor") mrPr =
Normal
| Just mrPr <- filterChildName (hasElemName"m" "rPr") element =
let scr =
case
filterChildName (hasElemName"m" "scr") mrPr >>=
findAttrBy (hasElemName"m" "val")
of
Just "roman" -> Just ORoman
Just "script" -> Just OScript
Just "fraktur" -> Just OFraktur
Just "double-struck" -> Just ODoubleStruck
Just "sans-serif" -> Just OSansSerif
Just "monospace" -> Just OMonospace
_ -> Nothing
sty =
case
filterChildName (hasElemName"m" "sty") mrPr >>=
findAttrBy (hasElemName"m" "val")
of
Just "p" -> Just OPlain
Just "b" -> Just OBold
Just "i" -> Just OItalic
Just "bi" -> Just OBoldItalic
_ -> Nothing
in
Styled { oMathScript = scr, oMathStyle = sty }
| otherwise = NoStyle
elemToOMathRunElem :: Element -> Maybe OMathRunElem
elemToOMathRunElem element
| isElem "w" "t" element
|| isElem "m" "t" element
|| isElem "w" "delText" element = Just $ TextRun $ T.pack $ strContent element
| isElem "w" "br" element = Just LnBrk
| isElem "w" "tab" element = Just Tab
| isElem "w" "sym" element = Just $ TextRun $ getSymChar element
| otherwise = Nothing
elemToOMathRunElems :: Element -> Maybe [OMathRunElem]
elemToOMathRunElems element
| isElem "w" "r" element
|| isElem "m" "r" element =
Just $ mapMaybe (elemToOMathRunElem) (elChildren element)
elemToOMathRunElems _ = Nothing
----- And now the TeXMath Creation
oMathRunElemToText :: OMathRunElem -> T.Text
oMathRunElemToText (TextRun s) = s
oMathRunElemToText (LnBrk) = "\n"
oMathRunElemToText (Tab) = "\t"
oMathRunElemsToText :: [OMathRunElem] -> T.Text
oMathRunElemsToText = T.concat . map oMathRunElemToText
oMathRunTextStyleToTextType :: OMathRunTextStyle -> Maybe TextType
oMathRunTextStyleToTextType (Normal) = Just $ TextNormal
oMathRunTextStyleToTextType (NoStyle) = Nothing
oMathRunTextStyleToTextType (Styled scr sty)
| Just OBold <- sty
, Just OSansSerif <- scr =
Just $ TextSansSerifBold
| Just OBoldItalic <- sty
, Just OSansSerif <- scr =
Just $ TextSansSerifBoldItalic
| Just OBold <- sty
, Just OScript <- scr =
Just $ TextBoldScript
| Just OBold <- sty
, Just OFraktur <- scr =
Just $ TextBoldFraktur
| Just OItalic <- sty
, Just OSansSerif <- scr =
Just $ TextSansSerifItalic
| Just OBold <- sty =
Just $ TextBold
| Just OItalic <- sty =
Just $ TextItalic
| Just OMonospace <- scr =
Just $ TextMonospace
| Just OSansSerif <- scr =
Just $ TextSansSerif
| Just ODoubleStruck <- scr =
Just $ TextDoubleStruck
| Just OScript <- scr =
Just $ TextScript
| Just OFraktur <- scr =
Just $ TextFraktur
| Just OBoldItalic <- sty =
Just $ TextBoldItalic
| otherwise = Nothing
elemToExps :: Element -> Maybe [Exp]
elemToExps element = unGroup <$> elemToExps' element
elemToExps' :: Element -> Maybe [Exp]
elemToExps' element | isElem "m" "acc" element = do
let chr = filterChildName (hasElemName "m" "accPr") element >>=
filterChildName (hasElemName "m" "chr") >>=
findAttrBy (hasElemName "m" "val") >>=
Just . head
chr' = case chr of
Just c -> T.singleton c
Nothing -> "\x302" -- default to wide hat.
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return $ [EOver False baseExp (ESymbol Accent chr')]
elemToExps' element | isElem "m" "bar" element = do
let pos = filterChildName (hasElemName "m" "barPr") element >>=
filterChildName (hasElemName "m" "pos") >>=
findAttrBy (hasElemName "m" "val")
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
According to OMML Specification , the default value of pos ( whether it exists or not ) is " bot "
-- see
case pos of
Just "top" -> Just [EOver False baseExp (ESymbol TOver "\773")]
_ -> Just [EUnder False baseExp (ESymbol TUnder "\818")]
elemToExps' element | isElem "m" "box" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [baseExp]
elemToExps' element | isElem "m" "borderBox" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [EBoxed baseExp]
elemToExps' element | isElem "m" "d" element =
let baseExps = mapMaybe
elemToBases
(elChildren element)
inDelimExps = map (map Right) baseExps
dPr = filterChildName (hasElemName "m" "dPr") element
begChr = dPr >>=
filterChildName (hasElemName "m" "begChr") >>=
findAttrBy (hasElemName "m" "val") >>=
(\c -> if null c then (Just ' ') else (Just $ head c))
sepChr = dPr >>=
filterChildName (hasElemName "m" "sepChr") >>=
findAttrBy (hasElemName "m" "val") >>=
(\c -> if null c then (Just ' ') else (Just $ head c))
endChr = dPr >>=
filterChildName (hasElemName "m" "endChr") >>=
findAttrBy (hasElemName "m" "val") >>=
(\c -> if null c then (Just ' ') else (Just $ head c))
beg = maybe "(" T.singleton begChr
end = maybe ")" T.singleton endChr
sep = maybe "|" T.singleton sepChr
exps = intercalate [Left sep] inDelimExps
in
Just [EDelimited beg end exps]
elemToExps' element | isElem "m" "eqArr" element =
let expLst = mapMaybe elemToBases (elChildren element)
expLst' = map breakOnAmpersand expLst
cols = maximum (map length expLst')
colspecs = take cols $ cycle [AlignRight , AlignLeft]
in
return [EArray colspecs expLst']
elemToExps' element | isElem "m" "f" element = do
num <- filterChildName (hasElemName "m" "num") element
den <- filterChildName (hasElemName "m" "den") element
let barType = filterChildName (hasElemName "m" "fPr") element >>=
filterChildName (hasElemName "m" "type") >>=
findAttrBy (hasElemName "m" "val")
let numExp = EGrouped $ concat $ mapMaybe (elemToExps) (elChildren num)
denExp = EGrouped $ concat $ mapMaybe (elemToExps) (elChildren den)
case barType of
Just "noBar" -> Just [EFraction NoLineFrac numExp denExp]
_ -> Just [EFraction NormalFrac numExp denExp]
elemToExps' element | isElem "m" "func" element = do
fName <- filterChildName (hasElemName "m" "fName") element
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
let fnameExp = case mconcat $ mapMaybe (elemToExps') (elChildren fName) of
[x] -> x
xs -> EGrouped xs
return [fnameExp, baseExp]
elemToExps' element | isElem "m" "groupChr" element = do
let gPr = filterChildName (hasElemName "m" "groupChrPr") element
chr = gPr >>=
filterChildName (hasElemName "m" "chr") >>=
findAttrBy (hasElemName "m" "val")
pos = gPr >>=
filterChildName (hasElemName "m" "pos") >>=
findAttrBy (hasElemName "m" "val")
justif = gPr >>=
filterChildName (hasElemName "m" "vertJC") >>=
findAttrBy (hasElemName "m" "val")
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
case pos of
Just "top" ->
let chr' = case chr of
Just (c:_) -> T.singleton c
_ -> "\65079" -- default to overbrace
in
return $
case justif of
Just "top" -> [EUnder False (ESymbol TOver chr') baseExp]
_ -> [EOver False baseExp (ESymbol TOver chr')]
_ -> -- bot is default
let chr' = case chr of
Just (c:_) -> T.singleton c
default to underbrace
in
return $
case justif of
Just "top" -> [EUnder False baseExp (ESymbol TUnder chr')]
_ -> [EOver False (ESymbol TUnder chr') baseExp]
elemToExps' element | isElem "m" "limLow" element = do
baseExp <- filterChildName (hasElemName "m" "e") element
>>= elemToBase
limExp <- filterChildName (hasElemName "m" "lim") element
>>= (\e -> Just $ concat $ mapMaybe (elemToExps) (elChildren e))
>>= (return . EGrouped)
return [EUnder True baseExp limExp]
elemToExps' element | isElem "m" "limUpp" element = do
baseExp <- filterChildName (hasElemName "m" "e") element
>>= elemToBase
limExp <- filterChildName (hasElemName "m" "lim") element
>>= (\e -> Just $ concat $ mapMaybe (elemToExps) (elChildren e))
>>= (return . EGrouped)
return [EOver True baseExp limExp]
elemToExps' element | isElem "m" "m" element =
let rows = filterChildrenName (hasElemName "m" "mr") element
rowExps = map
(\mr -> mapMaybe
elemToBases
(elChildren mr))
rows
in
return [EArray [AlignCenter] rowExps]
elemToExps' element | isElem "m" "nary" element = do
let naryPr = filterChildName (hasElemName "m" "naryPr") element
naryChr = naryPr >>=
filterChildName (hasElemName "m" "chr") >>=
findAttrBy (hasElemName "m" "val")
opChr = case naryChr of
Just (c:_) -> T.singleton c
_ -> "\8747" -- default to integral
limLoc = naryPr >>=
filterChildName (hasElemName "m" "limLoc") >>=
findAttrBy (hasElemName "m" "val")
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
case limLoc of
Just "undOvr" -> return [EUnderover True
(ESymbol Op opChr)
(EGrouped subExps)
(EGrouped supExps)
, baseExp]
_ -> return [ESubsup
(ESymbol Op opChr)
(EGrouped subExps)
(EGrouped supExps)
, baseExp]
elemToExps' element | isElem "m" "phant" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [EPhantom baseExp]
elemToExps' element | isElem "m" "rad" element = do
degExps <- filterChildName (hasElemName "m" "deg") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return $ case degExps of
[] -> [ESqrt baseExp]
ds -> [ERoot (EGrouped ds) baseExp]
elemToExps' element | isElem "m" "sPre" element = do
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [ESubsup
(EIdentifier "")
(EGrouped subExps)
(EGrouped supExps)
, baseExp]
elemToExps' element | isElem "m" "sSub" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
return [ESub baseExp (EGrouped subExps)]
elemToExps' element | isElem "m" "sSubSup" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
return [ESubsup baseExp (EGrouped subExps) (EGrouped supExps)]
elemToExps' element | isElem "m" "sSup" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
return [ESuper baseExp (EGrouped supExps)]
elemToExps' element | isElem "m" "r" element = do
let mrPr = filterChildName (hasElemName "m" "rPr") element
lit = mrPr >>= filterChildName (hasElemName "m" "lit")
nor = mrPr >>= filterChildName (hasElemName "m" "nor")
txtSty = oMathRunTextStyleToTextType $ elemToOMathRunTextStyle element
mrElems <- elemToOMathRunElems element
return $
if null lit && null nor
then case txtSty of
Nothing ->
interpretText $ oMathRunElemsToText mrElems
Just textSty ->
[EStyled textSty $ interpretText $ oMathRunElemsToText mrElems]
else [EText (fromMaybe TextNormal txtSty) $ oMathRunElemsToText mrElems]
elemToExps' _ = Nothing
interpretChar :: Char -> Exp
interpretChar c | isDigit c = ENumber $ T.singleton c
interpretChar c = case getSymbolType c of
Alpha -> EIdentifier c'
Ord | isDigit c -> ENumber c'
| otherwise -> case getSpaceWidth c of
Just x -> ESpace x
Nothing -> ESymbol Ord c'
symType -> ESymbol symType c'
where
c' = T.singleton c
interpretText :: T.Text -> [Exp]
interpretText s
| Just (c, xs) <- T.uncons s
, T.null xs = [interpretChar c]
| T.all isDigit s = [ENumber s]
| isJust (getOperator (EMathOperator s))
= [EMathOperator s]
| otherwise = map interpretChar (T.unpack s)
-- The char attribute is a hex string
getSymChar :: Element -> T.Text
getSymChar element
| Just s <- lowerFromPrivate <$> getCodepoint
, Just font <- getFont =
case readLitChar ("\\x" ++ s) of
[(char, _)] -> maybe "" T.singleton $ getUnicode font char
_ -> ""
where
getCodepoint = findAttrBy (hasElemName "w" "char") element
getFont = (textToFont . T.pack) =<< findAttrBy (hasElemName "w" "font") element
lowerFromPrivate ('F':xs) = '0':xs
lowerFromPrivate xs = xs
getSymChar _ = ""
| null | https://raw.githubusercontent.com/jgm/texmath/0f95c7d61a76491d9dce219763323d6f3173df21/src/Text/TeXMath/Readers/OMML.hs | haskell | # LANGUAGE OverloadedStrings #
documentation of this, so we just unwrap any w:tag immediately
beneath oMath. Note that this shouldn't affect the "w" tags in
elemToOMathRunElem(s) because, those are underneath an "m:r" tag.
--- And now the TeXMath Creation
default to wide hat.
see
default to overbrace
bot is default
default to integral
The char attribute is a hex string | # LANGUAGE PatternGuards #
Copyright ( C ) 2014 < >
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2014 Jesse Rosenthal <>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
|
Module : Text . TeXMath . Readers . OMML
Copyright : Copyright ( C ) 2014
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Types and functions for conversion of OMML into TeXMath ' Exp 's .
Module : Text.TeXMath.Readers.OMML
Copyright : Copyright (C) 2014 Jesse Rosenthal
License : GNU GPL, version 2 or above
Maintainer : Jesse Rosenthal <>
Stability : alpha
Portability : portable
Types and functions for conversion of OMML into TeXMath 'Exp's.
-}
module Text.TeXMath.Readers.OMML (readOMML) where
import Text.XML.Light
import Data.Maybe (isJust, mapMaybe, fromMaybe)
import Data.List (intercalate)
import Data.Char (isDigit, readLitChar)
import qualified Data.Text as T
import Text.TeXMath.Types
import Text.TeXMath.Shared (fixTree, getSpaceWidth, getOperator)
import Text.TeXMath.Unicode.ToTeX (getSymbolType)
import Text.TeXMath.Unicode.Fonts (getUnicode, textToFont)
import Data.List.Split (splitWhen)
readOMML :: T.Text -> Either T.Text [Exp]
readOMML s | Just e <- parseXMLDoc s =
case elemToOMML e of
Just exs -> Right $ map fixTree $ unGroup exs
Nothing -> Left "xml file was not an <m:oMathPara> or <m:oMath> element."
readOMML _ = Left "Couldn't parse OMML file"
unGroup :: [Exp] -> [Exp]
unGroup [EGrouped exps] = exps
unGroup exps = exps
elemToOMML :: Element -> Maybe [Exp]
elemToOMML element | isElem "m" "oMathPara" element = do
let expList = mapMaybe elemToOMML (elChildren element)
return $ map (\l -> if length l == 1 then (head l) else EGrouped l) expList
elemToOMML element | isElem "m" "oMath" element =
Just $ concat $ mapMaybe elemToExps $ unwrapWTags $ elChildren element
elemToOMML _ = Nothing
oMath can contain w : hyperlink , w : sdt , etc . I ca n't find a complete
unwrapWTags :: [Element] -> [Element]
unwrapWTags elements = concatMap unwrapChild elements
where unwrapChild element = case qPrefix $ elName element of
Just "w" -> elChildren element
_ -> [element]
Kept as String because of Text . XML.Light
isElem :: String -> String -> Element -> Bool
isElem prefix name element =
let qp = fromMaybe "" (qPrefix (elName element))
in
qName (elName element) == name &&
qp == prefix
Kept as String because of Text . XML.Light
hasElemName :: String -> String -> QName -> Bool
hasElemName prefix name qn =
let qp = fromMaybe "" (qPrefix qn)
in
qName qn == name &&
qp == prefix
data OMathRunElem = TextRun T.Text
| LnBrk
| Tab
deriving Show
data OMathRunTextStyle = NoStyle
| Normal
| Styled { oMathScript :: Maybe OMathTextScript
, oMathStyle :: Maybe OMathTextStyle }
deriving Show
data OMathTextScript = ORoman
| OScript
| OFraktur
| ODoubleStruck
| OSansSerif
| OMonospace
deriving (Show, Eq)
data OMathTextStyle = OPlain
| OBold
| OItalic
| OBoldItalic
deriving (Show, Eq)
elemToBase :: Element -> Maybe Exp
elemToBase element | isElem "m" "e" element = do
bs <- elemToBases element
return $ case bs of
(e : []) -> e
exps -> EGrouped exps
elemToBase _ = Nothing
elemToBases :: Element -> Maybe [Exp]
elemToBases element | isElem "m" "e" element =
return $ concat $ mapMaybe elemToExps' (elChildren element)
elemToBases _ = Nothing
breakOnAmpersand :: [Exp] -> [[Exp]]
breakOnAmpersand = splitWhen isAmpersand
where
isAmpersand (ESymbol _ "&") = True
isAmpersand _ = False
elemToOMathRunTextStyle :: Element -> OMathRunTextStyle
elemToOMathRunTextStyle element
| Just mrPr <- filterChildName (hasElemName"m" "rPr") element
, Just _ <- filterChildName (hasElemName"m" "nor") mrPr =
Normal
| Just mrPr <- filterChildName (hasElemName"m" "rPr") element =
let scr =
case
filterChildName (hasElemName"m" "scr") mrPr >>=
findAttrBy (hasElemName"m" "val")
of
Just "roman" -> Just ORoman
Just "script" -> Just OScript
Just "fraktur" -> Just OFraktur
Just "double-struck" -> Just ODoubleStruck
Just "sans-serif" -> Just OSansSerif
Just "monospace" -> Just OMonospace
_ -> Nothing
sty =
case
filterChildName (hasElemName"m" "sty") mrPr >>=
findAttrBy (hasElemName"m" "val")
of
Just "p" -> Just OPlain
Just "b" -> Just OBold
Just "i" -> Just OItalic
Just "bi" -> Just OBoldItalic
_ -> Nothing
in
Styled { oMathScript = scr, oMathStyle = sty }
| otherwise = NoStyle
elemToOMathRunElem :: Element -> Maybe OMathRunElem
elemToOMathRunElem element
| isElem "w" "t" element
|| isElem "m" "t" element
|| isElem "w" "delText" element = Just $ TextRun $ T.pack $ strContent element
| isElem "w" "br" element = Just LnBrk
| isElem "w" "tab" element = Just Tab
| isElem "w" "sym" element = Just $ TextRun $ getSymChar element
| otherwise = Nothing
elemToOMathRunElems :: Element -> Maybe [OMathRunElem]
elemToOMathRunElems element
| isElem "w" "r" element
|| isElem "m" "r" element =
Just $ mapMaybe (elemToOMathRunElem) (elChildren element)
elemToOMathRunElems _ = Nothing
oMathRunElemToText :: OMathRunElem -> T.Text
oMathRunElemToText (TextRun s) = s
oMathRunElemToText (LnBrk) = "\n"
oMathRunElemToText (Tab) = "\t"
oMathRunElemsToText :: [OMathRunElem] -> T.Text
oMathRunElemsToText = T.concat . map oMathRunElemToText
oMathRunTextStyleToTextType :: OMathRunTextStyle -> Maybe TextType
oMathRunTextStyleToTextType (Normal) = Just $ TextNormal
oMathRunTextStyleToTextType (NoStyle) = Nothing
oMathRunTextStyleToTextType (Styled scr sty)
| Just OBold <- sty
, Just OSansSerif <- scr =
Just $ TextSansSerifBold
| Just OBoldItalic <- sty
, Just OSansSerif <- scr =
Just $ TextSansSerifBoldItalic
| Just OBold <- sty
, Just OScript <- scr =
Just $ TextBoldScript
| Just OBold <- sty
, Just OFraktur <- scr =
Just $ TextBoldFraktur
| Just OItalic <- sty
, Just OSansSerif <- scr =
Just $ TextSansSerifItalic
| Just OBold <- sty =
Just $ TextBold
| Just OItalic <- sty =
Just $ TextItalic
| Just OMonospace <- scr =
Just $ TextMonospace
| Just OSansSerif <- scr =
Just $ TextSansSerif
| Just ODoubleStruck <- scr =
Just $ TextDoubleStruck
| Just OScript <- scr =
Just $ TextScript
| Just OFraktur <- scr =
Just $ TextFraktur
| Just OBoldItalic <- sty =
Just $ TextBoldItalic
| otherwise = Nothing
elemToExps :: Element -> Maybe [Exp]
elemToExps element = unGroup <$> elemToExps' element
elemToExps' :: Element -> Maybe [Exp]
elemToExps' element | isElem "m" "acc" element = do
let chr = filterChildName (hasElemName "m" "accPr") element >>=
filterChildName (hasElemName "m" "chr") >>=
findAttrBy (hasElemName "m" "val") >>=
Just . head
chr' = case chr of
Just c -> T.singleton c
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return $ [EOver False baseExp (ESymbol Accent chr')]
elemToExps' element | isElem "m" "bar" element = do
let pos = filterChildName (hasElemName "m" "barPr") element >>=
filterChildName (hasElemName "m" "pos") >>=
findAttrBy (hasElemName "m" "val")
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
According to OMML Specification , the default value of pos ( whether it exists or not ) is " bot "
case pos of
Just "top" -> Just [EOver False baseExp (ESymbol TOver "\773")]
_ -> Just [EUnder False baseExp (ESymbol TUnder "\818")]
elemToExps' element | isElem "m" "box" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [baseExp]
elemToExps' element | isElem "m" "borderBox" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [EBoxed baseExp]
elemToExps' element | isElem "m" "d" element =
let baseExps = mapMaybe
elemToBases
(elChildren element)
inDelimExps = map (map Right) baseExps
dPr = filterChildName (hasElemName "m" "dPr") element
begChr = dPr >>=
filterChildName (hasElemName "m" "begChr") >>=
findAttrBy (hasElemName "m" "val") >>=
(\c -> if null c then (Just ' ') else (Just $ head c))
sepChr = dPr >>=
filterChildName (hasElemName "m" "sepChr") >>=
findAttrBy (hasElemName "m" "val") >>=
(\c -> if null c then (Just ' ') else (Just $ head c))
endChr = dPr >>=
filterChildName (hasElemName "m" "endChr") >>=
findAttrBy (hasElemName "m" "val") >>=
(\c -> if null c then (Just ' ') else (Just $ head c))
beg = maybe "(" T.singleton begChr
end = maybe ")" T.singleton endChr
sep = maybe "|" T.singleton sepChr
exps = intercalate [Left sep] inDelimExps
in
Just [EDelimited beg end exps]
elemToExps' element | isElem "m" "eqArr" element =
let expLst = mapMaybe elemToBases (elChildren element)
expLst' = map breakOnAmpersand expLst
cols = maximum (map length expLst')
colspecs = take cols $ cycle [AlignRight , AlignLeft]
in
return [EArray colspecs expLst']
elemToExps' element | isElem "m" "f" element = do
num <- filterChildName (hasElemName "m" "num") element
den <- filterChildName (hasElemName "m" "den") element
let barType = filterChildName (hasElemName "m" "fPr") element >>=
filterChildName (hasElemName "m" "type") >>=
findAttrBy (hasElemName "m" "val")
let numExp = EGrouped $ concat $ mapMaybe (elemToExps) (elChildren num)
denExp = EGrouped $ concat $ mapMaybe (elemToExps) (elChildren den)
case barType of
Just "noBar" -> Just [EFraction NoLineFrac numExp denExp]
_ -> Just [EFraction NormalFrac numExp denExp]
elemToExps' element | isElem "m" "func" element = do
fName <- filterChildName (hasElemName "m" "fName") element
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
let fnameExp = case mconcat $ mapMaybe (elemToExps') (elChildren fName) of
[x] -> x
xs -> EGrouped xs
return [fnameExp, baseExp]
elemToExps' element | isElem "m" "groupChr" element = do
let gPr = filterChildName (hasElemName "m" "groupChrPr") element
chr = gPr >>=
filterChildName (hasElemName "m" "chr") >>=
findAttrBy (hasElemName "m" "val")
pos = gPr >>=
filterChildName (hasElemName "m" "pos") >>=
findAttrBy (hasElemName "m" "val")
justif = gPr >>=
filterChildName (hasElemName "m" "vertJC") >>=
findAttrBy (hasElemName "m" "val")
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
case pos of
Just "top" ->
let chr' = case chr of
Just (c:_) -> T.singleton c
in
return $
case justif of
Just "top" -> [EUnder False (ESymbol TOver chr') baseExp]
_ -> [EOver False baseExp (ESymbol TOver chr')]
let chr' = case chr of
Just (c:_) -> T.singleton c
default to underbrace
in
return $
case justif of
Just "top" -> [EUnder False baseExp (ESymbol TUnder chr')]
_ -> [EOver False (ESymbol TUnder chr') baseExp]
elemToExps' element | isElem "m" "limLow" element = do
baseExp <- filterChildName (hasElemName "m" "e") element
>>= elemToBase
limExp <- filterChildName (hasElemName "m" "lim") element
>>= (\e -> Just $ concat $ mapMaybe (elemToExps) (elChildren e))
>>= (return . EGrouped)
return [EUnder True baseExp limExp]
elemToExps' element | isElem "m" "limUpp" element = do
baseExp <- filterChildName (hasElemName "m" "e") element
>>= elemToBase
limExp <- filterChildName (hasElemName "m" "lim") element
>>= (\e -> Just $ concat $ mapMaybe (elemToExps) (elChildren e))
>>= (return . EGrouped)
return [EOver True baseExp limExp]
elemToExps' element | isElem "m" "m" element =
let rows = filterChildrenName (hasElemName "m" "mr") element
rowExps = map
(\mr -> mapMaybe
elemToBases
(elChildren mr))
rows
in
return [EArray [AlignCenter] rowExps]
elemToExps' element | isElem "m" "nary" element = do
let naryPr = filterChildName (hasElemName "m" "naryPr") element
naryChr = naryPr >>=
filterChildName (hasElemName "m" "chr") >>=
findAttrBy (hasElemName "m" "val")
opChr = case naryChr of
Just (c:_) -> T.singleton c
limLoc = naryPr >>=
filterChildName (hasElemName "m" "limLoc") >>=
findAttrBy (hasElemName "m" "val")
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
case limLoc of
Just "undOvr" -> return [EUnderover True
(ESymbol Op opChr)
(EGrouped subExps)
(EGrouped supExps)
, baseExp]
_ -> return [ESubsup
(ESymbol Op opChr)
(EGrouped subExps)
(EGrouped supExps)
, baseExp]
elemToExps' element | isElem "m" "phant" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [EPhantom baseExp]
elemToExps' element | isElem "m" "rad" element = do
degExps <- filterChildName (hasElemName "m" "deg") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return $ case degExps of
[] -> [ESqrt baseExp]
ds -> [ERoot (EGrouped ds) baseExp]
elemToExps' element | isElem "m" "sPre" element = do
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
return [ESubsup
(EIdentifier "")
(EGrouped subExps)
(EGrouped supExps)
, baseExp]
elemToExps' element | isElem "m" "sSub" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
return [ESub baseExp (EGrouped subExps)]
elemToExps' element | isElem "m" "sSubSup" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
subExps <- filterChildName (hasElemName "m" "sub") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
return [ESubsup baseExp (EGrouped subExps) (EGrouped supExps)]
elemToExps' element | isElem "m" "sSup" element = do
baseExp <- filterChildName (hasElemName "m" "e") element >>=
elemToBase
supExps <- filterChildName (hasElemName "m" "sup") element >>=
(\e -> return $ concat $ mapMaybe (elemToExps) (elChildren e))
return [ESuper baseExp (EGrouped supExps)]
elemToExps' element | isElem "m" "r" element = do
let mrPr = filterChildName (hasElemName "m" "rPr") element
lit = mrPr >>= filterChildName (hasElemName "m" "lit")
nor = mrPr >>= filterChildName (hasElemName "m" "nor")
txtSty = oMathRunTextStyleToTextType $ elemToOMathRunTextStyle element
mrElems <- elemToOMathRunElems element
return $
if null lit && null nor
then case txtSty of
Nothing ->
interpretText $ oMathRunElemsToText mrElems
Just textSty ->
[EStyled textSty $ interpretText $ oMathRunElemsToText mrElems]
else [EText (fromMaybe TextNormal txtSty) $ oMathRunElemsToText mrElems]
elemToExps' _ = Nothing
interpretChar :: Char -> Exp
interpretChar c | isDigit c = ENumber $ T.singleton c
interpretChar c = case getSymbolType c of
Alpha -> EIdentifier c'
Ord | isDigit c -> ENumber c'
| otherwise -> case getSpaceWidth c of
Just x -> ESpace x
Nothing -> ESymbol Ord c'
symType -> ESymbol symType c'
where
c' = T.singleton c
interpretText :: T.Text -> [Exp]
interpretText s
| Just (c, xs) <- T.uncons s
, T.null xs = [interpretChar c]
| T.all isDigit s = [ENumber s]
| isJust (getOperator (EMathOperator s))
= [EMathOperator s]
| otherwise = map interpretChar (T.unpack s)
getSymChar :: Element -> T.Text
getSymChar element
| Just s <- lowerFromPrivate <$> getCodepoint
, Just font <- getFont =
case readLitChar ("\\x" ++ s) of
[(char, _)] -> maybe "" T.singleton $ getUnicode font char
_ -> ""
where
getCodepoint = findAttrBy (hasElemName "w" "char") element
getFont = (textToFont . T.pack) =<< findAttrBy (hasElemName "w" "font") element
lowerFromPrivate ('F':xs) = '0':xs
lowerFromPrivate xs = xs
getSymChar _ = ""
|
f7b07fa195c226046f164d5c2c39611b50ed5ad7b28cbf40d6bc4e1d53d6ddda | haskell-repa/repa | JSON.hs | # OPTIONS_GHC -fno - warn - orphans #
| Conversion of Repa queries to JSON format for interfacing with
-- client systems.
--
Conversion is done by providing instances for the ` ToJSON ` and ` FromJSON `
classes form the package .
--
module Data.Repa.Query.Graph.JSON
(encode, toJSON, fromJSON, Result(..))
where
import Control.Monad
import Data.Char
import Data.List as L
import Data.Repa.Query.Graph
import Data.Repa.Query.Graph.Exp
import Data.Aeson as Aeson
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.HashMap.Strict as H
--------------------------------------------------------------------------------------------- Graph
instance (ToJSON nF, ToJSON bV, ToJSON nV)
=> (ToJSON (Graph a nF bV nV)) where
toJSON xx
= case xx of
Graph ns
-> object [ "_type" .= text "graph"
, "nodes" .= toJSON ns ]
instance (FromJSON nF, FromJSON bV, FromJSON nV)
=> (FromJSON (Graph () nF bV nV)) where
parseJSON (Object hh)
| Just (String "graph") <- H.lookup "_type" hh
, Just jNodes <- H.lookup "nodes" hh
= do nodes <- parseJSON jNodes
return $ Graph nodes
parseJSON _ = mzero
---------------------------------------------------------------------------------------------- Node
instance (ToJSON nF, ToJSON bV, ToJSON nV)
=> (ToJSON (Node a nF bV nV)) where
toJSON xx
= case xx of
NodeSource s
-> object [ "_type" .= text "node"
, "node" .= text "source"
, "source" .= toJSON s ]
NodeOp op
-> object [ "_type" .= text "node"
, "node" .= text "op"
, "op" .= toJSON op ]
instance (FromJSON nF, FromJSON bV, FromJSON nV)
=> (FromJSON (Node () nF bV nV)) where
parseJSON (Object hh)
| Just (String "node") <- H.lookup "_type" hh
, Just (String "source") <- H.lookup "node" hh
, Just jSource <- H.lookup "source" hh
= do source <- parseJSON jSource
return $ NodeSource source
| Just (String "node") <- H.lookup "_type" hh
, Just (String "op") <- H.lookup "node" hh
, Just jOp <- H.lookup "op" hh
= do op <- parseJSON jOp
return $ NodeOp op
parseJSON _ = mzero
-------------------------------------------------------------------------------------------- Source
instance (ToJSON nF)
=> ToJSON (Source a nF) where
toJSON xx
= case xx of
SourceFile _ name delim fields fOut
-> object [ "_type" .= text "source"
, "source" .= text "file"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "output" .= toJSON fOut ]
SourceTable _ name delim fields fOut
-> object [ "_type" .= text "source"
, "source" .= text "table"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "output" .= toJSON fOut ]
SourceTableColumn _ name delim fields column fOut
-> object [ "_type" .= text "source"
, "source" .= text "table_column"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "column" .= toJSON column
, "output" .= toJSON fOut ]
SourceTableColumns _ name delim fields columns fOut
-> object [ "_type" .= text "source"
, "source" .= text "table_columns"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "columns" .= toJSON columns
, "output" .= toJSON fOut ]
SourceFamilyColumns _
nameFamily nameColumns
formatKey formatColumns
flowOut
-> object [ "_type" .= text "source"
, "source" .= text "family_column"
, "name_fam" .= T.pack nameFamily
, "name_cols" .= map T.pack nameColumns
, "format_key" .= toJSON formatKey
, "format_cols" .= toJSON formatColumns
, "output" .= toJSON flowOut ]
instance FromJSON nF
=> FromJSON (Source () nF) where
parseJSON (Object hh)
-- SourceFile
| Just (String "source") <- H.lookup "_type" hh
, Just (String "file") <- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
out <- parseJSON jOut
return $ SourceFile () (T.unpack name) delim fields out
SourceTable
| Just (String "source") <- H.lookup "_type" hh
, Just (String "table") <- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
out <- parseJSON jOut
return $ SourceTable () (T.unpack name) delim fields out
-- SourceTableColumn
| Just (String "source") <- H.lookup "_type" hh
, Just (String "table_column")
<- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jColumn <- H.lookup "column" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
column <- parseJSON jColumn
out <- parseJSON jOut
return $ SourceTableColumn () (T.unpack name) delim fields column out
SourceTableColumns
| Just (String "source") <- H.lookup "_type" hh
, Just (String "table_columns")
<- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jColumns <- H.lookup "columns" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
columns <- parseJSON jColumns
out <- parseJSON jOut
return $ SourceTableColumns () (T.unpack name) delim fields columns out
SourceFamilyColumn
| Just (String "source") <- H.lookup "_type" hh
, Just (String "family_column")
<- H.lookup "source" hh
, Just (String nameFam) <- H.lookup "name_fam" hh
, Just jNameCols <- H.lookup "name_cols" hh
, Just jFormatKey <- H.lookup "format_key" hh
, Just jFormatCols <- H.lookup "format_cols" hh
, Just jOut <- H.lookup "output" hh
= do
formatKey <- parseJSON jFormatKey
formatCols <- parseJSON jFormatCols
nameCols <- parseJSON jNameCols
out <- parseJSON jOut
return $ SourceFamilyColumns ()
(T.unpack nameFam) nameCols
formatKey formatCols out
parseJSON _ = mzero
-------------------------------------------------------------------------------------------- FlowOp
instance (ToJSON nF, ToJSON bV, ToJSON nV)
=> (ToJSON (FlowOp a nF bV nV)) where
toJSON xx
= case xx of
FopMapI fIns fOut fun
-> object [ "_type" .= text "fop"
, "fop" .= text "mapi"
, "ins" .= toJSON fIns
, "out" .= toJSON fOut
, "fun" .= toJSON fun ]
FopFilterI fIn fOut fun
-> object [ "_type" .= text "fop"
, "fop" .= text "filteri"
, "in" .= toJSON fIn
, "out" .= toJSON fOut
, "fun" .= toJSON fun ]
FopFoldI fIn fOut fun z
-> object [ "_type" .= text "fop"
, "fop" .= text "foldi"
, "in" .= toJSON fIn
, "out" .= toJSON fOut
, "fun" .= toJSON fun
, "neutral" .= toJSON z ]
FopFoldsI fLens fElems fOut fun z
-> object [ "_type" .= text "fop"
, "fop" .= text "foldsi"
, "lens" .= toJSON fLens
, "elems" .= toJSON fElems
, "out" .= toJSON fOut
, "fun" .= toJSON fun
, "neutral" .= toJSON z ]
FopGroupsI fIn fOut fun
-> object [ "_type" .= text "fop"
, "fop" .= text "groupsi"
, "in" .= toJSON fIn
, "out" .= toJSON fOut
, "fun" .= toJSON fun ]
instance (FromJSON nF, FromJSON bV, FromJSON nV)
=> FromJSON (FlowOp () nF bV nV) where
parseJSON (Object hh)
mapi
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "mapi") <- H.lookup "fop" hh
, Just jIn <- H.lookup "ins" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
return $ FopMapI fin fout fun
-- filteri
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "filteri") <- H.lookup "fop" hh
, Just jIn <- H.lookup "in" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
return $ FopFilterI fin fout fun
-- foldi
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "foldi") <- H.lookup "fop" hh
, Just jIn <- H.lookup "in" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
, Just jNeutral <- H.lookup "neutral" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
neutral <- parseJSON jNeutral
return $ FopFoldI fin fout fun neutral
-- foldsi
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "foldsi") <- H.lookup "fop" hh
, Just jLens <- H.lookup "lens" hh
, Just jElems <- H.lookup "elems" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
, Just jNeutral <- H.lookup "neutral" hh
= do flens <- parseJSON jLens
felems <- parseJSON jElems
fout <- parseJSON jOut
fun <- parseJSON jFun
neutral <- parseJSON jNeutral
return $ FopFoldsI flens felems fout fun neutral
-- groupsi
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "groupsi") <- H.lookup "fop" hh
, Just jIn <- H.lookup "in" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
return $ FopGroupsI fin fout fun
parseJSON _ = mzero
----------------------------------------------------------------------------------------------- Exp
instance (ToJSON bV, ToJSON nV)
=> ToJSON (Exp a bV nV) where
toJSON xx
= case xx of
-- literals
XVal _ (VLit _ lit)
-> let (name :: Text, val)
= case lit of
LBool b -> ("bool", T.pack $ show b)
LWord w -> ("word", T.pack $ show w)
LInt i -> ("int", T.pack $ show i)
LFloat f -> ("float", T.pack $ show f)
LDouble d -> ("double", T.pack $ show d)
LString s -> ("string", T.pack s)
in object
[ "_type" .= text "exp"
, "exp" .= text "lit"
, "lit" .= name
, "value" .= val ]
-- lambdas
XVal _ (VLam _ bV x)
-> object [ "_type" .= text "exp"
, "exp" .= text "lam"
, "binder" .= toJSON bV
, "body" .= toJSON x ]
-- variables
XVar _ v
-> object [ "_type" .= text "exp"
, "exp" .= text "var"
, "var" .= toJSON v ]
-- applications
XApp _ xFun xArg
-> object [ "_type" .= text "exp"
, "exp" .= text "app"
, "fun" .= toJSON xFun
, "arg" .= toJSON xArg ]
-- operators
XOp _ sOp xsArgs
-> object [ "_type" .= text "exp"
, "exp" .= text "sop"
, "sop" .= nameOfScalarOp sOp
, "args" .= toJSON xsArgs ]
instance (FromJSON bV, FromJSON nV)
=> FromJSON (Exp () bV nV) where
parseJSON (Object hh)
-- literals
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "lit") <- H.lookup "exp" hh
, Just (String lit) <- H.lookup "lit" hh
, Just (String value) <- H.lookup "value" hh
= case T.unpack lit of
"bool" -> return $ xBool () $ read $ T.unpack value
"word" -> return $ xWord () $ read $ T.unpack value
"int" -> return $ xInt () $ read $ T.unpack value
"float" -> return $ xFloat () $ read $ T.unpack value
"double" -> return $ xDouble () $ read $ T.unpack value
"string" -> return $ xString () $ T.unpack value
_ -> mzero
-- variables
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "var") <- H.lookup "exp" hh
, Just jName <- H.lookup "var" hh
= do name <- parseJSON jName
return $ XVar () name
-- lambdas
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "lam") <- H.lookup "exp" hh
, Just jBinder <- H.lookup "binder" hh
, Just jBody <- H.lookup "body" hh
= do binder <- parseJSON jBinder
body <- parseJSON jBody
return $ XVal () (VLam () binder body)
-- operators
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "sop") <- H.lookup "exp" hh
, Just (String ssop) <- H.lookup "sop" hh
, Just jArgs <- H.lookup "args" hh
, Just sop <- scalarOpOfName (T.unpack ssop)
= do args <- parseJSON jArgs
return $ XOp () sop args
parseJSON _ = mzero
------------------------------------------------------------------------------------------ ScalarOp
nameOfScalarOp :: ScalarOp -> String
nameOfScalarOp sop
-- Atomic names
| [name] <- [ name | (sop', name) <- sopNames
, sop == sop' ]
= name
-- Tupling
| SopRow i <- sop
= "row" ++ show i
-- Projection
| SopGet i j <- sop
= "get" ++ show i ++ "_" ++ show j
-- If this fails then the 'sopNames' table is probably incomplete.
| otherwise
= error "repa-query.nameOfScalarOp: no match"
scalarOpOfName :: String -> Maybe ScalarOp
scalarOpOfName ss
-- Atomic names
| [sop] <- [ sop | (sop, name') <- sopNames
, ss == name' ]
= Just sop
-- Tupling
| Just ds <- L.stripPrefix "row" ss
, all isDigit ds, length ds > 0
= Just $ SopRow (read ds)
-- Projection
| Just ds <- L.stripPrefix "get" ss
, (ds1, '_' : ds2) <- L.span isDigit ds
, all isDigit ds1, length ds1 > 0
, all isDigit ds2, length ds2 > 0
= Just $ SopGet (read ds1) (read ds2)
| otherwise
= Nothing
sopNames :: [(ScalarOp, String)]
sopNames
= -- Arithmetic
[ (SopNeg, "neg")
, (SopAbs, "abs")
, (SopSignum, "signum")
, (SopAdd, "add")
, (SopSub, "sub")
, (SopMul, "mul")
, (SopDiv, "div")
, (SopEq, "eq")
, (SopNeq, "neq")
, (SopGt, "gt")
, (SopGe, "ge")
, (SopLt, "lt")
, (SopLe, "le")
-- Dates
, (SopStringOfDate, "stringOfDate")
, (SopYearOfDate, "yearOfDate")
, (SopMonthOfDate, "monthOfDate")
, (SopDayOfDate, "dayOfDate")]
---------------------------------------------------------------------------------------------------
text :: Text -> Text
text x = x
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/repa-query/Data/Repa/Query/Graph/JSON.hs | haskell | client systems.
------------------------------------------------------------------------------------------- Graph
-------------------------------------------------------------------------------------------- Node
------------------------------------------------------------------------------------------ Source
SourceFile
SourceTableColumn
------------------------------------------------------------------------------------------ FlowOp
filteri
foldi
foldsi
groupsi
--------------------------------------------------------------------------------------------- Exp
literals
lambdas
variables
applications
operators
literals
variables
lambdas
operators
---------------------------------------------------------------------------------------- ScalarOp
Atomic names
Tupling
Projection
If this fails then the 'sopNames' table is probably incomplete.
Atomic names
Tupling
Projection
Arithmetic
Dates
------------------------------------------------------------------------------------------------- | # OPTIONS_GHC -fno - warn - orphans #
| Conversion of Repa queries to JSON format for interfacing with
Conversion is done by providing instances for the ` ToJSON ` and ` FromJSON `
classes form the package .
module Data.Repa.Query.Graph.JSON
(encode, toJSON, fromJSON, Result(..))
where
import Control.Monad
import Data.Char
import Data.List as L
import Data.Repa.Query.Graph
import Data.Repa.Query.Graph.Exp
import Data.Aeson as Aeson
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.HashMap.Strict as H
instance (ToJSON nF, ToJSON bV, ToJSON nV)
=> (ToJSON (Graph a nF bV nV)) where
toJSON xx
= case xx of
Graph ns
-> object [ "_type" .= text "graph"
, "nodes" .= toJSON ns ]
instance (FromJSON nF, FromJSON bV, FromJSON nV)
=> (FromJSON (Graph () nF bV nV)) where
parseJSON (Object hh)
| Just (String "graph") <- H.lookup "_type" hh
, Just jNodes <- H.lookup "nodes" hh
= do nodes <- parseJSON jNodes
return $ Graph nodes
parseJSON _ = mzero
instance (ToJSON nF, ToJSON bV, ToJSON nV)
=> (ToJSON (Node a nF bV nV)) where
toJSON xx
= case xx of
NodeSource s
-> object [ "_type" .= text "node"
, "node" .= text "source"
, "source" .= toJSON s ]
NodeOp op
-> object [ "_type" .= text "node"
, "node" .= text "op"
, "op" .= toJSON op ]
instance (FromJSON nF, FromJSON bV, FromJSON nV)
=> (FromJSON (Node () nF bV nV)) where
parseJSON (Object hh)
| Just (String "node") <- H.lookup "_type" hh
, Just (String "source") <- H.lookup "node" hh
, Just jSource <- H.lookup "source" hh
= do source <- parseJSON jSource
return $ NodeSource source
| Just (String "node") <- H.lookup "_type" hh
, Just (String "op") <- H.lookup "node" hh
, Just jOp <- H.lookup "op" hh
= do op <- parseJSON jOp
return $ NodeOp op
parseJSON _ = mzero
instance (ToJSON nF)
=> ToJSON (Source a nF) where
toJSON xx
= case xx of
SourceFile _ name delim fields fOut
-> object [ "_type" .= text "source"
, "source" .= text "file"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "output" .= toJSON fOut ]
SourceTable _ name delim fields fOut
-> object [ "_type" .= text "source"
, "source" .= text "table"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "output" .= toJSON fOut ]
SourceTableColumn _ name delim fields column fOut
-> object [ "_type" .= text "source"
, "source" .= text "table_column"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "column" .= toJSON column
, "output" .= toJSON fOut ]
SourceTableColumns _ name delim fields columns fOut
-> object [ "_type" .= text "source"
, "source" .= text "table_columns"
, "name" .= T.pack name
, "delim" .= toJSON delim
, "fields" .= toJSON fields
, "columns" .= toJSON columns
, "output" .= toJSON fOut ]
SourceFamilyColumns _
nameFamily nameColumns
formatKey formatColumns
flowOut
-> object [ "_type" .= text "source"
, "source" .= text "family_column"
, "name_fam" .= T.pack nameFamily
, "name_cols" .= map T.pack nameColumns
, "format_key" .= toJSON formatKey
, "format_cols" .= toJSON formatColumns
, "output" .= toJSON flowOut ]
instance FromJSON nF
=> FromJSON (Source () nF) where
parseJSON (Object hh)
| Just (String "source") <- H.lookup "_type" hh
, Just (String "file") <- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
out <- parseJSON jOut
return $ SourceFile () (T.unpack name) delim fields out
SourceTable
| Just (String "source") <- H.lookup "_type" hh
, Just (String "table") <- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
out <- parseJSON jOut
return $ SourceTable () (T.unpack name) delim fields out
| Just (String "source") <- H.lookup "_type" hh
, Just (String "table_column")
<- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jColumn <- H.lookup "column" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
column <- parseJSON jColumn
out <- parseJSON jOut
return $ SourceTableColumn () (T.unpack name) delim fields column out
SourceTableColumns
| Just (String "source") <- H.lookup "_type" hh
, Just (String "table_columns")
<- H.lookup "source" hh
, Just (String name) <- H.lookup "name" hh
, Just jDelim <- H.lookup "delim" hh
, Just jFields <- H.lookup "fields" hh
, Just jColumns <- H.lookup "columns" hh
, Just jOut <- H.lookup "output" hh
= do delim <- parseJSON jDelim
fields <- parseJSON jFields
columns <- parseJSON jColumns
out <- parseJSON jOut
return $ SourceTableColumns () (T.unpack name) delim fields columns out
SourceFamilyColumn
| Just (String "source") <- H.lookup "_type" hh
, Just (String "family_column")
<- H.lookup "source" hh
, Just (String nameFam) <- H.lookup "name_fam" hh
, Just jNameCols <- H.lookup "name_cols" hh
, Just jFormatKey <- H.lookup "format_key" hh
, Just jFormatCols <- H.lookup "format_cols" hh
, Just jOut <- H.lookup "output" hh
= do
formatKey <- parseJSON jFormatKey
formatCols <- parseJSON jFormatCols
nameCols <- parseJSON jNameCols
out <- parseJSON jOut
return $ SourceFamilyColumns ()
(T.unpack nameFam) nameCols
formatKey formatCols out
parseJSON _ = mzero
instance (ToJSON nF, ToJSON bV, ToJSON nV)
=> (ToJSON (FlowOp a nF bV nV)) where
toJSON xx
= case xx of
FopMapI fIns fOut fun
-> object [ "_type" .= text "fop"
, "fop" .= text "mapi"
, "ins" .= toJSON fIns
, "out" .= toJSON fOut
, "fun" .= toJSON fun ]
FopFilterI fIn fOut fun
-> object [ "_type" .= text "fop"
, "fop" .= text "filteri"
, "in" .= toJSON fIn
, "out" .= toJSON fOut
, "fun" .= toJSON fun ]
FopFoldI fIn fOut fun z
-> object [ "_type" .= text "fop"
, "fop" .= text "foldi"
, "in" .= toJSON fIn
, "out" .= toJSON fOut
, "fun" .= toJSON fun
, "neutral" .= toJSON z ]
FopFoldsI fLens fElems fOut fun z
-> object [ "_type" .= text "fop"
, "fop" .= text "foldsi"
, "lens" .= toJSON fLens
, "elems" .= toJSON fElems
, "out" .= toJSON fOut
, "fun" .= toJSON fun
, "neutral" .= toJSON z ]
FopGroupsI fIn fOut fun
-> object [ "_type" .= text "fop"
, "fop" .= text "groupsi"
, "in" .= toJSON fIn
, "out" .= toJSON fOut
, "fun" .= toJSON fun ]
instance (FromJSON nF, FromJSON bV, FromJSON nV)
=> FromJSON (FlowOp () nF bV nV) where
parseJSON (Object hh)
mapi
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "mapi") <- H.lookup "fop" hh
, Just jIn <- H.lookup "ins" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
return $ FopMapI fin fout fun
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "filteri") <- H.lookup "fop" hh
, Just jIn <- H.lookup "in" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
return $ FopFilterI fin fout fun
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "foldi") <- H.lookup "fop" hh
, Just jIn <- H.lookup "in" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
, Just jNeutral <- H.lookup "neutral" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
neutral <- parseJSON jNeutral
return $ FopFoldI fin fout fun neutral
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "foldsi") <- H.lookup "fop" hh
, Just jLens <- H.lookup "lens" hh
, Just jElems <- H.lookup "elems" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
, Just jNeutral <- H.lookup "neutral" hh
= do flens <- parseJSON jLens
felems <- parseJSON jElems
fout <- parseJSON jOut
fun <- parseJSON jFun
neutral <- parseJSON jNeutral
return $ FopFoldsI flens felems fout fun neutral
| Just (String "fop") <- H.lookup "_type" hh
, Just (String "groupsi") <- H.lookup "fop" hh
, Just jIn <- H.lookup "in" hh
, Just jOut <- H.lookup "out" hh
, Just jFun <- H.lookup "fun" hh
= do fin <- parseJSON jIn
fout <- parseJSON jOut
fun <- parseJSON jFun
return $ FopGroupsI fin fout fun
parseJSON _ = mzero
instance (ToJSON bV, ToJSON nV)
=> ToJSON (Exp a bV nV) where
toJSON xx
= case xx of
XVal _ (VLit _ lit)
-> let (name :: Text, val)
= case lit of
LBool b -> ("bool", T.pack $ show b)
LWord w -> ("word", T.pack $ show w)
LInt i -> ("int", T.pack $ show i)
LFloat f -> ("float", T.pack $ show f)
LDouble d -> ("double", T.pack $ show d)
LString s -> ("string", T.pack s)
in object
[ "_type" .= text "exp"
, "exp" .= text "lit"
, "lit" .= name
, "value" .= val ]
XVal _ (VLam _ bV x)
-> object [ "_type" .= text "exp"
, "exp" .= text "lam"
, "binder" .= toJSON bV
, "body" .= toJSON x ]
XVar _ v
-> object [ "_type" .= text "exp"
, "exp" .= text "var"
, "var" .= toJSON v ]
XApp _ xFun xArg
-> object [ "_type" .= text "exp"
, "exp" .= text "app"
, "fun" .= toJSON xFun
, "arg" .= toJSON xArg ]
XOp _ sOp xsArgs
-> object [ "_type" .= text "exp"
, "exp" .= text "sop"
, "sop" .= nameOfScalarOp sOp
, "args" .= toJSON xsArgs ]
instance (FromJSON bV, FromJSON nV)
=> FromJSON (Exp () bV nV) where
parseJSON (Object hh)
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "lit") <- H.lookup "exp" hh
, Just (String lit) <- H.lookup "lit" hh
, Just (String value) <- H.lookup "value" hh
= case T.unpack lit of
"bool" -> return $ xBool () $ read $ T.unpack value
"word" -> return $ xWord () $ read $ T.unpack value
"int" -> return $ xInt () $ read $ T.unpack value
"float" -> return $ xFloat () $ read $ T.unpack value
"double" -> return $ xDouble () $ read $ T.unpack value
"string" -> return $ xString () $ T.unpack value
_ -> mzero
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "var") <- H.lookup "exp" hh
, Just jName <- H.lookup "var" hh
= do name <- parseJSON jName
return $ XVar () name
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "lam") <- H.lookup "exp" hh
, Just jBinder <- H.lookup "binder" hh
, Just jBody <- H.lookup "body" hh
= do binder <- parseJSON jBinder
body <- parseJSON jBody
return $ XVal () (VLam () binder body)
| Just (String "exp") <- H.lookup "_type" hh
, Just (String "sop") <- H.lookup "exp" hh
, Just (String ssop) <- H.lookup "sop" hh
, Just jArgs <- H.lookup "args" hh
, Just sop <- scalarOpOfName (T.unpack ssop)
= do args <- parseJSON jArgs
return $ XOp () sop args
parseJSON _ = mzero
nameOfScalarOp :: ScalarOp -> String
nameOfScalarOp sop
| [name] <- [ name | (sop', name) <- sopNames
, sop == sop' ]
= name
| SopRow i <- sop
= "row" ++ show i
| SopGet i j <- sop
= "get" ++ show i ++ "_" ++ show j
| otherwise
= error "repa-query.nameOfScalarOp: no match"
scalarOpOfName :: String -> Maybe ScalarOp
scalarOpOfName ss
| [sop] <- [ sop | (sop, name') <- sopNames
, ss == name' ]
= Just sop
| Just ds <- L.stripPrefix "row" ss
, all isDigit ds, length ds > 0
= Just $ SopRow (read ds)
| Just ds <- L.stripPrefix "get" ss
, (ds1, '_' : ds2) <- L.span isDigit ds
, all isDigit ds1, length ds1 > 0
, all isDigit ds2, length ds2 > 0
= Just $ SopGet (read ds1) (read ds2)
| otherwise
= Nothing
sopNames :: [(ScalarOp, String)]
sopNames
[ (SopNeg, "neg")
, (SopAbs, "abs")
, (SopSignum, "signum")
, (SopAdd, "add")
, (SopSub, "sub")
, (SopMul, "mul")
, (SopDiv, "div")
, (SopEq, "eq")
, (SopNeq, "neq")
, (SopGt, "gt")
, (SopGe, "ge")
, (SopLt, "lt")
, (SopLe, "le")
, (SopStringOfDate, "stringOfDate")
, (SopYearOfDate, "yearOfDate")
, (SopMonthOfDate, "monthOfDate")
, (SopDayOfDate, "dayOfDate")]
text :: Text -> Text
text x = x
|
3330158fd22d219d4f0fd66a9dd9bb83ab38301ff3f396b98a221c246abbc753 | mejgun/haskell-tdlib | InlineQueryResult.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.InlineQueryResult where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.Animation as Animation
import qualified TD.Data.Audio as Audio
import qualified TD.Data.Contact as Contact
import qualified TD.Data.Document as Document
import qualified TD.Data.Game as Game
import qualified TD.Data.Location as Location
import qualified TD.Data.Photo as Photo
import qualified TD.Data.Sticker as Sticker
import qualified TD.Data.Thumbnail as Thumbnail
import qualified TD.Data.Venue as Venue
import qualified TD.Data.Video as Video
import qualified TD.Data.VoiceNote as VoiceNote
import qualified Utils as U
-- | Represents a single result of an inline query
data InlineQueryResult
= -- | Represents a link to an article or web page
InlineQueryResultArticle
{ -- | Result thumbnail in JPEG format; may be null
thumbnail :: Maybe Thumbnail.Thumbnail,
-- |
description :: Maybe String,
-- | Title of the result
title :: Maybe String,
-- | True, if the URL must be not shown
hide_url :: Maybe Bool,
-- | URL of the result, if it exists
url :: Maybe String,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents a user contact
InlineQueryResultContact
{ -- | Result thumbnail in JPEG format; may be null
thumbnail :: Maybe Thumbnail.Thumbnail,
-- | A user contact
contact :: Maybe Contact.Contact,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents a point on the map
InlineQueryResultLocation
{ -- | Result thumbnail in JPEG format; may be null
thumbnail :: Maybe Thumbnail.Thumbnail,
-- | Title of the result
title :: Maybe String,
-- | Location result
location :: Maybe Location.Location,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents information about a venue
InlineQueryResultVenue
{ -- | Result thumbnail in JPEG format; may be null
thumbnail :: Maybe Thumbnail.Thumbnail,
-- | Venue result
venue :: Maybe Venue.Venue,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents information about a game
InlineQueryResultGame
{ -- | Game result
game :: Maybe Game.Game,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents an animation file
InlineQueryResultAnimation
{ -- | Animation title
title :: Maybe String,
-- | Animation file
animation :: Maybe Animation.Animation,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents an audio file
InlineQueryResultAudio
{ -- | Audio file
audio :: Maybe Audio.Audio,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents a document
InlineQueryResultDocument
{ -- |
description :: Maybe String,
-- | Document title
title :: Maybe String,
-- | Document
document :: Maybe Document.Document,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents a photo
InlineQueryResultPhoto
{ -- |
description :: Maybe String,
-- | Title of the result, if known
title :: Maybe String,
-- | Photo
photo :: Maybe Photo.Photo,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents a sticker
InlineQueryResultSticker
{ -- | Sticker
sticker :: Maybe Sticker.Sticker,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents a video
InlineQueryResultVideo
{ -- |
description :: Maybe String,
-- | Title of the video
title :: Maybe String,
-- | Video
video :: Maybe Video.Video,
-- | Unique identifier of the query result
_id :: Maybe String
}
| -- | Represents a voice note
InlineQueryResultVoiceNote
{ -- | Title of the voice note
title :: Maybe String,
-- | Voice note
voice_note :: Maybe VoiceNote.VoiceNote,
-- | Unique identifier of the query result
_id :: Maybe String
}
deriving (Eq)
instance Show InlineQueryResult where
show
InlineQueryResultArticle
{ thumbnail = thumbnail_,
description = description_,
title = title_,
hide_url = hide_url_,
url = url_,
_id = _id_
} =
"InlineQueryResultArticle"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "description" description_,
U.p "title" title_,
U.p "hide_url" hide_url_,
U.p "url" url_,
U.p "_id" _id_
]
show
InlineQueryResultContact
{ thumbnail = thumbnail_,
contact = contact_,
_id = _id_
} =
"InlineQueryResultContact"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "contact" contact_,
U.p "_id" _id_
]
show
InlineQueryResultLocation
{ thumbnail = thumbnail_,
title = title_,
location = location_,
_id = _id_
} =
"InlineQueryResultLocation"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "title" title_,
U.p "location" location_,
U.p "_id" _id_
]
show
InlineQueryResultVenue
{ thumbnail = thumbnail_,
venue = venue_,
_id = _id_
} =
"InlineQueryResultVenue"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "venue" venue_,
U.p "_id" _id_
]
show
InlineQueryResultGame
{ game = game_,
_id = _id_
} =
"InlineQueryResultGame"
++ U.cc
[ U.p "game" game_,
U.p "_id" _id_
]
show
InlineQueryResultAnimation
{ title = title_,
animation = animation_,
_id = _id_
} =
"InlineQueryResultAnimation"
++ U.cc
[ U.p "title" title_,
U.p "animation" animation_,
U.p "_id" _id_
]
show
InlineQueryResultAudio
{ audio = audio_,
_id = _id_
} =
"InlineQueryResultAudio"
++ U.cc
[ U.p "audio" audio_,
U.p "_id" _id_
]
show
InlineQueryResultDocument
{ description = description_,
title = title_,
document = document_,
_id = _id_
} =
"InlineQueryResultDocument"
++ U.cc
[ U.p "description" description_,
U.p "title" title_,
U.p "document" document_,
U.p "_id" _id_
]
show
InlineQueryResultPhoto
{ description = description_,
title = title_,
photo = photo_,
_id = _id_
} =
"InlineQueryResultPhoto"
++ U.cc
[ U.p "description" description_,
U.p "title" title_,
U.p "photo" photo_,
U.p "_id" _id_
]
show
InlineQueryResultSticker
{ sticker = sticker_,
_id = _id_
} =
"InlineQueryResultSticker"
++ U.cc
[ U.p "sticker" sticker_,
U.p "_id" _id_
]
show
InlineQueryResultVideo
{ description = description_,
title = title_,
video = video_,
_id = _id_
} =
"InlineQueryResultVideo"
++ U.cc
[ U.p "description" description_,
U.p "title" title_,
U.p "video" video_,
U.p "_id" _id_
]
show
InlineQueryResultVoiceNote
{ title = title_,
voice_note = voice_note_,
_id = _id_
} =
"InlineQueryResultVoiceNote"
++ U.cc
[ U.p "title" title_,
U.p "voice_note" voice_note_,
U.p "_id" _id_
]
instance T.FromJSON InlineQueryResult where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"inlineQueryResultArticle" -> parseInlineQueryResultArticle v
"inlineQueryResultContact" -> parseInlineQueryResultContact v
"inlineQueryResultLocation" -> parseInlineQueryResultLocation v
"inlineQueryResultVenue" -> parseInlineQueryResultVenue v
"inlineQueryResultGame" -> parseInlineQueryResultGame v
"inlineQueryResultAnimation" -> parseInlineQueryResultAnimation v
"inlineQueryResultAudio" -> parseInlineQueryResultAudio v
"inlineQueryResultDocument" -> parseInlineQueryResultDocument v
"inlineQueryResultPhoto" -> parseInlineQueryResultPhoto v
"inlineQueryResultSticker" -> parseInlineQueryResultSticker v
"inlineQueryResultVideo" -> parseInlineQueryResultVideo v
"inlineQueryResultVoiceNote" -> parseInlineQueryResultVoiceNote v
_ -> mempty
where
parseInlineQueryResultArticle :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultArticle = A.withObject "InlineQueryResultArticle" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
description_ <- o A..:? "description"
title_ <- o A..:? "title"
hide_url_ <- o A..:? "hide_url"
url_ <- o A..:? "url"
_id_ <- o A..:? "id"
return $ InlineQueryResultArticle {thumbnail = thumbnail_, description = description_, title = title_, hide_url = hide_url_, url = url_, _id = _id_}
parseInlineQueryResultContact :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultContact = A.withObject "InlineQueryResultContact" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
contact_ <- o A..:? "contact"
_id_ <- o A..:? "id"
return $ InlineQueryResultContact {thumbnail = thumbnail_, contact = contact_, _id = _id_}
parseInlineQueryResultLocation :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultLocation = A.withObject "InlineQueryResultLocation" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
title_ <- o A..:? "title"
location_ <- o A..:? "location"
_id_ <- o A..:? "id"
return $ InlineQueryResultLocation {thumbnail = thumbnail_, title = title_, location = location_, _id = _id_}
parseInlineQueryResultVenue :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultVenue = A.withObject "InlineQueryResultVenue" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
venue_ <- o A..:? "venue"
_id_ <- o A..:? "id"
return $ InlineQueryResultVenue {thumbnail = thumbnail_, venue = venue_, _id = _id_}
parseInlineQueryResultGame :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultGame = A.withObject "InlineQueryResultGame" $ \o -> do
game_ <- o A..:? "game"
_id_ <- o A..:? "id"
return $ InlineQueryResultGame {game = game_, _id = _id_}
parseInlineQueryResultAnimation :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultAnimation = A.withObject "InlineQueryResultAnimation" $ \o -> do
title_ <- o A..:? "title"
animation_ <- o A..:? "animation"
_id_ <- o A..:? "id"
return $ InlineQueryResultAnimation {title = title_, animation = animation_, _id = _id_}
parseInlineQueryResultAudio :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultAudio = A.withObject "InlineQueryResultAudio" $ \o -> do
audio_ <- o A..:? "audio"
_id_ <- o A..:? "id"
return $ InlineQueryResultAudio {audio = audio_, _id = _id_}
parseInlineQueryResultDocument :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultDocument = A.withObject "InlineQueryResultDocument" $ \o -> do
description_ <- o A..:? "description"
title_ <- o A..:? "title"
document_ <- o A..:? "document"
_id_ <- o A..:? "id"
return $ InlineQueryResultDocument {description = description_, title = title_, document = document_, _id = _id_}
parseInlineQueryResultPhoto :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultPhoto = A.withObject "InlineQueryResultPhoto" $ \o -> do
description_ <- o A..:? "description"
title_ <- o A..:? "title"
photo_ <- o A..:? "photo"
_id_ <- o A..:? "id"
return $ InlineQueryResultPhoto {description = description_, title = title_, photo = photo_, _id = _id_}
parseInlineQueryResultSticker :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultSticker = A.withObject "InlineQueryResultSticker" $ \o -> do
sticker_ <- o A..:? "sticker"
_id_ <- o A..:? "id"
return $ InlineQueryResultSticker {sticker = sticker_, _id = _id_}
parseInlineQueryResultVideo :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultVideo = A.withObject "InlineQueryResultVideo" $ \o -> do
description_ <- o A..:? "description"
title_ <- o A..:? "title"
video_ <- o A..:? "video"
_id_ <- o A..:? "id"
return $ InlineQueryResultVideo {description = description_, title = title_, video = video_, _id = _id_}
parseInlineQueryResultVoiceNote :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultVoiceNote = A.withObject "InlineQueryResultVoiceNote" $ \o -> do
title_ <- o A..:? "title"
voice_note_ <- o A..:? "voice_note"
_id_ <- o A..:? "id"
return $ InlineQueryResultVoiceNote {title = title_, voice_note = voice_note_, _id = _id_}
parseJSON _ = mempty
instance T.ToJSON InlineQueryResult where
toJSON
InlineQueryResultArticle
{ thumbnail = thumbnail_,
description = description_,
title = title_,
hide_url = hide_url_,
url = url_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultArticle",
"thumbnail" A..= thumbnail_,
"description" A..= description_,
"title" A..= title_,
"hide_url" A..= hide_url_,
"url" A..= url_,
"id" A..= _id_
]
toJSON
InlineQueryResultContact
{ thumbnail = thumbnail_,
contact = contact_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultContact",
"thumbnail" A..= thumbnail_,
"contact" A..= contact_,
"id" A..= _id_
]
toJSON
InlineQueryResultLocation
{ thumbnail = thumbnail_,
title = title_,
location = location_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultLocation",
"thumbnail" A..= thumbnail_,
"title" A..= title_,
"location" A..= location_,
"id" A..= _id_
]
toJSON
InlineQueryResultVenue
{ thumbnail = thumbnail_,
venue = venue_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultVenue",
"thumbnail" A..= thumbnail_,
"venue" A..= venue_,
"id" A..= _id_
]
toJSON
InlineQueryResultGame
{ game = game_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultGame",
"game" A..= game_,
"id" A..= _id_
]
toJSON
InlineQueryResultAnimation
{ title = title_,
animation = animation_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultAnimation",
"title" A..= title_,
"animation" A..= animation_,
"id" A..= _id_
]
toJSON
InlineQueryResultAudio
{ audio = audio_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultAudio",
"audio" A..= audio_,
"id" A..= _id_
]
toJSON
InlineQueryResultDocument
{ description = description_,
title = title_,
document = document_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultDocument",
"description" A..= description_,
"title" A..= title_,
"document" A..= document_,
"id" A..= _id_
]
toJSON
InlineQueryResultPhoto
{ description = description_,
title = title_,
photo = photo_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultPhoto",
"description" A..= description_,
"title" A..= title_,
"photo" A..= photo_,
"id" A..= _id_
]
toJSON
InlineQueryResultSticker
{ sticker = sticker_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultSticker",
"sticker" A..= sticker_,
"id" A..= _id_
]
toJSON
InlineQueryResultVideo
{ description = description_,
title = title_,
video = video_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultVideo",
"description" A..= description_,
"title" A..= title_,
"video" A..= video_,
"id" A..= _id_
]
toJSON
InlineQueryResultVoiceNote
{ title = title_,
voice_note = voice_note_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultVoiceNote",
"title" A..= title_,
"voice_note" A..= voice_note_,
"id" A..= _id_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/dc380d18d49eaadc386a81dc98af2ce00f8797c2/src/TD/Data/InlineQueryResult.hs | haskell | # LANGUAGE OverloadedStrings #
|
| Represents a single result of an inline query
| Represents a link to an article or web page
| Result thumbnail in JPEG format; may be null
|
| Title of the result
| True, if the URL must be not shown
| URL of the result, if it exists
| Unique identifier of the query result
| Represents a user contact
| Result thumbnail in JPEG format; may be null
| A user contact
| Unique identifier of the query result
| Represents a point on the map
| Result thumbnail in JPEG format; may be null
| Title of the result
| Location result
| Unique identifier of the query result
| Represents information about a venue
| Result thumbnail in JPEG format; may be null
| Venue result
| Unique identifier of the query result
| Represents information about a game
| Game result
| Unique identifier of the query result
| Represents an animation file
| Animation title
| Animation file
| Unique identifier of the query result
| Represents an audio file
| Audio file
| Unique identifier of the query result
| Represents a document
|
| Document title
| Document
| Unique identifier of the query result
| Represents a photo
|
| Title of the result, if known
| Photo
| Unique identifier of the query result
| Represents a sticker
| Sticker
| Unique identifier of the query result
| Represents a video
|
| Title of the video
| Video
| Unique identifier of the query result
| Represents a voice note
| Title of the voice note
| Voice note
| Unique identifier of the query result |
module TD.Data.InlineQueryResult where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.Animation as Animation
import qualified TD.Data.Audio as Audio
import qualified TD.Data.Contact as Contact
import qualified TD.Data.Document as Document
import qualified TD.Data.Game as Game
import qualified TD.Data.Location as Location
import qualified TD.Data.Photo as Photo
import qualified TD.Data.Sticker as Sticker
import qualified TD.Data.Thumbnail as Thumbnail
import qualified TD.Data.Venue as Venue
import qualified TD.Data.Video as Video
import qualified TD.Data.VoiceNote as VoiceNote
import qualified Utils as U
data InlineQueryResult
InlineQueryResultArticle
thumbnail :: Maybe Thumbnail.Thumbnail,
description :: Maybe String,
title :: Maybe String,
hide_url :: Maybe Bool,
url :: Maybe String,
_id :: Maybe String
}
InlineQueryResultContact
thumbnail :: Maybe Thumbnail.Thumbnail,
contact :: Maybe Contact.Contact,
_id :: Maybe String
}
InlineQueryResultLocation
thumbnail :: Maybe Thumbnail.Thumbnail,
title :: Maybe String,
location :: Maybe Location.Location,
_id :: Maybe String
}
InlineQueryResultVenue
thumbnail :: Maybe Thumbnail.Thumbnail,
venue :: Maybe Venue.Venue,
_id :: Maybe String
}
InlineQueryResultGame
game :: Maybe Game.Game,
_id :: Maybe String
}
InlineQueryResultAnimation
title :: Maybe String,
animation :: Maybe Animation.Animation,
_id :: Maybe String
}
InlineQueryResultAudio
audio :: Maybe Audio.Audio,
_id :: Maybe String
}
InlineQueryResultDocument
description :: Maybe String,
title :: Maybe String,
document :: Maybe Document.Document,
_id :: Maybe String
}
InlineQueryResultPhoto
description :: Maybe String,
title :: Maybe String,
photo :: Maybe Photo.Photo,
_id :: Maybe String
}
InlineQueryResultSticker
sticker :: Maybe Sticker.Sticker,
_id :: Maybe String
}
InlineQueryResultVideo
description :: Maybe String,
title :: Maybe String,
video :: Maybe Video.Video,
_id :: Maybe String
}
InlineQueryResultVoiceNote
title :: Maybe String,
voice_note :: Maybe VoiceNote.VoiceNote,
_id :: Maybe String
}
deriving (Eq)
instance Show InlineQueryResult where
show
InlineQueryResultArticle
{ thumbnail = thumbnail_,
description = description_,
title = title_,
hide_url = hide_url_,
url = url_,
_id = _id_
} =
"InlineQueryResultArticle"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "description" description_,
U.p "title" title_,
U.p "hide_url" hide_url_,
U.p "url" url_,
U.p "_id" _id_
]
show
InlineQueryResultContact
{ thumbnail = thumbnail_,
contact = contact_,
_id = _id_
} =
"InlineQueryResultContact"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "contact" contact_,
U.p "_id" _id_
]
show
InlineQueryResultLocation
{ thumbnail = thumbnail_,
title = title_,
location = location_,
_id = _id_
} =
"InlineQueryResultLocation"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "title" title_,
U.p "location" location_,
U.p "_id" _id_
]
show
InlineQueryResultVenue
{ thumbnail = thumbnail_,
venue = venue_,
_id = _id_
} =
"InlineQueryResultVenue"
++ U.cc
[ U.p "thumbnail" thumbnail_,
U.p "venue" venue_,
U.p "_id" _id_
]
show
InlineQueryResultGame
{ game = game_,
_id = _id_
} =
"InlineQueryResultGame"
++ U.cc
[ U.p "game" game_,
U.p "_id" _id_
]
show
InlineQueryResultAnimation
{ title = title_,
animation = animation_,
_id = _id_
} =
"InlineQueryResultAnimation"
++ U.cc
[ U.p "title" title_,
U.p "animation" animation_,
U.p "_id" _id_
]
show
InlineQueryResultAudio
{ audio = audio_,
_id = _id_
} =
"InlineQueryResultAudio"
++ U.cc
[ U.p "audio" audio_,
U.p "_id" _id_
]
show
InlineQueryResultDocument
{ description = description_,
title = title_,
document = document_,
_id = _id_
} =
"InlineQueryResultDocument"
++ U.cc
[ U.p "description" description_,
U.p "title" title_,
U.p "document" document_,
U.p "_id" _id_
]
show
InlineQueryResultPhoto
{ description = description_,
title = title_,
photo = photo_,
_id = _id_
} =
"InlineQueryResultPhoto"
++ U.cc
[ U.p "description" description_,
U.p "title" title_,
U.p "photo" photo_,
U.p "_id" _id_
]
show
InlineQueryResultSticker
{ sticker = sticker_,
_id = _id_
} =
"InlineQueryResultSticker"
++ U.cc
[ U.p "sticker" sticker_,
U.p "_id" _id_
]
show
InlineQueryResultVideo
{ description = description_,
title = title_,
video = video_,
_id = _id_
} =
"InlineQueryResultVideo"
++ U.cc
[ U.p "description" description_,
U.p "title" title_,
U.p "video" video_,
U.p "_id" _id_
]
show
InlineQueryResultVoiceNote
{ title = title_,
voice_note = voice_note_,
_id = _id_
} =
"InlineQueryResultVoiceNote"
++ U.cc
[ U.p "title" title_,
U.p "voice_note" voice_note_,
U.p "_id" _id_
]
instance T.FromJSON InlineQueryResult where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"inlineQueryResultArticle" -> parseInlineQueryResultArticle v
"inlineQueryResultContact" -> parseInlineQueryResultContact v
"inlineQueryResultLocation" -> parseInlineQueryResultLocation v
"inlineQueryResultVenue" -> parseInlineQueryResultVenue v
"inlineQueryResultGame" -> parseInlineQueryResultGame v
"inlineQueryResultAnimation" -> parseInlineQueryResultAnimation v
"inlineQueryResultAudio" -> parseInlineQueryResultAudio v
"inlineQueryResultDocument" -> parseInlineQueryResultDocument v
"inlineQueryResultPhoto" -> parseInlineQueryResultPhoto v
"inlineQueryResultSticker" -> parseInlineQueryResultSticker v
"inlineQueryResultVideo" -> parseInlineQueryResultVideo v
"inlineQueryResultVoiceNote" -> parseInlineQueryResultVoiceNote v
_ -> mempty
where
parseInlineQueryResultArticle :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultArticle = A.withObject "InlineQueryResultArticle" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
description_ <- o A..:? "description"
title_ <- o A..:? "title"
hide_url_ <- o A..:? "hide_url"
url_ <- o A..:? "url"
_id_ <- o A..:? "id"
return $ InlineQueryResultArticle {thumbnail = thumbnail_, description = description_, title = title_, hide_url = hide_url_, url = url_, _id = _id_}
parseInlineQueryResultContact :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultContact = A.withObject "InlineQueryResultContact" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
contact_ <- o A..:? "contact"
_id_ <- o A..:? "id"
return $ InlineQueryResultContact {thumbnail = thumbnail_, contact = contact_, _id = _id_}
parseInlineQueryResultLocation :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultLocation = A.withObject "InlineQueryResultLocation" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
title_ <- o A..:? "title"
location_ <- o A..:? "location"
_id_ <- o A..:? "id"
return $ InlineQueryResultLocation {thumbnail = thumbnail_, title = title_, location = location_, _id = _id_}
parseInlineQueryResultVenue :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultVenue = A.withObject "InlineQueryResultVenue" $ \o -> do
thumbnail_ <- o A..:? "thumbnail"
venue_ <- o A..:? "venue"
_id_ <- o A..:? "id"
return $ InlineQueryResultVenue {thumbnail = thumbnail_, venue = venue_, _id = _id_}
parseInlineQueryResultGame :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultGame = A.withObject "InlineQueryResultGame" $ \o -> do
game_ <- o A..:? "game"
_id_ <- o A..:? "id"
return $ InlineQueryResultGame {game = game_, _id = _id_}
parseInlineQueryResultAnimation :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultAnimation = A.withObject "InlineQueryResultAnimation" $ \o -> do
title_ <- o A..:? "title"
animation_ <- o A..:? "animation"
_id_ <- o A..:? "id"
return $ InlineQueryResultAnimation {title = title_, animation = animation_, _id = _id_}
parseInlineQueryResultAudio :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultAudio = A.withObject "InlineQueryResultAudio" $ \o -> do
audio_ <- o A..:? "audio"
_id_ <- o A..:? "id"
return $ InlineQueryResultAudio {audio = audio_, _id = _id_}
parseInlineQueryResultDocument :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultDocument = A.withObject "InlineQueryResultDocument" $ \o -> do
description_ <- o A..:? "description"
title_ <- o A..:? "title"
document_ <- o A..:? "document"
_id_ <- o A..:? "id"
return $ InlineQueryResultDocument {description = description_, title = title_, document = document_, _id = _id_}
parseInlineQueryResultPhoto :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultPhoto = A.withObject "InlineQueryResultPhoto" $ \o -> do
description_ <- o A..:? "description"
title_ <- o A..:? "title"
photo_ <- o A..:? "photo"
_id_ <- o A..:? "id"
return $ InlineQueryResultPhoto {description = description_, title = title_, photo = photo_, _id = _id_}
parseInlineQueryResultSticker :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultSticker = A.withObject "InlineQueryResultSticker" $ \o -> do
sticker_ <- o A..:? "sticker"
_id_ <- o A..:? "id"
return $ InlineQueryResultSticker {sticker = sticker_, _id = _id_}
parseInlineQueryResultVideo :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultVideo = A.withObject "InlineQueryResultVideo" $ \o -> do
description_ <- o A..:? "description"
title_ <- o A..:? "title"
video_ <- o A..:? "video"
_id_ <- o A..:? "id"
return $ InlineQueryResultVideo {description = description_, title = title_, video = video_, _id = _id_}
parseInlineQueryResultVoiceNote :: A.Value -> T.Parser InlineQueryResult
parseInlineQueryResultVoiceNote = A.withObject "InlineQueryResultVoiceNote" $ \o -> do
title_ <- o A..:? "title"
voice_note_ <- o A..:? "voice_note"
_id_ <- o A..:? "id"
return $ InlineQueryResultVoiceNote {title = title_, voice_note = voice_note_, _id = _id_}
parseJSON _ = mempty
instance T.ToJSON InlineQueryResult where
toJSON
InlineQueryResultArticle
{ thumbnail = thumbnail_,
description = description_,
title = title_,
hide_url = hide_url_,
url = url_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultArticle",
"thumbnail" A..= thumbnail_,
"description" A..= description_,
"title" A..= title_,
"hide_url" A..= hide_url_,
"url" A..= url_,
"id" A..= _id_
]
toJSON
InlineQueryResultContact
{ thumbnail = thumbnail_,
contact = contact_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultContact",
"thumbnail" A..= thumbnail_,
"contact" A..= contact_,
"id" A..= _id_
]
toJSON
InlineQueryResultLocation
{ thumbnail = thumbnail_,
title = title_,
location = location_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultLocation",
"thumbnail" A..= thumbnail_,
"title" A..= title_,
"location" A..= location_,
"id" A..= _id_
]
toJSON
InlineQueryResultVenue
{ thumbnail = thumbnail_,
venue = venue_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultVenue",
"thumbnail" A..= thumbnail_,
"venue" A..= venue_,
"id" A..= _id_
]
toJSON
InlineQueryResultGame
{ game = game_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultGame",
"game" A..= game_,
"id" A..= _id_
]
toJSON
InlineQueryResultAnimation
{ title = title_,
animation = animation_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultAnimation",
"title" A..= title_,
"animation" A..= animation_,
"id" A..= _id_
]
toJSON
InlineQueryResultAudio
{ audio = audio_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultAudio",
"audio" A..= audio_,
"id" A..= _id_
]
toJSON
InlineQueryResultDocument
{ description = description_,
title = title_,
document = document_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultDocument",
"description" A..= description_,
"title" A..= title_,
"document" A..= document_,
"id" A..= _id_
]
toJSON
InlineQueryResultPhoto
{ description = description_,
title = title_,
photo = photo_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultPhoto",
"description" A..= description_,
"title" A..= title_,
"photo" A..= photo_,
"id" A..= _id_
]
toJSON
InlineQueryResultSticker
{ sticker = sticker_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultSticker",
"sticker" A..= sticker_,
"id" A..= _id_
]
toJSON
InlineQueryResultVideo
{ description = description_,
title = title_,
video = video_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultVideo",
"description" A..= description_,
"title" A..= title_,
"video" A..= video_,
"id" A..= _id_
]
toJSON
InlineQueryResultVoiceNote
{ title = title_,
voice_note = voice_note_,
_id = _id_
} =
A.object
[ "@type" A..= T.String "inlineQueryResultVoiceNote",
"title" A..= title_,
"voice_note" A..= voice_note_,
"id" A..= _id_
]
|
9158a360be6c19dd124c15536fac8d9258da37be8f0a21c2fd57713d562a1b6d | plumatic/grab-bag | logging.clj | (ns service.logging
(:use plumbing.core)
(:require
[clojure.pprint :as pprint]
[clojure.string :as str]
[clojure.tools.logging :as clj-log]
[plumbing.html-gen :as html-gen]
[plumbing.logging :as log]
[plumbing.parallel :as parallel]
[plumbing.resource :as resource]
[store.mongo :as mongo]
[service.observer :as observer]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Private: helpers
;; TODO: way to configure target email, either service-wide and/or via :to in log entry.
(def +log-agg-keys+ [:date :service :level :location :type :ex-location])
(defn location [e]
(when e
(str (or (:ns e) (:file e)) "_" (:line e))))
(defn log-agg-vals [log-entry]
[(observer/today)
(:service log-entry)
(:level log-entry)
(location (:log log-entry))
(or (:type (:log log-entry)) (:type (:exception log-entry)))
(location (:exception log-entry))])
(defn entry-recorder [entry-atom]
(fn [e] (swap! entry-atom conj e)))
(defn work-around-recur-in-catch [t e]
(log/errorf t "Error recording log entries: %s" (pr-str e)))
(defn trim-entry [e]
(let [trim-str (fn [m ks]
(if-let [s (get-in m ks)]
(let [s (str s)]
(assoc-in m ks (.substring s 0 (min (count s) 10000))))
m))]
(-> e
(trim-str [:log :message])
;; the only unbounded string fields, see plumbing.logging/throwable->map
(trim-str [:exception :message])
(trim-str [:exception :data])
(trim-str [:exception :data-chain]))))
(let [max-entries 10000]
(defn trim-entries [es]
(when (> (count es) max-entries)
(log/errorf "Declaing log bankruptcy; %d errors since last round" (count es)))
(take max-entries es)))
(defn process-entries!
"Grab entries from the provided source and apply the provided
function to their trimmed forms."
[entry-atom raw? f]
(let [e (get-and-set! entry-atom nil)]
(try (when (seq e)
(f (trim-entries (reverse e))))
(catch Throwable t
(if raw?
(clj-log/errorf t "Error sending email about errors: %s" (pr-str e))
(work-around-recur-in-catch t e))))))
TODO : log bankruptcy , long logs .
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Private: Writing to mongo in aggregate and stream
(defn update-agg-logs
"Update count and latest in the agg-logs, given these new entries"
[agg-logs log-entries]
(doseq [[agg entries] (group-by log-agg-vals log-entries)]
(assert (= (count agg) (count +log-agg-keys+)))
(mongo/update agg-logs (zipmap +log-agg-keys+ agg)
{:latest (trim-entry (last entries))}
{:count (count entries)})))
(defn add-to-stream-log [stream-logs log-entries]
"Add these entries to our running stream / capped collection"
(doseq [entry log-entries]
(mongo/append stream-logs (trim-entry entry))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Private: github urls
(defn- ns->file
"Partially cribbed from clojure.core/root-resource"
[^String n]
(str (.. n
(replace \- \_)
(replace \. \/)) ".clj"))
(defn- class->ns [^String n]
(when n
(let [parts (.split n "\\$")]
(when (and (> (count parts) 1)
(= (first parts) (.toLowerCase ^String (first parts))))
(first parts)))))
(defn base-service-name [^String service-name]
(.replaceAll service-name "-i-.*" ""))
(defn- deploy-branch [service-name]
(str "DEPLOYED-" (base-service-name service-name)))
(defn- project-path [^String service-name ^String namespace]
(let [first-segment (first (.split namespace "\\."))]
(str
(cond (.startsWith service-name first-segment) "service"
(#{"schema" "plumbing" "hiphip" "dommy" "fnhouse"} first-segment) "open-source"
:else "grabbag")
"/" first-segment)))
(defn- namespace-path [service-name namespace]
(when namespace
(when-let [project-path (project-path service-name namespace)]
(str project-path "/src/" (ns->file namespace)))))
(defn github-link [service-name namespace line]
(when-let [path (namespace-path service-name namespace)]
(format "-bag/blob/%s/%s%s"
(deploy-branch service-name) path (if line (str "#L" line) ""))))
(defn github-linkify [service-name namespace line]
(if-let [link (github-link service-name namespace line)]
[:a {:href link} line]
line))
(defn github-linkify-frame [service-name frame]
(assoc frame :line
(github-linkify service-name (-> frame :class class->ns) (:line frame))))
(defn- split-last [^String s ^String sub]
(let [i (.lastIndexOf s sub)]
(if (>= i 0)
(remove empty? [(subs s 0 i) (subs s (inc i))])
[sub])))
(defn github-linkify-location [service-name location]
(let [[ns line] (split-last location "_")]
(if-let [link (github-link service-name ns line)]
(html-gen/render [:a {:href link} location])
location)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Private: Error emails
(defn best-frame
"Try to extract the most informative exception stack trace frame"
[stack-trace]
(or (first
(filter
(fnk [^String file ^String class]
(and file (.contains file "clj")
(not (.startsWith class "clojure."))
(not (.startsWith class "plumbing.core"))))
stack-trace))
(first stack-trace)))
(defn extract-message
"Extract the best message from a log entry"
[e]
(or (not-empty (-> e :exception :message))
(-> e :log :message)
""))
(defn flatten-entry [e]
(concat (dissoc e :log :exception)
(for-map [[k v] (:log e)]
(keyword (str "log_" (name k))) v)
(for-map [[k v] (dissoc (:exception e) :stack-trace)]
(keyword (str "ex_" (name k))) v)))
(defn inbox-teaser [sub-entries]
(str (count sub-entries) ": "
(->> sub-entries
(map extract-message)
(map #(subs % 0 (min 50 (count %))))
frequencies
(sort-by (comp - second))
(map (fn [[m c]] (format "%sx[%s]" c m)))
(str/join ", "))))
(defn entry-info [e service-name link]
(let [best-frame (best-frame (-> e :exception :stack-trace))]
{:log-ns (-> e :log :ns)
:log-line (github-linkify service-name (-> e :log :ns) (-> e :log :line))
:ex-class (-> best-frame :class)
:ex-line (:line (github-linkify-frame service-name best-frame))
:message [:a {:href link} (extract-message e)]}))
(defn summary-table [service-name indexed-entries]
[:table
{:border "1" :cellpadding "5" :cellspacing "0"}
(html-gen/table-rows
[:log-ns :log-line :ex-class :ex-line :message]
(for [[i e] indexed-entries]
(entry-info e service-name (str "#anchor" i))))])
(defn entry-details [service-name index entry]
[:div
(concat
[[:hr]
[:a {:name (str "anchor" index)} [:h3 (extract-message entry)]]
[:table
{:border "1" :cellpadding "3" :cellspacing "0" :font-size "8px"}
(html-gen/row-major-rows
(for [[k v] (sort-by first (flatten-entry entry))]
[k (if (coll? v) (with-out-str (pprint/pprint v)) v)]))]
[:br]]
(when-let [st (-> entry :exception :stack-trace)]
[[:h3 "stack trace"]
[:table
{:border "1" :cellpadding "3" :cellspacing "0" :font-size "6px"}
(html-gen/table-rows [:class :line :file :method] (map (partial github-linkify-frame service-name) st))]]))])
(defn email-entries
"Roll these entries together and send emails."
[service-name send-email error-email entries]
(doseq [[[email subject] sub-entries]
(group-by (comp (juxt :error-email :error-email-subject) :log) entries)
:let [indexed-entries (indexed (sort-by extract-message sub-entries))]]
(send-email
{:to (or email error-email)
:subject (format "%s: %s" service-name (or subject "exceptions"))
:html? true
:text (html-gen/render
[:div
[:div {:font-size "4px" :font-color "#dddddd"}
(inbox-teaser sub-entries)]
[:h2 "Summary"]
(summary-table service-name indexed-entries)
[:h2 "Entries"]
[:div
(for [[i e] indexed-entries]
(entry-details service-name i e))]])})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Public
(defnk logging-resource [[:instance service-name] log-data-store send-email {flush-freq 60}
{error-email "backend+"}
{agg-level :info} {log-level :warn} {email-level :error}]
(let [agg-logs (mongo/aggregate-collection log-data-store "agg-log" {:agg-keys +log-agg-keys+})
stream-logs (mongo/capped-collection log-data-store "log" {:max-mb 5000})
agg-atom (atom nil) log-atom (atom nil) email-atom (atom nil)
process (fn []
(process-entries! agg-atom false #(update-agg-logs agg-logs %))
(process-entries! log-atom false #(add-to-stream-log stream-logs %))
(process-entries! email-atom true #(email-entries
service-name send-email error-email %)))
exec (parallel/schedule-limited-work
{:f process
:submit-rate-ms (* flush-freq 1000)
:time-limit-ms (* flush-freq 1000)})]
(log/init!
service-name
[[(entry-recorder agg-atom) agg-level]
[(entry-recorder log-atom) log-level]
[(entry-recorder email-atom) email-level]])
(reify resource/PCloseable
(close [this]
(try (process) (catch Throwable e (clj-log/errorf e "Error processing final logs")))
(resource/close exec)))))
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/lib/service/src/service/logging.clj | clojure |
Private: helpers
TODO: way to configure target email, either service-wide and/or via :to in log entry.
the only unbounded string fields, see plumbing.logging/throwable->map
Private: Writing to mongo in aggregate and stream
Private: github urls
Private: Error emails
Public | (ns service.logging
(:use plumbing.core)
(:require
[clojure.pprint :as pprint]
[clojure.string :as str]
[clojure.tools.logging :as clj-log]
[plumbing.html-gen :as html-gen]
[plumbing.logging :as log]
[plumbing.parallel :as parallel]
[plumbing.resource :as resource]
[store.mongo :as mongo]
[service.observer :as observer]))
(def +log-agg-keys+ [:date :service :level :location :type :ex-location])
(defn location [e]
(when e
(str (or (:ns e) (:file e)) "_" (:line e))))
(defn log-agg-vals [log-entry]
[(observer/today)
(:service log-entry)
(:level log-entry)
(location (:log log-entry))
(or (:type (:log log-entry)) (:type (:exception log-entry)))
(location (:exception log-entry))])
(defn entry-recorder [entry-atom]
(fn [e] (swap! entry-atom conj e)))
(defn work-around-recur-in-catch [t e]
(log/errorf t "Error recording log entries: %s" (pr-str e)))
(defn trim-entry [e]
(let [trim-str (fn [m ks]
(if-let [s (get-in m ks)]
(let [s (str s)]
(assoc-in m ks (.substring s 0 (min (count s) 10000))))
m))]
(-> e
(trim-str [:log :message])
(trim-str [:exception :message])
(trim-str [:exception :data])
(trim-str [:exception :data-chain]))))
(let [max-entries 10000]
(defn trim-entries [es]
(when (> (count es) max-entries)
(log/errorf "Declaing log bankruptcy; %d errors since last round" (count es)))
(take max-entries es)))
(defn process-entries!
"Grab entries from the provided source and apply the provided
function to their trimmed forms."
[entry-atom raw? f]
(let [e (get-and-set! entry-atom nil)]
(try (when (seq e)
(f (trim-entries (reverse e))))
(catch Throwable t
(if raw?
(clj-log/errorf t "Error sending email about errors: %s" (pr-str e))
(work-around-recur-in-catch t e))))))
TODO : log bankruptcy , long logs .
(defn update-agg-logs
"Update count and latest in the agg-logs, given these new entries"
[agg-logs log-entries]
(doseq [[agg entries] (group-by log-agg-vals log-entries)]
(assert (= (count agg) (count +log-agg-keys+)))
(mongo/update agg-logs (zipmap +log-agg-keys+ agg)
{:latest (trim-entry (last entries))}
{:count (count entries)})))
(defn add-to-stream-log [stream-logs log-entries]
"Add these entries to our running stream / capped collection"
(doseq [entry log-entries]
(mongo/append stream-logs (trim-entry entry))))
(defn- ns->file
"Partially cribbed from clojure.core/root-resource"
[^String n]
(str (.. n
(replace \- \_)
(replace \. \/)) ".clj"))
(defn- class->ns [^String n]
(when n
(let [parts (.split n "\\$")]
(when (and (> (count parts) 1)
(= (first parts) (.toLowerCase ^String (first parts))))
(first parts)))))
(defn base-service-name [^String service-name]
(.replaceAll service-name "-i-.*" ""))
(defn- deploy-branch [service-name]
(str "DEPLOYED-" (base-service-name service-name)))
(defn- project-path [^String service-name ^String namespace]
(let [first-segment (first (.split namespace "\\."))]
(str
(cond (.startsWith service-name first-segment) "service"
(#{"schema" "plumbing" "hiphip" "dommy" "fnhouse"} first-segment) "open-source"
:else "grabbag")
"/" first-segment)))
(defn- namespace-path [service-name namespace]
(when namespace
(when-let [project-path (project-path service-name namespace)]
(str project-path "/src/" (ns->file namespace)))))
(defn github-link [service-name namespace line]
(when-let [path (namespace-path service-name namespace)]
(format "-bag/blob/%s/%s%s"
(deploy-branch service-name) path (if line (str "#L" line) ""))))
(defn github-linkify [service-name namespace line]
(if-let [link (github-link service-name namespace line)]
[:a {:href link} line]
line))
(defn github-linkify-frame [service-name frame]
(assoc frame :line
(github-linkify service-name (-> frame :class class->ns) (:line frame))))
(defn- split-last [^String s ^String sub]
(let [i (.lastIndexOf s sub)]
(if (>= i 0)
(remove empty? [(subs s 0 i) (subs s (inc i))])
[sub])))
(defn github-linkify-location [service-name location]
(let [[ns line] (split-last location "_")]
(if-let [link (github-link service-name ns line)]
(html-gen/render [:a {:href link} location])
location)))
(defn best-frame
"Try to extract the most informative exception stack trace frame"
[stack-trace]
(or (first
(filter
(fnk [^String file ^String class]
(and file (.contains file "clj")
(not (.startsWith class "clojure."))
(not (.startsWith class "plumbing.core"))))
stack-trace))
(first stack-trace)))
(defn extract-message
"Extract the best message from a log entry"
[e]
(or (not-empty (-> e :exception :message))
(-> e :log :message)
""))
(defn flatten-entry [e]
(concat (dissoc e :log :exception)
(for-map [[k v] (:log e)]
(keyword (str "log_" (name k))) v)
(for-map [[k v] (dissoc (:exception e) :stack-trace)]
(keyword (str "ex_" (name k))) v)))
(defn inbox-teaser [sub-entries]
(str (count sub-entries) ": "
(->> sub-entries
(map extract-message)
(map #(subs % 0 (min 50 (count %))))
frequencies
(sort-by (comp - second))
(map (fn [[m c]] (format "%sx[%s]" c m)))
(str/join ", "))))
(defn entry-info [e service-name link]
(let [best-frame (best-frame (-> e :exception :stack-trace))]
{:log-ns (-> e :log :ns)
:log-line (github-linkify service-name (-> e :log :ns) (-> e :log :line))
:ex-class (-> best-frame :class)
:ex-line (:line (github-linkify-frame service-name best-frame))
:message [:a {:href link} (extract-message e)]}))
(defn summary-table [service-name indexed-entries]
[:table
{:border "1" :cellpadding "5" :cellspacing "0"}
(html-gen/table-rows
[:log-ns :log-line :ex-class :ex-line :message]
(for [[i e] indexed-entries]
(entry-info e service-name (str "#anchor" i))))])
(defn entry-details [service-name index entry]
[:div
(concat
[[:hr]
[:a {:name (str "anchor" index)} [:h3 (extract-message entry)]]
[:table
{:border "1" :cellpadding "3" :cellspacing "0" :font-size "8px"}
(html-gen/row-major-rows
(for [[k v] (sort-by first (flatten-entry entry))]
[k (if (coll? v) (with-out-str (pprint/pprint v)) v)]))]
[:br]]
(when-let [st (-> entry :exception :stack-trace)]
[[:h3 "stack trace"]
[:table
{:border "1" :cellpadding "3" :cellspacing "0" :font-size "6px"}
(html-gen/table-rows [:class :line :file :method] (map (partial github-linkify-frame service-name) st))]]))])
(defn email-entries
"Roll these entries together and send emails."
[service-name send-email error-email entries]
(doseq [[[email subject] sub-entries]
(group-by (comp (juxt :error-email :error-email-subject) :log) entries)
:let [indexed-entries (indexed (sort-by extract-message sub-entries))]]
(send-email
{:to (or email error-email)
:subject (format "%s: %s" service-name (or subject "exceptions"))
:html? true
:text (html-gen/render
[:div
[:div {:font-size "4px" :font-color "#dddddd"}
(inbox-teaser sub-entries)]
[:h2 "Summary"]
(summary-table service-name indexed-entries)
[:h2 "Entries"]
[:div
(for [[i e] indexed-entries]
(entry-details service-name i e))]])})))
(defnk logging-resource [[:instance service-name] log-data-store send-email {flush-freq 60}
{error-email "backend+"}
{agg-level :info} {log-level :warn} {email-level :error}]
(let [agg-logs (mongo/aggregate-collection log-data-store "agg-log" {:agg-keys +log-agg-keys+})
stream-logs (mongo/capped-collection log-data-store "log" {:max-mb 5000})
agg-atom (atom nil) log-atom (atom nil) email-atom (atom nil)
process (fn []
(process-entries! agg-atom false #(update-agg-logs agg-logs %))
(process-entries! log-atom false #(add-to-stream-log stream-logs %))
(process-entries! email-atom true #(email-entries
service-name send-email error-email %)))
exec (parallel/schedule-limited-work
{:f process
:submit-rate-ms (* flush-freq 1000)
:time-limit-ms (* flush-freq 1000)})]
(log/init!
service-name
[[(entry-recorder agg-atom) agg-level]
[(entry-recorder log-atom) log-level]
[(entry-recorder email-atom) email-level]])
(reify resource/PCloseable
(close [this]
(try (process) (catch Throwable e (clj-log/errorf e "Error processing final logs")))
(resource/close exec)))))
|
512da20aa684b2ac7573fc5de868df8dded439a10b8c7297894145217c4ed828 | softlab-ntua/bencherl | basic_utils_test.erl | Copyright ( C ) 2003 - 2014
%
This file is part of the Ceylan Erlang library .
%
% This library is free software: you can redistribute it and/or modify
% it under the terms of the GNU Lesser General Public License or
the GNU General Public License , as they are published by the Free Software
Foundation , either version 3 of these Licenses , or ( at your option )
% any later version.
% You can also redistribute it and/or modify it under the terms of the
Mozilla Public License , version 1.1 or later .
%
% This library is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License and the GNU General Public License
% for more details.
%
You should have received a copy of the GNU Lesser General Public
License , of the GNU General Public License and of the Mozilla Public License
% along with this library.
% If not, see </> and
% </>.
%
Author : ( )
% Unit tests for the basic utils toolbox.
%
% See the basic_utils.erl tested module.
%
-module(basic_utils_test).
% For run/0 export and al:
-include("test_facilities.hrl").
-spec check_process_specific_values( integer(), integer() ) ->
basic_utils:void().
check_process_specific_values( Min, Max ) ->
Self = self(),
F = fun() -> Self ! basic_utils:get_process_specific_value( Min, Max ) end,
[ spawn( F ) || _X <- lists:seq( 1, 10 ) ],
G = fun() ->
receive V ->
V
end
end,
[ test_facilities:display(
"Generating a process-specific value in [~B;~B[: ~p.",
[ Min, Max, G() ] ) || _Y <- lists:seq( 1, 10 ) ].
-spec run() -> no_return().
run() ->
test_facilities:start( ?MODULE ),
test_facilities:display( "Testing the display of a static test message." ),
test_facilities:display( "Testing the display of a ~s test message.",
[ dynamic ] ),
InitialTimestamp = basic_utils:get_timestamp(),
InitialPreciseTimestamp = basic_utils:get_precise_timestamp(),
test_facilities:display( "Timestamp is ~s.", [
basic_utils:get_textual_timestamp( InitialTimestamp ) ] ),
test_facilities:display( "Timestamp for path is ~s.", [
basic_utils:get_textual_timestamp_for_path( InitialTimestamp ) ] ),
TextualTimeStamp = "14/4/2011 18:48:51",
test_facilities:display( "Parsed timestamp for '~s' is ~p.", [
TextualTimeStamp,
basic_utils:string_to_timestamp( TextualTimeStamp ) ] ),
basic_utils:checkpoint( 1 ),
basic_utils:checkpoint( 2 ),
basic_utils:display( "standalone display" ),
basic_utils:display( "display ~s", [ "with a format string" ] ),
UnregisteredName = test_non_registered,
try basic_utils:get_registered_pid_for( UnregisteredName ) of
_Anything ->
throw( test_should_have_failed )
catch
{ neither_registered_locally_nor_globally, UnregisteredName } ->
ok
end,
not_registered = basic_utils:is_registered( UnregisteredName ),
RegisteredName = test_registered,
PidToRegister = self(),
basic_utils:register_as( PidToRegister, RegisteredName, global_only ),
try basic_utils:get_registered_pid_for( RegisteredName ) of
PidToRegister ->
ok
catch
Exception ->
throw( { test_should_have_succeeded, Exception } )
end,
case basic_utils:is_registered( RegisteredName ) of
not_registered ->
throw( { neither_registered_locally_nor_globally,
RegisteredName } );
Pid when is_pid(Pid) ->
ok
end,
FirstVersion = { 0, 0, 0 },
SecondVersion = { 0, 0, 1 },
ThirdVersion = { 0, 1, 0 },
FourthVersion = { 1, 0, 0 },
FifthVersion = { 1, 1, 1 },
first_bigger = basic_utils:compare_versions( SecondVersion, FirstVersion ),
first_bigger = basic_utils:compare_versions( ThirdVersion, SecondVersion ),
first_bigger = basic_utils:compare_versions( FifthVersion, FirstVersion ),
second_bigger = basic_utils:compare_versions( FirstVersion, FourthVersion ),
second_bigger = basic_utils:compare_versions( ThirdVersion, FourthVersion ),
second_bigger = basic_utils:compare_versions( SecondVersion, ThirdVersion ),
equal = basic_utils:compare_versions( FirstVersion, FirstVersion ),
equal = basic_utils:compare_versions( ThirdVersion, ThirdVersion ),
equal = basic_utils:compare_versions( FifthVersion, FifthVersion ),
test_facilities:display( "Comparisons of versions like ~s succeeded.",
[ text_utils:version_to_string(ThirdVersion) ] ),
FirstShortVersion = { 0, 0 },
SecondShortVersion = { 0, 1 },
ThirdShortVersion = { 1, 0 },
first_bigger = basic_utils:compare_versions( SecondShortVersion,
FirstShortVersion ),
first_bigger = basic_utils:compare_versions( ThirdShortVersion,
SecondShortVersion ),
first_bigger = basic_utils:compare_versions( ThirdShortVersion,
FirstShortVersion ),
second_bigger = basic_utils:compare_versions( FirstShortVersion,
SecondShortVersion ),
second_bigger = basic_utils:compare_versions( SecondShortVersion,
ThirdShortVersion ),
second_bigger = basic_utils:compare_versions( FirstShortVersion,
ThirdShortVersion ),
equal = basic_utils:compare_versions( FirstShortVersion,
FirstShortVersion ),
equal = basic_utils:compare_versions( SecondShortVersion,
SecondShortVersion ),
equal = basic_utils:compare_versions( ThirdShortVersion,
ThirdShortVersion ),
test_facilities:display( "Comparisons of versions like ~s succeeded.",
[ text_utils:version_to_string(ThirdVersion) ] ),
{ 4, 22, 11 } = basic_utils:parse_version( "4.22.11" ),
test_facilities:display( "Generating a new UUID:"
" '~s'.", [ basic_utils:generate_uuid() ] ),
test_facilities:display( "Testing typing information." ),
boolean = basic_utils:get_type_of( true ),
atom = basic_utils:get_type_of( 'an atom' ),
binary = basic_utils:get_type_of( list_to_binary( "1" ) ),
float = basic_utils:get_type_of( 1.0 ),
function = basic_utils:get_type_of( fun(X) -> X + 1 end ),
integer = basic_utils:get_type_of( 42 ),
pid = basic_utils:get_type_of( self() ),
list = basic_utils:get_type_of( [ 1, 2 ] ),
port = basic_utils : get_type_of ( APort ) ,
tuple = basic_utils:get_type_of( { a, b } ),
reference = basic_utils:get_type_of( make_ref() ),
test_facilities:display( "Testing term recursive transformation." ),
% This term transformer does not change anything in the terms it scans, and
% just comment the traversal it does:
%
IdTermTransformer = fun( Term, UserData ) ->
NewUserData = [
io_lib:format( "Inspected '~p', ", [ Term ] ) | UserData ],
{ Term, NewUserData }
end,
TermToTraverse = { pseudo_record, [], { a, 1.0},
[ { b, 42 }, "hello", [ <<"foo">> ] ], self() },
{ TraversedTerm, InspectData } = basic_utils:traverse_term( TermToTraverse,
_Type=atom, IdTermTransformer, _UserData=[] ),
test_facilities:display( "Traversal of term:~n'~p' with "
"id term transformer "
"yielded:~n'~p', producing user data '~s'",
[ TermToTraverse, TraversedTerm,
lists:reverse( InspectData ) ] ),
% This term transformer changes a term into a textual representation, and
% does not do anything with user data:
TextTermTransformer = fun( Term, UserData ) ->
{ io_lib:format( "~w", [ Term ] ), UserData }
end,
% Requested to operate only on PIDs:
{ NewTraversedTerm, _UselessData } = basic_utils:traverse_term(
TermToTraverse, _OtherType=pid, TextTermTransformer,
_OtherUserData=undefined ),
test_facilities:display( "Traversal of term:~n'~p' with "
"text term transformer yielded:~n'~p'.",
[ TermToTraverse, NewTraversedTerm ] ),
test_facilities:display( "Generating a process-specific value: ~w.",
[ basic_utils:get_process_specific_value() ] ),
{ Min, Max } = { 3, 16 },
check_process_specific_values( Min, Max ),
basic_utils:display_process_info( self() ),
test_facilities:display( "This test was compiled with the execution target "
"set to '~s', and debug mode is ~s.",
[ basic_utils:get_execution_target(),
basic_utils:is_debug_mode_enabled() ] ),
FinalPreciseTimestamp = basic_utils:get_precise_timestamp(),
test_facilities:display( "Precise duration in test is ~p ms.", [
basic_utils:get_precise_duration( InitialPreciseTimestamp,
FinalPreciseTimestamp ) ] ),
test_facilities:stop().
| null | https://raw.githubusercontent.com/softlab-ntua/bencherl/317bdbf348def0b2f9ed32cb6621e21083b7e0ca/app/sim-diasca/common/src/utils/basic_utils_test.erl | erlang |
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License or
any later version.
You can also redistribute it and/or modify it under the terms of the
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
for more details.
along with this library.
If not, see </> and
</>.
Unit tests for the basic utils toolbox.
See the basic_utils.erl tested module.
For run/0 export and al:
This term transformer does not change anything in the terms it scans, and
just comment the traversal it does:
This term transformer changes a term into a textual representation, and
does not do anything with user data:
Requested to operate only on PIDs: | Copyright ( C ) 2003 - 2014
This file is part of the Ceylan Erlang library .
the GNU General Public License , as they are published by the Free Software
Foundation , either version 3 of these Licenses , or ( at your option )
Mozilla Public License , version 1.1 or later .
GNU Lesser General Public License and the GNU General Public License
You should have received a copy of the GNU Lesser General Public
License , of the GNU General Public License and of the Mozilla Public License
Author : ( )
-module(basic_utils_test).
-include("test_facilities.hrl").
-spec check_process_specific_values( integer(), integer() ) ->
basic_utils:void().
check_process_specific_values( Min, Max ) ->
Self = self(),
F = fun() -> Self ! basic_utils:get_process_specific_value( Min, Max ) end,
[ spawn( F ) || _X <- lists:seq( 1, 10 ) ],
G = fun() ->
receive V ->
V
end
end,
[ test_facilities:display(
"Generating a process-specific value in [~B;~B[: ~p.",
[ Min, Max, G() ] ) || _Y <- lists:seq( 1, 10 ) ].
-spec run() -> no_return().
run() ->
test_facilities:start( ?MODULE ),
test_facilities:display( "Testing the display of a static test message." ),
test_facilities:display( "Testing the display of a ~s test message.",
[ dynamic ] ),
InitialTimestamp = basic_utils:get_timestamp(),
InitialPreciseTimestamp = basic_utils:get_precise_timestamp(),
test_facilities:display( "Timestamp is ~s.", [
basic_utils:get_textual_timestamp( InitialTimestamp ) ] ),
test_facilities:display( "Timestamp for path is ~s.", [
basic_utils:get_textual_timestamp_for_path( InitialTimestamp ) ] ),
TextualTimeStamp = "14/4/2011 18:48:51",
test_facilities:display( "Parsed timestamp for '~s' is ~p.", [
TextualTimeStamp,
basic_utils:string_to_timestamp( TextualTimeStamp ) ] ),
basic_utils:checkpoint( 1 ),
basic_utils:checkpoint( 2 ),
basic_utils:display( "standalone display" ),
basic_utils:display( "display ~s", [ "with a format string" ] ),
UnregisteredName = test_non_registered,
try basic_utils:get_registered_pid_for( UnregisteredName ) of
_Anything ->
throw( test_should_have_failed )
catch
{ neither_registered_locally_nor_globally, UnregisteredName } ->
ok
end,
not_registered = basic_utils:is_registered( UnregisteredName ),
RegisteredName = test_registered,
PidToRegister = self(),
basic_utils:register_as( PidToRegister, RegisteredName, global_only ),
try basic_utils:get_registered_pid_for( RegisteredName ) of
PidToRegister ->
ok
catch
Exception ->
throw( { test_should_have_succeeded, Exception } )
end,
case basic_utils:is_registered( RegisteredName ) of
not_registered ->
throw( { neither_registered_locally_nor_globally,
RegisteredName } );
Pid when is_pid(Pid) ->
ok
end,
FirstVersion = { 0, 0, 0 },
SecondVersion = { 0, 0, 1 },
ThirdVersion = { 0, 1, 0 },
FourthVersion = { 1, 0, 0 },
FifthVersion = { 1, 1, 1 },
first_bigger = basic_utils:compare_versions( SecondVersion, FirstVersion ),
first_bigger = basic_utils:compare_versions( ThirdVersion, SecondVersion ),
first_bigger = basic_utils:compare_versions( FifthVersion, FirstVersion ),
second_bigger = basic_utils:compare_versions( FirstVersion, FourthVersion ),
second_bigger = basic_utils:compare_versions( ThirdVersion, FourthVersion ),
second_bigger = basic_utils:compare_versions( SecondVersion, ThirdVersion ),
equal = basic_utils:compare_versions( FirstVersion, FirstVersion ),
equal = basic_utils:compare_versions( ThirdVersion, ThirdVersion ),
equal = basic_utils:compare_versions( FifthVersion, FifthVersion ),
test_facilities:display( "Comparisons of versions like ~s succeeded.",
[ text_utils:version_to_string(ThirdVersion) ] ),
FirstShortVersion = { 0, 0 },
SecondShortVersion = { 0, 1 },
ThirdShortVersion = { 1, 0 },
first_bigger = basic_utils:compare_versions( SecondShortVersion,
FirstShortVersion ),
first_bigger = basic_utils:compare_versions( ThirdShortVersion,
SecondShortVersion ),
first_bigger = basic_utils:compare_versions( ThirdShortVersion,
FirstShortVersion ),
second_bigger = basic_utils:compare_versions( FirstShortVersion,
SecondShortVersion ),
second_bigger = basic_utils:compare_versions( SecondShortVersion,
ThirdShortVersion ),
second_bigger = basic_utils:compare_versions( FirstShortVersion,
ThirdShortVersion ),
equal = basic_utils:compare_versions( FirstShortVersion,
FirstShortVersion ),
equal = basic_utils:compare_versions( SecondShortVersion,
SecondShortVersion ),
equal = basic_utils:compare_versions( ThirdShortVersion,
ThirdShortVersion ),
test_facilities:display( "Comparisons of versions like ~s succeeded.",
[ text_utils:version_to_string(ThirdVersion) ] ),
{ 4, 22, 11 } = basic_utils:parse_version( "4.22.11" ),
test_facilities:display( "Generating a new UUID:"
" '~s'.", [ basic_utils:generate_uuid() ] ),
test_facilities:display( "Testing typing information." ),
boolean = basic_utils:get_type_of( true ),
atom = basic_utils:get_type_of( 'an atom' ),
binary = basic_utils:get_type_of( list_to_binary( "1" ) ),
float = basic_utils:get_type_of( 1.0 ),
function = basic_utils:get_type_of( fun(X) -> X + 1 end ),
integer = basic_utils:get_type_of( 42 ),
pid = basic_utils:get_type_of( self() ),
list = basic_utils:get_type_of( [ 1, 2 ] ),
port = basic_utils : get_type_of ( APort ) ,
tuple = basic_utils:get_type_of( { a, b } ),
reference = basic_utils:get_type_of( make_ref() ),
test_facilities:display( "Testing term recursive transformation." ),
IdTermTransformer = fun( Term, UserData ) ->
NewUserData = [
io_lib:format( "Inspected '~p', ", [ Term ] ) | UserData ],
{ Term, NewUserData }
end,
TermToTraverse = { pseudo_record, [], { a, 1.0},
[ { b, 42 }, "hello", [ <<"foo">> ] ], self() },
{ TraversedTerm, InspectData } = basic_utils:traverse_term( TermToTraverse,
_Type=atom, IdTermTransformer, _UserData=[] ),
test_facilities:display( "Traversal of term:~n'~p' with "
"id term transformer "
"yielded:~n'~p', producing user data '~s'",
[ TermToTraverse, TraversedTerm,
lists:reverse( InspectData ) ] ),
TextTermTransformer = fun( Term, UserData ) ->
{ io_lib:format( "~w", [ Term ] ), UserData }
end,
{ NewTraversedTerm, _UselessData } = basic_utils:traverse_term(
TermToTraverse, _OtherType=pid, TextTermTransformer,
_OtherUserData=undefined ),
test_facilities:display( "Traversal of term:~n'~p' with "
"text term transformer yielded:~n'~p'.",
[ TermToTraverse, NewTraversedTerm ] ),
test_facilities:display( "Generating a process-specific value: ~w.",
[ basic_utils:get_process_specific_value() ] ),
{ Min, Max } = { 3, 16 },
check_process_specific_values( Min, Max ),
basic_utils:display_process_info( self() ),
test_facilities:display( "This test was compiled with the execution target "
"set to '~s', and debug mode is ~s.",
[ basic_utils:get_execution_target(),
basic_utils:is_debug_mode_enabled() ] ),
FinalPreciseTimestamp = basic_utils:get_precise_timestamp(),
test_facilities:display( "Precise duration in test is ~p ms.", [
basic_utils:get_precise_duration( InitialPreciseTimestamp,
FinalPreciseTimestamp ) ] ),
test_facilities:stop().
|
8272782f2c91bca7f30a5c2d5241c558c043bb77f9c7084bf1979cd65060bed8 | alexandergunnarson/quantum | core.cljc | (ns quantum.apis.google.places.core
#_(:require-quantum [:lib conv http auth]))
(def search-url "")
(defrecord GeoCoordinate [lat long])
( GeoCoordinate . 0 0 # _ lat # _ long )
(def valid-place-types
#{"accounting"
"airport"
"amusement_park"
"aquarium"
"art_gallery"
"atm"
"bakery"
"bank"
"bar"
"beauty_salon"
"bicycle_store"
"book_store"
"bowling_alley"
"bus_station"
"cafe"
"campground"
"car_dealer"
"car_rental"
"car_repair"
"car_wash"
"casino"
"cemetery"
"church"
"city_hall"
"clothing_store"
"convenience_store"
"courthouse"
"dentist"
"department_store"
"doctor"
"electrician"
"electronics_store"
"embassy"
"establishment"
"finance"
"fire_station"
"florist"
"food"
"funeral_home"
"furniture_store"
"gas_station"
"general_contractor"
"grocery_or_supermarket"
"gym"
"hair_care"
"hardware_store"
"health"
"hindu_temple"
"home_goods_store"
"hospital"
"insurance_agency"
"jewelry_store"
"laundry"
"lawyer"
"library"
"liquor_store"
"local_government_office"
"locksmith"
"lodging"
"meal_delivery"
"meal_takeaway"
"mosque"
"movie_rental"
"movie_theater"
"moving_company"
"museum"
"night_club"
"painter"
"park"
"parking"
"pet_store"
"pharmacy"
"physiotherapist"
"place_of_worship"
"plumber"
"police"
"post_office"
"real_estate_agency"
"restaurant"
"roofing_contractor"
"rv_park"
"school"
"shoe_store"
"shopping_mall"
"spa"
"stadium"
"storage"
"store"
"subway_station"
"synagogue"
"taxi_stand"
"train_station"
"travel_agency"
"university"
"veterinary_care"
"zoo"})
#_(defn search
"Called 'Nearby Search / Place Search'"
{:usage `(search {:long -111.90948486 :lat 40.56180797}
(auth/datum :google "" :api-key))
:info ""}
[coord api-key & [{:keys [radius search-type place-types parse?]
:or {radius 50000}}]]
(assert (val? api-key))
(assert (in? search-type #{:fuzzy :exact}))
(assert (and (<= radius 50000) (> radius 0)) #{radius})
(let [location (str (:lat coord) "," (:long coord))
search-opts (condp = search-type
:fuzzy {"radius" radius}
:exact {"rankby" "distance"
"types" (if place-types
(str/join "|" place-types)
(extern (str/join "|" valid-place-types)))})]
(http/request!
{:url search-url
:parse? parse?
:query-params
(mergel search-opts
{"key" api-key
"location" location
; Defines the distance (in meters) within which to return place results.
})})))
types = food&name = cruise&key = API_KEY
If rankby = distance ( described under Optional parameters below ) is specified , then one or more of keyword , name , or types is required .
; Optional parameters
keyword — A term to be matched against all content that Google has indexed for this place , including but not limited to name , type , and address , as well as customer reviews and other third - party content .
; language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
minprice and ( optional ) — Restricts results to only those places within the specified range . Valid values range between 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
name — One or more terms to be matched against the names of places , separated with a space character . Results will be restricted to those containing the passed name values . Note that a place may have additional names associated with it , beyond its listed name . The API will try to match the passed name value against all of these names . As a result , places may be returned in the results whose listed names do not match the search term , but whose associated names do .
opennow — Returns only those places that are open for business at the time the query is sent . Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
; rankby — Specifies the order in which results are listed. Possible values are:
prominence ( default ) . This option sorts results based on their importance . Ranking will favor prominent places within the specified area . Prominence can be affected by a place 's ranking in Google 's index , global popularity , and other factors .
distance . This option sorts results in ascending order by their distance from the specified location . When distance is specified , one or more of keyword , name , or types is required .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
; Maps API for Work customers should not include a client or signature parameter with their requests.
The following example is a search request for places of type ' food ' within a 500 m radius of a point in Sydney , Australia , containing the word ' cruise ' in their name :
; Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
; Text Search Requests
The Google Places API Text Search Service is a web service that returns information about a set of places based on a string — for example " pizza in New York " or " shoe stores near Ottawa " . The service responds with a list of places matching the text string and any location bias that has been set . The search response will include a list of places , you can send a Place Details request for more information about any of the places in the response .
The Google Places search services share the same usage limits . However , the Text Search service is subject to a 10 - times multiplier . That is , each Text Search request that you make will count as 10 requests against your quota . If you 've purchased the Google Places API as part of your Google Maps API for Work contract , the multiplier may be different . Please refer to the Google Maps API for Work documentation for details .
; A Text Search request is an HTTP URL of the following form:
;
; where output may be either of the following values:
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
; xml indicates output as XML
; Certain parameters are required to initiate a search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
; Required parameters
query — The text string on which to search , for example : " restaurant " . The Google Places service will return candidate matches based on this string and order the results based on their perceived relevance .
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
; Optional parameters
; location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude. If you specify a location parameter, you must also specify a radius parameter.
radius — Defines the distance ( in meters ) within which to bias place results . The maximum allowed radius is 50000 meters . Results inside of this region will be ranked higher than results outside of the search circle ; however , prominent results from outside of the search radius may be included .
; language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
minprice and ( optional ) — Restricts results to only those places within the specified price level . Valid values are in the range from 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
opennow — Returns only those places that are open for business at the time the query is sent . places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
You may bias results to a specified circle by passing a location and a radius parameter . This will instruct the Google Places service to prefer showing results within that circle . Results outside the defined area may still be displayed .
; Maps API for Work customers should not include a client or signature parameter with their requests.
The below example shows a search for restaurants near Sydney .
; +in+Sydney&key=API_KEY
; Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
; Place Search
The Google Places API Web Service is for use in server applications . If you 're building a client - side application , take a look at the Google Places API for Android and the Places Library in the Google Maps JavaScript API .
Note : The i d and reference fields are deprecated as of June 24 , 2014 . They are replaced by the new place ID , a textual identifier that uniquely identifies a place and can be used to retrieve information about the place . The usual deprecation period of one year has been extended , as we 're looking into ways of ensuring this change will not break existing code . We 'll update this page with a final notice at least 90 days before we change the way the API handles the i d and reference fields . We recommend that you update your code to use the new place ID instead of i d and reference as soon as possible .
The Google Places API Web Service allows you to query for place information on a variety of categories , such as : establishments , prominent points of interest , geographic locations , and more . You can search for places either by proximity or a text string . A Place Search returns a list of places along with summary information about each place ; additional information is available via a Place Details query .
; Nearby Search Requests
; Earlier versions of the Places API referred to Nearby Search as Place Search.
; A Nearby Search lets you search for places within a specified area. You can refine your search request by supplying keywords or specifying the type of place you are searching for.
; A Nearby Search request is an HTTP URL of the following form:
;
; where output may be either of the following values:
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
; xml indicates output as XML
; Certain parameters are required to initiate a Nearby Search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
; Required parameters
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
; location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude.
radius — Defines the distance ( in meters ) within which to return place results . The maximum allowed radius is 50000 meters . Note that radius must not be included if rankby = distance ( described under Optional parameters below ) is specified .
If rankby = distance ( described under Optional parameters below ) is specified , then one or more of keyword , name , or types is required .
; Optional parameters
keyword — A term to be matched against all content that Google has indexed for this place , including but not limited to name , type , and address , as well as customer reviews and other third - party content .
; language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
minprice and ( optional ) — Restricts results to only those places within the specified range . Valid values range between 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
name — One or more terms to be matched against the names of places , separated with a space character . Results will be restricted to those containing the passed name values . Note that a place may have additional names associated with it , beyond its listed name . The API will try to match the passed name value against all of these names . As a result , places may be returned in the results whose listed names do not match the search term , but whose associated names do .
opennow — Returns only those places that are open for business at the time the query is sent . Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
; rankby — Specifies the order in which results are listed. Possible values are:
prominence ( default ) . This option sorts results based on their importance . Ranking will favor prominent places within the specified area . Prominence can be affected by a place 's ranking in Google 's index , global popularity , and other factors .
distance . This option sorts results in ascending order by their distance from the specified location . When distance is specified , one or more of keyword , name , or types is required .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
; Maps API for Work customers should not include a client or signature parameter with their requests.
The following example is a search request for places of type ' food ' within a 500 m radius of a point in Sydney , Australia , containing the word ' cruise ' in their name :
; =-33.8670522,151.1957362&radius=500&types=food&name=cruise&key=API_KEY
; Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
; Text Search Requests
The Google Places API Text Search Service is a web service that returns information about a set of places based on a string — for example " pizza in New York " or " shoe stores near Ottawa " . The service responds with a list of places matching the text string and any location bias that has been set . The search response will include a list of places , you can send a Place Details request for more information about any of the places in the response .
The Google Places search services share the same usage limits . However , the Text Search service is subject to a 10 - times multiplier . That is , each Text Search request that you make will count as 10 requests against your quota . If you 've purchased the Google Places API as part of your Google Maps API for Work contract , the multiplier may be different . Please refer to the Google Maps API for Work documentation for details .
; A Text Search request is an HTTP URL of the following form:
;
; where output may be either of the following values:
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
; xml indicates output as XML
; Certain parameters are required to initiate a search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
; Required parameters
query — The text string on which to search , for example : " restaurant " . The Google Places service will return candidate matches based on this string and order the results based on their perceived relevance .
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
; Optional parameters
; location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude. If you specify a location parameter, you must also specify a radius parameter.
radius — Defines the distance ( in meters ) within which to bias place results . The maximum allowed radius is 50000 meters . Results inside of this region will be ranked higher than results outside of the search circle ; however , prominent results from outside of the search radius may be included .
; language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
minprice and ( optional ) — Restricts results to only those places within the specified price level . Valid values are in the range from 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
opennow — Returns only those places that are open for business at the time the query is sent . places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
You may bias results to a specified circle by passing a location and a radius parameter . This will instruct the Google Places service to prefer showing results within that circle . Results outside the defined area may still be displayed .
; Maps API for Work customers should not include a client or signature parameter with their requests.
The below example shows a search for restaurants near Sydney .
; +in+Sydney&key=API_KEY
; Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
Radar Search Requests
The Google Places API Radar Search Service allows you to search for up to 200 places at once , but with less detail than is typically returned from a Text Search or Nearby Search request . With Radar Search , you can create applications that help users identify specific areas of interest within a geographic area .
The search response will include up to 200 places , and will include only the following information about each place :
; The geometry field containing geographic coordinates.
; The place_id, which you can use in a Place Details request to get more information about the place. For more information about place IDs, see the place ID overview.
; The deprecated reference field. See the deprecation notice on this page.
A Radar Search request is an HTTP URL of the following form :
;
; where output may be either of the following values:
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
; xml indicates output as XML
; Certain parameters are required to initiate a search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
; Required parameters
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
; location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude.
radius — Defines the distance ( in meters ) within which to return place results . The maximum allowed radius is 50000 meters .
A Radar Search request must include at least one of keyword , name , or types .
; Optional parameters
keyword — A term to be matched against all content that Google has indexed for this place , including but not limited to name , type , and address , as well as customer reviews and other third - party content .
minprice and ( optional ) — Restricts results to only those places within the specified price level . Valid values are in the range from 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
name — One or more terms to be matched against the names of places , separated by a space character . Results will be restricted to those containing the passed name values . Note that a place may have additional names associated with it , beyond its listed name . The API will try to match the passed name value against all of these names . As a result , places may be returned in the results whose listed names do not match the search term , but whose associated names do .
opennow — Returns only those places that are open for business at the time the query is sent . Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
; Maps API for Work customers should not include a client or signature parameter with their requests.
The below example returns a list of museums near London , England .
; ,-0.126446&radius=5000&types=museum&key=API_KEY
Using a combination of the keyword , name and types parameters , you can perform more precise queries . The below example shows restaurants and cafes in Paris that users have described as vegetarian .
; ,2.347589&radius=5000&types=food|cafe&keyword=vegetarian&key=API_KEY
; Note that you'll need to replace the key in these examples with your own key in order for the request to work in your application.
; Search Responses
; Search responses are returned in the format indicated by the output flag within the URL request's path.
; The following example shows a Nearby Search response. A Text Search response is similar, except that it returns a formatted_address instead of a vicinity property.
; JSONXML
; {
" " : [ ] ,
; "results" : [
; {
; "geometry" : {
; "location" : {
; "lat" : -33.870775,
" " : 151.199025
; }
; },
; "icon" : "-71.png",
" i d " : " 21a0b251c9b8392186142c798263e289fe45b4aa " ,
; "name" : "Rhythmboat Cruises",
; "opening_hours" : {
" open_now " : true
; },
; "photos" : [
; {
" height " : 270 ,
" " : [ ] ,
; "photo_reference" : "CnRnAAAAF-LjFR1ZV93eawe1cU_3QNMCNmaGkowY7CnOf-kcNmPhNnPEG9W979jOuJJ1sGr75rhD5hqKzjD8vbMbSsRnq_Ni3ZIGfY6hKWmsOf3qHKJInkm4h55lzvLAXJVc-Rr4kI9O1tmIblblUpg2oqoq8RIQRMQJhFsTr5s9haxQ07EQHxoUO0ICubVFGYfJiMUPor1GnIWb5i8",
" width " : 519
; }
; ],
; "place_id" : "ChIJyWEHuEmuEmsRm9hTkapTCrk",
" scope " : " GOOGLE " ,
; "alt_ids" : [
; {
; "place_id" : "D9iJyWEHuEmuEmsRm9hTkapTCrk",
; "scope" : "APP"
; }
; ],
" reference " : " - cP1dWvp_QGS4SNCBMk_fB06YRsfMrNkINtPez22p5lRIlj5ty_HmcNwcl6GZXbD2RdXsVfLYlQwnZQcnu7ihkjZp_2gk1 - fWXql3GQ8 - 1BEGwgCxG - eaSnIJIBPuIpihEhAY1WYdxPvOWsPnb2 - nGb6QGhTipN0lgaLpQTnkcMeAIEvCsSa0Ww " ,
; "types" : [ "travel_agency", "restaurant", "food", "establishment" ],
" vicinity " : " Pyrmont Bay Wharf Darling Dr , Sydney "
; },
; {
; "geometry" : {
; "location" : {
; "lat" : -33.866891,
" " : 151.200814
; }
; },
; "icon" : "-71.png",
" i d " : " 45a27fd8d56c56dc62afc9b49e1d850440d5c403 " ,
" name " : " Private Charter Sydney Habour Cruise " ,
; "photos" : [
; {
" height " : 426 ,
" " : [ ] ,
; "photo_reference" : "CnRnAAAAL3n0Zu3U6fseyPl8URGKD49aGB2Wka7CKDZfamoGX2ZTLMBYgTUshjr-MXc0_O2BbvlUAZWtQTBHUVZ-5Sxb1-P-VX2Fx0sZF87q-9vUt19VDwQQmAX_mjQe7UWmU5lJGCOXSgxp2fu1b5VR_PF31RIQTKZLfqm8TA1eynnN4M1XShoU8adzJCcOWK0er14h8SqOIDZctvU",
" width " : 640
; }
; ],
; "place_id" : "ChIJqwS6fjiuEmsRJAMiOY9MSms",
" scope " : " GOOGLE " ,
" reference " : " CpQBhgAAAFN27qR_t5oSDKPUzjQIeQa3lrRpFTm5alW3ZYbMFm8k10ETbISfK9S1nwcJVfrP - bjra7NSPuhaRulxoonSPQklDyB - xGvcJncq6qDXIUQ3hlI - bx4AxYckAOX74LkupHq7bcaREgrSBE - U6GbA1C3U7I - HnweO4IPtztSEcgW09y03v1hgHzL8xSDElmkQtRIQzLbyBfj3e0FhJzABXjM2QBoUE2EnL - DzWrzpgmMEulUBLGrtu2Y " ,
; "types" : [ "restaurant", "food", "establishment" ],
" vicinity " : " Australia "
; },
; {
; "geometry" : {
; "location" : {
; "lat" : -33.870943,
" " : 151.190311
; }
; },
; "icon" : "-71.png",
" i d " : " 30bee58f819b6c47bd24151802f25ecf11df8943 " ,
" name " : " Bucks Party Cruise " ,
; "opening_hours" : {
" open_now " : true
; },
; "photos" : [
; {
" height " : 600 ,
" " : [ ] ,
; "photo_reference" : "CnRnAAAA48AX5MsHIMiuipON_Lgh97hPiYDFkxx_vnaZQMOcvcQwYN92o33t5RwjRpOue5R47AjfMltntoz71hto40zqo7vFyxhDuuqhAChKGRQ5mdO5jv5CKWlzi182PICiOb37PiBtiFt7lSLe1SedoyrD-xIQD8xqSOaejWejYHCN4Ye2XBoUT3q2IXJQpMkmffJiBNftv8QSwF4",
" width " : 800
; }
; ],
" place_id " : " ChIJLfySpTOuEmsRsc_JfJtljdc " ,
" scope " : " GOOGLE " ,
; "reference" : "CoQBdQAAANQSThnTekt-UokiTiX3oUFT6YDfdQJIG0ljlQnkLfWefcKmjxax0xmUpWjmpWdOsScl9zSyBNImmrTO9AE9DnWTdQ2hY7n-OOU4UgCfX7U0TE1Vf7jyODRISbK-u86TBJij0b2i7oUWq2bGr0cQSj8CV97U5q8SJR3AFDYi3ogqEhCMXjNLR1k8fiXTkG2BxGJmGhTqwE8C4grdjvJ0w5UsAVoOH7v8HQ",
; "types" : [ "restaurant", "food", "establishment" ],
" vicinity " : " 37 Bank St , Pyrmont "
; },
; {
; "geometry" : {
; "location" : {
; "lat" : -33.867591,
" " : 151.201196
; }
; },
; "icon" : "-71.png",
; "id" : "a97f9fb468bcd26b68a23072a55af82d4b325e0d",
" name " : " Australian Cruise Group " ,
; "opening_hours" : {
" open_now " : true
; },
; "photos" : [
; {
" height " : 242 ,
" " : [ ] ,
" photo_reference " : " CnRnAAAABjeoPQ7NUU3pDitV4Vs0BgP1FLhf_iCgStUZUr4ZuNqQnc5k43jbvjKC2hTGM8SrmdJYyOyxRO3D2yutoJwVC4Vp_dzckkjG35L6LfMm5sjrOr6uyOtr2PNCp1xQylx6vhdcpW8yZjBZCvVsjNajLBIQ - z4ttAMIc8EjEZV7LsoFgRoU6OrqxvKCnkJGb9F16W57iIV4LuM " ,
" width " : 200
; }
; ],
; "place_id" : "ChIJrTLr-GyuEmsRBfy61i59si0",
" scope " : " GOOGLE " ,
; "reference" : "CoQBeQAAAFvf12y8veSQMdIMmAXQmus1zqkgKQ-O2KEX0Kr47rIRTy6HNsyosVl0CjvEBulIu_cujrSOgICdcxNioFDHtAxXBhqeR-8xXtm52Bp0lVwnO3LzLFY3jeo8WrsyIwNE1kQlGuWA4xklpOknHJuRXSQJVheRlYijOHSgsBQ35mOcEhC5IpbpqCMe82yR136087wZGhSziPEbooYkHLn9e5njOTuBprcfVw",
; "types" : [ "travel_agency", "restaurant", "food", "establishment" ],
" vicinity " : " 32 The Promenade , King Street Wharf 5 , Sydney "
; }
; ],
; "status" : "OK"
; }
A JSON response contains up to four root elements :
; "status" contains metadata on the request. See Status Codes below.
" results " contains an array of places , with information about each . See Search Results for information about these results . The Places API returns up to 20 establishment results per query . Additionally , political results may be returned which serve to identify the area of the request .
; html_attributions contain a set of attributions about this listing which must be displayed to the user.
next_page_token contains a token that can be used to return up to 20 additional results . A next_page_token will not be returned if there are no additional results to display . The maximum number of results that can be returned is 60 . There is a short delay between when a next_page_token is issued , and when it will become valid .
; See Processing JSON with Javascript for help parsing JSON responses.
; Status Codes
; The "status" field within the search response object contains the status of the request, and may contain debugging information to help you track down why the request failed. The "status" field may contain the following values:
OK indicates that no errors occurred ; the place was successfully detected and at least one result was returned .
; ZERO_RESULTS indicates that the search was successful but returned no results. This may occur if the search was passed a latlng in a remote location.
; OVER_QUERY_LIMIT indicates that you are over your quota.
; REQUEST_DENIED indicates that your request was denied, generally because of lack of an invalid key parameter.
; INVALID_REQUEST generally indicates that a required query parameter (location or radius) is missing.
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src/quantum/apis/google/places/core.cljc | clojure | Defines the distance (in meters) within which to return place results.
Optional parameters
language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
rankby — Specifies the order in which results are listed. Possible values are:
Maps API for Work customers should not include a client or signature parameter with their requests.
Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
Text Search Requests
A Text Search request is an HTTP URL of the following form:
where output may be either of the following values:
xml indicates output as XML
Certain parameters are required to initiate a search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
Required parameters
Optional parameters
location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude. If you specify a location parameter, you must also specify a radius parameter.
however , prominent results from outside of the search radius may be included .
language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
Maps API for Work customers should not include a client or signature parameter with their requests.
+in+Sydney&key=API_KEY
Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
Place Search
additional information is available via a Place Details query .
Nearby Search Requests
Earlier versions of the Places API referred to Nearby Search as Place Search.
A Nearby Search lets you search for places within a specified area. You can refine your search request by supplying keywords or specifying the type of place you are searching for.
A Nearby Search request is an HTTP URL of the following form:
where output may be either of the following values:
xml indicates output as XML
Certain parameters are required to initiate a Nearby Search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
Required parameters
location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude.
Optional parameters
language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
rankby — Specifies the order in which results are listed. Possible values are:
Maps API for Work customers should not include a client or signature parameter with their requests.
=-33.8670522,151.1957362&radius=500&types=food&name=cruise&key=API_KEY
Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
Text Search Requests
A Text Search request is an HTTP URL of the following form:
where output may be either of the following values:
xml indicates output as XML
Certain parameters are required to initiate a search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
Required parameters
Optional parameters
location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude. If you specify a location parameter, you must also specify a radius parameter.
however , prominent results from outside of the search radius may be included .
language — The language code, indicating in which language the results should be returned, if possible. See the list of supported languages and their codes. Note that we often update supported languages so this list may not be exhaustive.
Maps API for Work customers should not include a client or signature parameter with their requests.
+in+Sydney&key=API_KEY
Note that you'll need to replace the key in this example with your own key in order for the request to work in your application.
The geometry field containing geographic coordinates.
The place_id, which you can use in a Place Details request to get more information about the place. For more information about place IDs, see the place ID overview.
The deprecated reference field. See the deprecation notice on this page.
where output may be either of the following values:
xml indicates output as XML
Certain parameters are required to initiate a search request. As is standard in URLs, all parameters are separated using the ampersand (&) character.
Required parameters
location — The latitude/longitude around which to retrieve place information. This must be specified as latitude,longitude.
Optional parameters
Maps API for Work customers should not include a client or signature parameter with their requests.
,-0.126446&radius=5000&types=museum&key=API_KEY
,2.347589&radius=5000&types=food|cafe&keyword=vegetarian&key=API_KEY
Note that you'll need to replace the key in these examples with your own key in order for the request to work in your application.
Search Responses
Search responses are returned in the format indicated by the output flag within the URL request's path.
The following example shows a Nearby Search response. A Text Search response is similar, except that it returns a formatted_address instead of a vicinity property.
JSONXML
{
"results" : [
{
"geometry" : {
"location" : {
"lat" : -33.870775,
}
},
"icon" : "-71.png",
"name" : "Rhythmboat Cruises",
"opening_hours" : {
},
"photos" : [
{
"photo_reference" : "CnRnAAAAF-LjFR1ZV93eawe1cU_3QNMCNmaGkowY7CnOf-kcNmPhNnPEG9W979jOuJJ1sGr75rhD5hqKzjD8vbMbSsRnq_Ni3ZIGfY6hKWmsOf3qHKJInkm4h55lzvLAXJVc-Rr4kI9O1tmIblblUpg2oqoq8RIQRMQJhFsTr5s9haxQ07EQHxoUO0ICubVFGYfJiMUPor1GnIWb5i8",
}
],
"place_id" : "ChIJyWEHuEmuEmsRm9hTkapTCrk",
"alt_ids" : [
{
"place_id" : "D9iJyWEHuEmuEmsRm9hTkapTCrk",
"scope" : "APP"
}
],
"types" : [ "travel_agency", "restaurant", "food", "establishment" ],
},
{
"geometry" : {
"location" : {
"lat" : -33.866891,
}
},
"icon" : "-71.png",
"photos" : [
{
"photo_reference" : "CnRnAAAAL3n0Zu3U6fseyPl8URGKD49aGB2Wka7CKDZfamoGX2ZTLMBYgTUshjr-MXc0_O2BbvlUAZWtQTBHUVZ-5Sxb1-P-VX2Fx0sZF87q-9vUt19VDwQQmAX_mjQe7UWmU5lJGCOXSgxp2fu1b5VR_PF31RIQTKZLfqm8TA1eynnN4M1XShoU8adzJCcOWK0er14h8SqOIDZctvU",
}
],
"place_id" : "ChIJqwS6fjiuEmsRJAMiOY9MSms",
"types" : [ "restaurant", "food", "establishment" ],
},
{
"geometry" : {
"location" : {
"lat" : -33.870943,
}
},
"icon" : "-71.png",
"opening_hours" : {
},
"photos" : [
{
"photo_reference" : "CnRnAAAA48AX5MsHIMiuipON_Lgh97hPiYDFkxx_vnaZQMOcvcQwYN92o33t5RwjRpOue5R47AjfMltntoz71hto40zqo7vFyxhDuuqhAChKGRQ5mdO5jv5CKWlzi182PICiOb37PiBtiFt7lSLe1SedoyrD-xIQD8xqSOaejWejYHCN4Ye2XBoUT3q2IXJQpMkmffJiBNftv8QSwF4",
}
],
"reference" : "CoQBdQAAANQSThnTekt-UokiTiX3oUFT6YDfdQJIG0ljlQnkLfWefcKmjxax0xmUpWjmpWdOsScl9zSyBNImmrTO9AE9DnWTdQ2hY7n-OOU4UgCfX7U0TE1Vf7jyODRISbK-u86TBJij0b2i7oUWq2bGr0cQSj8CV97U5q8SJR3AFDYi3ogqEhCMXjNLR1k8fiXTkG2BxGJmGhTqwE8C4grdjvJ0w5UsAVoOH7v8HQ",
"types" : [ "restaurant", "food", "establishment" ],
},
{
"geometry" : {
"location" : {
"lat" : -33.867591,
}
},
"icon" : "-71.png",
"id" : "a97f9fb468bcd26b68a23072a55af82d4b325e0d",
"opening_hours" : {
},
"photos" : [
{
}
],
"place_id" : "ChIJrTLr-GyuEmsRBfy61i59si0",
"reference" : "CoQBeQAAAFvf12y8veSQMdIMmAXQmus1zqkgKQ-O2KEX0Kr47rIRTy6HNsyosVl0CjvEBulIu_cujrSOgICdcxNioFDHtAxXBhqeR-8xXtm52Bp0lVwnO3LzLFY3jeo8WrsyIwNE1kQlGuWA4xklpOknHJuRXSQJVheRlYijOHSgsBQ35mOcEhC5IpbpqCMe82yR136087wZGhSziPEbooYkHLn9e5njOTuBprcfVw",
"types" : [ "travel_agency", "restaurant", "food", "establishment" ],
}
],
"status" : "OK"
}
"status" contains metadata on the request. See Status Codes below.
html_attributions contain a set of attributions about this listing which must be displayed to the user.
See Processing JSON with Javascript for help parsing JSON responses.
Status Codes
The "status" field within the search response object contains the status of the request, and may contain debugging information to help you track down why the request failed. The "status" field may contain the following values:
the place was successfully detected and at least one result was returned .
ZERO_RESULTS indicates that the search was successful but returned no results. This may occur if the search was passed a latlng in a remote location.
OVER_QUERY_LIMIT indicates that you are over your quota.
REQUEST_DENIED indicates that your request was denied, generally because of lack of an invalid key parameter.
INVALID_REQUEST generally indicates that a required query parameter (location or radius) is missing. | (ns quantum.apis.google.places.core
#_(:require-quantum [:lib conv http auth]))
(def search-url "")
(defrecord GeoCoordinate [lat long])
( GeoCoordinate . 0 0 # _ lat # _ long )
(def valid-place-types
#{"accounting"
"airport"
"amusement_park"
"aquarium"
"art_gallery"
"atm"
"bakery"
"bank"
"bar"
"beauty_salon"
"bicycle_store"
"book_store"
"bowling_alley"
"bus_station"
"cafe"
"campground"
"car_dealer"
"car_rental"
"car_repair"
"car_wash"
"casino"
"cemetery"
"church"
"city_hall"
"clothing_store"
"convenience_store"
"courthouse"
"dentist"
"department_store"
"doctor"
"electrician"
"electronics_store"
"embassy"
"establishment"
"finance"
"fire_station"
"florist"
"food"
"funeral_home"
"furniture_store"
"gas_station"
"general_contractor"
"grocery_or_supermarket"
"gym"
"hair_care"
"hardware_store"
"health"
"hindu_temple"
"home_goods_store"
"hospital"
"insurance_agency"
"jewelry_store"
"laundry"
"lawyer"
"library"
"liquor_store"
"local_government_office"
"locksmith"
"lodging"
"meal_delivery"
"meal_takeaway"
"mosque"
"movie_rental"
"movie_theater"
"moving_company"
"museum"
"night_club"
"painter"
"park"
"parking"
"pet_store"
"pharmacy"
"physiotherapist"
"place_of_worship"
"plumber"
"police"
"post_office"
"real_estate_agency"
"restaurant"
"roofing_contractor"
"rv_park"
"school"
"shoe_store"
"shopping_mall"
"spa"
"stadium"
"storage"
"store"
"subway_station"
"synagogue"
"taxi_stand"
"train_station"
"travel_agency"
"university"
"veterinary_care"
"zoo"})
#_(defn search
"Called 'Nearby Search / Place Search'"
{:usage `(search {:long -111.90948486 :lat 40.56180797}
(auth/datum :google "" :api-key))
:info ""}
[coord api-key & [{:keys [radius search-type place-types parse?]
:or {radius 50000}}]]
(assert (val? api-key))
(assert (in? search-type #{:fuzzy :exact}))
(assert (and (<= radius 50000) (> radius 0)) #{radius})
(let [location (str (:lat coord) "," (:long coord))
search-opts (condp = search-type
:fuzzy {"radius" radius}
:exact {"rankby" "distance"
"types" (if place-types
(str/join "|" place-types)
(extern (str/join "|" valid-place-types)))})]
(http/request!
{:url search-url
:parse? parse?
:query-params
(mergel search-opts
{"key" api-key
"location" location
})})))
types = food&name = cruise&key = API_KEY
If rankby = distance ( described under Optional parameters below ) is specified , then one or more of keyword , name , or types is required .
keyword — A term to be matched against all content that Google has indexed for this place , including but not limited to name , type , and address , as well as customer reviews and other third - party content .
minprice and ( optional ) — Restricts results to only those places within the specified range . Valid values range between 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
name — One or more terms to be matched against the names of places , separated with a space character . Results will be restricted to those containing the passed name values . Note that a place may have additional names associated with it , beyond its listed name . The API will try to match the passed name value against all of these names . As a result , places may be returned in the results whose listed names do not match the search term , but whose associated names do .
opennow — Returns only those places that are open for business at the time the query is sent . Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
prominence ( default ) . This option sorts results based on their importance . Ranking will favor prominent places within the specified area . Prominence can be affected by a place 's ranking in Google 's index , global popularity , and other factors .
distance . This option sorts results in ascending order by their distance from the specified location . When distance is specified , one or more of keyword , name , or types is required .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
The following example is a search request for places of type ' food ' within a 500 m radius of a point in Sydney , Australia , containing the word ' cruise ' in their name :
The Google Places API Text Search Service is a web service that returns information about a set of places based on a string — for example " pizza in New York " or " shoe stores near Ottawa " . The service responds with a list of places matching the text string and any location bias that has been set . The search response will include a list of places , you can send a Place Details request for more information about any of the places in the response .
The Google Places search services share the same usage limits . However , the Text Search service is subject to a 10 - times multiplier . That is , each Text Search request that you make will count as 10 requests against your quota . If you 've purchased the Google Places API as part of your Google Maps API for Work contract , the multiplier may be different . Please refer to the Google Maps API for Work documentation for details .
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
query — The text string on which to search , for example : " restaurant " . The Google Places service will return candidate matches based on this string and order the results based on their perceived relevance .
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
minprice and ( optional ) — Restricts results to only those places within the specified price level . Valid values are in the range from 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
opennow — Returns only those places that are open for business at the time the query is sent . places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
You may bias results to a specified circle by passing a location and a radius parameter . This will instruct the Google Places service to prefer showing results within that circle . Results outside the defined area may still be displayed .
The below example shows a search for restaurants near Sydney .
The Google Places API Web Service is for use in server applications . If you 're building a client - side application , take a look at the Google Places API for Android and the Places Library in the Google Maps JavaScript API .
Note : The i d and reference fields are deprecated as of June 24 , 2014 . They are replaced by the new place ID , a textual identifier that uniquely identifies a place and can be used to retrieve information about the place . The usual deprecation period of one year has been extended , as we 're looking into ways of ensuring this change will not break existing code . We 'll update this page with a final notice at least 90 days before we change the way the API handles the i d and reference fields . We recommend that you update your code to use the new place ID instead of i d and reference as soon as possible .
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
radius — Defines the distance ( in meters ) within which to return place results . The maximum allowed radius is 50000 meters . Note that radius must not be included if rankby = distance ( described under Optional parameters below ) is specified .
If rankby = distance ( described under Optional parameters below ) is specified , then one or more of keyword , name , or types is required .
keyword — A term to be matched against all content that Google has indexed for this place , including but not limited to name , type , and address , as well as customer reviews and other third - party content .
minprice and ( optional ) — Restricts results to only those places within the specified range . Valid values range between 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
name — One or more terms to be matched against the names of places , separated with a space character . Results will be restricted to those containing the passed name values . Note that a place may have additional names associated with it , beyond its listed name . The API will try to match the passed name value against all of these names . As a result , places may be returned in the results whose listed names do not match the search term , but whose associated names do .
opennow — Returns only those places that are open for business at the time the query is sent . Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
prominence ( default ) . This option sorts results based on their importance . Ranking will favor prominent places within the specified area . Prominence can be affected by a place 's ranking in Google 's index , global popularity , and other factors .
distance . This option sorts results in ascending order by their distance from the specified location . When distance is specified , one or more of keyword , name , or types is required .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
The following example is a search request for places of type ' food ' within a 500 m radius of a point in Sydney , Australia , containing the word ' cruise ' in their name :
The Google Places API Text Search Service is a web service that returns information about a set of places based on a string — for example " pizza in New York " or " shoe stores near Ottawa " . The service responds with a list of places matching the text string and any location bias that has been set . The search response will include a list of places , you can send a Place Details request for more information about any of the places in the response .
The Google Places search services share the same usage limits . However , the Text Search service is subject to a 10 - times multiplier . That is , each Text Search request that you make will count as 10 requests against your quota . If you 've purchased the Google Places API as part of your Google Maps API for Work contract , the multiplier may be different . Please refer to the Google Maps API for Work documentation for details .
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
query — The text string on which to search , for example : " restaurant " . The Google Places service will return candidate matches based on this string and order the results based on their perceived relevance .
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
minprice and ( optional ) — Restricts results to only those places within the specified price level . Valid values are in the range from 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
opennow — Returns only those places that are open for business at the time the query is sent . places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
pagetoken — Returns the next 20 results from a previously run search . Setting a pagetoken parameter will execute a search with the same parameters used previously — all parameters other than pagetoken will be ignored .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
You may bias results to a specified circle by passing a location and a radius parameter . This will instruct the Google Places service to prefer showing results within that circle . Results outside the defined area may still be displayed .
The below example shows a search for restaurants near Sydney .
Radar Search Requests
The Google Places API Radar Search Service allows you to search for up to 200 places at once , but with less detail than is typically returned from a Text Search or Nearby Search request . With Radar Search , you can create applications that help users identify specific areas of interest within a geographic area .
The search response will include up to 200 places , and will include only the following information about each place :
A Radar Search request is an HTTP URL of the following form :
json ( recommended ) indicates output in JavaScript Object Notation ( JSON )
key — Your application 's API key . This key identifies your application for purposes of quota management and so that places added from your application are made immediately available to your app . Visit the Google Developers Console to create an API Project and obtain your key .
radius — Defines the distance ( in meters ) within which to return place results . The maximum allowed radius is 50000 meters .
A Radar Search request must include at least one of keyword , name , or types .
keyword — A term to be matched against all content that Google has indexed for this place , including but not limited to name , type , and address , as well as customer reviews and other third - party content .
minprice and ( optional ) — Restricts results to only those places within the specified price level . Valid values are in the range from 0 ( most affordable ) to 4 ( most expensive ) , inclusive . The exact amount indicated by a specific value will vary from region to region .
name — One or more terms to be matched against the names of places , separated by a space character . Results will be restricted to those containing the passed name values . Note that a place may have additional names associated with it , beyond its listed name . The API will try to match the passed name value against all of these names . As a result , places may be returned in the results whose listed names do not match the search term , but whose associated names do .
opennow — Returns only those places that are open for business at the time the query is sent . Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query .
types — Restricts the results to places matching at least one of the specified types . Types should be separated with a pipe symbol ( type1|type2|etc ) . See the list of supported types .
zagatselected — Add this parameter ( just the parameter name , with no associated value ) to restrict your search to locations that are Zagat selected businesses . This parameter must not include a true or false value . The zagatselected parameter is experimental , and is only available to Google Places API for Work customers .
The below example returns a list of museums near London , England .
Using a combination of the keyword , name and types parameters , you can perform more precise queries . The below example shows restaurants and cafes in Paris that users have described as vegetarian .
" " : [ ] ,
" " : 151.199025
" i d " : " 21a0b251c9b8392186142c798263e289fe45b4aa " ,
" open_now " : true
" height " : 270 ,
" " : [ ] ,
" width " : 519
" scope " : " GOOGLE " ,
" reference " : " - cP1dWvp_QGS4SNCBMk_fB06YRsfMrNkINtPez22p5lRIlj5ty_HmcNwcl6GZXbD2RdXsVfLYlQwnZQcnu7ihkjZp_2gk1 - fWXql3GQ8 - 1BEGwgCxG - eaSnIJIBPuIpihEhAY1WYdxPvOWsPnb2 - nGb6QGhTipN0lgaLpQTnkcMeAIEvCsSa0Ww " ,
" vicinity " : " Pyrmont Bay Wharf Darling Dr , Sydney "
" " : 151.200814
" i d " : " 45a27fd8d56c56dc62afc9b49e1d850440d5c403 " ,
" name " : " Private Charter Sydney Habour Cruise " ,
" height " : 426 ,
" " : [ ] ,
" width " : 640
" scope " : " GOOGLE " ,
" reference " : " CpQBhgAAAFN27qR_t5oSDKPUzjQIeQa3lrRpFTm5alW3ZYbMFm8k10ETbISfK9S1nwcJVfrP - bjra7NSPuhaRulxoonSPQklDyB - xGvcJncq6qDXIUQ3hlI - bx4AxYckAOX74LkupHq7bcaREgrSBE - U6GbA1C3U7I - HnweO4IPtztSEcgW09y03v1hgHzL8xSDElmkQtRIQzLbyBfj3e0FhJzABXjM2QBoUE2EnL - DzWrzpgmMEulUBLGrtu2Y " ,
" vicinity " : " Australia "
" " : 151.190311
" i d " : " 30bee58f819b6c47bd24151802f25ecf11df8943 " ,
" name " : " Bucks Party Cruise " ,
" open_now " : true
" height " : 600 ,
" " : [ ] ,
" width " : 800
" place_id " : " ChIJLfySpTOuEmsRsc_JfJtljdc " ,
" scope " : " GOOGLE " ,
" vicinity " : " 37 Bank St , Pyrmont "
" " : 151.201196
" name " : " Australian Cruise Group " ,
" open_now " : true
" height " : 242 ,
" " : [ ] ,
" photo_reference " : " CnRnAAAABjeoPQ7NUU3pDitV4Vs0BgP1FLhf_iCgStUZUr4ZuNqQnc5k43jbvjKC2hTGM8SrmdJYyOyxRO3D2yutoJwVC4Vp_dzckkjG35L6LfMm5sjrOr6uyOtr2PNCp1xQylx6vhdcpW8yZjBZCvVsjNajLBIQ - z4ttAMIc8EjEZV7LsoFgRoU6OrqxvKCnkJGb9F16W57iIV4LuM " ,
" width " : 200
" scope " : " GOOGLE " ,
" vicinity " : " 32 The Promenade , King Street Wharf 5 , Sydney "
A JSON response contains up to four root elements :
" results " contains an array of places , with information about each . See Search Results for information about these results . The Places API returns up to 20 establishment results per query . Additionally , political results may be returned which serve to identify the area of the request .
next_page_token contains a token that can be used to return up to 20 additional results . A next_page_token will not be returned if there are no additional results to display . The maximum number of results that can be returned is 60 . There is a short delay between when a next_page_token is issued , and when it will become valid .
|
e714557ec2b72231d14dcd43884604123ccd95ee75e1b77d5482d0796040ba40 | theodormoroianu/SecondYearCourses | lab6-sol_20210115144738.hs | -- /
import Data.Char
import Data.List
import Test.QuickCheck
1 .
rotate :: Int -> [Char] -> [Char]
rotate n l
| n > 0
, n < length l
= suf ++ pre
where
(pre, suf) = splitAt n l
rotate _ _ = error "număr negativ sau prea mare"
2 .
prop_rotate :: Int -> String -> Bool
prop_rotate k str = rotate (l + 2 - m) (rotate m str') == str'
where
str' = "ab" ++ str
l = length str + 1
m = 1 + if l == 0 then 0 else k `mod` l
3 .
makeKey :: Int -> [(Char, Char)]
makeKey = undefined
4 .
lookUp :: Char -> [(Char, Char)] -> Char
lookUp = undefined
5 .
encipher :: Int -> Char -> Char
encipher = undefined
6 .
normalize :: String -> String
normalize = undefined
7 .
encipherStr :: Int -> String -> String
encipherStr = undefined
8 .
reverseKey :: [(Char, Char)] -> [(Char, Char)]
reverseKey = undefined
9 .
decipher :: Int -> Char -> Char
decipher = undefined
decipherStr :: Int -> String -> String
decipherStr = undefined
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/99185b0e97119135e7301c2c7be0f07ae7258006/Haskell/l/.history/lab6/lab6-sol_20210115144738.hs | haskell | / |
import Data.Char
import Data.List
import Test.QuickCheck
1 .
rotate :: Int -> [Char] -> [Char]
rotate n l
| n > 0
, n < length l
= suf ++ pre
where
(pre, suf) = splitAt n l
rotate _ _ = error "număr negativ sau prea mare"
2 .
prop_rotate :: Int -> String -> Bool
prop_rotate k str = rotate (l + 2 - m) (rotate m str') == str'
where
str' = "ab" ++ str
l = length str + 1
m = 1 + if l == 0 then 0 else k `mod` l
3 .
makeKey :: Int -> [(Char, Char)]
makeKey = undefined
4 .
lookUp :: Char -> [(Char, Char)] -> Char
lookUp = undefined
5 .
encipher :: Int -> Char -> Char
encipher = undefined
6 .
normalize :: String -> String
normalize = undefined
7 .
encipherStr :: Int -> String -> String
encipherStr = undefined
8 .
reverseKey :: [(Char, Char)] -> [(Char, Char)]
reverseKey = undefined
9 .
decipher :: Int -> Char -> Char
decipher = undefined
decipherStr :: Int -> String -> String
decipherStr = undefined
|
879ec1a79a6dee4834b775676234155d1e0df26e340c2393f879d25b93ce1910 | scalaris-team/scalaris | db_util.erl | 2015 Zuse Institute Berlin ,
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
@author < >
%% @doc DB back-end utilities.
%% @end
%% @version $Id$
-module(db_util).
-author('').
-author('').
-include("scalaris.hrl").
-export([get_name/1, get_subscriber_name/1,
get_recoverable_dbs/0, parse_table_name/1]).
%% @doc Initializes a new database.
-spec get_name(DBName::nonempty_string() | atom() | tuple()) -> nonempty_string().
get_name(DBName) when is_atom(DBName) ->
get_name(erlang:atom_to_list(DBName));
get_name(DBName = {Name, Id}) when is_tuple(DBName) ->
get_name(erlang:atom_to_list(Name) ++ "-" ++ erlang:integer_to_list(Id));
get_name(DBName) ->
?DBG_ASSERT(not lists:member($+, DBName)),
RandomName = randoms:getRandomString(),
DBName ++ "+" ++ pid_groups:group_to_filename(pid_groups:my_groupname())
++ "+" ++ RandomName.
-spec get_subscriber_name(DBName::nonempty_string()) -> nonempty_string().
get_subscriber_name(DBName) ->
?DBG_ASSERT(not lists:member($#, DBName)),
DBName ++ "#subscribers".
-spec get_recoverable_dbs()
-> [{DB_type::nonempty_string(), PID_group::pid_groups:groupname(), DB_name::nonempty_string()}].
get_recoverable_dbs() ->
Tables = (config:read(db_backend)):get_persisted_tables(),
io : list : ~w ~ n " , [ Tables ] ) ,
%% creating tuples with DB_names different parts : {DB_type, PID_group, DB_name}
[parse_table_name(Table) || Table <- Tables].
-spec parse_table_name(Table::nonempty_string() | atom())
-> {DB_type::nonempty_string(), PID_group::pid_groups:groupname(), Table::nonempty_string()}.
parse_table_name(Table) when is_atom(Table) ->
parse_table_name(erlang:atom_to_list(Table));
parse_table_name(Table) ->
{string:sub_word(Table, 1, $+),
pid_groups:filename_to_group(string:sub_word(Table, 2, $+)),
Table}.
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/src/db_util.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc DB back-end utilities.
@end
@version $Id$
@doc Initializes a new database.
creating tuples with DB_names different parts : {DB_type, PID_group, DB_name} | 2015 Zuse Institute Berlin ,
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
@author < >
-module(db_util).
-author('').
-author('').
-include("scalaris.hrl").
-export([get_name/1, get_subscriber_name/1,
get_recoverable_dbs/0, parse_table_name/1]).
-spec get_name(DBName::nonempty_string() | atom() | tuple()) -> nonempty_string().
get_name(DBName) when is_atom(DBName) ->
get_name(erlang:atom_to_list(DBName));
get_name(DBName = {Name, Id}) when is_tuple(DBName) ->
get_name(erlang:atom_to_list(Name) ++ "-" ++ erlang:integer_to_list(Id));
get_name(DBName) ->
?DBG_ASSERT(not lists:member($+, DBName)),
RandomName = randoms:getRandomString(),
DBName ++ "+" ++ pid_groups:group_to_filename(pid_groups:my_groupname())
++ "+" ++ RandomName.
-spec get_subscriber_name(DBName::nonempty_string()) -> nonempty_string().
get_subscriber_name(DBName) ->
?DBG_ASSERT(not lists:member($#, DBName)),
DBName ++ "#subscribers".
-spec get_recoverable_dbs()
-> [{DB_type::nonempty_string(), PID_group::pid_groups:groupname(), DB_name::nonempty_string()}].
get_recoverable_dbs() ->
Tables = (config:read(db_backend)):get_persisted_tables(),
io : list : ~w ~ n " , [ Tables ] ) ,
[parse_table_name(Table) || Table <- Tables].
-spec parse_table_name(Table::nonempty_string() | atom())
-> {DB_type::nonempty_string(), PID_group::pid_groups:groupname(), Table::nonempty_string()}.
parse_table_name(Table) when is_atom(Table) ->
parse_table_name(erlang:atom_to_list(Table));
parse_table_name(Table) ->
{string:sub_word(Table, 1, $+),
pid_groups:filename_to_group(string:sub_word(Table, 2, $+)),
Table}.
|
d17633debc0bb2768b19b2b9d272e64d8b4e576ba7a55cae4787b35c94966d05 | morpheusgraphql/morpheus-graphql | App.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TupleSections #
# LANGUAGE UndecidableInstances #
# LANGUAGE NoImplicitPrelude #
module Data.Morpheus.Server.Deriving.App
( RootResolverConstraint,
deriveSchema,
deriveApp,
)
where
import Data.Morpheus.App
( App (..),
mkApp,
)
import Data.Morpheus.App.Internal.Resolving
( resultOr,
)
import Data.Morpheus.Server.Deriving.Resolvers
( DERIVE_NAMED_RESOLVERS,
DERIVE_RESOLVERS,
deriveNamedResolvers,
deriveResolvers,
)
import Data.Morpheus.Server.Deriving.Schema
( SCHEMA,
deriveSchema,
)
import Data.Morpheus.Server.Resolvers
( NamedResolvers,
)
import Data.Morpheus.Server.Types
import Relude
type RootResolverConstraint m e query mutation subscription =
( DERIVE_RESOLVERS (Resolver QUERY e m) query mutation subscription,
SCHEMA query mutation subscription,
Monad m
)
type NamedResolversConstraint m e query mutation subscription =
( DERIVE_NAMED_RESOLVERS (Resolver QUERY e m) query,
SCHEMA query mutation subscription,
Monad m
)
class
DeriveApp
f
m
(event :: Type)
(qu :: (Type -> Type) -> Type)
(mu :: (Type -> Type) -> Type)
(su :: (Type -> Type) -> Type)
where
deriveApp :: f m event qu mu su -> App event m
instance RootResolverConstraint m e query mut sub => DeriveApp RootResolver m e query mut sub where
deriveApp root =
resultOr FailApp (uncurry mkApp) $ (,) <$> deriveSchema (Identity root) <*> deriveResolvers root
instance NamedResolversConstraint m e query mut sub => DeriveApp NamedResolvers m e query mut sub where
deriveApp root =
resultOr FailApp (uncurry mkApp) $ (,deriveNamedResolvers root) <$> deriveSchema (Identity root)
| null | https://raw.githubusercontent.com/morpheusgraphql/morpheus-graphql/c176cc86e590ae5cecadb8ee02bd608be8b88c5f/morpheus-graphql-server/src/Data/Morpheus/Server/Deriving/App.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE GADTs # | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TupleSections #
# LANGUAGE UndecidableInstances #
# LANGUAGE NoImplicitPrelude #
module Data.Morpheus.Server.Deriving.App
( RootResolverConstraint,
deriveSchema,
deriveApp,
)
where
import Data.Morpheus.App
( App (..),
mkApp,
)
import Data.Morpheus.App.Internal.Resolving
( resultOr,
)
import Data.Morpheus.Server.Deriving.Resolvers
( DERIVE_NAMED_RESOLVERS,
DERIVE_RESOLVERS,
deriveNamedResolvers,
deriveResolvers,
)
import Data.Morpheus.Server.Deriving.Schema
( SCHEMA,
deriveSchema,
)
import Data.Morpheus.Server.Resolvers
( NamedResolvers,
)
import Data.Morpheus.Server.Types
import Relude
type RootResolverConstraint m e query mutation subscription =
( DERIVE_RESOLVERS (Resolver QUERY e m) query mutation subscription,
SCHEMA query mutation subscription,
Monad m
)
type NamedResolversConstraint m e query mutation subscription =
( DERIVE_NAMED_RESOLVERS (Resolver QUERY e m) query,
SCHEMA query mutation subscription,
Monad m
)
class
DeriveApp
f
m
(event :: Type)
(qu :: (Type -> Type) -> Type)
(mu :: (Type -> Type) -> Type)
(su :: (Type -> Type) -> Type)
where
deriveApp :: f m event qu mu su -> App event m
instance RootResolverConstraint m e query mut sub => DeriveApp RootResolver m e query mut sub where
deriveApp root =
resultOr FailApp (uncurry mkApp) $ (,) <$> deriveSchema (Identity root) <*> deriveResolvers root
instance NamedResolversConstraint m e query mut sub => DeriveApp NamedResolvers m e query mut sub where
deriveApp root =
resultOr FailApp (uncurry mkApp) $ (,deriveNamedResolvers root) <$> deriveSchema (Identity root)
|
0c285dd865949743fb60807ae6a0c87a84a558542e050cde9c9913674b381b98 | cloudamqp/rabbitmq-vshovel | rabbit_vshovel_parameters.erl | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
The Original Code is RabbitMQ .
%%
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2007 - 2017 Pivotal Software , Inc. All rights reserved .
%%
-module(rabbit_vshovel_parameters).
-behaviour(rabbit_runtime_parameter).
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_vshovel.hrl").
-export([validate/5, notify/5, notify/4,
notify_clear/3, notify_clear/4]).
-export([register/0, unregister/0, parse/2]).
-import(rabbit_misc, [pget/2, pget/3]).
-rabbit_boot_step({?MODULE,
[{description, "vShovel parameters"},
{mfa, {rabbit_vshovel_parameters, register, []}},
{cleanup, {?MODULE, unregister, []}},
{requires, rabbit_registry},
{enables, recovery}]}).
register() ->
rabbit_registry:register(runtime_parameter, <<"vshovel">>, ?MODULE).
unregister() ->
rabbit_registry:unregister(runtime_parameter, <<"vshovel">>).
validate(_VHost, <<"vshovel">>, Name, Def, User) ->
{ok, DestType} = rabbit_vshovel_endpoint:ensure_protocol(pget(<<"dest-type">>, Def, ?AMQP_PROTOCOL)),
{ok, Version} = rabbit_vshovel_endpoint:ensure_version(pget(<<"dest-vsn">>, Def, ?DEFAULT_AMQP_VERSION)),
Mod = rabbit_vshovel_config:get_endpoint_module(DestType, Version),
[case Mod of
Mod when is_atom(Mod) -> ok;
_Other -> {error, "Unsupported protocol: \"~s\"", [DestType]}
end,
case pget2(<<"src-exchange">>, <<"src-queue">>, Def) of
zero -> {error, "Must specify 'src-exchange' or 'src-queue'", []};
one -> ok;
both -> {error, "Cannot specify 'src-exchange' and 'src-queue'", []}
end,
if DestType =:= ?AMQP_PROTOCOL ->
case pget2(<<"dest-exchange">>, <<"dest-queue">>, Def) of
zero -> ok;
one -> ok;
both -> {error, "Cannot specify 'dest-exchange' and 'dest-queue'", []}
end;
true ->
ok
end,
case {pget(<<"delete-after">>, Def), pget(<<"ack-mode">>, Def)} of
{N, <<"no-ack">>} when is_integer(N) ->
{error, "Cannot specify 'no-ack' and numerical 'delete-after'", []};
_ ->
ok
end | rabbit_parameter_validation:proplist(Name, validation(User, Mod, Def), Def)];
validate(_VHost, _Component, Name, _Term, _User) ->
{error, "name not recognised: ~p", [Name]}.
pget2(K1, K2, Defs) -> case {pget(K1, Defs), pget(K2, Defs)} of
{undefined, undefined} -> zero;
{undefined, _} -> one;
{_, undefined} -> one;
{_, _} -> both
end.
notify(VHost, <<"vshovel">>, Name, Definition) ->
rabbit_vshovel_dyn_worker_sup_sup:adjust({VHost, Name}, Definition).
notify(VHost, <<"vshovel">>, Name, Definition, _Username) ->
notify(VHost, <<"vshovel">>, Name, Definition).
notify_clear(VHost, <<"vshovel">>, Name) ->
rabbit_vshovel_dyn_worker_sup_sup:stop_child({VHost, Name}).
notify_clear(VHost, <<"vshovel">>, Name, _Username) ->
notify_clear(VHost, <<"vshovel">>, Name).
%%----------------------------------------------------------------------------
validation(User, CB, Def) ->
[{<<"src-uri">>, validate_uri_fun(User), mandatory},
{<<"dest-uri">>, validate_address_fun(User, CB), mandatory},
{<<"dest-type">>, fun validate_dest_type_fun/2, optional},
{<<"dest-args">>, validate_dest_args_fun(CB), optional},
{<<"dest-vsn">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-port">>, fun rabbit_parameter_validation:number/2,optional},
{<<"src-exchange">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"src-exchange-key">>,fun rabbit_parameter_validation:binary/2,optional},
{<<"src-queue">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-exchange">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-exchange-key">>,fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-queue">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"prefetch-count">>, fun rabbit_parameter_validation:number/2,optional},
{<<"reconnect-delay">>, fun rabbit_parameter_validation:number/2,optional},
{<<"add-forward-headers">>, fun rabbit_parameter_validation:boolean/2,optional},
{<<"add-timestamp-header">>, fun rabbit_parameter_validation:boolean/2,optional},
{<<"publish-properties">>, fun validate_properties/2, optional},
{<<"ack-mode">>, rabbit_parameter_validation:enum(
['no-ack', 'on-publish', 'on-confirm']), optional},
{<<"delete-after">>, fun validate_delete_after/2, optional}
].
validate_uri_fun(User) ->
fun (Name, Term) -> validate_uri(Name, Term, User) end.
validate_uri(Name, Term, User) when is_binary(Term) ->
case rabbit_parameter_validation:binary(Name, Term) of
ok -> case amqp_uri:parse(binary_to_list(Term)) of
{ok, P} -> validate_params_user(P, User);
{error, E} -> {error, "\"~s\" not a valid URI: ~p", [Term, E]}
end;
E -> E
end;
validate_uri(Name, Term, User) ->
case rabbit_parameter_validation:list(Name, Term) of
ok -> case [V || URI <- Term,
V <- [validate_uri(Name, URI, User)],
element(1, V) =:= error] of
[] -> ok;
[E | _] -> E
end;
E -> E
end.
validate_params_user(#amqp_params_direct{}, none) ->
ok;
validate_params_user(#amqp_params_direct{virtual_host = VHost},
User = #user{username = Username}) ->
case rabbit_vhost:exists(VHost) andalso
(catch rabbit_access_control:check_vhost_access(
User, VHost, undefined)) of
ok -> ok;
_ -> {error, "user \"~s\" may not connect to vhost \"~s\"",
[Username, VHost]}
end;
validate_params_user(#amqp_params_network{}, _User) ->
ok.
validate_delete_after(_Name, <<"never">>) -> ok;
validate_delete_after(_Name, <<"queue-length">>) -> ok;
validate_delete_after(_Name, N) when is_integer(N) -> ok;
validate_delete_after(Name, Term) ->
{error, "~s should be number, \"never\" or \"queue-length\", actually was "
"~p", [Name, Term]}.
TODO headers ?
validate_properties(Name, Term) ->
Str = fun rabbit_parameter_validation:binary/2,
Num = fun rabbit_parameter_validation:number/2,
rabbit_parameter_validation:proplist(
Name, [{<<"content_type">>, Str, optional},
{<<"content_encoding">>, Str, optional},
{<<"delivery_mode">>, Num, optional},
{<<"priority">>, Num, optional},
{<<"correlation_id">>, Str, optional},
{<<"reply_to">>, Str, optional},
{<<"expiration">>, Str, optional},
{<<"message_id">>, Str, optional},
{<<"timestamp">>, Num, optional},
{<<"type">>, Str, optional},
{<<"user_id">>, Str, optional},
{<<"app_id">>, Str, optional},
{<<"cluster_id">>, Str, optional}], Term).
validate_dest_type_fun(_Name, Term) ->
{ok, Term0} = rabbit_vshovel_endpoint:ensure_protocol(Term),
case rabbit_vshovel_config:get_endpoint_module(Term0) of
M when is_atom(M) -> ok;
_Error -> {error, "Unsupported protocol: \"~s\"", [Term]}
end.
validate_dest_args_fun(CB) ->
fun (Name, Term) -> validate_dest_args_fun(Name, Term, CB) end.
validate_dest_args_fun(_Name, Term, CB) ->
CB:validate_arguments(Term).
validate_address_fun(User, rabbit_vshovel_worker) -> validate_uri_fun(User);
validate_address_fun(_User, CB) ->
fun (Name, Term) -> validate_address_fun(Name, Term, CB) end.
validate_address_fun(_Name, Term, CB) ->
CB:validate_address(Term).
%%----------------------------------------------------------------------------
parse({VHost, Name}, Def) ->
SrcURIs = get_uris(<<"src-uri">>, Def),
DestURIs = get_uris(<<"dest-uri">>, Def),
DestType = pget(<<"dest-type">>, Def, <<"amqp">>),
Version = pget(<<"dest-vsn">>, Def, <<"0.9.1">>),
DestArgs = pget(<<"dest-args">>, Def, <<"{}">>),
SrcX = pget(<<"src-exchange">>, Def, none),
[ 1 ]
SrcQ = pget(<<"src-queue">>, Def, none),
DestX = pget(<<"dest-exchange">>, Def, none),
DestXKey = pget(<<"dest-exchange-key">>, Def, none),
DestQ = pget(<<"dest-queue">>, Def, none),
[ 1 ] src - exchange - key is never ignored if src - exchange is set
{SrcFun, Queue, Table1} =
case SrcQ of
none -> {fun (_Conn, Ch) ->
Ms = [#'queue.declare'{exclusive = true},
#'queue.bind'{routing_key = SrcXKey,
exchange = SrcX}],
[amqp_channel:call(Ch, M) || M <- Ms]
end, <<>>, [{<<"src-exchange">>, SrcX},
{<<"src-exchange-key">>, SrcXKey}]};
_ -> {fun (Conn, _Ch) ->
ensure_queue(Conn, SrcQ)
end, SrcQ, [{<<"src-queue">>, SrcQ}]}
end,
{ok, DestType0} = rabbit_vshovel_endpoint:ensure_protocol(DestType),
CB = rabbit_vshovel_config:get_endpoint_module(DestType0, Version),
{ok, DestArgs0} =
CB:validate_arguments(DestArgs),
%% Update state
VShovelStateInit =
#vshovel{
sources = #endpoint{address = SrcURIs,
resource_declaration = SrcFun},
destinations = #endpoint{protocol = DestType0,
address = DestURIs,
arguments = DestArgs0},
prefetch_count = pget(<<"prefetch-count">>, Def, 1000),
ack_mode = translate_ack_mode(
pget(<<"ack-mode">>, Def, <<"on-confirm">>)),
queue = Queue,
reconnect_delay = pget(<<"reconnect-delay">>, Def, 1),
delete_after = opt_b2a(pget(<<"delete-after">>, Def, <<"never">>))
},
If protocol is ' ' , treat as current implementation
{ok, if DestType0 =:= ?AMQP_PROTOCOL ->
DestFun = fun (Conn, _Ch) ->
case DestQ of
none -> ok;
_ -> ensure_queue(Conn, DestQ)
end
end,
{X, Key} = case DestQ of
none -> {DestX, DestXKey};
_ -> {<<>>, DestQ}
end,
Table2 = [{K, V} || {K, V} <- [{<<"dest-exchange">>, DestX},
{<<"dest-exchange-key">>, DestXKey},
{<<"dest-queue">>, DestQ}],
V =/= none],
PubFun = fun (_SrcURI, _DestURI, P0) ->
P1 = case X of
none -> P0;
_ -> P0#'basic.publish'{exchange = X}
end,
case Key of
none -> P1;
_ -> P1#'basic.publish'{routing_key = Key}
end
end,
AddHeaders = pget(<<"add-forward-headers">>, Def, false),
Table0 = [{<<"vshovelled-by">>, rabbit_nodes:cluster_name()},
{<<"vshovel-type">>, <<"dynamic">>},
{<<"vshovel-name">>, Name},
{<<"vshovel-vhost">>, VHost}],
SetProps = lookup_indices(pget(<<"publish-properties">>, Def, []),
record_info(fields, 'P_basic')),
AddTimestampHeader = pget(<<"add-timestamp-header">>, Def, false),
PubPropsFun =
fun (SrcURI, DestURI, P0) ->
P = set_properties(P0, SetProps),
P1 = case AddHeaders of
true -> rabbit_vshovel_util:update_headers(
Table0, Table1 ++ Table2,
SrcURI, DestURI, P);
false -> P
end,
case AddTimestampHeader of
true -> rabbit_vshovel_util:add_timestamp_header(P1);
false -> P1
end
end,
VShovelStateInit#vshovel{
destinations = #endpoint{address = DestURIs,
resource_declaration = DestFun},
publish_fields = PubFun,
publish_properties = PubPropsFun
};
true ->
VShovelStateInit
end}.
get_uris(Key, Def) ->
URIs = case pget(Key, Def) of
B when is_binary(B) -> [B];
L when is_list(L) -> L
end,
[binary_to_list(URI) || URI <- URIs].
translate_ack_mode(<<"on-confirm">>) -> on_confirm;
translate_ack_mode(<<"on-publish">>) -> on_publish;
translate_ack_mode(<<"no-ack">>) -> no_ack.
ensure_queue(Conn, Queue) ->
{ok, Ch} = amqp_connection:open_channel(Conn),
try
amqp_channel:call(Ch, #'queue.declare'{queue = Queue,
passive = true})
catch exit:{{shutdown, {server_initiated_close, ?NOT_FOUND, _Text}}, _} ->
{ok, Ch2} = amqp_connection:open_channel(Conn),
amqp_channel:call(Ch2, #'queue.declare'{queue = Queue,
durable = true}),
catch amqp_channel:close(Ch2)
after
catch amqp_channel:close(Ch)
end.
opt_b2a(B) when is_binary(B) -> list_to_atom(binary_to_list(B));
opt_b2a(N) -> N.
set_properties(Props, []) ->
Props;
set_properties(Props, [{Ix, V} | Rest]) ->
set_properties(setelement(Ix, Props, V), Rest).
lookup_indices(KVs, L) ->
[{1 + list_find(list_to_atom(binary_to_list(K)), L), V} || {K, V} <- KVs].
list_find(K, L) -> list_find(K, L, 1).
list_find(K, [K|_], N) -> N;
list_find(K, [], _N) -> exit({not_found, K});
list_find(K, [_|L], N) -> list_find(K, L, N + 1). | null | https://raw.githubusercontent.com/cloudamqp/rabbitmq-vshovel/c439a10efdc518eb930dcccdd0a7334f1ca859ce/src/rabbit_vshovel_parameters.erl | erlang | compliance with the License. You may obtain a copy of the License
at /
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and
limitations under the License.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Update state | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ .
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2007 - 2017 Pivotal Software , Inc. All rights reserved .
-module(rabbit_vshovel_parameters).
-behaviour(rabbit_runtime_parameter).
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_vshovel.hrl").
-export([validate/5, notify/5, notify/4,
notify_clear/3, notify_clear/4]).
-export([register/0, unregister/0, parse/2]).
-import(rabbit_misc, [pget/2, pget/3]).
-rabbit_boot_step({?MODULE,
[{description, "vShovel parameters"},
{mfa, {rabbit_vshovel_parameters, register, []}},
{cleanup, {?MODULE, unregister, []}},
{requires, rabbit_registry},
{enables, recovery}]}).
register() ->
rabbit_registry:register(runtime_parameter, <<"vshovel">>, ?MODULE).
unregister() ->
rabbit_registry:unregister(runtime_parameter, <<"vshovel">>).
validate(_VHost, <<"vshovel">>, Name, Def, User) ->
{ok, DestType} = rabbit_vshovel_endpoint:ensure_protocol(pget(<<"dest-type">>, Def, ?AMQP_PROTOCOL)),
{ok, Version} = rabbit_vshovel_endpoint:ensure_version(pget(<<"dest-vsn">>, Def, ?DEFAULT_AMQP_VERSION)),
Mod = rabbit_vshovel_config:get_endpoint_module(DestType, Version),
[case Mod of
Mod when is_atom(Mod) -> ok;
_Other -> {error, "Unsupported protocol: \"~s\"", [DestType]}
end,
case pget2(<<"src-exchange">>, <<"src-queue">>, Def) of
zero -> {error, "Must specify 'src-exchange' or 'src-queue'", []};
one -> ok;
both -> {error, "Cannot specify 'src-exchange' and 'src-queue'", []}
end,
if DestType =:= ?AMQP_PROTOCOL ->
case pget2(<<"dest-exchange">>, <<"dest-queue">>, Def) of
zero -> ok;
one -> ok;
both -> {error, "Cannot specify 'dest-exchange' and 'dest-queue'", []}
end;
true ->
ok
end,
case {pget(<<"delete-after">>, Def), pget(<<"ack-mode">>, Def)} of
{N, <<"no-ack">>} when is_integer(N) ->
{error, "Cannot specify 'no-ack' and numerical 'delete-after'", []};
_ ->
ok
end | rabbit_parameter_validation:proplist(Name, validation(User, Mod, Def), Def)];
validate(_VHost, _Component, Name, _Term, _User) ->
{error, "name not recognised: ~p", [Name]}.
pget2(K1, K2, Defs) -> case {pget(K1, Defs), pget(K2, Defs)} of
{undefined, undefined} -> zero;
{undefined, _} -> one;
{_, undefined} -> one;
{_, _} -> both
end.
notify(VHost, <<"vshovel">>, Name, Definition) ->
rabbit_vshovel_dyn_worker_sup_sup:adjust({VHost, Name}, Definition).
notify(VHost, <<"vshovel">>, Name, Definition, _Username) ->
notify(VHost, <<"vshovel">>, Name, Definition).
notify_clear(VHost, <<"vshovel">>, Name) ->
rabbit_vshovel_dyn_worker_sup_sup:stop_child({VHost, Name}).
notify_clear(VHost, <<"vshovel">>, Name, _Username) ->
notify_clear(VHost, <<"vshovel">>, Name).
validation(User, CB, Def) ->
[{<<"src-uri">>, validate_uri_fun(User), mandatory},
{<<"dest-uri">>, validate_address_fun(User, CB), mandatory},
{<<"dest-type">>, fun validate_dest_type_fun/2, optional},
{<<"dest-args">>, validate_dest_args_fun(CB), optional},
{<<"dest-vsn">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-port">>, fun rabbit_parameter_validation:number/2,optional},
{<<"src-exchange">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"src-exchange-key">>,fun rabbit_parameter_validation:binary/2,optional},
{<<"src-queue">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-exchange">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-exchange-key">>,fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-queue">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"prefetch-count">>, fun rabbit_parameter_validation:number/2,optional},
{<<"reconnect-delay">>, fun rabbit_parameter_validation:number/2,optional},
{<<"add-forward-headers">>, fun rabbit_parameter_validation:boolean/2,optional},
{<<"add-timestamp-header">>, fun rabbit_parameter_validation:boolean/2,optional},
{<<"publish-properties">>, fun validate_properties/2, optional},
{<<"ack-mode">>, rabbit_parameter_validation:enum(
['no-ack', 'on-publish', 'on-confirm']), optional},
{<<"delete-after">>, fun validate_delete_after/2, optional}
].
validate_uri_fun(User) ->
fun (Name, Term) -> validate_uri(Name, Term, User) end.
validate_uri(Name, Term, User) when is_binary(Term) ->
case rabbit_parameter_validation:binary(Name, Term) of
ok -> case amqp_uri:parse(binary_to_list(Term)) of
{ok, P} -> validate_params_user(P, User);
{error, E} -> {error, "\"~s\" not a valid URI: ~p", [Term, E]}
end;
E -> E
end;
validate_uri(Name, Term, User) ->
case rabbit_parameter_validation:list(Name, Term) of
ok -> case [V || URI <- Term,
V <- [validate_uri(Name, URI, User)],
element(1, V) =:= error] of
[] -> ok;
[E | _] -> E
end;
E -> E
end.
validate_params_user(#amqp_params_direct{}, none) ->
ok;
validate_params_user(#amqp_params_direct{virtual_host = VHost},
User = #user{username = Username}) ->
case rabbit_vhost:exists(VHost) andalso
(catch rabbit_access_control:check_vhost_access(
User, VHost, undefined)) of
ok -> ok;
_ -> {error, "user \"~s\" may not connect to vhost \"~s\"",
[Username, VHost]}
end;
validate_params_user(#amqp_params_network{}, _User) ->
ok.
validate_delete_after(_Name, <<"never">>) -> ok;
validate_delete_after(_Name, <<"queue-length">>) -> ok;
validate_delete_after(_Name, N) when is_integer(N) -> ok;
validate_delete_after(Name, Term) ->
{error, "~s should be number, \"never\" or \"queue-length\", actually was "
"~p", [Name, Term]}.
TODO headers ?
validate_properties(Name, Term) ->
Str = fun rabbit_parameter_validation:binary/2,
Num = fun rabbit_parameter_validation:number/2,
rabbit_parameter_validation:proplist(
Name, [{<<"content_type">>, Str, optional},
{<<"content_encoding">>, Str, optional},
{<<"delivery_mode">>, Num, optional},
{<<"priority">>, Num, optional},
{<<"correlation_id">>, Str, optional},
{<<"reply_to">>, Str, optional},
{<<"expiration">>, Str, optional},
{<<"message_id">>, Str, optional},
{<<"timestamp">>, Num, optional},
{<<"type">>, Str, optional},
{<<"user_id">>, Str, optional},
{<<"app_id">>, Str, optional},
{<<"cluster_id">>, Str, optional}], Term).
validate_dest_type_fun(_Name, Term) ->
{ok, Term0} = rabbit_vshovel_endpoint:ensure_protocol(Term),
case rabbit_vshovel_config:get_endpoint_module(Term0) of
M when is_atom(M) -> ok;
_Error -> {error, "Unsupported protocol: \"~s\"", [Term]}
end.
validate_dest_args_fun(CB) ->
fun (Name, Term) -> validate_dest_args_fun(Name, Term, CB) end.
validate_dest_args_fun(_Name, Term, CB) ->
CB:validate_arguments(Term).
validate_address_fun(User, rabbit_vshovel_worker) -> validate_uri_fun(User);
validate_address_fun(_User, CB) ->
fun (Name, Term) -> validate_address_fun(Name, Term, CB) end.
validate_address_fun(_Name, Term, CB) ->
CB:validate_address(Term).
parse({VHost, Name}, Def) ->
SrcURIs = get_uris(<<"src-uri">>, Def),
DestURIs = get_uris(<<"dest-uri">>, Def),
DestType = pget(<<"dest-type">>, Def, <<"amqp">>),
Version = pget(<<"dest-vsn">>, Def, <<"0.9.1">>),
DestArgs = pget(<<"dest-args">>, Def, <<"{}">>),
SrcX = pget(<<"src-exchange">>, Def, none),
[ 1 ]
SrcQ = pget(<<"src-queue">>, Def, none),
DestX = pget(<<"dest-exchange">>, Def, none),
DestXKey = pget(<<"dest-exchange-key">>, Def, none),
DestQ = pget(<<"dest-queue">>, Def, none),
[ 1 ] src - exchange - key is never ignored if src - exchange is set
{SrcFun, Queue, Table1} =
case SrcQ of
none -> {fun (_Conn, Ch) ->
Ms = [#'queue.declare'{exclusive = true},
#'queue.bind'{routing_key = SrcXKey,
exchange = SrcX}],
[amqp_channel:call(Ch, M) || M <- Ms]
end, <<>>, [{<<"src-exchange">>, SrcX},
{<<"src-exchange-key">>, SrcXKey}]};
_ -> {fun (Conn, _Ch) ->
ensure_queue(Conn, SrcQ)
end, SrcQ, [{<<"src-queue">>, SrcQ}]}
end,
{ok, DestType0} = rabbit_vshovel_endpoint:ensure_protocol(DestType),
CB = rabbit_vshovel_config:get_endpoint_module(DestType0, Version),
{ok, DestArgs0} =
CB:validate_arguments(DestArgs),
VShovelStateInit =
#vshovel{
sources = #endpoint{address = SrcURIs,
resource_declaration = SrcFun},
destinations = #endpoint{protocol = DestType0,
address = DestURIs,
arguments = DestArgs0},
prefetch_count = pget(<<"prefetch-count">>, Def, 1000),
ack_mode = translate_ack_mode(
pget(<<"ack-mode">>, Def, <<"on-confirm">>)),
queue = Queue,
reconnect_delay = pget(<<"reconnect-delay">>, Def, 1),
delete_after = opt_b2a(pget(<<"delete-after">>, Def, <<"never">>))
},
If protocol is ' ' , treat as current implementation
{ok, if DestType0 =:= ?AMQP_PROTOCOL ->
DestFun = fun (Conn, _Ch) ->
case DestQ of
none -> ok;
_ -> ensure_queue(Conn, DestQ)
end
end,
{X, Key} = case DestQ of
none -> {DestX, DestXKey};
_ -> {<<>>, DestQ}
end,
Table2 = [{K, V} || {K, V} <- [{<<"dest-exchange">>, DestX},
{<<"dest-exchange-key">>, DestXKey},
{<<"dest-queue">>, DestQ}],
V =/= none],
PubFun = fun (_SrcURI, _DestURI, P0) ->
P1 = case X of
none -> P0;
_ -> P0#'basic.publish'{exchange = X}
end,
case Key of
none -> P1;
_ -> P1#'basic.publish'{routing_key = Key}
end
end,
AddHeaders = pget(<<"add-forward-headers">>, Def, false),
Table0 = [{<<"vshovelled-by">>, rabbit_nodes:cluster_name()},
{<<"vshovel-type">>, <<"dynamic">>},
{<<"vshovel-name">>, Name},
{<<"vshovel-vhost">>, VHost}],
SetProps = lookup_indices(pget(<<"publish-properties">>, Def, []),
record_info(fields, 'P_basic')),
AddTimestampHeader = pget(<<"add-timestamp-header">>, Def, false),
PubPropsFun =
fun (SrcURI, DestURI, P0) ->
P = set_properties(P0, SetProps),
P1 = case AddHeaders of
true -> rabbit_vshovel_util:update_headers(
Table0, Table1 ++ Table2,
SrcURI, DestURI, P);
false -> P
end,
case AddTimestampHeader of
true -> rabbit_vshovel_util:add_timestamp_header(P1);
false -> P1
end
end,
VShovelStateInit#vshovel{
destinations = #endpoint{address = DestURIs,
resource_declaration = DestFun},
publish_fields = PubFun,
publish_properties = PubPropsFun
};
true ->
VShovelStateInit
end}.
get_uris(Key, Def) ->
URIs = case pget(Key, Def) of
B when is_binary(B) -> [B];
L when is_list(L) -> L
end,
[binary_to_list(URI) || URI <- URIs].
translate_ack_mode(<<"on-confirm">>) -> on_confirm;
translate_ack_mode(<<"on-publish">>) -> on_publish;
translate_ack_mode(<<"no-ack">>) -> no_ack.
ensure_queue(Conn, Queue) ->
{ok, Ch} = amqp_connection:open_channel(Conn),
try
amqp_channel:call(Ch, #'queue.declare'{queue = Queue,
passive = true})
catch exit:{{shutdown, {server_initiated_close, ?NOT_FOUND, _Text}}, _} ->
{ok, Ch2} = amqp_connection:open_channel(Conn),
amqp_channel:call(Ch2, #'queue.declare'{queue = Queue,
durable = true}),
catch amqp_channel:close(Ch2)
after
catch amqp_channel:close(Ch)
end.
opt_b2a(B) when is_binary(B) -> list_to_atom(binary_to_list(B));
opt_b2a(N) -> N.
set_properties(Props, []) ->
Props;
set_properties(Props, [{Ix, V} | Rest]) ->
set_properties(setelement(Ix, Props, V), Rest).
lookup_indices(KVs, L) ->
[{1 + list_find(list_to_atom(binary_to_list(K)), L), V} || {K, V} <- KVs].
list_find(K, L) -> list_find(K, L, 1).
list_find(K, [K|_], N) -> N;
list_find(K, [], _N) -> exit({not_found, K});
list_find(K, [_|L], N) -> list_find(K, L, N + 1). |
fe89ad7c8c0dfd6462bac09afb3c062e54a5e0d5f06e15cc63b298a40f1aae09 | nikodemus/SBCL | parse-defmacro-errors.lisp | error - handling machinery for PARSE - DEFMACRO , separated from
PARSE - DEFMACRO code itself because the happy path can be handled
;;;; earlier in the bootstrap sequence than DEFINE-CONDITION can be
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!KERNEL")
(define-condition defmacro-lambda-list-bind-error (error)
((kind :reader defmacro-lambda-list-bind-error-kind
:initarg :kind)
(name :reader defmacro-lambda-list-bind-error-name
:initarg :name
:initform nil)))
shared logic for REPORTing variants of DEFMACRO - LAMBDA - LIST - BIND - ERROR :
;;; Set up appropriate prettying and indentation on STREAM, print some
;;; boilerplate related to CONDITION (an instance of
;;; DEFMACRO-LAMBDA-LIST-BIND-ERROR), then execute BODY.
(defmacro !printing-defmacro-lambda-list-bind-error ((condition stream)
&body body)
`(%printing-defmacro-lambda-list-bind-error ,condition
,stream
(lambda (,stream)
(declare (type stream ,stream))
,@body)))
(defun %printing-defmacro-lambda-list-bind-error (condition stream fun)
(declare (type stream stream) (type function fun))
(pprint-logical-block (stream nil)
(format stream
"error while parsing arguments to ~A~@[ ~S~]:~2I~:@_"
(defmacro-lambda-list-bind-error-kind condition)
(defmacro-lambda-list-bind-error-name condition))
(pprint-logical-block (stream nil)
(funcall fun stream))))
(define-condition defmacro-bogus-sublist-error
(defmacro-lambda-list-bind-error)
((object :reader defmacro-bogus-sublist-error-object :initarg :object)
(lambda-list :reader defmacro-bogus-sublist-error-lambda-list
:initarg :lambda-list))
(:report
(lambda (condition stream)
(!printing-defmacro-lambda-list-bind-error (condition stream)
(format stream
"bogus sublist ~2I~_~S ~I~_to satisfy lambda-list ~2I~_~:S"
(defmacro-bogus-sublist-error-object condition)
(defmacro-bogus-sublist-error-lambda-list condition))))))
(define-condition arg-count-error (defmacro-lambda-list-bind-error)
((args :reader arg-count-error-args :initarg :args)
(lambda-list :reader arg-count-error-lambda-list
:initarg :lambda-list)
(minimum :reader arg-count-error-minimum :initarg :minimum)
(maximum :reader arg-count-error-maximum :initarg :maximum))
(:report
(lambda (condition stream)
(!printing-defmacro-lambda-list-bind-error (condition stream)
(format stream
"invalid number of elements in ~2I~_~:S ~
~I~_to satisfy lambda list ~2I~_~:S: ~I~_"
(arg-count-error-args condition)
(arg-count-error-lambda-list condition))
(cond ((null (arg-count-error-maximum condition))
(format stream "at least ~W expected"
(arg-count-error-minimum condition)))
((= (arg-count-error-minimum condition)
(arg-count-error-maximum condition))
(format stream "exactly ~W expected"
(arg-count-error-minimum condition)))
(t
(format stream "between ~W and ~W expected"
(arg-count-error-minimum condition)
(arg-count-error-maximum condition))))
(format stream ", but ~a found"
(if (null (cdr (last (arg-count-error-args condition))))
(length (arg-count-error-args condition))
"not a proper list"))))))
(define-condition defmacro-lambda-list-broken-key-list-error
(defmacro-lambda-list-bind-error)
((problem :reader defmacro-lambda-list-broken-key-list-error-problem
:initarg :problem)
(info :reader defmacro-lambda-list-broken-key-list-error-info
:initarg :info))
(:report (lambda (condition stream)
(!printing-defmacro-lambda-list-bind-error (condition stream)
(format stream
FIXME : These should probably just be three
;; subclasses of the base class, so that we don't
;; need to maintain the set of tags both here and
;; implicitly wherever this macro is used. (This
might get easier once CLOS is initialized in
;; cold init.)
(ecase
(defmacro-lambda-list-broken-key-list-error-problem
condition)
(:dotted-list
"dotted keyword/value list: ~S")
(:odd-length
"odd number of elements in keyword/value list: ~S")
(:unknown-keyword
expected one of ~
~{~S~^, ~}~}"))
(defmacro-lambda-list-broken-key-list-error-info
condition))))))
| null | https://raw.githubusercontent.com/nikodemus/SBCL/3c11847d1e12db89b24a7887b18a137c45ed4661/src/code/parse-defmacro-errors.lisp | lisp | earlier in the bootstrap sequence than DEFINE-CONDITION can be
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
Set up appropriate prettying and indentation on STREAM, print some
boilerplate related to CONDITION (an instance of
DEFMACRO-LAMBDA-LIST-BIND-ERROR), then execute BODY.
subclasses of the base class, so that we don't
need to maintain the set of tags both here and
implicitly wherever this macro is used. (This
cold init.) | error - handling machinery for PARSE - DEFMACRO , separated from
PARSE - DEFMACRO code itself because the happy path can be handled
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!KERNEL")
(define-condition defmacro-lambda-list-bind-error (error)
((kind :reader defmacro-lambda-list-bind-error-kind
:initarg :kind)
(name :reader defmacro-lambda-list-bind-error-name
:initarg :name
:initform nil)))
shared logic for REPORTing variants of DEFMACRO - LAMBDA - LIST - BIND - ERROR :
(defmacro !printing-defmacro-lambda-list-bind-error ((condition stream)
&body body)
`(%printing-defmacro-lambda-list-bind-error ,condition
,stream
(lambda (,stream)
(declare (type stream ,stream))
,@body)))
(defun %printing-defmacro-lambda-list-bind-error (condition stream fun)
(declare (type stream stream) (type function fun))
(pprint-logical-block (stream nil)
(format stream
"error while parsing arguments to ~A~@[ ~S~]:~2I~:@_"
(defmacro-lambda-list-bind-error-kind condition)
(defmacro-lambda-list-bind-error-name condition))
(pprint-logical-block (stream nil)
(funcall fun stream))))
(define-condition defmacro-bogus-sublist-error
(defmacro-lambda-list-bind-error)
((object :reader defmacro-bogus-sublist-error-object :initarg :object)
(lambda-list :reader defmacro-bogus-sublist-error-lambda-list
:initarg :lambda-list))
(:report
(lambda (condition stream)
(!printing-defmacro-lambda-list-bind-error (condition stream)
(format stream
"bogus sublist ~2I~_~S ~I~_to satisfy lambda-list ~2I~_~:S"
(defmacro-bogus-sublist-error-object condition)
(defmacro-bogus-sublist-error-lambda-list condition))))))
(define-condition arg-count-error (defmacro-lambda-list-bind-error)
((args :reader arg-count-error-args :initarg :args)
(lambda-list :reader arg-count-error-lambda-list
:initarg :lambda-list)
(minimum :reader arg-count-error-minimum :initarg :minimum)
(maximum :reader arg-count-error-maximum :initarg :maximum))
(:report
(lambda (condition stream)
(!printing-defmacro-lambda-list-bind-error (condition stream)
(format stream
"invalid number of elements in ~2I~_~:S ~
~I~_to satisfy lambda list ~2I~_~:S: ~I~_"
(arg-count-error-args condition)
(arg-count-error-lambda-list condition))
(cond ((null (arg-count-error-maximum condition))
(format stream "at least ~W expected"
(arg-count-error-minimum condition)))
((= (arg-count-error-minimum condition)
(arg-count-error-maximum condition))
(format stream "exactly ~W expected"
(arg-count-error-minimum condition)))
(t
(format stream "between ~W and ~W expected"
(arg-count-error-minimum condition)
(arg-count-error-maximum condition))))
(format stream ", but ~a found"
(if (null (cdr (last (arg-count-error-args condition))))
(length (arg-count-error-args condition))
"not a proper list"))))))
(define-condition defmacro-lambda-list-broken-key-list-error
(defmacro-lambda-list-bind-error)
((problem :reader defmacro-lambda-list-broken-key-list-error-problem
:initarg :problem)
(info :reader defmacro-lambda-list-broken-key-list-error-info
:initarg :info))
(:report (lambda (condition stream)
(!printing-defmacro-lambda-list-bind-error (condition stream)
(format stream
FIXME : These should probably just be three
might get easier once CLOS is initialized in
(ecase
(defmacro-lambda-list-broken-key-list-error-problem
condition)
(:dotted-list
"dotted keyword/value list: ~S")
(:odd-length
"odd number of elements in keyword/value list: ~S")
(:unknown-keyword
expected one of ~
~{~S~^, ~}~}"))
(defmacro-lambda-list-broken-key-list-error-info
condition))))))
|
14eb34543773ccfb63eb23ad945f238c5aeec499aa83ae3f934b70bdf229d518 | adventuring/tootsville.net | lil-mc.lisp | ;;;; -*- lisp -*-
;;;
src / characters / named / lil - mc.lisp is part of
;;;
Copyright © 2008 - 2017 Bruce - Robert Pocock ; © 2018 - 2021 The
Corporation for Inter - World Tourism and Adventuring ( ciwta.org ) .
;;;
This program is Free Software : you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License
as published by the Free Software Foundation ; either version 3 of
the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; Affero General Public License for more details.
;;;
You should have received a copy of the GNU Affero General Public
;;; License along with this program. If not, see
;;; </>.
;;;
;;; You can reach CIWTA at /, or write to us at:
;;;
PO Box 23095
Oakland Park , FL 33307 - 3095
USA
(in-package :Tootsville)
(define-character Lil-MC Basic-8)
| null | https://raw.githubusercontent.com/adventuring/tootsville.net/985c11a91dd1a21b77d7378362d86cf1c031b22c/src/characters/named/lil-mc.lisp | lisp | -*- lisp -*-
© 2018 - 2021 The
either version 3 of
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Affero General Public License for more details.
License along with this program. If not, see
</>.
You can reach CIWTA at /, or write to us at:
| src / characters / named / lil - mc.lisp is part of
Corporation for Inter - World Tourism and Adventuring ( ciwta.org ) .
This program is Free Software : you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License
the License , or ( at your option ) any later version .
You should have received a copy of the GNU Affero General Public
PO Box 23095
Oakland Park , FL 33307 - 3095
USA
(in-package :Tootsville)
(define-character Lil-MC Basic-8)
|
508d01268f2e6eadbda4bf939555fa3d3049fa3656724f98aa4e7c45ba4152d3 | racket/macro-debugger | reductions-util.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/parse)
racket/stxparam
racket/contract/base
racket/list
syntax/stx
racket/match
racket/pretty
"deriv-util.rkt"
"stx-util.rkt"
"pattern.rkt"
"context.rkt"
"tracking.rkt"
"steps.rkt")
(provide STRICT-CHECKS
DEBUG)
(define-syntax-rule (STRICT-CHECKS form ...)
(when #t form ... (void)))
(define-syntax-rule (DEBUG form ...)
(when #f form ... (void)))
(define (hash-set-list h ks v)
(for/fold ([h h]) ([k (in-list ks)]) (hash-set h k v)))
(define (hash-remove-list h ks)
(for/fold ([h h]) ([k (in-list ks)]) (hash-remove h k)))
(define state/c (or/c state? #f))
;; ============================================================
;; Hiding configuration
(provide
(contract-out
[macro-policy (parameter/c (-> identifier? any/c))]))
(define macro-policy (make-parameter (lambda (id) #t)))
;; ============================================================
;; Expansion Context
(provide
(contract-out
[the-phase (parameter/c exact-nonnegative-integer?)]
[the-context (parameter/c list?)]
[the-big-context (parameter/c (listof bigframe?))]
[call-with-initial-context (-> (-> any) #:xstate xstate? any)])
honest?
not-complete-fiction?)
(define the-phase (make-parameter 0))
(define the-context (make-parameter null))
(define the-big-context (make-parameter null))
( )
(define honesty (make-parameter 'T)) ;; (Parameterof HonestyMask)
(define (call-with-initial-context proc #:xstate xst)
(parameterize ((the-xstate xst)
(the-phase 0)
(the-context null)
(the-big-context null)
(the-vt #f)
(honesty 'T))
(proc)))
set - honesty : HonestyMask Stx - > Void
;; PRE: hm <= (honesty) -- that is, honesty is only decreased or left unchanged
;; Invariant: (honesty) = 'T iff (the-vt) = #f
(define (set-honesty hm f)
(define current-hm (honesty))
(DEBUG (unless (eq? (honesty) hm) (eprintf "set-honesty : ~s => ~s\n" (honesty) hm)))
(unless (equal? current-hm hm)
(when (eq? current-hm 'T) (the-vt (vt-base f)))
(honesty hm)))
;; honest? : -> Boolean
(define (honest?) (eq? (honesty) 'T))
;; not-complete-fiction? : -> Boolean
(define (not-complete-fiction?) (not (eq? (honesty) 'F)))
;; ============================================================
;; Expansion State
(provide
(struct-out xstate)
(contract-out
[the-xstate (parameter/c (or/c xstate? #f))]
[new-xstate (-> xstate?)]
[next-seqno (-> exact-nonnegative-integer?)]
[add-step (->* [protostep?] [any/c] void?)])
FIXME
learn-binders
learn-definites
add-lift
add-endlift
get/clear-lifts
get/clear-endlifts)
;; An XState is:
(struct xstate
binders ;; ImmutableHasheq[Identifier => Phase]
definites ;; ImmutableHasheq[Identifier => Phase]
( )
( )
frontier ;; ImmutableHashEq[Syntax => #t]
ReductionSequence
( AnnotatedStep ) or # f
) #:transparent #:mutable)
;; where Lift = (list 'def Ids Syntax) | (list 'req Syntax) | (list 'mod Syntax)
An AnnotatedStep is ( annotated Boolean HonestyMask Step )
(struct annotated (shown? hm step) #:transparent)
the - xstate : ( )
(define the-xstate (make-parameter #f))
;; new-xstate : -> XState
(define (new-xstate)
(xstate 0 '#hasheq() '#hasheq() null null '#hasheq() null #f))
;; next-seqno : -> Nat
(define (next-seqno #:xstate [xst (the-xstate)])
(let ([n (xstate-seqno xst)]) (set-xstate-seqno! xst (add1 n)) n))
learn-{binders , definites } : Id / s - > Void
(define (learn-binders ids #:xstate [xst (the-xstate)])
(set-xstate-binders! xst (hash-set-list (xstate-binders xst) (flatten-identifiers ids) (the-phase))))
(define (learn-definites ids #:xstate [xst (the-xstate)])
(set-xstate-definites! xst (hash-set-list (xstate-definites xst) (flatten ids) (the-phase))))
;; add-lift : Lift -> Void
;; add-endlift : Syntax -> Void
(define (add-lift lift #:xstate [xst (the-xstate)])
(let ([li (liftinfo lift (honesty) (the-vt))])
(set-xstate-lifts! xst (cons li (xstate-lifts xst)))))
(define (add-endlift lift #:xstate [xst (the-xstate)])
(let ([li (liftinfo lift (honesty) (the-vt))])
(set-xstate-endlifts! xst (cons li (xstate-endlifts xst)))))
get / clear - lifts : - > ( Lift )
get / clear - endlifts : - > ( )
(define (get/clear-lifts #:xstate [xst (the-xstate)])
(set-xstate-lifts! xst null))
(define (get/clear-endlifts #:xstate [xst (the-xstate)])
(set-xstate-endlifts! xst null))
;; add-step : Step -> Void
(define (add-step step [add? (honest?)] #:xstate [xst (the-xstate)])
(when add? (set-xstate-steps! xst (cons step (xstate-steps xst))))
(let ([all-steps (xstate-all-steps xst)])
(when all-steps (set-xstate-all-steps! xst (cons (annotated add? (honesty) step) all-steps)))))
;; ----------------------------------------
;; Lifts
(struct liftinfo (lift hm vt) #:prefab)
;; ============================================================
;; Creating steps
(provide
immediate-frame
(contract-out
[current-state-with
(-> syntaxish? syntaxish?
state?)]
[walk
(->* [syntaxish? syntaxish? step-type?]
[#:foci1 syntaxish? #:foci2 syntaxish?]
step?)]
[stumble
(->* [syntaxish? exn?]
[#:focus syntaxish?]
misstep?)]
[walk/talk
(-> step-type? (listof (or/c syntax? string? 'arrow))
protostep?)]
[foci
(-> any/c (listof syntax?))]))
(define (current-state-with e fs)
(define xst (the-xstate))
(let loop ([e e] [fs (foci fs)] [ctx (the-context)])
(cond [(and (pair? ctx) (immediate-frame? (car ctx)))
(define e* ((car ctx) e))
(loop e*
(for/list ([f (in-list fs)])
(if (eq? f e) e* f))
(cdr ctx))]
[else
(make state e fs ctx (the-big-context)
(xstate-binders xst) (xstate-definites xst)
(xstate-frontier xst) (xstate-seqno xst))])))
;; An immediate-frame's procedure should not add syntactic structure; it should
;; only add/adjust syntax properties, rearm terms, etc. When the current state
;; is captured, any immediate frames at the top of the context are applied to
;; the visible term and any focus identical to the visible term. This preserves
;; eq? connections between term and foci; without it, whole-term steps lose
;; their focus highlighting. See add-rearm-frame in reductions.rkt
(struct immediate-frame (f)
#:property prop:procedure (lambda (self x) ((immediate-frame-f self) x)))
(define (walk e1 e2 type
#:foci1 [foci1 e1]
#:foci2 [foci2 e2])
(make step type
(current-state-with e1 foci1)
(current-state-with e2 foci2)))
(define (stumble stx exn #:focus [focus stx])
(make misstep 'error (current-state-with stx focus) exn))
(define (walk/talk type contents)
(make remarkstep type (current-state-with #f null) contents))
(define (foci x) (filter syntax? (flatten x)))
;; ============================================================
RS : the reduction monad
;; This monad acts like an Exception monad whose success type is always
specialized to a 4 - tuple containing the * local * reduction state : real term ,
visible term , pattern , and state ( cf step.rkt ) .
;; Contextual state and threaded state are handled by parameters and a
;; parameter-held "xstate" mutable object (see later sections).
(provide
RS/c
(contract-out
[RSunit
(-> syntaxish? syntaxish? pattern/c state/c RS/c)]
[RSfail
(-> exn? RS/c)]
[RSbind
(-> RS/c (-> any/c any/c state/c RS/c) RS/c)]
[RScase
(-> RS/c
(-> any/c any/c any/c state/c any)
(-> exn? any)
any)]
[RSreset
(->* [RS/c] [#:pattern (or/c pattern/c #f)] RS/c)]))
RS = ( rsok Stx Stx Pattern State )
;; | (rsfailed Exn)
(struct rsok (f v p s))
(struct rsfailed (exn))
(define RS/c (or/c rsok? rsfailed?))
(define pattern/c any/c)
(define RST/c
First two args are any / c instead of syntaxish ? because of
# : new - local - context that initially sets syntax to # f.
(-> (or/c syntaxish? #f) (or/c syntaxish? #f) pattern/c state/c
RS/c))
(define (RSunit f v p s) (rsok f v p s))
(define (RSfail exn) (rsfailed exn))
(define (RSbind a fun)
(match a
[(rsok f v p s) (fun f v p s)]
[(rsfailed exn) a]))
(define (RScase a ok fail)
(match a
[(rsok f v p s) (ok f v p s)]
[(rsfailed exn) (fail exn)]))
(define (RSreset a #:pattern [reset-p #f])
(RSbind a (lambda (f v p s) (RSunit f v (or reset-p p) s))))
;; ============================================================
;; Implicit match from #:pattern
;; In a subexpression of an R-clause (see below), the % and %e macros have
;; access to the pattern variables bound by matching the current term (f)
;; against the current pattern (p).
;; Unlike with-syntax and #', pattern-match and % never create syntax
objects . Also , patterns here are first - class values , which simplifies
;; continuation management.
(provide % %e)
(define-syntax-parameter the-match-result
(lambda (stx)
(raise-syntax-error #f "no match result; used outside of with-pattern-match" stx)))
(define-syntax-rule (% p) (%e (quote-template-pattern p)))
(define-syntax-rule (%e p) (pattern-template p the-match-result))
(define-syntax with-pattern-match
(syntax-parser
[(_ [f p] expr:expr)
#'(let ([mv (pattern-match p f)])
(syntax-parameterize ((the-match-result (make-rename-transformer #'mv)))
expr))]))
;; ============================================================
The Reduction Language
(provide R !)
(define-syntax ! (syntax-rules ()))
;; (R R-clause ...) : RST
(begin-for-syntax
(define clause-kw->macro
(hash '#:set-syntax #'R/set-syntax
'#:pattern #'R/pattern
'#:do #'R/do
'#:let #'R/let
'#:parameterize #'R/parameterize
'#:walk #'R/walk
'#:rename #'R/rename
'#:rename/mark #'R/rename/mark
'#:rename/unmark #'R/rename/unmark
'#:with-marking #'R/with-marking
'#:new-local-context #'R/new-local-context
'#:if #'R/if
'#:when #'R/when
'#:in-hole #'R/in-hole
'#:hide-check #'R/hide-check
'#:seek-check #'R/seek-check
))
(define-syntax-class RClause #:attributes (macro)
#:literals (!)
(pattern [! . _]
#:with macro #'R/!)
(pattern [e:expr . _]
#:with macro #'R/run)
(pattern [kw:keyword . _]
#:attr macro (hash-ref clause-kw->macro (syntax-e #'kw) #f)
#:fail-when (and (not (attribute macro)) #'kw) "unknown keyword")))
syntax ( R RClause ... ) : RST
(define-syntax-rule (R . clauses)
(lambda (f v p s) (R** f v p s . clauses)))
syntax ( R * * f v p s RClause ... ) : RS
(define-syntax R**
(syntax-parser
#:literals (=>)
[(R** f v p s)
#'(RSunit f v p s)]
[(R** f v p s => k . more)
#:declare k (expr/c #'RST/c)
#'(RSbind (RSreset (k.c f v p s) #:pattern p)
(R . more))]
[(R** f v p s c:RClause . more)
#'(begin
(DEBUG (do-debug-clause (quote c) (quote-syntax c) v))
(c.macro f v p s c (R . more)))]))
(define (do-debug-clause c cstx v)
(define where (format "[~s:~s]" (syntax-line cstx) (syntax-column cstx)))
(eprintf "doing ~a ~.s, honesty = ~s, v = ~.s\n"
where (trim-quoted-clause c) (honesty) (stx->datum v)))
(define (trim-quoted-clause c)
(define (abbrev-kw? x) (memq x '(#:parameterize #:when #:if #:with-marking #:do)))
(match c [(cons (? abbrev-kw? kw) _) `(,kw _)] [_ c]))
;; A R/<Clause> macro has the form
( R/<Clause > f v p s < Clause > kexpr )
where f , v , p , w , ws are * variables * and kexpr is * expression *
;; - f is the "real" form -- it should never contain artificial syntax
;; - v is the "virtual/visible" form (used for steps)
;; - p is the current pattern
;; - s is the last marked state, or #f
- kexpr is the continuation ( RST )
(define-syntax R/!
(syntax-parser
#:literals (!)
;; Error-point case
[(_ f v p s [! maybe-exn] ke)
#:declare maybe-exn (expr/c #'(or/c exn? #f))
#'(let ([x maybe-exn.c])
(if x
(begin (add-step (stumble v x) #t)
(RSfail x))
(ke f v p s)))]))
(define-syntax R/pattern
(syntax-parser
;; Change patterns
[(_ f v p s [#:pattern p2] ke)
#'(ke f v (quote-pattern p2) s)]))
(define-syntax R/do
(syntax-parser
;; Execute expressions for effect
[(_ f v p s [#:do expr ...] ke)
#'(begin
(with-pattern-match [f p] (let () expr ... (void)))
(ke f v p s))]))
(define-syntax R/let
(syntax-parser
[(_ f v p s [#:let var:id expr] ke)
#'(let ([var (with-pattern-match [f p] expr)])
(ke f v p s))]))
(define-syntax R/parameterize
(syntax-parser
[(_ f v p s [#:parameterize ((param expr) ...) . clauses] ke)
#:declare param (expr/c #'parameter?)
#'(RSbind (parameterize ((param.c (with-pattern-match [f p] expr)) ...)
(R** f v p s . clauses))
ke)]))
(define-syntax R/set-syntax
(syntax-parser
;; Change syntax
[(_ f v p s [#:set-syntax form] ke)
#:declare form (expr/c #'syntaxish?)
#'(let ([f2 (with-pattern-match [f p] form.c)])
(ke f2 (change-visible-term f f2 v) p s))]))
(define (change-visible-term f f2 v)
(cond [(honest?) f2]
[else (set-honesty 'F f) v]))
(begin-for-syntax
(define-syntax-class walk-clause
#:attributes (state1.c form1.c form2.c foci1.c foci2.c type)
(pattern [#:walk form2 type:expr
(~alt (~optional (~seq #:foci foci2))
(~optional (~seq #:from-state state1))
(~optional (~seq #:from form1))
(~optional (~seq #:from-foci foci1))) ...]
#:declare state1 (expr/c #'state/c)
#:declare form1 (expr/c #'syntaxish?)
#:declare foci1 (expr/c #'syntaxish?)
#:declare form2 (expr/c #'syntaxish?)
#:declare foci2 (expr/c #'syntaxish?))))
(define-syntax R/walk
(syntax-parser
[(_ f v p s w:walk-clause ke)
#'(let ()
(define-values (state1 f1 f2 type)
(with-pattern-match [f p]
(values (~? w.state1.c #f) (~? w.form1.c v) w.form2.c w.type)))
(define-values (fs1 fs2)
(with-pattern-match [f p]
(values (~? w.foci1.c f1) (~? w.foci2.c f2))))
(do-walk f v p s state1 f1 fs1 f2 fs2 type ke))]))
(define (do-walk f v p s state1 f1 fs1 f2 fs2 type k)
(define s1 (or state1 (current-state-with f1 fs1)))
(define s2 (current-state-with f2 fs2))
(when type (add-step (make step type s1 s2)))
(k f2 (change-visible-term f f2 v) p s2))
(define-syntax R/rename
(syntax-parser
;; Rename
[(_ f v p s [#:rename pattern renames] ke)
#'(RSbind (Rename f v p s pattern renames #f #f) ke)]
[(_ f v p s [#:rename pattern renames description] ke)
#'(RSbind (Rename f v p s pattern renames description #f) ke)]))
(define-syntax-rule (Rename f v p s pattern renames description mark-flag)
(let ()
(define-values (renames-var description-var)
(with-pattern-match [f p] (values renames description)))
(do-rename f v p s (quote-pattern pattern) renames-var description-var mark-flag)))
(define (do-rename f v p s ren-p renames description mode)
(DEBUG
(eprintf "do-rename(~s): ~.s at ~s\n" (or mode description) (stx->datum renames) ren-p)
(eprintf " v = ~.s\n" (stx->datum v)))
(define pre-renames (pattern-template ren-p (pattern-match p f)))
(cond [(equal? pre-renames renames)
(RSunit f v p s)]
[else
(do-rename* f v p s ren-p pre-renames renames description mode)]))
(define (do-rename* f v p s ren-p pre-renames renames description mode)
(STRICT-CHECKS
(unless (same-contour? pre-renames renames)
(error 'rename "different contours!\n contour-diff: ~s\n pre: ~s\n post: ~s"
(stx-contour-diff pre-renames renames) pre-renames renames)))
(define f2 (pattern-replace p f ren-p renames #:resyntax? #f))
;; renaming preserves honesty
(when (the-vt) (the-vt (vt-track pre-renames renames (the-vt) description)))
;; ----
;; Note: renames might have more structure than pre-renames, especially if arming!
(define-values (v2 foci1 foci2)
(cond [(honest?)
(values (pattern-replace p f ren-p renames #:resyntax? #t)
pre-renames renames)]
[(eq? mode 'mark)
;; FIXME: if honesty = (T . F), then what about mark visibility?
;; FIXME: if mode is mark or unmark, should honesty be strictly 'T or 'F ??
(values v null null)]
[else
(define-values (v2 foci1 foci2)
(do-rename-v v (the-vt) (honesty) pre-renames renames))
(values (honesty-composite (honesty) f2 v2)
Must include pre - renames , renames for true part ( FIXME : need narrowing ? )
(cons foci1 pre-renames) (cons foci2 renames))]))
(DEBUG
(eprintf " renamed: diff=~s, v2 = ~.s \n" (stx-eq-diff v2 v) (stx->datum v2)))
(when (not (memq description '(#f sync)))
;; FIXME: better condition/heuristic for when to add rename step?
(add-step (walk v v2 description #:foci1 foci1 #:foci2 foci2)
(not-complete-fiction?)))
(RSunit f2 v2 p s))
(define (honesty-composite hm f v #:resyntax? [resyntax? #t])
(DEBUG (eprintf "honesty-composite: ~s\n f = ~.s\n v = ~.s\n" hm f v))
(let loop ([hm hm] [f f] [v v])
(match hm
['T f]
['F v]
[(cons hma hmb)
(define c (cons (loop hma (stxd-car f) (stxd-car v))
(loop hmb (stxd-cdr f) (stxd-cdr v))))
(if resyntax? (restx c v) c)])))
(define (do-rename-v v vt hm pre post)
(DEBUG
(eprintf " do-rename-v\n")
(eprintf " vt-stx = ~.s\n" (stx->datum (vt->stx vt))))
;; Note: pre,post can have different shape because of rename-transformers
(STRICT-CHECKS
(unless (same-contour? pre post)
(eprintf "RENAME MISMATCH\npre = ~s\npost = ~s\n" (stx->datum pre) (stx->datum post))))
Recur through pre , post to find the largest sub - renames that apply to v.
(define (init-k v accren) (values v (map car accren) (map cdr accren)))
(let loop ([pre pre] [post post] [v v] [accren null] [k init-k])
(define (try-rename)
(match (vt-seek pre vt)
[(cons path _)
(DEBUG
(eprintf " found at ~s, pre = ~.s\n" path (stx->datum pre))
(eprintf " actually = ~.s\n" (stx->datum (path-get v path)))
(eprintf " do-rename-v : replace at ~s : ~.s => ~.s\n"
path (stx->datum v) (stx->datum (path-replace v path post #:resyntax? #f))))
(cons (path-replace v path post #:resyntax? #t)
(cons (cons pre post) accren))]
[else #f]))
(cond [(and (syntax? pre) (try-rename))
=> (match-lambda [(cons v accren) (k v accren)])]
[(stx-pair? pre)
(loop (stxd-car pre) (stxd-car post) v accren
(lambda (v accren)
(loop (stxd-cdr pre) (stxd-cdr post) v accren k)))]
[else (k v accren)])))
(define-syntax R/rename/mark
(syntax-parser
[(_ f v p s [#:rename/mark pvar to] ke)
#:declare to (expr/c #'syntaxish?)
#'(RSbind (Rename f v p s pvar to.c #f 'mark) ke)]))
(define-syntax R/rename/unmark
(syntax-parser
[(_ f v p s [#:rename/unmark pvar to] ke)
#:declare to (expr/c #'syntaxish?)
#'(RSbind (Rename f v p s pvar to.c #f 'unmark) ke)]))
;; - corresponds to the dynamic extent of a syntax-local-introduce bindings
(define-syntax-rule (R/with-marking f v p s [#:with-marking c ...] ke)
(RSbind ((R c ...) f v p s) ke))
(define-syntax R/if
(syntax-parser
;; Conditional (pattern changes lost afterwards ...)
[(_ f v p s [#:if test [consequent ...] [alternate ...]] ke)
#'(RSbind (RSreset (if (with-pattern-match [f p] test)
(R** f v p s consequent ...)
(R** f v p s alternate ...))
#:pattern p)
ke)]))
(define-syntax R/when
(syntax-parser
;; Conditional (pattern changes lost afterwards ...)
[(_ f v p s [#:when test consequent ...] ke)
#'(R/if f v p s [#:if test [consequent ...] []] ke)]))
(define-syntax R/new-local-context
(syntax-parser
[(_ f v p s [#:new-local-context clause ...] ke)
#'(do-local-context f v p s (R clause ...) ke)]))
(define (do-local-context f v p s rst k)
(cond [(honest?)
(RSbind (call/local-context v (lambda () (rst #f #f (quote-pattern _) #f)))
(lambda (_f2 _v2 _p2 _s2)
(k f v p s)))]
[else
(RSbind (rst #f v (quote-pattern _) #f)
(lambda (_f2 v2 _p2 _s2)
(k f v2 p s)))]))
(define (call/local-context v proc)
(define bf (bigframe (the-context) (list v) v))
(parameterize ((the-big-context (cons bf (the-big-context)))
(the-context null)
(honesty 'T) ;; FIXME?
(the-vt #f))
(proc)))
(define-syntax R/run
(syntax-parser
Subterm handling
[(R** f v p s [reducer hole fill] ke)
#:declare reducer (expr/c #'(-> any/c RST/c))
#'(RSbind (run reducer.c f v p s (quote hole) fill)
ke)]))
(define-syntax R/in-hole
(syntax-parser
[(_ f v p s [#:in-hole hole . clauses] ke)
#'(RSbind (let ([reducer (lambda (_) (R . clauses))])
(run reducer f v p s (quote hole) #f))
ke)]))
;; ============================================================
(define-syntax R/hide-check
(syntax-parser
[(_ f v p s [#:hide-check rs] ke)
#:declare rs (expr/c #'(listof identifier?))
#'(do-hide-check f v p s (with-pattern-match [f p] rs.c) ke)]))
(define (do-hide-check f v p s ids k)
(unless (or (eq? (honesty) 'F) (andmap (macro-policy) ids))
(DEBUG
(eprintf "hide-check: hiding with f=~.s, v=~.s\n" (stx->datum f) (stx->datum v)))
(set-honesty 'F f))
(k f v p s))
(define-syntax-rule (R/seek-check f v p s [#:seek-check] ke)
(do-seek-check f v p s ke))
(define (do-seek-check f v p s k)
(cond [(honest?) (k f v p s)]
[else
(match (vt-seek f (the-vt))
['()
(DEBUG (eprintf "seek-check: no paths found for ~.s\n" (stx->datum f))
#;(begin (eprintf " the-vt =\n") (pretty-print (the-vt)) (eprintf "\n")))
(k f v p s)]
[(cons path more-paths)
(DEBUG (eprintf "seek-check: found path ~s for ~.s within ~.s\n"
path (stx->datum f) (stx->datum v)))
(define vctx (path-replacer v path #:resyntax? #t))
((parameterize ((the-context (cons vctx (the-context)))
(honesty 'T)
(the-vt #f))
(RScase (k f f p s)
(lambda (f2 v2 p2 s2)
;; inside parameterize
(define end-vt (the-vt))
(lambda ()
;; outside parameterize
(the-vt (vt-merge-at-path (the-vt) path (or end-vt f2)))
;; note: returning a true term into a fictional
;; context does not increase honesty
(RSunit f2 (vctx v2) p s)))
(lambda (exn)
(lambda ()
(RSfail exn))))))])]))
;; ============================================================
;; Running reducers in a sub-context
;; Within a context created by run/path:
;; - Honesty (with respect to the context) never increases; the local honesty on
;; exit from the context is <= the local honesty when the context is entered.
;; - Global honesty does not change on entry to a context. It only changes on
;; hide decisions, steps (if local honesty is < T, then a step immediately
;; sets it to F), and exit from a context.
;; - A successful seek creates a locally honest context, but the honesty is lost
;; on exit from the context. Honesty is measured from the root of the current
;; term, so an honest term gets "lost" when returned into dishonest context.
run : ( X - > RST ) Stx Stx Pattern State Hole ( U X ( ) ) - > RS
;; where Hole = Symbol | (list Symbol '...) -- NOT a pattern value
;; Note: run restores pattern after running reducer
(define (run reducer f v p s hole fill)
(match hole
[(? symbol? hole)
(define path (subpattern-path p hole))
(run/path reducer f v p s path fill)]
[(list (? symbol? hole) '...)
(match-define (vector pre-path sub-path) (subpattern-path p hole #t))
(let loop ([fill fill] [k 0] [f f] [v v] [s s])
(match fill
[(cons fill0 fill*)
(define path (path-append pre-path (path-add-ref k sub-path)))
(RSbind (run/path reducer f v p s path fill0)
(lambda (f v _p s) (loop fill* (add1 k) f v s)))]
['() (RSunit f v p s)]))]))
(define (run/path reducer f v p s path fill)
(define fctx (path-replacer f path #:resyntax? #f))
(define sub-f (path-get f path))
(define sub-hm (honesty-at-path (honesty) path))
(DEBUG (eprintf "run/path: honesty ~s at path ~s => ~s\n" (honesty) path sub-hm))
(define-values (vctx sub-v sub-vt)
(cond [(eq? sub-hm 'F)
path might be out of bounds for v = > ca n't take vctx = > sub - v is meaningless
probably not much point in narrowing VT ( and nontrivial to do right )
;; FIXME: it would be slightly better to know whether we were *inside* an F,
;; because we care about whether the context is honest, not the term
(define sub-v v)
(define sub-vt (the-vt))
(values #f sub-v sub-vt)]
[else
can take vctx , but must also take narrowed VT ( when sub - hm ! = ' T )
(define vctx (path-replacer v path #:resyntax? #t))
(define sub-v (path-get v path))
(define sub-vt (if (eq? sub-hm 'T) #f (vt-zoom (the-vt) path)))
(values vctx sub-v sub-vt)]))
(DEBUG (eprintf "run/path: run ~s on f=~.s; v=~.s\n"
reducer (stx->datum sub-f) (stx->datum sub-v)))
((parameterize ((the-context (if vctx (cons vctx (the-context)) (the-context)))
(honesty sub-hm)
(the-vt sub-vt))
(RScase ((reducer fill) sub-f sub-v (quote-pattern _) s)
(lambda (f2 v2 _p2 _s2)
;; inside parameterize
(define end-hm (honesty))
(define end-vt (the-vt))
(lambda ()
;; outside of parameterize
(define merged-hm (honesty-merge-at-path (honesty) path end-hm))
(DEBUG
(eprintf "\n<< run/path merge old ~s and sub ~s => ~s\n"
(honesty) end-hm merged-hm)
(eprintf " v => ~.s\n" (stx->datum (if vctx (vctx v2) v2))))
(honesty merged-hm)
(the-vt (cond
;; Case: sub-hm = F
;; - then sub-vt was not zoomed, end-vt extends sub-vt, return as is
[(eq? sub-hm 'F) end-vt]
;; Case: sub-hm > F and sub-vt != #f
;; - then sub-vt was zoomed, end-vt extends it, so unzoom end-vt
[sub-vt (vt-unzoom end-vt path)]
;; Case: sub-hm = T and end-vt != #f -- honesty decreased during reducer
[end-vt
(if (the-vt)
(vt-merge-at-path (the-vt) path end-vt)
(vt-merge-at-path f path end-vt))]
;; Case: sub-hm = end-hm = T
[else (the-vt)]))
(DEBUG
(eprintf " vt => ~e\n" (the-vt))
(when (the-vt)
(eprintf " vt-stx => ~.s\n" (stx->datum (vt->stx (the-vt))))))
(RSunit (fctx f2) (if vctx (vctx v2) v2) p s)))
(lambda (exn)
(lambda () (RSfail exn)))))))
;; ------------------------------------
(define (revappend a b)
(cond [(pair? a) (revappend (cdr a) (cons (car a) b))]
[(null? a) b]))
(define (same-contour? x y)
(let loop ([x (stx->datum x)] [y (stx->datum y)])
(cond [(and (pair? x) (pair? y))
(and (loop (car x) (car y)) (loop (cdr x) (cdr y)))]
[else (not (or (pair? x) (pair? y)))])))
(define (stx-contour-diff x y)
(let loop ([x (stx->datum x)] [y (stx->datum y)])
(cond [(and (pair? x) (pair? y))
(let ([d1 (loop (car x) (car y))]
[d2 (loop (cdr x) (cdr y))])
(cond [(and (eq? d1 '_) (eq? d2 '_)) '_]
[else (cons d1 d2)]))]
[(and (null? x) (null? y)) '()]
[(equal? x y) '_]
[else `#s(DIFF ,x ,y)])))
(define (stx-eq-diff a b)
(let loop ([a a] [b b])
(cond [(and (stx-null? a) (stx-null? b)) '()]
[(equal? a b) '_]
[(stx-pair? a)
(cons (loop (stx-car a) (stx-car b))
(loop (stx-cdr a) (stx-cdr b)))]
[else
(unless (equal? (stx->datum a) (stx->datum b))
(error 'stx-eq-diff "different shapes: ~.s, ~.s" a b))
(stx->datum a)])))
;; flatten-identifiers : syntaxlike -> (list-of identifier)
(define (flatten-identifiers stx)
(syntax-case stx ()
[id (identifier? #'id) (list #'id)]
[() null]
[(x . y) (append (flatten-identifiers #'x) (flatten-identifiers #'y))]
[else (error 'flatten-identifiers "neither syntax list nor identifier: ~s"
(if (syntax? stx)
(syntax->datum stx)
stx))]))
;; ============================================================
Macro hiding
;; The behavior of the reductions generator is determined by the current
;; *honesty* level.
;;
;; - honesty: Does the current *visible* local term correspond to the current
;; *actual* local term? This depends on the history of macro hiding within the
;; current context and (partly) on the honesty of the parent context.
;; Honesty forms a lattice with top 'T and bottom 'F:
;;
;; - 'T = honest: The current local *actual* and *visible* terms correspond
;; exactly, although the actual term (f) may have plain pairs in some places
;; where the visible term has artificial syntax pairs.
;;
;; - 'F = fictional: The current visible term is (potentially) completely
;; fictional. It may originate from an outer context. For example:
;;
( define x ( let ( ) 1 ) ) - > ( define - values ( x ) ( let ( ) 1 ) )
;;
;; Suppose that define is hidden. Then when we recur into its right-hand
side , we will have f = ( let ( ) 1 ) and v = ( define x ( let ( ) 1 ) ) . Then we
seek for ( let ( ) 1 ) and find it at vctx = ( define x [ ] ) , and we create a
context for the ( initially honest ) reduction of ( let ( ) 1 ) , but we put
that reduction sequence in the synthetic context vctx . ( Note : see also
;; Visible Term Tracking.)
;;
;; - (cons hm1 hm2) = an honest pair with contents whose honesty is described by
hm1 and hm2 , respectively . We consider ( cons ' T ' T ) = ' T.
;; The honesty level has the following consequences:
;;
;; On *steps*:
;; - 'T: the step can be shown, and it updates the visible term
;; - 'F: the step is not shown, and the visible term is not updated
;; - cons: the step cannot be shown, or it must be simulated
Consider the actual expansion ( # % expression A ) - > ( # % expression A * ) - > A * .
If hiding produces ( # % expression A ) - > ( # % expression A * * ) , then we can not
apply the step ( # % expression A * ) - > A * . There are two options :
;; - drop the step (this is the current behavior)
- simulation : rewrite the step to ( # % expression A * * ) - > A * * ; this
;; requires custom code for each step (?)
;;
;; On entering a new context for reduction:
;; - 'T: the mode of the new context is still 'T
;; - 'F: the mode of the new context is still 'F
;;
;; - cons: the honesty level of the new context is the subtree of the old level
;; at the path corresponding to the context. For example, if the old honesty
;; level is (cons 'F 'T), then if the new context is ([ ] e), then the new
;; local honesty level is 'F, but if the context is (e [ ]), then the new
;; local level is 'T. (See Honesty Masks.)
;;
;; On returning from a context:
;; - 'T: the parent context's mode is unchanged
;; - otherwise, we merge the final local honesty level into the parent's level
;; at the context's path
;; Why not simplify the lattice to just 'T and 'F?
;;
;; - It means there is no need to explicitly specify dependent vs independent
contexts . For example , the if form has three independent subexpressions ,
and macro hiding in the first subexpression should not affect the second
;; subexpression. But in a block, hiding that occurs in pass1 should inhibit
the letrec transformation , and should start with the visible results
of pass1 . That is , 's context depends on pass1 . The old macro stepper
;; implementation addressed this with #:pass1/#:pass2 annotations that had to
;; be sprinkled everywhere, and there always seemed to be one missing
;; somewhere. Honesty masks subsume those annotations.
;;
;; - The current lattice also allow greater precision. For example, in the
expansion of ( λ ( ) ( define x 1 ) ( begin 2 3 ) ) , the honesty lattice allows us
;; to hide the define form but show the begin splicing.
;; ----------------------------------------
;; Visible Term Tracking
The difficult half of macro hiding is * seeking * visible subterms so their
;; expansions can be shown in synthetic contexts. The "visible term" (v) is not
;; sufficient for determining whether a term should be shown or for determining
;; in what context to show it, due to scopes-renames, syntax-arm/disarm, etc.
Instead , when the honesty level drops below ' T , a VT ( visible terms tracker )
is created from the last visible honest term . The VT records scope - renames
and other syntax adjustements . When a term is searched in the VT , it applies
;; the history in reverse to see if the adjusted term existed in the last
visible honest term , and if so at what path . The synthetic context ( vctx ) is
;; created from the path and the current visible term.
;; Invariants:
;; - if (honesty) = 'T, then (the-vt) = #f
;; - if (honesty) < 'T, then
- ( the - vt ) is a VT , and
- ( stx->datum ( vt->stx ( the - vt ) ) ) = ( stx->datum v )
;; ----------------------------------------
;; Honesty Masks
;; An *honesty mask* indicates what parts of the current term may be fictional.
An HonestyMask is one of
;; - 'F -- (partly, potentially) fictional term
;; - 'T -- true term
- ( cons HonestyMask HonestyMask ) -- a true pair
Note : Since HonestyMask < , can use path functions on HonestyMasks .
hmcons : HonestyMask HonestyMask - > HonestyMask
;; Note: (cons T T) = T --- pair might be artificial, but can sync? (FIXME)
(define (hmcons hm1 hm2) (if (and (eq? hm1 'T) (eq? hm2 'T)) 'T (cons hm1 hm2)))
honesty - at - path : HonestyMask Path - > HonestyMask
(define (honesty-at-path hm path)
(define-values (hm* path*) (path-get-until hm path symbol?))
;; Either we used whole path, or we stopped short at 'T or 'F, and
;; the subterms of a 'T or 'F term is 'T or 'F, respectively.
hm*)
honesty - merge : HonestyMask HonestyMask - > HonestyMask
(define (honesty-merge hm1 hm2)
(let loop ([hm1 hm1] [hm2 hm2])
(match* [hm1 hm2]
[['T hm] hm]
[[hm 'T] hm]
[[(cons hm1a hm1b) (cons hm2a hm2b)]
(hmcons (loop hm1a hm2a) (loop hm1b hm2b))]
[[_ _] 'F])))
honesty - merge - at - path : HonestyMask Path HonestyMask - > HonestyMask
Merges the first hm 's subtree at path with second subtree .
(define (honesty-merge-at-path hm1 path hm2)
(define (loop hm1 path)
(match path
[(empty-path) (honesty-merge hm1 hm2)]
[(path-add-car path)
(match hm1
[(cons hm1a hm1b) (cons (loop hm1a path) hm1b)]
['T (hmcons (loop 'T path) 'T)]
['F 'F])]
[(path-add-cdrs n path)
(let tailloop ([hm1 hm1] [n n])
(cond [(zero? n) (loop hm1 path)]
[else
(match hm1
[(cons hm1a hm1b) (hmcons hm1a (tailloop hm1b (sub1 n)))]
['T (hmcons 'T (tailloop 'T (sub1 n)))]
['F 'F])]))]))
(loop hm1 path))
An HonestyMaskSpec extends HonestyMask with
- # ( hmrep HonestyMask ) -- a true list whose elements have the given honesty
(struct hmrep (hm) #:prefab)
;; honesty>=? : HonestyMask HonestyMaskSpec -> Boolean
Retuns # t if hm1 is at least as honest as hm2 .
(define (honesty>=? hm1 hm2)
(let loop ([hm1 hm1] [hm2 hm2])
(match* [hm1 hm2]
[['T _] #t]
[[_ 'F] #t]
[[(cons hm1a hm1b) (cons hm2a hm2b)]
(and (loop hm1a hm2a) (loop hm1b hm2b))]
[[(cons hm1a hm1b) (hmrep hm2e)]
(and (loop hm1a hm2e) (loop hm1b hm2))]
[[_ _] #f])))
| null | https://raw.githubusercontent.com/racket/macro-debugger/d4e2325e6d8eced81badf315048ff54f515110d5/macro-debugger-text-lib/macro-debugger/model/reductions-util.rkt | racket | ============================================================
Hiding configuration
============================================================
Expansion Context
(Parameterof HonestyMask)
PRE: hm <= (honesty) -- that is, honesty is only decreased or left unchanged
Invariant: (honesty) = 'T iff (the-vt) = #f
honest? : -> Boolean
not-complete-fiction? : -> Boolean
============================================================
Expansion State
An XState is:
ImmutableHasheq[Identifier => Phase]
ImmutableHasheq[Identifier => Phase]
ImmutableHashEq[Syntax => #t]
where Lift = (list 'def Ids Syntax) | (list 'req Syntax) | (list 'mod Syntax)
new-xstate : -> XState
next-seqno : -> Nat
add-lift : Lift -> Void
add-endlift : Syntax -> Void
add-step : Step -> Void
----------------------------------------
Lifts
============================================================
Creating steps
An immediate-frame's procedure should not add syntactic structure; it should
only add/adjust syntax properties, rearm terms, etc. When the current state
is captured, any immediate frames at the top of the context are applied to
the visible term and any focus identical to the visible term. This preserves
eq? connections between term and foci; without it, whole-term steps lose
their focus highlighting. See add-rearm-frame in reductions.rkt
============================================================
This monad acts like an Exception monad whose success type is always
Contextual state and threaded state are handled by parameters and a
parameter-held "xstate" mutable object (see later sections).
| (rsfailed Exn)
============================================================
Implicit match from #:pattern
In a subexpression of an R-clause (see below), the % and %e macros have
access to the pattern variables bound by matching the current term (f)
against the current pattern (p).
Unlike with-syntax and #', pattern-match and % never create syntax
continuation management.
============================================================
(R R-clause ...) : RST
A R/<Clause> macro has the form
- f is the "real" form -- it should never contain artificial syntax
- v is the "virtual/visible" form (used for steps)
- p is the current pattern
- s is the last marked state, or #f
Error-point case
Change patterns
Execute expressions for effect
Change syntax
Rename
renaming preserves honesty
----
Note: renames might have more structure than pre-renames, especially if arming!
FIXME: if honesty = (T . F), then what about mark visibility?
FIXME: if mode is mark or unmark, should honesty be strictly 'T or 'F ??
FIXME: better condition/heuristic for when to add rename step?
Note: pre,post can have different shape because of rename-transformers
- corresponds to the dynamic extent of a syntax-local-introduce bindings
Conditional (pattern changes lost afterwards ...)
Conditional (pattern changes lost afterwards ...)
FIXME?
============================================================
(begin (eprintf " the-vt =\n") (pretty-print (the-vt)) (eprintf "\n")))
inside parameterize
outside parameterize
note: returning a true term into a fictional
context does not increase honesty
============================================================
Running reducers in a sub-context
Within a context created by run/path:
- Honesty (with respect to the context) never increases; the local honesty on
exit from the context is <= the local honesty when the context is entered.
- Global honesty does not change on entry to a context. It only changes on
hide decisions, steps (if local honesty is < T, then a step immediately
sets it to F), and exit from a context.
- A successful seek creates a locally honest context, but the honesty is lost
on exit from the context. Honesty is measured from the root of the current
term, so an honest term gets "lost" when returned into dishonest context.
where Hole = Symbol | (list Symbol '...) -- NOT a pattern value
Note: run restores pattern after running reducer
FIXME: it would be slightly better to know whether we were *inside* an F,
because we care about whether the context is honest, not the term
inside parameterize
outside of parameterize
Case: sub-hm = F
- then sub-vt was not zoomed, end-vt extends sub-vt, return as is
Case: sub-hm > F and sub-vt != #f
- then sub-vt was zoomed, end-vt extends it, so unzoom end-vt
Case: sub-hm = T and end-vt != #f -- honesty decreased during reducer
Case: sub-hm = end-hm = T
------------------------------------
flatten-identifiers : syntaxlike -> (list-of identifier)
============================================================
The behavior of the reductions generator is determined by the current
*honesty* level.
- honesty: Does the current *visible* local term correspond to the current
*actual* local term? This depends on the history of macro hiding within the
current context and (partly) on the honesty of the parent context.
Honesty forms a lattice with top 'T and bottom 'F:
- 'T = honest: The current local *actual* and *visible* terms correspond
exactly, although the actual term (f) may have plain pairs in some places
where the visible term has artificial syntax pairs.
- 'F = fictional: The current visible term is (potentially) completely
fictional. It may originate from an outer context. For example:
Suppose that define is hidden. Then when we recur into its right-hand
Visible Term Tracking.)
- (cons hm1 hm2) = an honest pair with contents whose honesty is described by
The honesty level has the following consequences:
On *steps*:
- 'T: the step can be shown, and it updates the visible term
- 'F: the step is not shown, and the visible term is not updated
- cons: the step cannot be shown, or it must be simulated
- drop the step (this is the current behavior)
this
requires custom code for each step (?)
On entering a new context for reduction:
- 'T: the mode of the new context is still 'T
- 'F: the mode of the new context is still 'F
- cons: the honesty level of the new context is the subtree of the old level
at the path corresponding to the context. For example, if the old honesty
level is (cons 'F 'T), then if the new context is ([ ] e), then the new
local honesty level is 'F, but if the context is (e [ ]), then the new
local level is 'T. (See Honesty Masks.)
On returning from a context:
- 'T: the parent context's mode is unchanged
- otherwise, we merge the final local honesty level into the parent's level
at the context's path
Why not simplify the lattice to just 'T and 'F?
- It means there is no need to explicitly specify dependent vs independent
subexpression. But in a block, hiding that occurs in pass1 should inhibit
implementation addressed this with #:pass1/#:pass2 annotations that had to
be sprinkled everywhere, and there always seemed to be one missing
somewhere. Honesty masks subsume those annotations.
- The current lattice also allow greater precision. For example, in the
to hide the define form but show the begin splicing.
----------------------------------------
Visible Term Tracking
expansions can be shown in synthetic contexts. The "visible term" (v) is not
sufficient for determining whether a term should be shown or for determining
in what context to show it, due to scopes-renames, syntax-arm/disarm, etc.
the history in reverse to see if the adjusted term existed in the last
created from the path and the current visible term.
Invariants:
- if (honesty) = 'T, then (the-vt) = #f
- if (honesty) < 'T, then
----------------------------------------
Honesty Masks
An *honesty mask* indicates what parts of the current term may be fictional.
- 'F -- (partly, potentially) fictional term
- 'T -- true term
Note: (cons T T) = T --- pair might be artificial, but can sync? (FIXME)
Either we used whole path, or we stopped short at 'T or 'F, and
the subterms of a 'T or 'F term is 'T or 'F, respectively.
honesty>=? : HonestyMask HonestyMaskSpec -> Boolean | #lang racket/base
(require (for-syntax racket/base
syntax/parse)
racket/stxparam
racket/contract/base
racket/list
syntax/stx
racket/match
racket/pretty
"deriv-util.rkt"
"stx-util.rkt"
"pattern.rkt"
"context.rkt"
"tracking.rkt"
"steps.rkt")
(provide STRICT-CHECKS
DEBUG)
(define-syntax-rule (STRICT-CHECKS form ...)
(when #t form ... (void)))
(define-syntax-rule (DEBUG form ...)
(when #f form ... (void)))
(define (hash-set-list h ks v)
(for/fold ([h h]) ([k (in-list ks)]) (hash-set h k v)))
(define (hash-remove-list h ks)
(for/fold ([h h]) ([k (in-list ks)]) (hash-remove h k)))
(define state/c (or/c state? #f))
(provide
(contract-out
[macro-policy (parameter/c (-> identifier? any/c))]))
(define macro-policy (make-parameter (lambda (id) #t)))
(provide
(contract-out
[the-phase (parameter/c exact-nonnegative-integer?)]
[the-context (parameter/c list?)]
[the-big-context (parameter/c (listof bigframe?))]
[call-with-initial-context (-> (-> any) #:xstate xstate? any)])
honest?
not-complete-fiction?)
(define the-phase (make-parameter 0))
(define the-context (make-parameter null))
(define the-big-context (make-parameter null))
( )
(define (call-with-initial-context proc #:xstate xst)
(parameterize ((the-xstate xst)
(the-phase 0)
(the-context null)
(the-big-context null)
(the-vt #f)
(honesty 'T))
(proc)))
set - honesty : HonestyMask Stx - > Void
(define (set-honesty hm f)
(define current-hm (honesty))
(DEBUG (unless (eq? (honesty) hm) (eprintf "set-honesty : ~s => ~s\n" (honesty) hm)))
(unless (equal? current-hm hm)
(when (eq? current-hm 'T) (the-vt (vt-base f)))
(honesty hm)))
(define (honest?) (eq? (honesty) 'T))
(define (not-complete-fiction?) (not (eq? (honesty) 'F)))
(provide
(struct-out xstate)
(contract-out
[the-xstate (parameter/c (or/c xstate? #f))]
[new-xstate (-> xstate?)]
[next-seqno (-> exact-nonnegative-integer?)]
[add-step (->* [protostep?] [any/c] void?)])
FIXME
learn-binders
learn-definites
add-lift
add-endlift
get/clear-lifts
get/clear-endlifts)
(struct xstate
( )
( )
ReductionSequence
( AnnotatedStep ) or # f
) #:transparent #:mutable)
An AnnotatedStep is ( annotated Boolean HonestyMask Step )
(struct annotated (shown? hm step) #:transparent)
the - xstate : ( )
(define the-xstate (make-parameter #f))
(define (new-xstate)
(xstate 0 '#hasheq() '#hasheq() null null '#hasheq() null #f))
(define (next-seqno #:xstate [xst (the-xstate)])
(let ([n (xstate-seqno xst)]) (set-xstate-seqno! xst (add1 n)) n))
learn-{binders , definites } : Id / s - > Void
(define (learn-binders ids #:xstate [xst (the-xstate)])
(set-xstate-binders! xst (hash-set-list (xstate-binders xst) (flatten-identifiers ids) (the-phase))))
(define (learn-definites ids #:xstate [xst (the-xstate)])
(set-xstate-definites! xst (hash-set-list (xstate-definites xst) (flatten ids) (the-phase))))
(define (add-lift lift #:xstate [xst (the-xstate)])
(let ([li (liftinfo lift (honesty) (the-vt))])
(set-xstate-lifts! xst (cons li (xstate-lifts xst)))))
(define (add-endlift lift #:xstate [xst (the-xstate)])
(let ([li (liftinfo lift (honesty) (the-vt))])
(set-xstate-endlifts! xst (cons li (xstate-endlifts xst)))))
get / clear - lifts : - > ( Lift )
get / clear - endlifts : - > ( )
(define (get/clear-lifts #:xstate [xst (the-xstate)])
(set-xstate-lifts! xst null))
(define (get/clear-endlifts #:xstate [xst (the-xstate)])
(set-xstate-endlifts! xst null))
(define (add-step step [add? (honest?)] #:xstate [xst (the-xstate)])
(when add? (set-xstate-steps! xst (cons step (xstate-steps xst))))
(let ([all-steps (xstate-all-steps xst)])
(when all-steps (set-xstate-all-steps! xst (cons (annotated add? (honesty) step) all-steps)))))
(struct liftinfo (lift hm vt) #:prefab)
(provide
immediate-frame
(contract-out
[current-state-with
(-> syntaxish? syntaxish?
state?)]
[walk
(->* [syntaxish? syntaxish? step-type?]
[#:foci1 syntaxish? #:foci2 syntaxish?]
step?)]
[stumble
(->* [syntaxish? exn?]
[#:focus syntaxish?]
misstep?)]
[walk/talk
(-> step-type? (listof (or/c syntax? string? 'arrow))
protostep?)]
[foci
(-> any/c (listof syntax?))]))
(define (current-state-with e fs)
(define xst (the-xstate))
(let loop ([e e] [fs (foci fs)] [ctx (the-context)])
(cond [(and (pair? ctx) (immediate-frame? (car ctx)))
(define e* ((car ctx) e))
(loop e*
(for/list ([f (in-list fs)])
(if (eq? f e) e* f))
(cdr ctx))]
[else
(make state e fs ctx (the-big-context)
(xstate-binders xst) (xstate-definites xst)
(xstate-frontier xst) (xstate-seqno xst))])))
(struct immediate-frame (f)
#:property prop:procedure (lambda (self x) ((immediate-frame-f self) x)))
(define (walk e1 e2 type
#:foci1 [foci1 e1]
#:foci2 [foci2 e2])
(make step type
(current-state-with e1 foci1)
(current-state-with e2 foci2)))
(define (stumble stx exn #:focus [focus stx])
(make misstep 'error (current-state-with stx focus) exn))
(define (walk/talk type contents)
(make remarkstep type (current-state-with #f null) contents))
(define (foci x) (filter syntax? (flatten x)))
RS : the reduction monad
specialized to a 4 - tuple containing the * local * reduction state : real term ,
visible term , pattern , and state ( cf step.rkt ) .
(provide
RS/c
(contract-out
[RSunit
(-> syntaxish? syntaxish? pattern/c state/c RS/c)]
[RSfail
(-> exn? RS/c)]
[RSbind
(-> RS/c (-> any/c any/c state/c RS/c) RS/c)]
[RScase
(-> RS/c
(-> any/c any/c any/c state/c any)
(-> exn? any)
any)]
[RSreset
(->* [RS/c] [#:pattern (or/c pattern/c #f)] RS/c)]))
RS = ( rsok Stx Stx Pattern State )
(struct rsok (f v p s))
(struct rsfailed (exn))
(define RS/c (or/c rsok? rsfailed?))
(define pattern/c any/c)
(define RST/c
First two args are any / c instead of syntaxish ? because of
# : new - local - context that initially sets syntax to # f.
(-> (or/c syntaxish? #f) (or/c syntaxish? #f) pattern/c state/c
RS/c))
(define (RSunit f v p s) (rsok f v p s))
(define (RSfail exn) (rsfailed exn))
(define (RSbind a fun)
(match a
[(rsok f v p s) (fun f v p s)]
[(rsfailed exn) a]))
(define (RScase a ok fail)
(match a
[(rsok f v p s) (ok f v p s)]
[(rsfailed exn) (fail exn)]))
(define (RSreset a #:pattern [reset-p #f])
(RSbind a (lambda (f v p s) (RSunit f v (or reset-p p) s))))
objects . Also , patterns here are first - class values , which simplifies
(provide % %e)
(define-syntax-parameter the-match-result
(lambda (stx)
(raise-syntax-error #f "no match result; used outside of with-pattern-match" stx)))
(define-syntax-rule (% p) (%e (quote-template-pattern p)))
(define-syntax-rule (%e p) (pattern-template p the-match-result))
(define-syntax with-pattern-match
(syntax-parser
[(_ [f p] expr:expr)
#'(let ([mv (pattern-match p f)])
(syntax-parameterize ((the-match-result (make-rename-transformer #'mv)))
expr))]))
The Reduction Language
(provide R !)
(define-syntax ! (syntax-rules ()))
(begin-for-syntax
(define clause-kw->macro
(hash '#:set-syntax #'R/set-syntax
'#:pattern #'R/pattern
'#:do #'R/do
'#:let #'R/let
'#:parameterize #'R/parameterize
'#:walk #'R/walk
'#:rename #'R/rename
'#:rename/mark #'R/rename/mark
'#:rename/unmark #'R/rename/unmark
'#:with-marking #'R/with-marking
'#:new-local-context #'R/new-local-context
'#:if #'R/if
'#:when #'R/when
'#:in-hole #'R/in-hole
'#:hide-check #'R/hide-check
'#:seek-check #'R/seek-check
))
(define-syntax-class RClause #:attributes (macro)
#:literals (!)
(pattern [! . _]
#:with macro #'R/!)
(pattern [e:expr . _]
#:with macro #'R/run)
(pattern [kw:keyword . _]
#:attr macro (hash-ref clause-kw->macro (syntax-e #'kw) #f)
#:fail-when (and (not (attribute macro)) #'kw) "unknown keyword")))
syntax ( R RClause ... ) : RST
(define-syntax-rule (R . clauses)
(lambda (f v p s) (R** f v p s . clauses)))
syntax ( R * * f v p s RClause ... ) : RS
(define-syntax R**
(syntax-parser
#:literals (=>)
[(R** f v p s)
#'(RSunit f v p s)]
[(R** f v p s => k . more)
#:declare k (expr/c #'RST/c)
#'(RSbind (RSreset (k.c f v p s) #:pattern p)
(R . more))]
[(R** f v p s c:RClause . more)
#'(begin
(DEBUG (do-debug-clause (quote c) (quote-syntax c) v))
(c.macro f v p s c (R . more)))]))
(define (do-debug-clause c cstx v)
(define where (format "[~s:~s]" (syntax-line cstx) (syntax-column cstx)))
(eprintf "doing ~a ~.s, honesty = ~s, v = ~.s\n"
where (trim-quoted-clause c) (honesty) (stx->datum v)))
(define (trim-quoted-clause c)
(define (abbrev-kw? x) (memq x '(#:parameterize #:when #:if #:with-marking #:do)))
(match c [(cons (? abbrev-kw? kw) _) `(,kw _)] [_ c]))
( R/<Clause > f v p s < Clause > kexpr )
where f , v , p , w , ws are * variables * and kexpr is * expression *
- kexpr is the continuation ( RST )
(define-syntax R/!
(syntax-parser
#:literals (!)
[(_ f v p s [! maybe-exn] ke)
#:declare maybe-exn (expr/c #'(or/c exn? #f))
#'(let ([x maybe-exn.c])
(if x
(begin (add-step (stumble v x) #t)
(RSfail x))
(ke f v p s)))]))
(define-syntax R/pattern
(syntax-parser
[(_ f v p s [#:pattern p2] ke)
#'(ke f v (quote-pattern p2) s)]))
(define-syntax R/do
(syntax-parser
[(_ f v p s [#:do expr ...] ke)
#'(begin
(with-pattern-match [f p] (let () expr ... (void)))
(ke f v p s))]))
(define-syntax R/let
(syntax-parser
[(_ f v p s [#:let var:id expr] ke)
#'(let ([var (with-pattern-match [f p] expr)])
(ke f v p s))]))
(define-syntax R/parameterize
(syntax-parser
[(_ f v p s [#:parameterize ((param expr) ...) . clauses] ke)
#:declare param (expr/c #'parameter?)
#'(RSbind (parameterize ((param.c (with-pattern-match [f p] expr)) ...)
(R** f v p s . clauses))
ke)]))
(define-syntax R/set-syntax
(syntax-parser
[(_ f v p s [#:set-syntax form] ke)
#:declare form (expr/c #'syntaxish?)
#'(let ([f2 (with-pattern-match [f p] form.c)])
(ke f2 (change-visible-term f f2 v) p s))]))
(define (change-visible-term f f2 v)
(cond [(honest?) f2]
[else (set-honesty 'F f) v]))
(begin-for-syntax
(define-syntax-class walk-clause
#:attributes (state1.c form1.c form2.c foci1.c foci2.c type)
(pattern [#:walk form2 type:expr
(~alt (~optional (~seq #:foci foci2))
(~optional (~seq #:from-state state1))
(~optional (~seq #:from form1))
(~optional (~seq #:from-foci foci1))) ...]
#:declare state1 (expr/c #'state/c)
#:declare form1 (expr/c #'syntaxish?)
#:declare foci1 (expr/c #'syntaxish?)
#:declare form2 (expr/c #'syntaxish?)
#:declare foci2 (expr/c #'syntaxish?))))
(define-syntax R/walk
(syntax-parser
[(_ f v p s w:walk-clause ke)
#'(let ()
(define-values (state1 f1 f2 type)
(with-pattern-match [f p]
(values (~? w.state1.c #f) (~? w.form1.c v) w.form2.c w.type)))
(define-values (fs1 fs2)
(with-pattern-match [f p]
(values (~? w.foci1.c f1) (~? w.foci2.c f2))))
(do-walk f v p s state1 f1 fs1 f2 fs2 type ke))]))
(define (do-walk f v p s state1 f1 fs1 f2 fs2 type k)
(define s1 (or state1 (current-state-with f1 fs1)))
(define s2 (current-state-with f2 fs2))
(when type (add-step (make step type s1 s2)))
(k f2 (change-visible-term f f2 v) p s2))
(define-syntax R/rename
(syntax-parser
[(_ f v p s [#:rename pattern renames] ke)
#'(RSbind (Rename f v p s pattern renames #f #f) ke)]
[(_ f v p s [#:rename pattern renames description] ke)
#'(RSbind (Rename f v p s pattern renames description #f) ke)]))
(define-syntax-rule (Rename f v p s pattern renames description mark-flag)
(let ()
(define-values (renames-var description-var)
(with-pattern-match [f p] (values renames description)))
(do-rename f v p s (quote-pattern pattern) renames-var description-var mark-flag)))
(define (do-rename f v p s ren-p renames description mode)
(DEBUG
(eprintf "do-rename(~s): ~.s at ~s\n" (or mode description) (stx->datum renames) ren-p)
(eprintf " v = ~.s\n" (stx->datum v)))
(define pre-renames (pattern-template ren-p (pattern-match p f)))
(cond [(equal? pre-renames renames)
(RSunit f v p s)]
[else
(do-rename* f v p s ren-p pre-renames renames description mode)]))
(define (do-rename* f v p s ren-p pre-renames renames description mode)
(STRICT-CHECKS
(unless (same-contour? pre-renames renames)
(error 'rename "different contours!\n contour-diff: ~s\n pre: ~s\n post: ~s"
(stx-contour-diff pre-renames renames) pre-renames renames)))
(define f2 (pattern-replace p f ren-p renames #:resyntax? #f))
(when (the-vt) (the-vt (vt-track pre-renames renames (the-vt) description)))
(define-values (v2 foci1 foci2)
(cond [(honest?)
(values (pattern-replace p f ren-p renames #:resyntax? #t)
pre-renames renames)]
[(eq? mode 'mark)
(values v null null)]
[else
(define-values (v2 foci1 foci2)
(do-rename-v v (the-vt) (honesty) pre-renames renames))
(values (honesty-composite (honesty) f2 v2)
Must include pre - renames , renames for true part ( FIXME : need narrowing ? )
(cons foci1 pre-renames) (cons foci2 renames))]))
(DEBUG
(eprintf " renamed: diff=~s, v2 = ~.s \n" (stx-eq-diff v2 v) (stx->datum v2)))
(when (not (memq description '(#f sync)))
(add-step (walk v v2 description #:foci1 foci1 #:foci2 foci2)
(not-complete-fiction?)))
(RSunit f2 v2 p s))
(define (honesty-composite hm f v #:resyntax? [resyntax? #t])
(DEBUG (eprintf "honesty-composite: ~s\n f = ~.s\n v = ~.s\n" hm f v))
(let loop ([hm hm] [f f] [v v])
(match hm
['T f]
['F v]
[(cons hma hmb)
(define c (cons (loop hma (stxd-car f) (stxd-car v))
(loop hmb (stxd-cdr f) (stxd-cdr v))))
(if resyntax? (restx c v) c)])))
(define (do-rename-v v vt hm pre post)
(DEBUG
(eprintf " do-rename-v\n")
(eprintf " vt-stx = ~.s\n" (stx->datum (vt->stx vt))))
(STRICT-CHECKS
(unless (same-contour? pre post)
(eprintf "RENAME MISMATCH\npre = ~s\npost = ~s\n" (stx->datum pre) (stx->datum post))))
Recur through pre , post to find the largest sub - renames that apply to v.
(define (init-k v accren) (values v (map car accren) (map cdr accren)))
(let loop ([pre pre] [post post] [v v] [accren null] [k init-k])
(define (try-rename)
(match (vt-seek pre vt)
[(cons path _)
(DEBUG
(eprintf " found at ~s, pre = ~.s\n" path (stx->datum pre))
(eprintf " actually = ~.s\n" (stx->datum (path-get v path)))
(eprintf " do-rename-v : replace at ~s : ~.s => ~.s\n"
path (stx->datum v) (stx->datum (path-replace v path post #:resyntax? #f))))
(cons (path-replace v path post #:resyntax? #t)
(cons (cons pre post) accren))]
[else #f]))
(cond [(and (syntax? pre) (try-rename))
=> (match-lambda [(cons v accren) (k v accren)])]
[(stx-pair? pre)
(loop (stxd-car pre) (stxd-car post) v accren
(lambda (v accren)
(loop (stxd-cdr pre) (stxd-cdr post) v accren k)))]
[else (k v accren)])))
(define-syntax R/rename/mark
(syntax-parser
[(_ f v p s [#:rename/mark pvar to] ke)
#:declare to (expr/c #'syntaxish?)
#'(RSbind (Rename f v p s pvar to.c #f 'mark) ke)]))
(define-syntax R/rename/unmark
(syntax-parser
[(_ f v p s [#:rename/unmark pvar to] ke)
#:declare to (expr/c #'syntaxish?)
#'(RSbind (Rename f v p s pvar to.c #f 'unmark) ke)]))
(define-syntax-rule (R/with-marking f v p s [#:with-marking c ...] ke)
(RSbind ((R c ...) f v p s) ke))
(define-syntax R/if
(syntax-parser
[(_ f v p s [#:if test [consequent ...] [alternate ...]] ke)
#'(RSbind (RSreset (if (with-pattern-match [f p] test)
(R** f v p s consequent ...)
(R** f v p s alternate ...))
#:pattern p)
ke)]))
(define-syntax R/when
(syntax-parser
[(_ f v p s [#:when test consequent ...] ke)
#'(R/if f v p s [#:if test [consequent ...] []] ke)]))
(define-syntax R/new-local-context
(syntax-parser
[(_ f v p s [#:new-local-context clause ...] ke)
#'(do-local-context f v p s (R clause ...) ke)]))
(define (do-local-context f v p s rst k)
(cond [(honest?)
(RSbind (call/local-context v (lambda () (rst #f #f (quote-pattern _) #f)))
(lambda (_f2 _v2 _p2 _s2)
(k f v p s)))]
[else
(RSbind (rst #f v (quote-pattern _) #f)
(lambda (_f2 v2 _p2 _s2)
(k f v2 p s)))]))
(define (call/local-context v proc)
(define bf (bigframe (the-context) (list v) v))
(parameterize ((the-big-context (cons bf (the-big-context)))
(the-context null)
(the-vt #f))
(proc)))
(define-syntax R/run
(syntax-parser
Subterm handling
[(R** f v p s [reducer hole fill] ke)
#:declare reducer (expr/c #'(-> any/c RST/c))
#'(RSbind (run reducer.c f v p s (quote hole) fill)
ke)]))
(define-syntax R/in-hole
(syntax-parser
[(_ f v p s [#:in-hole hole . clauses] ke)
#'(RSbind (let ([reducer (lambda (_) (R . clauses))])
(run reducer f v p s (quote hole) #f))
ke)]))
(define-syntax R/hide-check
(syntax-parser
[(_ f v p s [#:hide-check rs] ke)
#:declare rs (expr/c #'(listof identifier?))
#'(do-hide-check f v p s (with-pattern-match [f p] rs.c) ke)]))
(define (do-hide-check f v p s ids k)
(unless (or (eq? (honesty) 'F) (andmap (macro-policy) ids))
(DEBUG
(eprintf "hide-check: hiding with f=~.s, v=~.s\n" (stx->datum f) (stx->datum v)))
(set-honesty 'F f))
(k f v p s))
(define-syntax-rule (R/seek-check f v p s [#:seek-check] ke)
(do-seek-check f v p s ke))
(define (do-seek-check f v p s k)
(cond [(honest?) (k f v p s)]
[else
(match (vt-seek f (the-vt))
['()
(DEBUG (eprintf "seek-check: no paths found for ~.s\n" (stx->datum f))
(k f v p s)]
[(cons path more-paths)
(DEBUG (eprintf "seek-check: found path ~s for ~.s within ~.s\n"
path (stx->datum f) (stx->datum v)))
(define vctx (path-replacer v path #:resyntax? #t))
((parameterize ((the-context (cons vctx (the-context)))
(honesty 'T)
(the-vt #f))
(RScase (k f f p s)
(lambda (f2 v2 p2 s2)
(define end-vt (the-vt))
(lambda ()
(the-vt (vt-merge-at-path (the-vt) path (or end-vt f2)))
(RSunit f2 (vctx v2) p s)))
(lambda (exn)
(lambda ()
(RSfail exn))))))])]))
run : ( X - > RST ) Stx Stx Pattern State Hole ( U X ( ) ) - > RS
(define (run reducer f v p s hole fill)
(match hole
[(? symbol? hole)
(define path (subpattern-path p hole))
(run/path reducer f v p s path fill)]
[(list (? symbol? hole) '...)
(match-define (vector pre-path sub-path) (subpattern-path p hole #t))
(let loop ([fill fill] [k 0] [f f] [v v] [s s])
(match fill
[(cons fill0 fill*)
(define path (path-append pre-path (path-add-ref k sub-path)))
(RSbind (run/path reducer f v p s path fill0)
(lambda (f v _p s) (loop fill* (add1 k) f v s)))]
['() (RSunit f v p s)]))]))
(define (run/path reducer f v p s path fill)
(define fctx (path-replacer f path #:resyntax? #f))
(define sub-f (path-get f path))
(define sub-hm (honesty-at-path (honesty) path))
(DEBUG (eprintf "run/path: honesty ~s at path ~s => ~s\n" (honesty) path sub-hm))
(define-values (vctx sub-v sub-vt)
(cond [(eq? sub-hm 'F)
path might be out of bounds for v = > ca n't take vctx = > sub - v is meaningless
probably not much point in narrowing VT ( and nontrivial to do right )
(define sub-v v)
(define sub-vt (the-vt))
(values #f sub-v sub-vt)]
[else
can take vctx , but must also take narrowed VT ( when sub - hm ! = ' T )
(define vctx (path-replacer v path #:resyntax? #t))
(define sub-v (path-get v path))
(define sub-vt (if (eq? sub-hm 'T) #f (vt-zoom (the-vt) path)))
(values vctx sub-v sub-vt)]))
(DEBUG (eprintf "run/path: run ~s on f=~.s; v=~.s\n"
reducer (stx->datum sub-f) (stx->datum sub-v)))
((parameterize ((the-context (if vctx (cons vctx (the-context)) (the-context)))
(honesty sub-hm)
(the-vt sub-vt))
(RScase ((reducer fill) sub-f sub-v (quote-pattern _) s)
(lambda (f2 v2 _p2 _s2)
(define end-hm (honesty))
(define end-vt (the-vt))
(lambda ()
(define merged-hm (honesty-merge-at-path (honesty) path end-hm))
(DEBUG
(eprintf "\n<< run/path merge old ~s and sub ~s => ~s\n"
(honesty) end-hm merged-hm)
(eprintf " v => ~.s\n" (stx->datum (if vctx (vctx v2) v2))))
(honesty merged-hm)
(the-vt (cond
[(eq? sub-hm 'F) end-vt]
[sub-vt (vt-unzoom end-vt path)]
[end-vt
(if (the-vt)
(vt-merge-at-path (the-vt) path end-vt)
(vt-merge-at-path f path end-vt))]
[else (the-vt)]))
(DEBUG
(eprintf " vt => ~e\n" (the-vt))
(when (the-vt)
(eprintf " vt-stx => ~.s\n" (stx->datum (vt->stx (the-vt))))))
(RSunit (fctx f2) (if vctx (vctx v2) v2) p s)))
(lambda (exn)
(lambda () (RSfail exn)))))))
(define (revappend a b)
(cond [(pair? a) (revappend (cdr a) (cons (car a) b))]
[(null? a) b]))
(define (same-contour? x y)
(let loop ([x (stx->datum x)] [y (stx->datum y)])
(cond [(and (pair? x) (pair? y))
(and (loop (car x) (car y)) (loop (cdr x) (cdr y)))]
[else (not (or (pair? x) (pair? y)))])))
(define (stx-contour-diff x y)
(let loop ([x (stx->datum x)] [y (stx->datum y)])
(cond [(and (pair? x) (pair? y))
(let ([d1 (loop (car x) (car y))]
[d2 (loop (cdr x) (cdr y))])
(cond [(and (eq? d1 '_) (eq? d2 '_)) '_]
[else (cons d1 d2)]))]
[(and (null? x) (null? y)) '()]
[(equal? x y) '_]
[else `#s(DIFF ,x ,y)])))
(define (stx-eq-diff a b)
(let loop ([a a] [b b])
(cond [(and (stx-null? a) (stx-null? b)) '()]
[(equal? a b) '_]
[(stx-pair? a)
(cons (loop (stx-car a) (stx-car b))
(loop (stx-cdr a) (stx-cdr b)))]
[else
(unless (equal? (stx->datum a) (stx->datum b))
(error 'stx-eq-diff "different shapes: ~.s, ~.s" a b))
(stx->datum a)])))
(define (flatten-identifiers stx)
(syntax-case stx ()
[id (identifier? #'id) (list #'id)]
[() null]
[(x . y) (append (flatten-identifiers #'x) (flatten-identifiers #'y))]
[else (error 'flatten-identifiers "neither syntax list nor identifier: ~s"
(if (syntax? stx)
(syntax->datum stx)
stx))]))
Macro hiding
( define x ( let ( ) 1 ) ) - > ( define - values ( x ) ( let ( ) 1 ) )
side , we will have f = ( let ( ) 1 ) and v = ( define x ( let ( ) 1 ) ) . Then we
seek for ( let ( ) 1 ) and find it at vctx = ( define x [ ] ) , and we create a
context for the ( initially honest ) reduction of ( let ( ) 1 ) , but we put
that reduction sequence in the synthetic context vctx . ( Note : see also
hm1 and hm2 , respectively . We consider ( cons ' T ' T ) = ' T.
Consider the actual expansion ( # % expression A ) - > ( # % expression A * ) - > A * .
If hiding produces ( # % expression A ) - > ( # % expression A * * ) , then we can not
apply the step ( # % expression A * ) - > A * . There are two options :
contexts . For example , the if form has three independent subexpressions ,
and macro hiding in the first subexpression should not affect the second
the letrec transformation , and should start with the visible results
of pass1 . That is , 's context depends on pass1 . The old macro stepper
expansion of ( λ ( ) ( define x 1 ) ( begin 2 3 ) ) , the honesty lattice allows us
The difficult half of macro hiding is * seeking * visible subterms so their
Instead , when the honesty level drops below ' T , a VT ( visible terms tracker )
is created from the last visible honest term . The VT records scope - renames
and other syntax adjustements . When a term is searched in the VT , it applies
visible honest term , and if so at what path . The synthetic context ( vctx ) is
- ( the - vt ) is a VT , and
- ( stx->datum ( vt->stx ( the - vt ) ) ) = ( stx->datum v )
An HonestyMask is one of
- ( cons HonestyMask HonestyMask ) -- a true pair
Note : Since HonestyMask < , can use path functions on HonestyMasks .
hmcons : HonestyMask HonestyMask - > HonestyMask
(define (hmcons hm1 hm2) (if (and (eq? hm1 'T) (eq? hm2 'T)) 'T (cons hm1 hm2)))
honesty - at - path : HonestyMask Path - > HonestyMask
(define (honesty-at-path hm path)
(define-values (hm* path*) (path-get-until hm path symbol?))
hm*)
honesty - merge : HonestyMask HonestyMask - > HonestyMask
(define (honesty-merge hm1 hm2)
(let loop ([hm1 hm1] [hm2 hm2])
(match* [hm1 hm2]
[['T hm] hm]
[[hm 'T] hm]
[[(cons hm1a hm1b) (cons hm2a hm2b)]
(hmcons (loop hm1a hm2a) (loop hm1b hm2b))]
[[_ _] 'F])))
honesty - merge - at - path : HonestyMask Path HonestyMask - > HonestyMask
Merges the first hm 's subtree at path with second subtree .
(define (honesty-merge-at-path hm1 path hm2)
(define (loop hm1 path)
(match path
[(empty-path) (honesty-merge hm1 hm2)]
[(path-add-car path)
(match hm1
[(cons hm1a hm1b) (cons (loop hm1a path) hm1b)]
['T (hmcons (loop 'T path) 'T)]
['F 'F])]
[(path-add-cdrs n path)
(let tailloop ([hm1 hm1] [n n])
(cond [(zero? n) (loop hm1 path)]
[else
(match hm1
[(cons hm1a hm1b) (hmcons hm1a (tailloop hm1b (sub1 n)))]
['T (hmcons 'T (tailloop 'T (sub1 n)))]
['F 'F])]))]))
(loop hm1 path))
An HonestyMaskSpec extends HonestyMask with
- # ( hmrep HonestyMask ) -- a true list whose elements have the given honesty
(struct hmrep (hm) #:prefab)
Retuns # t if hm1 is at least as honest as hm2 .
(define (honesty>=? hm1 hm2)
(let loop ([hm1 hm1] [hm2 hm2])
(match* [hm1 hm2]
[['T _] #t]
[[_ 'F] #t]
[[(cons hm1a hm1b) (cons hm2a hm2b)]
(and (loop hm1a hm2a) (loop hm1b hm2b))]
[[(cons hm1a hm1b) (hmrep hm2e)]
(and (loop hm1a hm2e) (loop hm1b hm2))]
[[_ _] #f])))
|
4993001fb4a5f4f4b285b61847c907acda02d8444ecafd41db7710abb4d80d41 | buntine/Simply-Scheme-Exercises | 5-19.scm | ; Write a procedure insert-and that takes a sentence of items and returns a new
;sentence with an “and” in the right place:
;
> ( insert - and ’ ( ) )
( JOHN BILL WAYNE FRED AND )
(define (insert-and sent)
(sentence (bl sent) 'and (last sent)))
| null | https://raw.githubusercontent.com/buntine/Simply-Scheme-Exercises/c6cbf0bd60d6385b506b8df94c348ac5edc7f646/05-words-and-sentences/5-19.scm | scheme | Write a procedure insert-and that takes a sentence of items and returns a new
sentence with an “and” in the right place:
| > ( insert - and ’ ( ) )
( JOHN BILL WAYNE FRED AND )
(define (insert-and sent)
(sentence (bl sent) 'and (last sent)))
|
92253550386a84da3d53ea9560477a372bae3c7293a1bb3fdfa7def200897c22 | fpco/ide-backend | Update.hs | # LANGUAGE FlexibleContexts , , ScopedTypeVariables , TemplateHaskell , OverloadedStrings #
-- | IDE session updates
--
-- We should only be using internal types here (explicit strictness/sharing)
module IdeSession.Update (
-- * Starting and stopping
initSession
, initSessionWithCallbacks
, SessionInitParams(..)
, defaultSessionInitParams
, shutdownSession
, forceShutdownSession
, restartSession
-- * Session updates
, IdeSessionUpdate -- Abstract
, updateSession
, updateSourceFile
, updateSourceFileFromFile
, updateSourceFileDelete
, updateGhcOpts
, updateRtsOpts
, updateRelativeIncludes
, updateCodeGeneration
, updateDataFile
, updateDataFileFromFile
, updateDataFileDelete
, updateDeleteManagedFiles
, updateEnv
, updateArgs
, updateStdoutBufferMode
, updateStderrBufferMode
, updateTargets
, buildExe
, buildDoc
, buildLicenses
-- * Running code
, runStmt
, runStmtPty
, runExe
, resume
, setBreakpoint
, printVar
-- * Debugging
, crashGhcServer
, buildLicsFromPkgs
, LicenseArgs(..)
)
where
import Prelude hiding (mod, span)
import Control.Concurrent (threadDelay)
import Control.Monad (when, unless)
import Control.Monad.IO.Class (liftIO)
import Data.Accessor (Accessor, (^.))
import Data.List (elemIndices, isPrefixOf)
import Data.Maybe (fromMaybe, isJust)
import Data.Monoid (Monoid(..), (<>))
import Distribution.Simple (PackageDBStack, PackageDB(..))
import System.Environment (getEnvironment, unsetEnv, lookupEnv)
import System.Exit (ExitCode(..))
import System.FilePath ((</>))
import System.IO.Temp (createTempDirectory)
import System.IO (IOMode(..))
import System.Process (proc, CreateProcess(..), StdStream(..), createProcess, waitForProcess, interruptProcessGroupOf, terminateProcess)
import qualified Control.Exception as Ex
import qualified Data.ByteString as BSS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.UTF8 as BSL.UTF8
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified System.Directory as Dir
import qualified System.IO as IO
import IdeSession.Cabal
import IdeSession.Config
import IdeSession.GHC.API
import IdeSession.GHC.Client
import IdeSession.RPC.API (ExternalException(..))
import IdeSession.State
import IdeSession.Strict.Container
import IdeSession.Strict.MVar (newMVar, newEmptyMVar, StrictMVar)
import IdeSession.Types.Private hiding (RunResult(..))
import IdeSession.Types.Progress
import IdeSession.Types.Public (RunBufferMode(..))
import IdeSession.Update.ExecuteSessionUpdate
import IdeSession.Update.IdeSessionUpdate
import IdeSession.Util
import IdeSession.Util.BlockingOps
import IdeSession.Util.Logger
import IdeSession.Util.PortableIO
import IdeSession.Util.PortableFiles (moduleNameToExeName)
import qualified IdeSession.Query as Query
import qualified IdeSession.Strict.List as List
import qualified IdeSession.Strict.Map as Map
import qualified IdeSession.Strict.Maybe as Maybe
import qualified IdeSession.Types.Private as Private
import qualified IdeSession.Types.Public as Public
{-------------------------------------------------------------------------------
Session initialization
-------------------------------------------------------------------------------}
-- | How should the session be initialized?
--
-- Client code should use 'defaultSessionInitParams' to protect itself against
-- future extensions of this record.
data SessionInitParams = SessionInitParams {
-- | Previously computed cabal macros,
-- or 'Nothing' to compute them on startup
sessionInitCabalMacros :: Maybe BSL.ByteString
| Initial ghc options
, sessionInitGhcOptions :: [String]
| Include paths ( equivalent of GHC 's @-i@ parameter ) relative to the
-- temporary directory where we store the session's source files.
--
-- By default this is the singleton list @[""]@ -- i.e., we include the
-- sources dir but nothing else.
, sessionInitRelativeIncludes :: [FilePath]
-- | Targets for compilation
--
-- Defaults to @TargetsExclude []@ -- i.e., compile all modules in the
-- project.
, sessionInitTargets :: Public.Targets
-- | RTS options
--
Defaults to
, sessionInitRtsOpts :: [String]
-- | dist/ directory.
, sessionInitDistDir :: !(Maybe FilePath)
}
deriving Show
defaultSessionInitParams :: SessionInitParams
defaultSessionInitParams = SessionInitParams {
sessionInitCabalMacros = Nothing
, sessionInitGhcOptions = []
, sessionInitRelativeIncludes = [""]
, sessionInitTargets = Public.TargetsExclude []
, sessionInitRtsOpts = ["-K8M"]
, sessionInitDistDir = Nothing
}
-- | Session initialization parameters for an existing session.
--
-- For internal use only (used in 'updateSession' when restarting the session).
--
-- We set 'sessionInitCabalMacros' to 'Nothing' because the cabal macros file
-- has already been written to disk, and we don't remove the project directory
-- on a session restart.
sessionRestartParams :: IdeIdleState -> IdeSessionUpdate -> SessionInitParams
sessionRestartParams st IdeSessionUpdate{..} = SessionInitParams {
sessionInitCabalMacros = Nothing
, sessionInitGhcOptions = fromMaybe (st ^. ideGhcOpts) ideUpdateGhcOpts
, sessionInitRelativeIncludes = fromMaybe (st ^. ideRelativeIncludes) ideUpdateRelIncls
, sessionInitTargets = fromMaybe (st ^. ideTargets) ideUpdateTargets
, sessionInitRtsOpts = fromMaybe (st ^. ideRtsOpts) ideUpdateRtsOpts
, sessionInitDistDir = Nothing
}
-- | Set up the initial state of the session according to the given parameters
execInitParams :: IdeStaticInfo -> SessionInitParams -> IO ()
execInitParams staticInfo SessionInitParams{..} = do
writeMacros staticInfo sessionInitCabalMacros
| Write per - package CPP macros .
writeMacros :: IdeStaticInfo -> Maybe BSL.ByteString -> IO ()
writeMacros IdeStaticInfo{ideConfig = SessionConfig {..}, ..}
configCabalMacros = do
macros <- case configCabalMacros of
Nothing -> generateMacros configPackageDBStack configExtraPathDirs
Just macros -> return (BSL.UTF8.toString macros)
writeFile (cabalMacrosLocation ideDistDir) macros
{-------------------------------------------------------------------------------
Session startup
-------------------------------------------------------------------------------}
-- | Create a fresh session, using some initial configuration.
--
-- Throws an exception if the configuration is invalid, or if GHC_PACKAGE_PATH
-- is set.
initSession :: SessionInitParams -> SessionConfig -> IO IdeSession
initSession = initSessionWithCallbacks defaultIdeCallbacks
| Like ' initSession ' , but also takes a ' IdeCallbacks ' .
--
Since 0.10.0
initSessionWithCallbacks :: IdeCallbacks -> SessionInitParams -> SessionConfig -> IO IdeSession
initSessionWithCallbacks ideCallbacks initParams@SessionInitParams{..} ideConfig@SessionConfig{..} = do
let logFunc = ideCallbacksLogFunc ideCallbacks
--FIXME: add version info here.
$logInfo "Initializing ide-backend session"
-- verifyConfig used to bail if GHC_PACKAGE_PATH was set. Instead,
-- we just unset it so that cabal invocations are happy. It's up to
-- the user of ide-backend to set 'configPackageDBStack' based on
-- this environment variable.
mpath <- lookupEnv "GHC_PACKAGE_PATH"
when (isJust mpath) $ do
$logWarn "ide-backend doesn't pay attention to GHC_PACKAGE_PATH, but it is set in the environment"
unsetEnv "GHC_PACKAGE_PATH"
verifyConfig ideConfig
configDirCanon <- Dir.canonicalizePath configDir
ideSessionDir <- createTempDirectory configDirCanon "session."
$logDebug $ "Session dir = " <> Text.pack ideSessionDir
let ideDistDir = fromMaybe (ideSessionDir </> "dist/") sessionInitDistDir
$logDebug $ "Dist dir = " <> Text.pack ideDistDir
let ideStaticInfo = IdeStaticInfo{..}
-- Create the common subdirectories of session.nnnn so that we don't have to
-- worry about creating these elsewhere
case configLocalWorkingDir of
Just dir -> $logDebug $ "Local working dir = " <> Text.pack dir
Nothing -> do
Dir.createDirectoryIfMissing True (ideSourceDir ideStaticInfo)
Dir.createDirectoryIfMissing True (ideDataDir ideStaticInfo)
Dir.createDirectoryIfMissing True ideDistDir
Dir.createDirectoryIfMissing True (ideSessionObjDir ideSessionDir)
-- Local initialization
execInitParams ideStaticInfo initParams
Start the GHC server ( as a separate process )
mServer <- forkGhcServer sessionInitGhcOptions
sessionInitRelativeIncludes
sessionInitRtsOpts
ideStaticInfo
ideCallbacks
let (state, server, version) = case mServer of
Right (s, v) -> (IdeSessionIdle, s, v)
Left e -> (IdeSessionServerDied e, Ex.throw e, Ex.throw e)
-- The value of _ideLogicalTimestamp field is a workaround for
-- the problems with 'invalidateModSummaryCache', which itself is
a workaround for .
-- We have to make sure that file times never reach 0, because this will
-- trigger an exception ().
We rather arbitrary start at Jan 2 , 1970 .
let idleState = IdeIdleState {
_ideLogicalTimestamp = 86400
, _ideComputed = Maybe.nothing
, _ideGenerateCode = False
, _ideManagedFiles = ManagedFilesInternal [] []
, _ideObjectFiles = []
, _ideBuildExeStatus = Nothing
, _ideBuildDocStatus = Nothing
, _ideBuildLicensesStatus = Nothing
, _ideEnv = []
, _ideArgs = []
, _ideStdoutBufferMode = RunNoBuffering
, _ideStderrBufferMode = RunNoBuffering
, _ideBreakInfo = Maybe.nothing
, _ideGhcServer = server
, _ideGhcVersion = version
, _ideGhcOpts = sessionInitGhcOptions
, _ideRelativeIncludes = sessionInitRelativeIncludes
, _ideTargets = sessionInitTargets
, _ideRtsOpts = sessionInitRtsOpts
}
ideState <- newMVar (state idleState)
return IdeSession{..}
-- | Verify configuration, and throw an exception if configuration is invalid
verifyConfig :: SessionConfig -> IO ()
verifyConfig SessionConfig{..} = do
unless (isValidPackageDB configPackageDBStack) $
Ex.throw . userError $ "Invalid package DB stack: "
++ show configPackageDBStack
where
isValidPackageDB :: PackageDBStack -> Bool
isValidPackageDB stack =
elemIndices GlobalPackageDB stack == [0]
&& elemIndices UserPackageDB stack `elem` [[], [1]]
{-------------------------------------------------------------------------------
Session shutdown
-------------------------------------------------------------------------------}
-- | Close a session down, releasing the resources.
--
-- This operation is the only one that can be run after a shutdown was already
-- performed. This lets the API user execute an early shutdown, e.g., before
-- the @shutdownSession@ placed inside 'bracket' is triggered by a normal
-- program control flow.
--
-- If code is still running, it will be interrupted.
shutdownSession :: IdeSession -> IO ()
shutdownSession = shutdownSession' False
| Like shutdownSession , but do n't be nice about it ( SIGKILL )
forceShutdownSession :: IdeSession -> IO ()
forceShutdownSession = shutdownSession' True
-- | Internal generalization of 'shutdownSession' and 'forceShutdownSession'
shutdownSession' :: Bool -> IdeSession -> IO ()
shutdownSession' forceTerminate IdeSession{ideState, ideStaticInfo} = do
$modifyStrictMVar_ ideState $ \state ->
case state of
IdeSessionIdle idleState -> do
if forceTerminate
then forceShutdownGhcServer $ _ideGhcServer idleState
else shutdownGhcServer $ _ideGhcServer idleState
cleanupDirs
return IdeSessionShutdown
IdeSessionShutdown ->
return IdeSessionShutdown
IdeSessionServerDied _ _ -> do
cleanupDirs
return IdeSessionShutdown
where
cleanupDirs :: IO ()
cleanupDirs =
when (configDeleteTempFiles . ideConfig $ ideStaticInfo) $
ignoreDoesNotExist $
Dir.removeDirectoryRecursive (ideSessionDir ideStaticInfo)
{-------------------------------------------------------------------------------
Session restart
-------------------------------------------------------------------------------}
-- | Restart a session
--
-- This puts the session in a "dead" state; it won't _actually_ be restarted
-- until the next call to 'updateSession'.
restartSession :: IdeSession -> IO ()
restartSession IdeSession{ideState} =
$modifyStrictMVar_ ideState $ \state ->
case state of
IdeSessionIdle idleState ->
return $ IdeSessionServerDied forcedRestart idleState
IdeSessionServerDied _ _ ->
return state -- Leave Died state as is
IdeSessionShutdown ->
fail "Shutdown session cannot be restarted."
data RestartResult =
ServerRestarted IdeIdleState IdeSessionUpdate
| ServerRestartFailed IdeIdleState
executeRestart :: SessionInitParams
-> IdeStaticInfo
-> IdeCallbacks
-> IdeIdleState
-> IO RestartResult
executeRestart initParams@SessionInitParams{..} staticInfo ideCallbacks idleState = do
let logFunc = ideCallbacksLogFunc ideCallbacks
$logInfo "Restarting ide-backend-server"
forceShutdownGhcServer $ _ideGhcServer idleState
mServer <- forkGhcServer sessionInitGhcOptions
sessionInitRelativeIncludes
sessionInitRtsOpts
staticInfo
ideCallbacks
case mServer of
Right (server, version) -> do
execInitParams staticInfo initParams
-- Reset back to initial values ..
let idleState' = idleState {
_ideComputed = Maybe.nothing
, _ideGhcOpts = sessionInitGhcOptions
, _ideRelativeIncludes = sessionInitRelativeIncludes
, _ideRtsOpts = sessionInitRtsOpts
, _ideGenerateCode = False
, _ideObjectFiles = []
, _ideEnv = []
, _ideArgs = []
, _ideGhcServer = server
, _ideGhcVersion = version
, _ideTargets = sessionInitTargets
}
-- .. and let an update make sure we bring the state back to where it was
let upd = mconcat [
updateEnv (idleState ^. ideEnv)
, updateArgs (idleState ^. ideArgs)
, updateCodeGeneration (idleState ^. ideGenerateCode)
]
return (ServerRestarted idleState' upd)
Left e -> do
let idleState' = idleState {
_ideGhcServer = Ex.throw e
, _ideGhcVersion = Ex.throw e
}
return (ServerRestartFailed idleState')
------------------------------------------------------------------------------
Session update
Here we deal only with the top - level logic : restart the session and then run
the session update . The specifics of how to execute the individual parts
of the session update are defined in IdeSession . Update . ExecuteSessionUpdate .
------------------------------------------------------------------------------
Session update
Here we deal only with the top-level logic: restart the session and then run
the session update. The specifics of how to execute the individual parts
of the session update are defined in IdeSession.Update.ExecuteSessionUpdate.
-------------------------------------------------------------------------------}
-- | Given the current IDE session state, go ahead and update the session,
-- eventually resulting in a new session state, with fully updated computed
-- information (typing, etc.).
--
-- The update can be a long running operation, so we support a callback which
-- can be used to monitor progress of the operation.
updateSession :: IdeSession -> IdeSessionUpdate -> (Public.UpdateStatus -> IO ()) -> IO ()
updateSession = flip . updateSession'
updateSession' :: IdeSession -> (Public.UpdateStatus -> IO ()) -> IdeSessionUpdate -> IO ()
updateSession' IdeSession{ideStaticInfo, ideState, ideCallbacks} updateStatus = \update ->
$modifyStrictMVar_ ideState $ go False update
where
logFunc = ideCallbacksLogFunc ideCallbacks
go :: Bool -> IdeSessionUpdate -> IdeSessionState -> IO IdeSessionState
go justRestarted update (IdeSessionIdle idleState) =
if not (requiresSessionRestart idleState update)
then do
(idleState', mex) <- runSessionUpdate justRestarted update ideStaticInfo updateStatus ideCallbacks idleState
case mex of
Nothing -> do
updateStatus Public.UpdateStatusDone
return $ IdeSessionIdle idleState'
Just ex -> do
updateStatus $ Public.UpdateStatusFailed (Text.pack (show ex))
return $ IdeSessionServerDied ex idleState'
else do
$logInfo $ "Restarting session due to update requiring it."
unless justRestarted $ updateStatus Public.UpdateStatusRequiredRestart
let restartParams = sessionRestartParams idleState update
restart justRestarted update restartParams idleState
go justRestarted update (IdeSessionServerDied ex idleState) = do
let msg = Text.pack (show ex)
$logInfo $ "Restarting session due to server dieing: " <> msg
unless justRestarted $ updateStatus (Public.UpdateStatusErrorRestart msg)
let restartParams = sessionRestartParams idleState update
restart justRestarted update restartParams idleState
go _ _ IdeSessionShutdown =
Ex.throwIO (userError "Session already shut down.")
restart :: Bool -> IdeSessionUpdate -> SessionInitParams -> IdeIdleState -> IO IdeSessionState
restart True _ _ idleState =
return $ IdeSessionServerDied serverRestartLoop idleState
restart False update restartParams idleState = do
-- To avoid "<stdout> hPutChar: resource vanished (Broken pipe)":
-- TODO: I wish I knew why this is necessary :(
threadDelay 100000
restartResult <- executeRestart restartParams ideStaticInfo ideCallbacks idleState
case restartResult of
ServerRestarted idleState' resetSession ->
go True (resetSession <> update) (IdeSessionIdle idleState')
ServerRestartFailed idleState' -> do
updateStatus Public.UpdateStatusFailedToRestart
return $ IdeSessionServerDied failedToRestart idleState'
| @requiresSessionRestart returns true if update @upd@ requires a
session restart given current state
--
-- See 'sessionRestartParams' to compute the session initialization parameters
-- for the new session.
requiresSessionRestart :: IdeIdleState -> IdeSessionUpdate -> Bool
requiresSessionRestart st IdeSessionUpdate{..} =
(ideUpdateRelIncls `changes` ideRelativeIncludes)
|| (ideUpdateTargets `changes` ideTargets)
|| (ideUpdateRtsOpts `changes` ideRtsOpts)
|| (any optRequiresRestart (listChanges' ideUpdateGhcOpts ideGhcOpts))
where
optRequiresRestart :: String -> Bool
optRequiresRestart str =
Library flags can not be changed dynamically ( # 214 )
"-l" `isPrefixOf` str
changes :: Eq a => Maybe a -> Accessor IdeIdleState a -> Bool
changes Nothing _ = False
changes (Just x) y = x /= st ^. y
listChanges' :: Ord a => Maybe [a] -> Accessor IdeIdleState [a] -> [a]
listChanges' Nothing _ = []
listChanges' (Just xs) ys = listChanges xs (st ^. ys)
-- | @listChanges xs ys@ is the list of elements that appear in @xs@ but not
in @ys@ and the set of elements that appear in @ys@ but not in @xs@.
--
-- Considering the lists as sets, it is the complement of the intersection
between the two sets .
listChanges :: Ord a => [a] -> [a] -> [a]
listChanges xs ys =
Set.toList $ (a `Set.union` b) `Set.difference` (a `Set.intersection` b)
where
a = Set.fromList xs
b = Set.fromList ys
{-------------------------------------------------------------------------------
Running code
-------------------------------------------------------------------------------}
-- | Run a given function in a given module (the name of the module
-- is the one between @module ... end@, which may differ from the file name).
-- The function resembles a query, but it's not instantaneous
-- and the running code can be interrupted or interacted with.
--
-- 'runStmt' will throw an exception if the code has not been compiled yet,
or when the server is in a dead state ( i.e. , when ghc has crashed ) . In the
latter case ' getSourceErrors ' will report the ghc exception ; it is the
-- responsibility of the client code to check for this.
runStmt :: IdeSession -> String -> String -> IO (RunActions Public.RunResult)
runStmt ideSession m fun = runCmd ideSession $ \idleState -> RunStmt {
runCmdModule = m
, runCmdFunction = fun
, runCmdStdout = idleState ^. ideStdoutBufferMode
, runCmdStderr = idleState ^. ideStderrBufferMode
, runCmdPty = False
}
-- | Like 'runStmt', but runs the statement in a pseudoterminal.
runStmtPty :: IdeSession -> String -> String -> IO (RunActions Public.RunResult)
runStmtPty ideSession m fun = runCmd ideSession $ \idleState -> RunStmt {
runCmdModule = m
, runCmdFunction = fun
, runCmdStdout = idleState ^. ideStdoutBufferMode
, runCmdStderr = idleState ^. ideStderrBufferMode
, runCmdPty = True
}
-- | Run the main function from the last compiled executable.
--
-- 'runExe' will throw an exception if there were no executables
-- compiled since session init, or if the last compilation was not
-- successful (checked as in @getBuildExeStatus@)
-- or if none of the executables last compiled have the supplied name
or when the server is in a dead state ( i.e. , when ghc has crashed ) . In the
last case ' getSourceErrors ' will report the ghc exception ; it is the
-- responsibility of the client code to check for this.
runExe :: IdeSession -> String -> IO (RunActions ExitCode)
runExe session m = do
let handleQueriesExc (_ :: Query.InvalidSessionStateQueries) =
fail $ "Wrong session state when trying to run an executable."
Ex.handle handleQueriesExc $ do
mstatus <- Query.getBuildExeStatus session
case mstatus of
Nothing ->
fail $ "No executable compilation initiated since session init."
(Just status@ExitFailure{}) ->
fail $ "Last executable compilation failed with status "
++ show status ++ "."
Just ExitSuccess -> do
distDir <- Query.getDistDir session
dataDir <- Query.getDataDir session
args <- Query.getArgs session
envInherited <- getEnvironment
envOverride <- Query.getEnv session
let overrideVar :: (String, Maybe String) -> Strict (Map String) String
-> Strict (Map String) String
overrideVar (var, Just val) env = Map.insert var val env
overrideVar (var, Nothing) env = Map.delete var env
envMap = foldr overrideVar (Map.fromList envInherited) envOverride
let exePath = distDir </> "build" </> m </> moduleNameToExeName m
exeExists <- Dir.doesFileExist exePath
unless exeExists $
fail $ "No compiled executable file "
++ m ++ " exists at path "
++ exePath ++ "."
(stdRd, stdWr) <- liftIO createPipe
std_rd_hdl <- fdToHandle stdRd ReadMode
std_wr_hdl <- fdToHandle stdWr WriteMode
let cproc = (proc exePath args) { cwd = Just dataDir
, env = Just $ Map.toList envMap
, create_group = True
-- for interruptProcessGroupOf
, std_in = CreatePipe
, std_out = UseHandle std_wr_hdl
, std_err = UseHandle std_wr_hdl
}
(Just stdin_hdl, Nothing, Nothing, ph) <- createProcess cproc
-- The runActionState holds 'Just' the result of the snippet, or 'Nothing' if
-- it has not yet terminated.
runActionsState <- newMVar Nothing
return $ RunActions
{ runWait = $modifyStrictMVar runActionsState $ \st -> case st of
Just outcome ->
return (Just outcome, Right outcome)
Nothing -> do
bs <- BSS.hGetSome std_rd_hdl blockSize
if BSS.null bs
then do
res <- waitForProcess ph
return (Just res, Right res)
else
return (Nothing, Left bs)
, interrupt = interruptProcessGroupOf ph
, supplyStdin = \bs -> BSS.hPut stdin_hdl bs >> IO.hFlush stdin_hdl
, forceCancel = terminateProcess ph
}
We do n't need to close any handles . At the latest GC closes them .
where
-- TODO: What is a good value here?
blockSize :: Int
blockSize = 4096
-- | Resume a previously interrupted statement
resume :: IdeSession -> IO (RunActions Public.RunResult)
resume ideSession = runCmd ideSession (const Resume)
-- | Internal geneneralization used in 'runStmt' and 'resume'
runCmd :: IdeSession -> (IdeIdleState -> RunCmd) -> IO (RunActions Public.RunResult)
runCmd session mkCmd = modifyIdleState session $ \idleState ->
case (toLazyMaybe (idleState ^. ideComputed), idleState ^. ideGenerateCode) of
(Just comp, True) -> do
let cmd = mkCmd idleState
checkStateOk comp cmd
isBreak <- newEmptyMVar
runActions <- rpcRun (idleState ^. ideGhcServer)
cmd
(translateRunResult isBreak)
TODO : We should register the runActions somewhere so we can do a
-- clean session shutdown?
return (IdeSessionIdle idleState, runActions)
_ ->
-- This 'fail' invocation is, in part, a workaround for
which would otherwise lead to a hard GHC crash ,
-- instead of providing a sensible error message
-- that we could show to the user.
fail "Cannot run before the code is generated."
where
checkStateOk :: Computed -> RunCmd -> IO ()
checkStateOk comp RunStmt{..} =
-- ideManagedFiles is irrelevant, because only the module name inside
-- 'module .. where' counts.
unless (Text.pack runCmdModule `List.elem` computedLoadedModules comp) $
fail $ "Module " ++ show runCmdModule
++ " not successfully loaded, when trying to run code."
checkStateOk _comp Resume =
-- TODO: should we check that there is anything to resume here?
return ()
translateRunResult :: StrictMVar (Strict Maybe BreakInfo)
-> Maybe Private.RunResult
-> IO Public.RunResult
translateRunResult isBreak (Just Private.RunOk) = do
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunOk
translateRunResult isBreak (Just (Private.RunProgException str)) = do
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunProgException str
translateRunResult isBreak (Just (Private.RunGhcException str)) = do
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunGhcException str
translateRunResult isBreak (Just (Private.RunBreak breakInfo)) = do
$putStrictMVar isBreak (Maybe.just breakInfo)
return $ Public.RunBreak
translateRunResult isBreak Nothing = do
-- On a force cancellation we definitely didn't hit a breakpoint
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunForceCancelled
-- | Breakpoint
--
-- Set a breakpoint at the specified location. Returns @Just@ the old value of the
-- breakpoint if successful, or @Nothing@ otherwise.
setBreakpoint :: IdeSession
-> ModuleName -- ^ Module where the breakshould should be set
-> Public.SourceSpan -- ^ Location of the breakpoint
-> Bool -- ^ New value for the breakpoint
-> IO (Maybe Bool) -- ^ Old value of the breakpoint (if valid)
setBreakpoint session mod span value = withIdleState session $ \idleState ->
rpcBreakpoint (idleState ^. ideGhcServer) mod span value
-- | Print and/or force values during debugging
--
-- Only valid in breakpoint state.
printVar :: IdeSession
-> Public.Name -- ^ Variable to print
^ Should printing bind new vars ? ( @:print@ vs. @:sprint@ )
-> Bool -- ^ Should the value be forced? (@:print@ vs. @:force@)
-> IO Public.VariableEnv
printVar session var bind forceEval = withBreakInfo session $ \idleState _ ->
rpcPrint (idleState ^. ideGhcServer) var bind forceEval
TODO : We should do this translation only when we talk to ghc , and keep
-- the original Targets in the session state
-- ** IdeStaticInfo{..} <- asks ideSessionUpdateStaticInfo
-- ** let sourceDir = ideSessionSourceDir ideSessionDir
* * newTargets = case targets of
-- ** Public.TargetsInclude l ->
-- ** Public.TargetsInclude $ map (sourceDir </>) l
-- ** Public.TargetsExclude l ->
-- ** Public.TargetsExclude $ map (sourceDir </>) l
-- **
* * < - get ideTargets
* * when ( oldTargets /= newTargets ) $ do
* * set ideTargets newTargets
-- ** restartSession'
{------------------------------------------------------------------------------
Debugging of ide-backend itself
------------------------------------------------------------------------------}
| Crash the GHC server . For debugging only . If the specified delay is
-- @Nothing@, crash immediately; otherwise, set up a thread that throws
-- an exception to the main thread after the delay.
crashGhcServer :: IdeSession -> Maybe Int -> IO ()
crashGhcServer IdeSession{..} delay = $withStrictMVar ideState $ \state ->
case state of
IdeSessionIdle idleState ->
rpcCrash (idleState ^. ideGhcServer) delay
_ ->
Ex.throwIO $ userError "State not idle"
{-------------------------------------------------------------------------------
Auxiliary (ide-backend specific)
-------------------------------------------------------------------------------}
withBreakInfo :: IdeSession -> (IdeIdleState -> Public.BreakInfo -> IO a) -> IO a
withBreakInfo session act = withIdleState session $ \idleState ->
case toLazyMaybe (idleState ^. ideBreakInfo) of
Just breakInfo -> act idleState breakInfo
Nothing -> Ex.throwIO (userError "Not in breakpoint state")
withIdleState :: IdeSession -> (IdeIdleState -> IO a) -> IO a
withIdleState session act = modifyIdleState session $ \idleState -> do
result <- act idleState
return (IdeSessionIdle idleState, result)
modifyIdleState :: IdeSession -> (IdeIdleState -> IO (IdeSessionState, a)) -> IO a
modifyIdleState IdeSession{..} act = $modifyStrictMVar ideState $ \state -> case state of
IdeSessionIdle idleState -> act idleState
_ -> Ex.throwIO $ userError "State not idle"
failedToRestart :: ExternalException
failedToRestart = ExternalException {
externalStdErr = "Failed to restart server"
, externalException = Nothing
}
forcedRestart :: ExternalException
forcedRestart = ExternalException {
externalStdErr = "Session manually restarted"
, externalException = Nothing
}
serverRestartLoop :: ExternalException
serverRestartLoop = ExternalException {
externalStdErr = "Server restart loop"
, externalException = Nothing
}
| null | https://raw.githubusercontent.com/fpco/ide-backend/860636f2d0e872e9481569236bce690637e0016e/ide-backend/IdeSession/Update.hs | haskell | | IDE session updates
We should only be using internal types here (explicit strictness/sharing)
* Starting and stopping
* Session updates
Abstract
* Running code
* Debugging
------------------------------------------------------------------------------
Session initialization
------------------------------------------------------------------------------
| How should the session be initialized?
Client code should use 'defaultSessionInitParams' to protect itself against
future extensions of this record.
| Previously computed cabal macros,
or 'Nothing' to compute them on startup
temporary directory where we store the session's source files.
By default this is the singleton list @[""]@ -- i.e., we include the
sources dir but nothing else.
| Targets for compilation
Defaults to @TargetsExclude []@ -- i.e., compile all modules in the
project.
| RTS options
| dist/ directory.
| Session initialization parameters for an existing session.
For internal use only (used in 'updateSession' when restarting the session).
We set 'sessionInitCabalMacros' to 'Nothing' because the cabal macros file
has already been written to disk, and we don't remove the project directory
on a session restart.
| Set up the initial state of the session according to the given parameters
------------------------------------------------------------------------------
Session startup
------------------------------------------------------------------------------
| Create a fresh session, using some initial configuration.
Throws an exception if the configuration is invalid, or if GHC_PACKAGE_PATH
is set.
FIXME: add version info here.
verifyConfig used to bail if GHC_PACKAGE_PATH was set. Instead,
we just unset it so that cabal invocations are happy. It's up to
the user of ide-backend to set 'configPackageDBStack' based on
this environment variable.
Create the common subdirectories of session.nnnn so that we don't have to
worry about creating these elsewhere
Local initialization
The value of _ideLogicalTimestamp field is a workaround for
the problems with 'invalidateModSummaryCache', which itself is
We have to make sure that file times never reach 0, because this will
trigger an exception ().
| Verify configuration, and throw an exception if configuration is invalid
------------------------------------------------------------------------------
Session shutdown
------------------------------------------------------------------------------
| Close a session down, releasing the resources.
This operation is the only one that can be run after a shutdown was already
performed. This lets the API user execute an early shutdown, e.g., before
the @shutdownSession@ placed inside 'bracket' is triggered by a normal
program control flow.
If code is still running, it will be interrupted.
| Internal generalization of 'shutdownSession' and 'forceShutdownSession'
------------------------------------------------------------------------------
Session restart
------------------------------------------------------------------------------
| Restart a session
This puts the session in a "dead" state; it won't _actually_ be restarted
until the next call to 'updateSession'.
Leave Died state as is
Reset back to initial values ..
.. and let an update make sure we bring the state back to where it was
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
| Given the current IDE session state, go ahead and update the session,
eventually resulting in a new session state, with fully updated computed
information (typing, etc.).
The update can be a long running operation, so we support a callback which
can be used to monitor progress of the operation.
To avoid "<stdout> hPutChar: resource vanished (Broken pipe)":
TODO: I wish I knew why this is necessary :(
See 'sessionRestartParams' to compute the session initialization parameters
for the new session.
| @listChanges xs ys@ is the list of elements that appear in @xs@ but not
Considering the lists as sets, it is the complement of the intersection
------------------------------------------------------------------------------
Running code
------------------------------------------------------------------------------
| Run a given function in a given module (the name of the module
is the one between @module ... end@, which may differ from the file name).
The function resembles a query, but it's not instantaneous
and the running code can be interrupted or interacted with.
'runStmt' will throw an exception if the code has not been compiled yet,
responsibility of the client code to check for this.
| Like 'runStmt', but runs the statement in a pseudoterminal.
| Run the main function from the last compiled executable.
'runExe' will throw an exception if there were no executables
compiled since session init, or if the last compilation was not
successful (checked as in @getBuildExeStatus@)
or if none of the executables last compiled have the supplied name
responsibility of the client code to check for this.
for interruptProcessGroupOf
The runActionState holds 'Just' the result of the snippet, or 'Nothing' if
it has not yet terminated.
TODO: What is a good value here?
| Resume a previously interrupted statement
| Internal geneneralization used in 'runStmt' and 'resume'
clean session shutdown?
This 'fail' invocation is, in part, a workaround for
instead of providing a sensible error message
that we could show to the user.
ideManagedFiles is irrelevant, because only the module name inside
'module .. where' counts.
TODO: should we check that there is anything to resume here?
On a force cancellation we definitely didn't hit a breakpoint
| Breakpoint
Set a breakpoint at the specified location. Returns @Just@ the old value of the
breakpoint if successful, or @Nothing@ otherwise.
^ Module where the breakshould should be set
^ Location of the breakpoint
^ New value for the breakpoint
^ Old value of the breakpoint (if valid)
| Print and/or force values during debugging
Only valid in breakpoint state.
^ Variable to print
^ Should the value be forced? (@:print@ vs. @:force@)
the original Targets in the session state
** IdeStaticInfo{..} <- asks ideSessionUpdateStaticInfo
** let sourceDir = ideSessionSourceDir ideSessionDir
** Public.TargetsInclude l ->
** Public.TargetsInclude $ map (sourceDir </>) l
** Public.TargetsExclude l ->
** Public.TargetsExclude $ map (sourceDir </>) l
**
** restartSession'
-----------------------------------------------------------------------------
Debugging of ide-backend itself
-----------------------------------------------------------------------------
@Nothing@, crash immediately; otherwise, set up a thread that throws
an exception to the main thread after the delay.
------------------------------------------------------------------------------
Auxiliary (ide-backend specific)
------------------------------------------------------------------------------ | # LANGUAGE FlexibleContexts , , ScopedTypeVariables , TemplateHaskell , OverloadedStrings #
module IdeSession.Update (
initSession
, initSessionWithCallbacks
, SessionInitParams(..)
, defaultSessionInitParams
, shutdownSession
, forceShutdownSession
, restartSession
, updateSession
, updateSourceFile
, updateSourceFileFromFile
, updateSourceFileDelete
, updateGhcOpts
, updateRtsOpts
, updateRelativeIncludes
, updateCodeGeneration
, updateDataFile
, updateDataFileFromFile
, updateDataFileDelete
, updateDeleteManagedFiles
, updateEnv
, updateArgs
, updateStdoutBufferMode
, updateStderrBufferMode
, updateTargets
, buildExe
, buildDoc
, buildLicenses
, runStmt
, runStmtPty
, runExe
, resume
, setBreakpoint
, printVar
, crashGhcServer
, buildLicsFromPkgs
, LicenseArgs(..)
)
where
import Prelude hiding (mod, span)
import Control.Concurrent (threadDelay)
import Control.Monad (when, unless)
import Control.Monad.IO.Class (liftIO)
import Data.Accessor (Accessor, (^.))
import Data.List (elemIndices, isPrefixOf)
import Data.Maybe (fromMaybe, isJust)
import Data.Monoid (Monoid(..), (<>))
import Distribution.Simple (PackageDBStack, PackageDB(..))
import System.Environment (getEnvironment, unsetEnv, lookupEnv)
import System.Exit (ExitCode(..))
import System.FilePath ((</>))
import System.IO.Temp (createTempDirectory)
import System.IO (IOMode(..))
import System.Process (proc, CreateProcess(..), StdStream(..), createProcess, waitForProcess, interruptProcessGroupOf, terminateProcess)
import qualified Control.Exception as Ex
import qualified Data.ByteString as BSS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.UTF8 as BSL.UTF8
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified System.Directory as Dir
import qualified System.IO as IO
import IdeSession.Cabal
import IdeSession.Config
import IdeSession.GHC.API
import IdeSession.GHC.Client
import IdeSession.RPC.API (ExternalException(..))
import IdeSession.State
import IdeSession.Strict.Container
import IdeSession.Strict.MVar (newMVar, newEmptyMVar, StrictMVar)
import IdeSession.Types.Private hiding (RunResult(..))
import IdeSession.Types.Progress
import IdeSession.Types.Public (RunBufferMode(..))
import IdeSession.Update.ExecuteSessionUpdate
import IdeSession.Update.IdeSessionUpdate
import IdeSession.Util
import IdeSession.Util.BlockingOps
import IdeSession.Util.Logger
import IdeSession.Util.PortableIO
import IdeSession.Util.PortableFiles (moduleNameToExeName)
import qualified IdeSession.Query as Query
import qualified IdeSession.Strict.List as List
import qualified IdeSession.Strict.Map as Map
import qualified IdeSession.Strict.Maybe as Maybe
import qualified IdeSession.Types.Private as Private
import qualified IdeSession.Types.Public as Public
data SessionInitParams = SessionInitParams {
sessionInitCabalMacros :: Maybe BSL.ByteString
| Initial ghc options
, sessionInitGhcOptions :: [String]
| Include paths ( equivalent of GHC 's @-i@ parameter ) relative to the
, sessionInitRelativeIncludes :: [FilePath]
, sessionInitTargets :: Public.Targets
Defaults to
, sessionInitRtsOpts :: [String]
, sessionInitDistDir :: !(Maybe FilePath)
}
deriving Show
defaultSessionInitParams :: SessionInitParams
defaultSessionInitParams = SessionInitParams {
sessionInitCabalMacros = Nothing
, sessionInitGhcOptions = []
, sessionInitRelativeIncludes = [""]
, sessionInitTargets = Public.TargetsExclude []
, sessionInitRtsOpts = ["-K8M"]
, sessionInitDistDir = Nothing
}
sessionRestartParams :: IdeIdleState -> IdeSessionUpdate -> SessionInitParams
sessionRestartParams st IdeSessionUpdate{..} = SessionInitParams {
sessionInitCabalMacros = Nothing
, sessionInitGhcOptions = fromMaybe (st ^. ideGhcOpts) ideUpdateGhcOpts
, sessionInitRelativeIncludes = fromMaybe (st ^. ideRelativeIncludes) ideUpdateRelIncls
, sessionInitTargets = fromMaybe (st ^. ideTargets) ideUpdateTargets
, sessionInitRtsOpts = fromMaybe (st ^. ideRtsOpts) ideUpdateRtsOpts
, sessionInitDistDir = Nothing
}
execInitParams :: IdeStaticInfo -> SessionInitParams -> IO ()
execInitParams staticInfo SessionInitParams{..} = do
writeMacros staticInfo sessionInitCabalMacros
| Write per - package CPP macros .
writeMacros :: IdeStaticInfo -> Maybe BSL.ByteString -> IO ()
writeMacros IdeStaticInfo{ideConfig = SessionConfig {..}, ..}
configCabalMacros = do
macros <- case configCabalMacros of
Nothing -> generateMacros configPackageDBStack configExtraPathDirs
Just macros -> return (BSL.UTF8.toString macros)
writeFile (cabalMacrosLocation ideDistDir) macros
initSession :: SessionInitParams -> SessionConfig -> IO IdeSession
initSession = initSessionWithCallbacks defaultIdeCallbacks
| Like ' initSession ' , but also takes a ' IdeCallbacks ' .
Since 0.10.0
initSessionWithCallbacks :: IdeCallbacks -> SessionInitParams -> SessionConfig -> IO IdeSession
initSessionWithCallbacks ideCallbacks initParams@SessionInitParams{..} ideConfig@SessionConfig{..} = do
let logFunc = ideCallbacksLogFunc ideCallbacks
$logInfo "Initializing ide-backend session"
mpath <- lookupEnv "GHC_PACKAGE_PATH"
when (isJust mpath) $ do
$logWarn "ide-backend doesn't pay attention to GHC_PACKAGE_PATH, but it is set in the environment"
unsetEnv "GHC_PACKAGE_PATH"
verifyConfig ideConfig
configDirCanon <- Dir.canonicalizePath configDir
ideSessionDir <- createTempDirectory configDirCanon "session."
$logDebug $ "Session dir = " <> Text.pack ideSessionDir
let ideDistDir = fromMaybe (ideSessionDir </> "dist/") sessionInitDistDir
$logDebug $ "Dist dir = " <> Text.pack ideDistDir
let ideStaticInfo = IdeStaticInfo{..}
case configLocalWorkingDir of
Just dir -> $logDebug $ "Local working dir = " <> Text.pack dir
Nothing -> do
Dir.createDirectoryIfMissing True (ideSourceDir ideStaticInfo)
Dir.createDirectoryIfMissing True (ideDataDir ideStaticInfo)
Dir.createDirectoryIfMissing True ideDistDir
Dir.createDirectoryIfMissing True (ideSessionObjDir ideSessionDir)
execInitParams ideStaticInfo initParams
Start the GHC server ( as a separate process )
mServer <- forkGhcServer sessionInitGhcOptions
sessionInitRelativeIncludes
sessionInitRtsOpts
ideStaticInfo
ideCallbacks
let (state, server, version) = case mServer of
Right (s, v) -> (IdeSessionIdle, s, v)
Left e -> (IdeSessionServerDied e, Ex.throw e, Ex.throw e)
a workaround for .
We rather arbitrary start at Jan 2 , 1970 .
let idleState = IdeIdleState {
_ideLogicalTimestamp = 86400
, _ideComputed = Maybe.nothing
, _ideGenerateCode = False
, _ideManagedFiles = ManagedFilesInternal [] []
, _ideObjectFiles = []
, _ideBuildExeStatus = Nothing
, _ideBuildDocStatus = Nothing
, _ideBuildLicensesStatus = Nothing
, _ideEnv = []
, _ideArgs = []
, _ideStdoutBufferMode = RunNoBuffering
, _ideStderrBufferMode = RunNoBuffering
, _ideBreakInfo = Maybe.nothing
, _ideGhcServer = server
, _ideGhcVersion = version
, _ideGhcOpts = sessionInitGhcOptions
, _ideRelativeIncludes = sessionInitRelativeIncludes
, _ideTargets = sessionInitTargets
, _ideRtsOpts = sessionInitRtsOpts
}
ideState <- newMVar (state idleState)
return IdeSession{..}
verifyConfig :: SessionConfig -> IO ()
verifyConfig SessionConfig{..} = do
unless (isValidPackageDB configPackageDBStack) $
Ex.throw . userError $ "Invalid package DB stack: "
++ show configPackageDBStack
where
isValidPackageDB :: PackageDBStack -> Bool
isValidPackageDB stack =
elemIndices GlobalPackageDB stack == [0]
&& elemIndices UserPackageDB stack `elem` [[], [1]]
shutdownSession :: IdeSession -> IO ()
shutdownSession = shutdownSession' False
| Like shutdownSession , but do n't be nice about it ( SIGKILL )
forceShutdownSession :: IdeSession -> IO ()
forceShutdownSession = shutdownSession' True
shutdownSession' :: Bool -> IdeSession -> IO ()
shutdownSession' forceTerminate IdeSession{ideState, ideStaticInfo} = do
$modifyStrictMVar_ ideState $ \state ->
case state of
IdeSessionIdle idleState -> do
if forceTerminate
then forceShutdownGhcServer $ _ideGhcServer idleState
else shutdownGhcServer $ _ideGhcServer idleState
cleanupDirs
return IdeSessionShutdown
IdeSessionShutdown ->
return IdeSessionShutdown
IdeSessionServerDied _ _ -> do
cleanupDirs
return IdeSessionShutdown
where
cleanupDirs :: IO ()
cleanupDirs =
when (configDeleteTempFiles . ideConfig $ ideStaticInfo) $
ignoreDoesNotExist $
Dir.removeDirectoryRecursive (ideSessionDir ideStaticInfo)
restartSession :: IdeSession -> IO ()
restartSession IdeSession{ideState} =
$modifyStrictMVar_ ideState $ \state ->
case state of
IdeSessionIdle idleState ->
return $ IdeSessionServerDied forcedRestart idleState
IdeSessionServerDied _ _ ->
IdeSessionShutdown ->
fail "Shutdown session cannot be restarted."
data RestartResult =
ServerRestarted IdeIdleState IdeSessionUpdate
| ServerRestartFailed IdeIdleState
executeRestart :: SessionInitParams
-> IdeStaticInfo
-> IdeCallbacks
-> IdeIdleState
-> IO RestartResult
executeRestart initParams@SessionInitParams{..} staticInfo ideCallbacks idleState = do
let logFunc = ideCallbacksLogFunc ideCallbacks
$logInfo "Restarting ide-backend-server"
forceShutdownGhcServer $ _ideGhcServer idleState
mServer <- forkGhcServer sessionInitGhcOptions
sessionInitRelativeIncludes
sessionInitRtsOpts
staticInfo
ideCallbacks
case mServer of
Right (server, version) -> do
execInitParams staticInfo initParams
let idleState' = idleState {
_ideComputed = Maybe.nothing
, _ideGhcOpts = sessionInitGhcOptions
, _ideRelativeIncludes = sessionInitRelativeIncludes
, _ideRtsOpts = sessionInitRtsOpts
, _ideGenerateCode = False
, _ideObjectFiles = []
, _ideEnv = []
, _ideArgs = []
, _ideGhcServer = server
, _ideGhcVersion = version
, _ideTargets = sessionInitTargets
}
let upd = mconcat [
updateEnv (idleState ^. ideEnv)
, updateArgs (idleState ^. ideArgs)
, updateCodeGeneration (idleState ^. ideGenerateCode)
]
return (ServerRestarted idleState' upd)
Left e -> do
let idleState' = idleState {
_ideGhcServer = Ex.throw e
, _ideGhcVersion = Ex.throw e
}
return (ServerRestartFailed idleState')
Session update
Here we deal only with the top - level logic : restart the session and then run
the session update . The specifics of how to execute the individual parts
of the session update are defined in IdeSession . Update . ExecuteSessionUpdate .
Session update
Here we deal only with the top-level logic: restart the session and then run
the session update. The specifics of how to execute the individual parts
of the session update are defined in IdeSession.Update.ExecuteSessionUpdate.
updateSession :: IdeSession -> IdeSessionUpdate -> (Public.UpdateStatus -> IO ()) -> IO ()
updateSession = flip . updateSession'
updateSession' :: IdeSession -> (Public.UpdateStatus -> IO ()) -> IdeSessionUpdate -> IO ()
updateSession' IdeSession{ideStaticInfo, ideState, ideCallbacks} updateStatus = \update ->
$modifyStrictMVar_ ideState $ go False update
where
logFunc = ideCallbacksLogFunc ideCallbacks
go :: Bool -> IdeSessionUpdate -> IdeSessionState -> IO IdeSessionState
go justRestarted update (IdeSessionIdle idleState) =
if not (requiresSessionRestart idleState update)
then do
(idleState', mex) <- runSessionUpdate justRestarted update ideStaticInfo updateStatus ideCallbacks idleState
case mex of
Nothing -> do
updateStatus Public.UpdateStatusDone
return $ IdeSessionIdle idleState'
Just ex -> do
updateStatus $ Public.UpdateStatusFailed (Text.pack (show ex))
return $ IdeSessionServerDied ex idleState'
else do
$logInfo $ "Restarting session due to update requiring it."
unless justRestarted $ updateStatus Public.UpdateStatusRequiredRestart
let restartParams = sessionRestartParams idleState update
restart justRestarted update restartParams idleState
go justRestarted update (IdeSessionServerDied ex idleState) = do
let msg = Text.pack (show ex)
$logInfo $ "Restarting session due to server dieing: " <> msg
unless justRestarted $ updateStatus (Public.UpdateStatusErrorRestart msg)
let restartParams = sessionRestartParams idleState update
restart justRestarted update restartParams idleState
go _ _ IdeSessionShutdown =
Ex.throwIO (userError "Session already shut down.")
restart :: Bool -> IdeSessionUpdate -> SessionInitParams -> IdeIdleState -> IO IdeSessionState
restart True _ _ idleState =
return $ IdeSessionServerDied serverRestartLoop idleState
restart False update restartParams idleState = do
threadDelay 100000
restartResult <- executeRestart restartParams ideStaticInfo ideCallbacks idleState
case restartResult of
ServerRestarted idleState' resetSession ->
go True (resetSession <> update) (IdeSessionIdle idleState')
ServerRestartFailed idleState' -> do
updateStatus Public.UpdateStatusFailedToRestart
return $ IdeSessionServerDied failedToRestart idleState'
| @requiresSessionRestart returns true if update @upd@ requires a
session restart given current state
requiresSessionRestart :: IdeIdleState -> IdeSessionUpdate -> Bool
requiresSessionRestart st IdeSessionUpdate{..} =
(ideUpdateRelIncls `changes` ideRelativeIncludes)
|| (ideUpdateTargets `changes` ideTargets)
|| (ideUpdateRtsOpts `changes` ideRtsOpts)
|| (any optRequiresRestart (listChanges' ideUpdateGhcOpts ideGhcOpts))
where
optRequiresRestart :: String -> Bool
optRequiresRestart str =
Library flags can not be changed dynamically ( # 214 )
"-l" `isPrefixOf` str
changes :: Eq a => Maybe a -> Accessor IdeIdleState a -> Bool
changes Nothing _ = False
changes (Just x) y = x /= st ^. y
listChanges' :: Ord a => Maybe [a] -> Accessor IdeIdleState [a] -> [a]
listChanges' Nothing _ = []
listChanges' (Just xs) ys = listChanges xs (st ^. ys)
in @ys@ and the set of elements that appear in @ys@ but not in @xs@.
between the two sets .
listChanges :: Ord a => [a] -> [a] -> [a]
listChanges xs ys =
Set.toList $ (a `Set.union` b) `Set.difference` (a `Set.intersection` b)
where
a = Set.fromList xs
b = Set.fromList ys
or when the server is in a dead state ( i.e. , when ghc has crashed ) . In the
latter case ' getSourceErrors ' will report the ghc exception ; it is the
runStmt :: IdeSession -> String -> String -> IO (RunActions Public.RunResult)
runStmt ideSession m fun = runCmd ideSession $ \idleState -> RunStmt {
runCmdModule = m
, runCmdFunction = fun
, runCmdStdout = idleState ^. ideStdoutBufferMode
, runCmdStderr = idleState ^. ideStderrBufferMode
, runCmdPty = False
}
runStmtPty :: IdeSession -> String -> String -> IO (RunActions Public.RunResult)
runStmtPty ideSession m fun = runCmd ideSession $ \idleState -> RunStmt {
runCmdModule = m
, runCmdFunction = fun
, runCmdStdout = idleState ^. ideStdoutBufferMode
, runCmdStderr = idleState ^. ideStderrBufferMode
, runCmdPty = True
}
or when the server is in a dead state ( i.e. , when ghc has crashed ) . In the
last case ' getSourceErrors ' will report the ghc exception ; it is the
runExe :: IdeSession -> String -> IO (RunActions ExitCode)
runExe session m = do
let handleQueriesExc (_ :: Query.InvalidSessionStateQueries) =
fail $ "Wrong session state when trying to run an executable."
Ex.handle handleQueriesExc $ do
mstatus <- Query.getBuildExeStatus session
case mstatus of
Nothing ->
fail $ "No executable compilation initiated since session init."
(Just status@ExitFailure{}) ->
fail $ "Last executable compilation failed with status "
++ show status ++ "."
Just ExitSuccess -> do
distDir <- Query.getDistDir session
dataDir <- Query.getDataDir session
args <- Query.getArgs session
envInherited <- getEnvironment
envOverride <- Query.getEnv session
let overrideVar :: (String, Maybe String) -> Strict (Map String) String
-> Strict (Map String) String
overrideVar (var, Just val) env = Map.insert var val env
overrideVar (var, Nothing) env = Map.delete var env
envMap = foldr overrideVar (Map.fromList envInherited) envOverride
let exePath = distDir </> "build" </> m </> moduleNameToExeName m
exeExists <- Dir.doesFileExist exePath
unless exeExists $
fail $ "No compiled executable file "
++ m ++ " exists at path "
++ exePath ++ "."
(stdRd, stdWr) <- liftIO createPipe
std_rd_hdl <- fdToHandle stdRd ReadMode
std_wr_hdl <- fdToHandle stdWr WriteMode
let cproc = (proc exePath args) { cwd = Just dataDir
, env = Just $ Map.toList envMap
, create_group = True
, std_in = CreatePipe
, std_out = UseHandle std_wr_hdl
, std_err = UseHandle std_wr_hdl
}
(Just stdin_hdl, Nothing, Nothing, ph) <- createProcess cproc
runActionsState <- newMVar Nothing
return $ RunActions
{ runWait = $modifyStrictMVar runActionsState $ \st -> case st of
Just outcome ->
return (Just outcome, Right outcome)
Nothing -> do
bs <- BSS.hGetSome std_rd_hdl blockSize
if BSS.null bs
then do
res <- waitForProcess ph
return (Just res, Right res)
else
return (Nothing, Left bs)
, interrupt = interruptProcessGroupOf ph
, supplyStdin = \bs -> BSS.hPut stdin_hdl bs >> IO.hFlush stdin_hdl
, forceCancel = terminateProcess ph
}
We do n't need to close any handles . At the latest GC closes them .
where
blockSize :: Int
blockSize = 4096
resume :: IdeSession -> IO (RunActions Public.RunResult)
resume ideSession = runCmd ideSession (const Resume)
runCmd :: IdeSession -> (IdeIdleState -> RunCmd) -> IO (RunActions Public.RunResult)
runCmd session mkCmd = modifyIdleState session $ \idleState ->
case (toLazyMaybe (idleState ^. ideComputed), idleState ^. ideGenerateCode) of
(Just comp, True) -> do
let cmd = mkCmd idleState
checkStateOk comp cmd
isBreak <- newEmptyMVar
runActions <- rpcRun (idleState ^. ideGhcServer)
cmd
(translateRunResult isBreak)
TODO : We should register the runActions somewhere so we can do a
return (IdeSessionIdle idleState, runActions)
_ ->
which would otherwise lead to a hard GHC crash ,
fail "Cannot run before the code is generated."
where
checkStateOk :: Computed -> RunCmd -> IO ()
checkStateOk comp RunStmt{..} =
unless (Text.pack runCmdModule `List.elem` computedLoadedModules comp) $
fail $ "Module " ++ show runCmdModule
++ " not successfully loaded, when trying to run code."
checkStateOk _comp Resume =
return ()
translateRunResult :: StrictMVar (Strict Maybe BreakInfo)
-> Maybe Private.RunResult
-> IO Public.RunResult
translateRunResult isBreak (Just Private.RunOk) = do
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunOk
translateRunResult isBreak (Just (Private.RunProgException str)) = do
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunProgException str
translateRunResult isBreak (Just (Private.RunGhcException str)) = do
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunGhcException str
translateRunResult isBreak (Just (Private.RunBreak breakInfo)) = do
$putStrictMVar isBreak (Maybe.just breakInfo)
return $ Public.RunBreak
translateRunResult isBreak Nothing = do
$putStrictMVar isBreak Maybe.nothing
return $ Public.RunForceCancelled
setBreakpoint :: IdeSession
setBreakpoint session mod span value = withIdleState session $ \idleState ->
rpcBreakpoint (idleState ^. ideGhcServer) mod span value
printVar :: IdeSession
^ Should printing bind new vars ? ( @:print@ vs. @:sprint@ )
-> IO Public.VariableEnv
printVar session var bind forceEval = withBreakInfo session $ \idleState _ ->
rpcPrint (idleState ^. ideGhcServer) var bind forceEval
TODO : We should do this translation only when we talk to ghc , and keep
* * newTargets = case targets of
* * < - get ideTargets
* * when ( oldTargets /= newTargets ) $ do
* * set ideTargets newTargets
| Crash the GHC server . For debugging only . If the specified delay is
crashGhcServer :: IdeSession -> Maybe Int -> IO ()
crashGhcServer IdeSession{..} delay = $withStrictMVar ideState $ \state ->
case state of
IdeSessionIdle idleState ->
rpcCrash (idleState ^. ideGhcServer) delay
_ ->
Ex.throwIO $ userError "State not idle"
withBreakInfo :: IdeSession -> (IdeIdleState -> Public.BreakInfo -> IO a) -> IO a
withBreakInfo session act = withIdleState session $ \idleState ->
case toLazyMaybe (idleState ^. ideBreakInfo) of
Just breakInfo -> act idleState breakInfo
Nothing -> Ex.throwIO (userError "Not in breakpoint state")
withIdleState :: IdeSession -> (IdeIdleState -> IO a) -> IO a
withIdleState session act = modifyIdleState session $ \idleState -> do
result <- act idleState
return (IdeSessionIdle idleState, result)
modifyIdleState :: IdeSession -> (IdeIdleState -> IO (IdeSessionState, a)) -> IO a
modifyIdleState IdeSession{..} act = $modifyStrictMVar ideState $ \state -> case state of
IdeSessionIdle idleState -> act idleState
_ -> Ex.throwIO $ userError "State not idle"
failedToRestart :: ExternalException
failedToRestart = ExternalException {
externalStdErr = "Failed to restart server"
, externalException = Nothing
}
forcedRestart :: ExternalException
forcedRestart = ExternalException {
externalStdErr = "Session manually restarted"
, externalException = Nothing
}
serverRestartLoop :: ExternalException
serverRestartLoop = ExternalException {
externalStdErr = "Server restart loop"
, externalException = Nothing
}
|
66c27ab7bc3ee785caf457788adbcee338cac4b333f325019f8ee425ed937e76 | bhauman/figwheel-main | watching.clj | (ns figwheel.main.watching
(:require
[clojure.java.io :as io]
[clojure.string :as string]
[nextjournal.beholder :as beholder]))
(def ^:dynamic *watcher* (atom {:watcher nil :watches {}}))
(defn stop-watchers! [watchers]
(doseq [watcher watchers]
(beholder/stop watcher)))
(defn alter-watches [{:keys [watchers watches]} f]
(stop-watchers! watchers)
(let [watches (f watches)
watchers (doall
(for [watch (vals watches)]
(let [{:keys [paths filter handler]} watch
ctx (atom {})]
(apply beholder/watch
(fn [e]
(let [file (.toFile (:path e))
e (assoc e :file file)]
(when (or (not filter)
(filter ctx e))
(swap! ctx handler e))))
paths))))]
{:watchers watchers
:watches watches}))
(defn add-watch! [watch-key watch]
(swap! *watcher* alter-watches #(assoc % watch-key watch)))
(defn remove-watch! [watch-key]
(swap! *watcher* alter-watches #(dissoc % watch-key)))
(defn stop! []
(stop-watchers! (:watchers @*watcher*)))
(defn reset-watch! []
(stop!)
(reset! *watcher* {}))
(defn running? []
(some-> *watcher* deref :watcher :thread .isAlive))
(defn join []
(some-> *watcher* deref :watcher :thread .join))
(defn throttle [millis f]
(fn [{:keys [collector] :as ctx} e]
(let [collector (or collector (atom {}))
{:keys [collecting? events]} (deref collector)]
(if collecting?
(swap! collector update :events (fnil conj []) e)
(let [events (volatile! nil)]
(swap! collector assoc :collecting? true)
(future
(try
(Thread/sleep millis) ;; is this needed now?
(swap! collector update :events (fn [evts] (vreset! events evts) nil))
(f (cons e @events))
(finally
(swap! collector assoc :collecting? false))))))
(assoc ctx :collector collector))))
(defn file-suffix [file]
(last (string/split (.getName (io/file file)) #"\.")))
(defn real-file? [file]
(and file
(.isFile file)
(not (.isHidden file))
(not (#{\. \#} (first (.getName file))))))
(defn suffix-filter [suffixes]
(fn [_ {:keys [file]}]
(and (real-file? file)
(suffixes (file-suffix file)))))
| null | https://raw.githubusercontent.com/bhauman/figwheel-main/8db5e58c6e951d9db08600f2552d0170a7cc2932/src/figwheel/main/watching.clj | clojure | is this needed now? | (ns figwheel.main.watching
(:require
[clojure.java.io :as io]
[clojure.string :as string]
[nextjournal.beholder :as beholder]))
(def ^:dynamic *watcher* (atom {:watcher nil :watches {}}))
(defn stop-watchers! [watchers]
(doseq [watcher watchers]
(beholder/stop watcher)))
(defn alter-watches [{:keys [watchers watches]} f]
(stop-watchers! watchers)
(let [watches (f watches)
watchers (doall
(for [watch (vals watches)]
(let [{:keys [paths filter handler]} watch
ctx (atom {})]
(apply beholder/watch
(fn [e]
(let [file (.toFile (:path e))
e (assoc e :file file)]
(when (or (not filter)
(filter ctx e))
(swap! ctx handler e))))
paths))))]
{:watchers watchers
:watches watches}))
(defn add-watch! [watch-key watch]
(swap! *watcher* alter-watches #(assoc % watch-key watch)))
(defn remove-watch! [watch-key]
(swap! *watcher* alter-watches #(dissoc % watch-key)))
(defn stop! []
(stop-watchers! (:watchers @*watcher*)))
(defn reset-watch! []
(stop!)
(reset! *watcher* {}))
(defn running? []
(some-> *watcher* deref :watcher :thread .isAlive))
(defn join []
(some-> *watcher* deref :watcher :thread .join))
(defn throttle [millis f]
(fn [{:keys [collector] :as ctx} e]
(let [collector (or collector (atom {}))
{:keys [collecting? events]} (deref collector)]
(if collecting?
(swap! collector update :events (fnil conj []) e)
(let [events (volatile! nil)]
(swap! collector assoc :collecting? true)
(future
(try
(swap! collector update :events (fn [evts] (vreset! events evts) nil))
(f (cons e @events))
(finally
(swap! collector assoc :collecting? false))))))
(assoc ctx :collector collector))))
(defn file-suffix [file]
(last (string/split (.getName (io/file file)) #"\.")))
(defn real-file? [file]
(and file
(.isFile file)
(not (.isHidden file))
(not (#{\. \#} (first (.getName file))))))
(defn suffix-filter [suffixes]
(fn [_ {:keys [file]}]
(and (real-file? file)
(suffixes (file-suffix file)))))
|
119a0dcc79664a3a371507d1722ec66aba65a731ec1dc5d632889f9129aa8fa6 | adamdoupe/find_ear_rails | scope_cfg_tests.ml |
open OUnit
open Cfg_test_helper
open Config
let succ_scope_test (desc,pre,post) =
conf.error_raises_exc <- true;
let d = desc ^ ": " ^ pre in
d >::
(fun () ->
Cfg_refactor.re_init ();
let loader = File_loader.create File_loader.EmptyCfg [] in
let scoper = Cfg_scope.create loader in
let cfg1 = Cfg_scope.resolve_scopes scoper (refactor_string pre) in
let cfg2 = Cfg_scope.resolve_scopes scoper (refactor_string post) in
assert_cfg_equal cfg1 cfg2
)
let diff_scope_test (desc,pre,post) =
conf.error_raises_exc <- true;
let d = desc ^ ": " ^ pre in
d >::
(fun () ->
Cfg_refactor.re_init ();
let loader = File_loader.create File_loader.EmptyCfg [] in
let scoper = Cfg_scope.create loader in
let cfg1 = Cfg_scope.resolve_scopes scoper (refactor_string pre) in
let cfg2 = Cfg_scope.resolve_scopes scoper (refactor_string post) in
assert_cfg_neq cfg1 cfg2
)
exception Should_fail
let fail_scope_test (desc,code) =
conf.error_raises_exc <- true;
let d = desc ^ ": " ^ code in
d >::
(fun () ->
try
let loader = File_loader.create File_loader.EmptyCfg [] in
let scoper = Cfg_scope.create loader in
ignore(Cfg_scope.resolve_scopes scoper (refactor_string code));
raise Should_fail
with Failure _ -> ()
)
let succ_tests = [
("simple uscope",
"A=1;A",
"A=1;::A"
);
("nested access to uscope",
"A=1;class B;A;end",
"A=1;class B;::A;end"
);
("nested access to scope",
"class A; class B; class C;
A::B; end end end",
"class A; class B; class C;
::A::B; end end end"
);
("subtrees with same scope id",
"class A
class B
class C
C
end
end
class D
class C;
C
end
end
end",
"class A
class B
class C
::A::B::C
end
end
class D
class C;
::A::D::C
end
end
end"
);
("inherit constant over lexical scope",
"class Module; X=2;end
class A
class Module; X=1; end
class B < Module; X; end
end
",
"class ::Module; ::Module::X=2;end
class ::A
class Module; ::A::Module::X=1; end
class ::A::B < ::A::Module; ::A::Module::X; end
end
");
("reference superclass constant",
"class A; X=1;end;
class B < A; X; end",
"class A; X=1;end;
class B < A; ::A::X; end"
);
("scoped initializer",
"class A;def m(); X; end;end
A::X = 1",
"class A;def m(); ::A::X; end;end
::A::X = 1"
);
("module include",
"module M; X=1;end
class A; include M; X;end",
"module ::M; ::M::X=1;end
class ::A; include ::M; ::M::X;end"
);
("re-enter scope",
"class A; class B; X=1; end;end
class ::A::B; X;end",
"class ::A; class ::A::B; ::A::B::X=1; end;end
class ::A::B; ::A::B::X;end"
);
("class method lex scope",
"class A; X=1
def A.foo; X; end;end",
"class ::A; ::A::X=1
def ::A.foo; ::A::X; end;end"
);
("singleton method doesn't open lexical scope",
"X=1
class Outer
X=2
class A
class << self
class X
end
def testme1
X
end
end
def self.testme2
X
end
end
end
",
"X=1
class Outer
X=2
class A
class << self
class X
end
def testme1
::Outer::A::X
end
end
def self.testme2
::Outer::X
end
end
end
");
(* just want this to be accepted, I don't think we want to change
the self to a ::A *)
("inherit from self",
"class A; class B < self; end;end",
"class A; class B < self; end;end"
);
("track classes through assignment",
"class A; X=1; end;B=A;B::X",
"class ::A; ::A::X=1; end;::B=::A;::A::X"
);
("equal named parent/child",
"module A; class A; end; end",
"module ::A; class ::A::A; end; end"
);
("equal named parent/child, look in parent",
"module A
class A
end
class B
def B.foo() end
end
end",
"module ::A
class ::A::A
end
class ::A::B
def ::A::B.foo() end
end
end");
( " metaclass looks in reg class " ,
"
class A
X=1
class B
X=2
end
end
X=3
class < < A::B.new
puts X
end
" , "
class A
X=1
class B
X=2
end
end
X=3
class < < A::B.new
puts : : A::B::X
end
"
) ;
("metaclass looks in reg class",
"
class A
X=1
class B
X=2
end
end
X=3
class << A::B.new
puts X
end
","
class A
X=1
class B
X=2
end
end
X=3
class << A::B.new
puts ::A::B::X
end
"
);
*)
]
let diff_tests = [
("don't break lexical scope",
"class A; class B; end; end",
"class A;end;class ::A::B;end"
);
]
let fail_tests = [
("lexical binder",
"class A; X=1;end
class ::A::B; X; end"
)
]
let (@@) = List.rev_append
let suite = "Scope suite" >:::
(List.map succ_scope_test succ_tests) @@
(List.map diff_scope_test diff_tests) @@
(List.map fail_scope_test fail_tests)
| null | https://raw.githubusercontent.com/adamdoupe/find_ear_rails/38f892f9962ad415a583973caf24fbab1a011be1/diamondback-ruby-0.20090726/tests/cfg/scope_cfg_tests.ml | ocaml | just want this to be accepted, I don't think we want to change
the self to a ::A |
open OUnit
open Cfg_test_helper
open Config
let succ_scope_test (desc,pre,post) =
conf.error_raises_exc <- true;
let d = desc ^ ": " ^ pre in
d >::
(fun () ->
Cfg_refactor.re_init ();
let loader = File_loader.create File_loader.EmptyCfg [] in
let scoper = Cfg_scope.create loader in
let cfg1 = Cfg_scope.resolve_scopes scoper (refactor_string pre) in
let cfg2 = Cfg_scope.resolve_scopes scoper (refactor_string post) in
assert_cfg_equal cfg1 cfg2
)
let diff_scope_test (desc,pre,post) =
conf.error_raises_exc <- true;
let d = desc ^ ": " ^ pre in
d >::
(fun () ->
Cfg_refactor.re_init ();
let loader = File_loader.create File_loader.EmptyCfg [] in
let scoper = Cfg_scope.create loader in
let cfg1 = Cfg_scope.resolve_scopes scoper (refactor_string pre) in
let cfg2 = Cfg_scope.resolve_scopes scoper (refactor_string post) in
assert_cfg_neq cfg1 cfg2
)
exception Should_fail
let fail_scope_test (desc,code) =
conf.error_raises_exc <- true;
let d = desc ^ ": " ^ code in
d >::
(fun () ->
try
let loader = File_loader.create File_loader.EmptyCfg [] in
let scoper = Cfg_scope.create loader in
ignore(Cfg_scope.resolve_scopes scoper (refactor_string code));
raise Should_fail
with Failure _ -> ()
)
let succ_tests = [
("simple uscope",
"A=1;A",
"A=1;::A"
);
("nested access to uscope",
"A=1;class B;A;end",
"A=1;class B;::A;end"
);
("nested access to scope",
"class A; class B; class C;
A::B; end end end",
"class A; class B; class C;
::A::B; end end end"
);
("subtrees with same scope id",
"class A
class B
class C
C
end
end
class D
class C;
C
end
end
end",
"class A
class B
class C
::A::B::C
end
end
class D
class C;
::A::D::C
end
end
end"
);
("inherit constant over lexical scope",
"class Module; X=2;end
class A
class Module; X=1; end
class B < Module; X; end
end
",
"class ::Module; ::Module::X=2;end
class ::A
class Module; ::A::Module::X=1; end
class ::A::B < ::A::Module; ::A::Module::X; end
end
");
("reference superclass constant",
"class A; X=1;end;
class B < A; X; end",
"class A; X=1;end;
class B < A; ::A::X; end"
);
("scoped initializer",
"class A;def m(); X; end;end
A::X = 1",
"class A;def m(); ::A::X; end;end
::A::X = 1"
);
("module include",
"module M; X=1;end
class A; include M; X;end",
"module ::M; ::M::X=1;end
class ::A; include ::M; ::M::X;end"
);
("re-enter scope",
"class A; class B; X=1; end;end
class ::A::B; X;end",
"class ::A; class ::A::B; ::A::B::X=1; end;end
class ::A::B; ::A::B::X;end"
);
("class method lex scope",
"class A; X=1
def A.foo; X; end;end",
"class ::A; ::A::X=1
def ::A.foo; ::A::X; end;end"
);
("singleton method doesn't open lexical scope",
"X=1
class Outer
X=2
class A
class << self
class X
end
def testme1
X
end
end
def self.testme2
X
end
end
end
",
"X=1
class Outer
X=2
class A
class << self
class X
end
def testme1
::Outer::A::X
end
end
def self.testme2
::Outer::X
end
end
end
");
("inherit from self",
"class A; class B < self; end;end",
"class A; class B < self; end;end"
);
("track classes through assignment",
"class A; X=1; end;B=A;B::X",
"class ::A; ::A::X=1; end;::B=::A;::A::X"
);
("equal named parent/child",
"module A; class A; end; end",
"module ::A; class ::A::A; end; end"
);
("equal named parent/child, look in parent",
"module A
class A
end
class B
def B.foo() end
end
end",
"module ::A
class ::A::A
end
class ::A::B
def ::A::B.foo() end
end
end");
( " metaclass looks in reg class " ,
"
class A
X=1
class B
X=2
end
end
X=3
class < < A::B.new
puts X
end
" , "
class A
X=1
class B
X=2
end
end
X=3
class < < A::B.new
puts : : A::B::X
end
"
) ;
("metaclass looks in reg class",
"
class A
X=1
class B
X=2
end
end
X=3
class << A::B.new
puts X
end
","
class A
X=1
class B
X=2
end
end
X=3
class << A::B.new
puts ::A::B::X
end
"
);
*)
]
let diff_tests = [
("don't break lexical scope",
"class A; class B; end; end",
"class A;end;class ::A::B;end"
);
]
let fail_tests = [
("lexical binder",
"class A; X=1;end
class ::A::B; X; end"
)
]
let (@@) = List.rev_append
let suite = "Scope suite" >:::
(List.map succ_scope_test succ_tests) @@
(List.map diff_scope_test diff_tests) @@
(List.map fail_scope_test fail_tests)
|
1a8d3fa7b8bfe4f8cb32b9553e784fdc5b68b829ede4282a053c21b4f85c8334 | mzp/coq-ruby | coqmktop.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
$ I d : coqmktop.ml 12874 2010 - 03 - 19 23:15:52Z herbelin $
coqmktop is a script to link Coq , analogous to ocamlmktop .
The command line contains options specific to coqmktop , options for the
Ocaml linker and files to link ( in addition to the default Coq files ) .
The command line contains options specific to coqmktop, options for the
Ocaml linker and files to link (in addition to the default Coq files). *)
open Unix
(* Objects to link *)
1 . Core objects
let ocamlobjs = ["str.cma";"unix.cma";"nums.cma"]
let dynobjs = ["dynlink.cma"]
let camlp4objs = ["gramlib.cma"]
let libobjs = ocamlobjs @ camlp4objs
let spaces = Str.regexp "[ \t\n]+"
let split_list l = Str.split spaces l
let copts = split_list Tolink.copts
let core_objs = split_list Tolink.core_objs
let core_libs = split_list Tolink.core_libs
let ide = split_list Tolink.ide
3 . Toplevel objects
let camlp4topobjs =
if Coq_config.camlp4 = "camlp5" then
["camlp5_top.cma"; "pa_o.cmo"; "pa_extend.cmo"]
else
["camlp4_top.cma"; "pa_o.cmo"; "pa_op.cmo"; "pa_extend.cmo"]
let topobjs = camlp4topobjs
let gramobjs = []
let notopobjs = gramobjs
4 . High - level tactics objects
(* environment *)
let opt = ref false
let full = ref false
let top = ref false
let searchisos = ref false
let coqide = ref false
let echo = ref false
let src_dirs () =
[ []; ["kernel";"byterun"]; [ "config" ]; [ "toplevel" ] ] @
if !coqide then [[ "ide" ]] else []
let includes () =
let coqlib = Envars.coqlib () in
let camlp4lib = Envars.camlp4lib () in
List.fold_right
(fun d l -> "-I" :: ("\"" ^ List.fold_left Filename.concat coqlib d ^ "\"") :: l)
(src_dirs ())
(["-I"; "\"" ^ camlp4lib ^ "\""] @
["-I"; "\"" ^ coqlib ^ "\""] @
(if !coqide then ["-thread"; "-I"; "+lablgtk2"] else []))
Transform bytecode object file names in native object file names
let native_suffix f =
if Filename.check_suffix f ".cmo" then
(Filename.chop_suffix f ".cmo") ^ ".cmx"
else if Filename.check_suffix f ".cma" then
(Filename.chop_suffix f ".cma") ^ ".cmxa"
else
if Filename.check_suffix f ".a" then f
else
failwith ("File "^f^" has not extension .cmo, .cma or .a")
Transforms a file name in the corresponding Caml module name .
let rem_ext_regexpr = Str.regexp "\\(.*\\)\\.\\(cm..?\\|ml\\)"
let module_of_file name =
let s = Str.replace_first rem_ext_regexpr "\\1" (Filename.basename name) in
String.capitalize s
(* Build the list of files to link and the list of modules names *)
let files_to_link userfiles =
let dyn_objs =
if not !opt || Coq_config.has_natdynlink then dynobjs else [] in
let toplevel_objs =
if !top then topobjs else if !opt then notopobjs else [] in
let ide_objs = if !coqide then
"threads.cma"::"lablgtk.cma"::"gtkThread.cmo"::ide
else []
in
let ide_libs = if !coqide then
["threads.cma" ; "lablgtk.cma" ; "gtkThread.cmo" ;
"ide/ide.cma" ]
else []
in
let objs = dyn_objs @ libobjs @ core_objs @ toplevel_objs @ ide_objs
and libs = dyn_objs @ libobjs @ core_libs @ toplevel_objs @ ide_libs in
let objstolink,libstolink =
if !opt then
((List.map native_suffix objs) @ userfiles,
(List.map native_suffix libs) @ userfiles)
else
(objs @ userfiles, libs @ userfiles )
in
let modules = List.map module_of_file objstolink in
(modules, libstolink)
(* Gives the list of all the directories under [dir].
Uses [Unix] (it is hard to do without it). *)
let all_subdirs dir =
let l = ref [dir] in
let add f = l := f :: !l in
let rec traverse dir =
let dirh =
try opendir dir with Unix_error _ -> invalid_arg "all_subdirs"
in
try
while true do
let f = readdir dirh in
if f <> "." && f <> ".." then
let file = Filename.concat dir f in
if (stat file).st_kind = S_DIR then begin
add file;
traverse file
end
done
with End_of_file ->
closedir dirh
in
traverse dir; List.rev !l
(* usage *)
let usage () =
prerr_endline "Usage: coqmktop <options> <ocaml options> files
Flags.are:
-coqlib dir Specify where the Coq object files are
-camlbin dir Specify where the OCaml binaries are
-camlp4bin dir Specify where the CAmp4/5 binaries are
-o exec-file Specify the name of the resulting toplevel
-boot Run in boot mode
-opt Compile in native code
-full Link high level tactics
-top Build Coq on a ocaml toplevel (incompatible with -opt)
-searchisos Build a toplevel for SearchIsos
-ide Build a toplevel for the Coq IDE
-R dir Specify recursively directories for Ocaml\n";
exit 1
(* parsing of the command line *)
let parse_args () =
let rec parse (op,fl) = function
| [] -> List.rev op, List.rev fl
| "-coqlib" :: d :: rem ->
Flags.coqlib_spec := true; Flags.coqlib := d ; parse (op,fl) rem
| "-coqlib" :: _ -> usage ()
| "-camlbin" :: d :: rem ->
Flags.camlbin_spec := true; Flags.camlbin := d ; parse (op,fl) rem
| "-camlbin" :: _ -> usage ()
| "-camlp4bin" :: d :: rem ->
Flags.camlp4bin_spec := true; Flags.camlp4bin := d ; parse (op,fl) rem
| "-camlp4bin" :: _ -> usage ()
| "-boot" :: rem -> Flags.boot := true; parse (op,fl) rem
| "-opt" :: rem -> opt := true ; parse (op,fl) rem
| "-full" :: rem -> full := true ; parse (op,fl) rem
| "-top" :: rem -> top := true ; parse (op,fl) rem
| "-ide" :: rem ->
coqide := true; parse (op,fl) rem
| "-v8" :: rem ->
Printf.eprintf "warning: option -v8 deprecated";
parse (op,fl) rem
| "-echo" :: rem -> echo := true ; parse (op,fl) rem
| ("-cclib"|"-ccopt"|"-I"|"-o"|"-w" as o) :: rem' ->
begin
match rem' with
| a :: rem -> parse (a::o::op,fl) rem
| [] -> usage ()
end
| "-R" :: a :: rem ->
parse ((List.rev(List.flatten (List.map (fun d -> ["-I";d])
(all_subdirs a))))@op,fl) rem
| "-R" :: [] -> usage ()
| ("-noassert"|"-compact"|"-g"|"-p"|"-thread"|"-dtypes" as o) :: rem ->
parse (o::op,fl) rem
| ("-h"|"--help") :: _ -> usage ()
| f :: rem ->
if Filename.check_suffix f ".ml"
or Filename.check_suffix f ".cmx"
or Filename.check_suffix f ".cmo"
or Filename.check_suffix f ".cmxa"
or Filename.check_suffix f ".cma" then
parse (op,f::fl) rem
else begin
prerr_endline ("Don't know what to do with " ^ f);
exit 1
end
in
parse ([Coq_config.osdeplibs],[]) (List.tl (Array.to_list Sys.argv))
let clean file =
let rm f = if Sys.file_exists f then Sys.remove f in
let basename = Filename.chop_suffix file ".ml" in
if not !echo then begin
rm file;
rm (basename ^ ".o");
rm (basename ^ ".cmi");
rm (basename ^ ".cmo");
rm (basename ^ ".cmx")
end
Creates another temporary file for Dynlink if needed
let tmp_dynlink()=
let tmp = Filename.temp_file "coqdynlink" ".ml" in
let _ = Sys.command ("echo \"Dynlink.init();;\" > "^tmp) in
tmp
(* Initializes the kind of loading in the main program *)
let declare_loading_string () =
if not !top then
"Mltop.remove ();;"
else
"let ppf = Format.std_formatter;;
Mltop.set_top
{Mltop.load_obj=Topdirs.dir_load ppf;
Mltop.use_file=Topdirs.dir_use ppf;
Mltop.add_dir=Topdirs.dir_directory;
Mltop.ml_loop=(fun () -> Toploop.loop ppf) };;\n"
(* create a temporary main file to link *)
let create_tmp_main_file modules =
let main_name = Filename.temp_file "coqmain" ".ml" in
let oc = open_out main_name in
try
(* Add the pre-linked modules *)
output_string oc "List.iter Mltop.add_known_module [\"";
output_string oc (String.concat "\";\"" modules);
output_string oc "\"];;\n";
(* Initializes the kind of loading *)
output_string oc (declare_loading_string());
Start the right toplevel loop : Coq or Coq_searchisos
if !searchisos then
output_string oc "Cmd_searchisos_line.start();;\n"
else if !coqide then
output_string oc "Coqide.start();;\n"
else
output_string oc "Coqtop.start();;\n";
close_out oc;
main_name
with e ->
clean main_name; raise e
(* main part *)
let main () =
let (options, userfiles) = parse_args () in
(* which ocaml command to invoke *)
let camlbin = Envars.camlbin () in
let prog =
if !opt then begin
(* native code *)
if !top then failwith "no custom toplevel in native code !";
let ocamloptexec = Filename.concat camlbin "ocamlopt" in
ocamloptexec^" -linkall"
end else
(* bytecode (we shunt ocamlmktop script which fails on win32) *)
let ocamlmktoplib = " toplevellib.cma" in
let ocamlcexec = Filename.concat camlbin "ocamlc" in
let ocamlccustom = Printf.sprintf "%s %s -linkall "
ocamlcexec Coq_config.coqrunbyteflags in
(if !top then ocamlccustom^ocamlmktoplib else ocamlccustom)
in
(* files to link *)
let (modules, tolink) = files_to_link userfiles in
(*file for dynlink *)
let dynlink=
if not (!opt || !top) then
[tmp_dynlink()]
else
[]
in
(* the list of the loaded modules *)
let main_file = create_tmp_main_file modules in
try
let args =
options @ (includes ()) @ copts @ tolink @ dynlink @ [ main_file ] in
(* add topstart.cmo explicitly because we shunted ocamlmktop wrapper *)
let args = if !top then args @ [ "topstart.cmo" ] else args in
(* Now, with the .cma, we MUST use the -linkall option *)
let command = String.concat " " (prog::"-rectypes"::args) in
if !echo then
begin
print_endline command;
print_endline
("(command length is " ^
(string_of_int (String.length command)) ^ " characters)");
flush Pervasives.stdout
end;
let retcode = Sys.command command in
clean main_file;
command gives the exit code in HSB , and signal in LSB ! ! !
if retcode > 255 then retcode lsr 8 else retcode
with e ->
clean main_file; raise e
let retcode =
try Printexc.print main () with _ -> 1
let _ = exit retcode
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/scripts/coqmktop.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
Objects to link
environment
Build the list of files to link and the list of modules names
Gives the list of all the directories under [dir].
Uses [Unix] (it is hard to do without it).
usage
parsing of the command line
Initializes the kind of loading in the main program
create a temporary main file to link
Add the pre-linked modules
Initializes the kind of loading
main part
which ocaml command to invoke
native code
bytecode (we shunt ocamlmktop script which fails on win32)
files to link
file for dynlink
the list of the loaded modules
add topstart.cmo explicitly because we shunted ocamlmktop wrapper
Now, with the .cma, we MUST use the -linkall option | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
$ I d : coqmktop.ml 12874 2010 - 03 - 19 23:15:52Z herbelin $
coqmktop is a script to link Coq , analogous to ocamlmktop .
The command line contains options specific to coqmktop , options for the
Ocaml linker and files to link ( in addition to the default Coq files ) .
The command line contains options specific to coqmktop, options for the
Ocaml linker and files to link (in addition to the default Coq files). *)
open Unix
1 . Core objects
let ocamlobjs = ["str.cma";"unix.cma";"nums.cma"]
let dynobjs = ["dynlink.cma"]
let camlp4objs = ["gramlib.cma"]
let libobjs = ocamlobjs @ camlp4objs
let spaces = Str.regexp "[ \t\n]+"
let split_list l = Str.split spaces l
let copts = split_list Tolink.copts
let core_objs = split_list Tolink.core_objs
let core_libs = split_list Tolink.core_libs
let ide = split_list Tolink.ide
3 . Toplevel objects
let camlp4topobjs =
if Coq_config.camlp4 = "camlp5" then
["camlp5_top.cma"; "pa_o.cmo"; "pa_extend.cmo"]
else
["camlp4_top.cma"; "pa_o.cmo"; "pa_op.cmo"; "pa_extend.cmo"]
let topobjs = camlp4topobjs
let gramobjs = []
let notopobjs = gramobjs
4 . High - level tactics objects
let opt = ref false
let full = ref false
let top = ref false
let searchisos = ref false
let coqide = ref false
let echo = ref false
let src_dirs () =
[ []; ["kernel";"byterun"]; [ "config" ]; [ "toplevel" ] ] @
if !coqide then [[ "ide" ]] else []
let includes () =
let coqlib = Envars.coqlib () in
let camlp4lib = Envars.camlp4lib () in
List.fold_right
(fun d l -> "-I" :: ("\"" ^ List.fold_left Filename.concat coqlib d ^ "\"") :: l)
(src_dirs ())
(["-I"; "\"" ^ camlp4lib ^ "\""] @
["-I"; "\"" ^ coqlib ^ "\""] @
(if !coqide then ["-thread"; "-I"; "+lablgtk2"] else []))
Transform bytecode object file names in native object file names
let native_suffix f =
if Filename.check_suffix f ".cmo" then
(Filename.chop_suffix f ".cmo") ^ ".cmx"
else if Filename.check_suffix f ".cma" then
(Filename.chop_suffix f ".cma") ^ ".cmxa"
else
if Filename.check_suffix f ".a" then f
else
failwith ("File "^f^" has not extension .cmo, .cma or .a")
Transforms a file name in the corresponding Caml module name .
let rem_ext_regexpr = Str.regexp "\\(.*\\)\\.\\(cm..?\\|ml\\)"
let module_of_file name =
let s = Str.replace_first rem_ext_regexpr "\\1" (Filename.basename name) in
String.capitalize s
let files_to_link userfiles =
let dyn_objs =
if not !opt || Coq_config.has_natdynlink then dynobjs else [] in
let toplevel_objs =
if !top then topobjs else if !opt then notopobjs else [] in
let ide_objs = if !coqide then
"threads.cma"::"lablgtk.cma"::"gtkThread.cmo"::ide
else []
in
let ide_libs = if !coqide then
["threads.cma" ; "lablgtk.cma" ; "gtkThread.cmo" ;
"ide/ide.cma" ]
else []
in
let objs = dyn_objs @ libobjs @ core_objs @ toplevel_objs @ ide_objs
and libs = dyn_objs @ libobjs @ core_libs @ toplevel_objs @ ide_libs in
let objstolink,libstolink =
if !opt then
((List.map native_suffix objs) @ userfiles,
(List.map native_suffix libs) @ userfiles)
else
(objs @ userfiles, libs @ userfiles )
in
let modules = List.map module_of_file objstolink in
(modules, libstolink)
let all_subdirs dir =
let l = ref [dir] in
let add f = l := f :: !l in
let rec traverse dir =
let dirh =
try opendir dir with Unix_error _ -> invalid_arg "all_subdirs"
in
try
while true do
let f = readdir dirh in
if f <> "." && f <> ".." then
let file = Filename.concat dir f in
if (stat file).st_kind = S_DIR then begin
add file;
traverse file
end
done
with End_of_file ->
closedir dirh
in
traverse dir; List.rev !l
let usage () =
prerr_endline "Usage: coqmktop <options> <ocaml options> files
Flags.are:
-coqlib dir Specify where the Coq object files are
-camlbin dir Specify where the OCaml binaries are
-camlp4bin dir Specify where the CAmp4/5 binaries are
-o exec-file Specify the name of the resulting toplevel
-boot Run in boot mode
-opt Compile in native code
-full Link high level tactics
-top Build Coq on a ocaml toplevel (incompatible with -opt)
-searchisos Build a toplevel for SearchIsos
-ide Build a toplevel for the Coq IDE
-R dir Specify recursively directories for Ocaml\n";
exit 1
let parse_args () =
let rec parse (op,fl) = function
| [] -> List.rev op, List.rev fl
| "-coqlib" :: d :: rem ->
Flags.coqlib_spec := true; Flags.coqlib := d ; parse (op,fl) rem
| "-coqlib" :: _ -> usage ()
| "-camlbin" :: d :: rem ->
Flags.camlbin_spec := true; Flags.camlbin := d ; parse (op,fl) rem
| "-camlbin" :: _ -> usage ()
| "-camlp4bin" :: d :: rem ->
Flags.camlp4bin_spec := true; Flags.camlp4bin := d ; parse (op,fl) rem
| "-camlp4bin" :: _ -> usage ()
| "-boot" :: rem -> Flags.boot := true; parse (op,fl) rem
| "-opt" :: rem -> opt := true ; parse (op,fl) rem
| "-full" :: rem -> full := true ; parse (op,fl) rem
| "-top" :: rem -> top := true ; parse (op,fl) rem
| "-ide" :: rem ->
coqide := true; parse (op,fl) rem
| "-v8" :: rem ->
Printf.eprintf "warning: option -v8 deprecated";
parse (op,fl) rem
| "-echo" :: rem -> echo := true ; parse (op,fl) rem
| ("-cclib"|"-ccopt"|"-I"|"-o"|"-w" as o) :: rem' ->
begin
match rem' with
| a :: rem -> parse (a::o::op,fl) rem
| [] -> usage ()
end
| "-R" :: a :: rem ->
parse ((List.rev(List.flatten (List.map (fun d -> ["-I";d])
(all_subdirs a))))@op,fl) rem
| "-R" :: [] -> usage ()
| ("-noassert"|"-compact"|"-g"|"-p"|"-thread"|"-dtypes" as o) :: rem ->
parse (o::op,fl) rem
| ("-h"|"--help") :: _ -> usage ()
| f :: rem ->
if Filename.check_suffix f ".ml"
or Filename.check_suffix f ".cmx"
or Filename.check_suffix f ".cmo"
or Filename.check_suffix f ".cmxa"
or Filename.check_suffix f ".cma" then
parse (op,f::fl) rem
else begin
prerr_endline ("Don't know what to do with " ^ f);
exit 1
end
in
parse ([Coq_config.osdeplibs],[]) (List.tl (Array.to_list Sys.argv))
let clean file =
let rm f = if Sys.file_exists f then Sys.remove f in
let basename = Filename.chop_suffix file ".ml" in
if not !echo then begin
rm file;
rm (basename ^ ".o");
rm (basename ^ ".cmi");
rm (basename ^ ".cmo");
rm (basename ^ ".cmx")
end
Creates another temporary file for Dynlink if needed
let tmp_dynlink()=
let tmp = Filename.temp_file "coqdynlink" ".ml" in
let _ = Sys.command ("echo \"Dynlink.init();;\" > "^tmp) in
tmp
let declare_loading_string () =
if not !top then
"Mltop.remove ();;"
else
"let ppf = Format.std_formatter;;
Mltop.set_top
{Mltop.load_obj=Topdirs.dir_load ppf;
Mltop.use_file=Topdirs.dir_use ppf;
Mltop.add_dir=Topdirs.dir_directory;
Mltop.ml_loop=(fun () -> Toploop.loop ppf) };;\n"
let create_tmp_main_file modules =
let main_name = Filename.temp_file "coqmain" ".ml" in
let oc = open_out main_name in
try
output_string oc "List.iter Mltop.add_known_module [\"";
output_string oc (String.concat "\";\"" modules);
output_string oc "\"];;\n";
output_string oc (declare_loading_string());
Start the right toplevel loop : Coq or Coq_searchisos
if !searchisos then
output_string oc "Cmd_searchisos_line.start();;\n"
else if !coqide then
output_string oc "Coqide.start();;\n"
else
output_string oc "Coqtop.start();;\n";
close_out oc;
main_name
with e ->
clean main_name; raise e
let main () =
let (options, userfiles) = parse_args () in
let camlbin = Envars.camlbin () in
let prog =
if !opt then begin
if !top then failwith "no custom toplevel in native code !";
let ocamloptexec = Filename.concat camlbin "ocamlopt" in
ocamloptexec^" -linkall"
end else
let ocamlmktoplib = " toplevellib.cma" in
let ocamlcexec = Filename.concat camlbin "ocamlc" in
let ocamlccustom = Printf.sprintf "%s %s -linkall "
ocamlcexec Coq_config.coqrunbyteflags in
(if !top then ocamlccustom^ocamlmktoplib else ocamlccustom)
in
let (modules, tolink) = files_to_link userfiles in
let dynlink=
if not (!opt || !top) then
[tmp_dynlink()]
else
[]
in
let main_file = create_tmp_main_file modules in
try
let args =
options @ (includes ()) @ copts @ tolink @ dynlink @ [ main_file ] in
let args = if !top then args @ [ "topstart.cmo" ] else args in
let command = String.concat " " (prog::"-rectypes"::args) in
if !echo then
begin
print_endline command;
print_endline
("(command length is " ^
(string_of_int (String.length command)) ^ " characters)");
flush Pervasives.stdout
end;
let retcode = Sys.command command in
clean main_file;
command gives the exit code in HSB , and signal in LSB ! ! !
if retcode > 255 then retcode lsr 8 else retcode
with e ->
clean main_file; raise e
let retcode =
try Printexc.print main () with _ -> 1
let _ = exit retcode
|
83e888939a381c2d449adfbcd2ffdf90035861d13f84242423b7ad0f9b1833c3 | walmartlabs/lacinia | documentation_test.clj | Copyright ( c ) 2017 - present Walmart , Inc.
;
Licensed under the Apache License , Version 2.0 ( the " License " )
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; -2.0
;
; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
(ns com.walmartlabs.lacinia.documentation-test
"Tests that documentation for fields and field args can be inhertited from interfaces."
(:require
[clojure.test :refer [deftest is]]
[com.walmartlabs.test-utils :as utils]))
(def ^:private schema
(utils/compile-schema "doc-inheritance-schema.edn" {}))
(defn ^:private q [query]
(utils/execute schema query))
(deftest field-description-may-inherit-from-interface
(is (= {:data
{:ex1
{:fields
[{:description "ex1/alpha"
:name "alpha"}
{:description "sierra/bravo"
:name "bravo"}]}
:ex2
{:fields
[{:description "ex2/alpha"
:name "alpha"}
{:description "ex2/bravo"
:name "bravo"}]}}}
(q "
{ ex1: __type (name: \"ex1\") {
fields {
name
description
}
}
ex2: __type(name: \"ex2\") {
fields {
name
description
}
}
}"))))
(deftest arg-description-may-inherit-from-interface
(is (= {:data
{:ex3
{:fields
[{:args
[{:description "ex3/delta"
:name "delta"}
{:description "tango/charlie/echo"
:name "echo"}]}]}}}
(q "
{ ex3: __type(name: \"ex3\") {
fields {
args {
name
description
}
}
}
}"))))
| null | https://raw.githubusercontent.com/walmartlabs/lacinia/88bf46f197bfed645d7767fcfe2bfa39e8b00b27/test/com/walmartlabs/lacinia/documentation_test.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright ( c ) 2017 - present Walmart , Inc.
Licensed under the Apache License , Version 2.0 ( the " License " )
distributed under the License is distributed on an " AS IS " BASIS ,
(ns com.walmartlabs.lacinia.documentation-test
"Tests that documentation for fields and field args can be inhertited from interfaces."
(:require
[clojure.test :refer [deftest is]]
[com.walmartlabs.test-utils :as utils]))
(def ^:private schema
(utils/compile-schema "doc-inheritance-schema.edn" {}))
(defn ^:private q [query]
(utils/execute schema query))
(deftest field-description-may-inherit-from-interface
(is (= {:data
{:ex1
{:fields
[{:description "ex1/alpha"
:name "alpha"}
{:description "sierra/bravo"
:name "bravo"}]}
:ex2
{:fields
[{:description "ex2/alpha"
:name "alpha"}
{:description "ex2/bravo"
:name "bravo"}]}}}
(q "
{ ex1: __type (name: \"ex1\") {
fields {
name
description
}
}
ex2: __type(name: \"ex2\") {
fields {
name
description
}
}
}"))))
(deftest arg-description-may-inherit-from-interface
(is (= {:data
{:ex3
{:fields
[{:args
[{:description "ex3/delta"
:name "delta"}
{:description "tango/charlie/echo"
:name "echo"}]}]}}}
(q "
{ ex3: __type(name: \"ex3\") {
fields {
args {
name
description
}
}
}
}"))))
|
219eee00326375427636005c5ffc63f6db150ce88e8158540101a7bbf8bf1bb8 | alex-gutev/tridash | parser.lisp | ;;;; parser.lisp
;;;;
;;;; Tridash Programming Language.
Copyright ( C ) 2018 - 2021
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;;; GNU General Public License for more details.
;;;;
You should have received a copy of the GNU General Public License
;;;; along with this program. If not, see </>.
;;;; Unit tests for parser
(defpackage :tridash/test.parser
(:use :generic-cl
:alexandria
:anaphora
:arrows
:iterate
:optima
:fiveam
:tridash.parser
:tridash.frontend
:tridash/test
:tridash/test.util)
(:shadowing-import-from :generic-cl
:emptyp
:multiply
:accumulate)
(:shadowing-import-from :fiveam :fail)
(:import-from :lol :defmacro!)
(:import-from :tridash.parser
:declaration-parse-error)
(:import-from :tridash.frontend
:+infix-operators+))
(in-package :tridash/test.parser)
(named-readtables:in-readtable :interpol-syntax)
;;; Test Suite Definition
(def-suite parser
:description "Test parser."
:in frontend)
(in-suite parser)
Test Utilities
(defmacro! test-parser ((o!string &optional (o!operators '+infix-operators+)) &body declarations)
"Checks that the node declarations parsed from STRING are equal to
DECLS. OPERATORS is the operator table to pass to the parser."
`(with-input-from-string (,g!in ,g!string)
(let ((,g!parser (make-parser ,g!in)))
,@(loop
for expected in (declaration-symbols declarations)
collect `(is (decl= ',expected (funcall ,g!parser ,g!operators))))
(let ((,g!last (funcall ,g!parser ,g!operators)))
(is-false ,g!last "Expected EOF. Got: ~a" ,g!last)))))
(defun test-parse-error (string &optional (operators +infix-operators+))
"Tests that parsing a single declaration from STRING results in a
parse error. OPERATORS is the operator table to pass to the
parser."
(with-input-from-string (in string)
(let ((parser (make-parser in)))
(signals declaration-parse-error (funcall parser operators)
"Expected a parse error when parsing: ~a"
string))))
Node Expression Tests
;; let ((*package* (find-package :tridash.symbols)))
(test atom-nodes
"Test expressions consisting of just atom nodes."
(test-parser
(#?"node1; node2\nnode3")
!|node1| !|node2| !|node3|))
(test functor-nodes-multiple-arguments
"Test expressions consisting of functor nodes of multiple arguments."
(test-parser
(#?"fn(arg1, arg2)\narg")
(!|fn| !|arg1| !|arg2|)
!|arg|))
(test functor-nodes-single-argument
"Test expressions consisting of functor nodes of a single argument."
(test-parser
(#?"fn(arg1)\narg")
(!|fn| !|arg1|)
!|arg|))
(test functor-nodes-no-arguments
"Test expressions consisting of functor nodes of no arguments."
(test-parser
(#?"fn()\narg")
(!|fn|)
!|arg|))
;;; Infix Operator Tests
(defconstant +arithmetic-operators+
(alist-hash-map
`(((:infix ,(s "->")) 10 :right)
((:infix ,(s "+")) 20 :left)
((:infix ,(s "-")) 20 :left)
((:infix ,(s "*")) 50 :right)
((:infix ,(s "/")) 50 :left)
((:prefix ,(s "-")) 70)
((:prefix ,(s "^")) 15)
(:open-paren 200)
((:infix ,(s ".")) 500 :left)
((:prefix ,(s "$")) 800))))
(test infix-operator-precedence
"Test infix operator expressions with multiple operators of varying precedence."
(test-parser
(#?"a + b * \nc - e / d" +arithmetic-operators+)
(!-
(!+ !\a (!* !\b !\c))
(!/ !\e !\d))))
(test infix-operator-parenthesis
"Test grouping using parenthesis in infix operator expressions."
(test-parser
(#?"(a + b\n) * (c - e / d)" +arithmetic-operators+)
(!*
(!+ !\a !\b)
(!- !\c (!/ !\e !\d)))))
(test infix-operator-left-associativity
"Test infix operator left associativity."
(test-parser
(#?"a - b - \nc - d" +arithmetic-operators+)
(!-
(!- (!- !\a !\b) !\c)
!\d)))
(test infix-operator-right-associativity
"Test infix operator right associativity."
(test-parser
(#?"a -> b -> \n c -> d\n" +arithmetic-operators+)
(!-> !\a (!-> !\b (!-> !\c !\d)))))
(test infix-operator-and-functors
"Test multi-line infix operator expressions with functor nodes."
(test-parser
(#?"mod.fn(arg1, arg2) + fn2(arg) -\n node1 * node2" +arithmetic-operators+)
(!-
(!+
((!\. !|mod| !|fn|) !|arg1| !|arg2|)
(!|fn2| !|arg|))
(!* !|node1| !|node2|))))
(test prefix-operator-high-precedence
"Test expressions prefix operator with higher precedence than infix operator"
(test-parser
(#?"- a * b + - c" +arithmetic-operators+)
(!+
(!* (!- !\a) !\b)
(!- !\c))))
(test prefix-operator-low-precedence
"Test expressions prefix operator with lower precedence than infix operator"
(test-parser
(#?"- a.b * - c.d" +arithmetic-operators+)
(!*
(!- (!. !\a !\b))
(!- (!. !\c !\d)))))
(test prefix-operator-high-infix-precedence
"Test prefix operator expression within infix expression with high precedence"
(test-parser
(#?"^ a + b * ^ c + d" +arithmetic-operators+)
(!^
(!+
(!+ !\a
(!* !\b (!^ !\c)))
!\d))))
(test prefix-operator-line-term
"Test prefix operator before line terminator"
(test-parser
(#?"a + ^\nb" +arithmetic-operators+)
(!+ !\a !^)
!\b))
(test prefix-operator-no-line-term
"Test prefix operator before newline (not declaration terminator)"
(test-parser
(#?"(a + ^\nb) * c" +arithmetic-operators+)
(!*
(!+ !\a (!^ !\b))
!\c)))
(test prefix-operator-parenthesis
"Test prefix operator in parenthesis expressions"
(test-parser
("-(a + b) * c" +arithmetic-operators+)
(!*
(!- (!+ !\a !\b))
!\c)))
(test prefix-operator-atom
"Test prefix operator treated as atom node"
(test-parser
("a + (^) - b" +arithmetic-operators+)
(!- (!+ !\a !^) !\b)))
(test prefix-operator-infix-left-associativity
"Test prefix operator in infix expression with left associativity"
(test-parser
("a - ^ b - c" +arithmetic-operators+)
(!- !\a (!^ (!- !\b !\c)))))
(test prefix-operator-and-functors
"Test prefix operator in expression with functors"
(test-parser
(#?"- mod.fn(arg1, arg2) + - fn2(arg) - \n $ fn3(a1, a2, a3)" +arithmetic-operators+)
(!-
(!+ (!- ((!. !|mod| !|fn|) !|arg1| !|arg2|))
(!- (!|fn2| !|arg|)))
((!$ !|fn3|) !\a1 !\a2 !\a3))))
;;; Node List Test
(test node-lists
"Test node lists."
(test-parser
(#?"fn(a, b) : {a;b\nc\n\nd}"
(alist-hash-map
`(((:infix ,(s ":")) 10 :right)
(:open-paren 200))))
(!\:
(!|fn| !\a !\b)
(!|/prog| !\a !\b !\c !\d))))
Test Literals
(test mixed-literals
"Test expressions with of mixed literal types."
(let ((ops (alist-hash-map
`(((:infix ,(s "+")) 20 :left)
((:infix ,(s "-")) 20 :left)
((:infix ,(s "*")) 50 :right)
((:infix ,(s "/")) 50 :left)
(:open-paren 200)))))
(test-parser
(#?"a + 1 + 2.3 -\"hello\" " ops)
(!- (!+ (!+ !\a 1) 2.3) "hello"))))
(test string-line-feed-tab-escape
"Test strings with line feed and tab character escape sequences."
(test-parser
(" \"Hello\\n\\tWorld\" ")
#?"Hello\n\tWorld"))
(test string-carriage-return-line-feed-escape
"Test strings with carriage return and line feed escape sequences."
(test-parser
(" \"Hello\\r\\nWorld\" ")
#?"Hello\r\nWorld"))
(test string-escape-quotes-backslashes
"Test strings with escaped quotes and backslashes."
(test-parser
(" \"He\\\\she said \\\"Hello World\\\"\" ")
"He\\she said \"Hello World\""))
(test string-unicode-escape
"Test strings with unicode escape sequences"
(test-parser
(" \"x\\u{2265}5\" ")
#?"x\x{2265}5"))
(test string-unicode-escape-unclosed
"Test strings with unclosed unicode escape sequences"
(test-parser
(" \"x \\u{2265 5\" ")
#?"x \x{2265} 5"))
(test string-escape-u-1
"Test strings with escaped `u` character with unicode character code."
(test-parser
(" \"x \\ux1\" ")
"x ux1"))
(test string-escape-u-2
"Test strings with escaped `u` character with unicode character code."
(test-parser
(" \"x \\u10\" ")
"x u10"))
;;; Parse Error Tests
(test infix-operator-errors
"Test parse errors involving malformed infix operator expressions."
(test-parse-error "a + b")
(test-parse-error "a +")
(test-parse-error "a ->")
(test-parse-error #?"a.\n")
(test-parse-error #?"a.;"))
(test functor-errors
"Test parse errors involving malformed functor expressions."
(test-parse-error "fn(a,b")
(test-parse-error "fn(a,b,)")
(test-parse-error "fn(a,b))"))
(test parenthesis-errors
"Test parse errors involving malformed parenthesis expressions."
(test-parse-error "(a -> b")
(test-parse-error "a -> b)")
(test-parse-error "(a -> b))")
(test-parse-error "((a -> b)")
(test-parse-error "(a , b)")
(test-parse-error "{a b}"))
(test node-list-errors
"Test parse errors involving node lists."
(test-parse-error #?"{a -> b; c\nd")
(test-parse-error #?"a -> b}")
(test-parse-error #?"{a -> b; c\nd}}")
(test-parse-error #?"{{a -> b;c\nd}"))
| null | https://raw.githubusercontent.com/alex-gutev/tridash/1d6163f8bda3908e8a2f197245b07511efa68e78/test/parser.lisp | lisp | parser.lisp
Tridash Programming Language.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
Unit tests for parser
Test Suite Definition
let ((*package* (find-package :tridash.symbols)))
Infix Operator Tests
Node List Test
Parse Error Tests | Copyright ( C ) 2018 - 2021
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(defpackage :tridash/test.parser
(:use :generic-cl
:alexandria
:anaphora
:arrows
:iterate
:optima
:fiveam
:tridash.parser
:tridash.frontend
:tridash/test
:tridash/test.util)
(:shadowing-import-from :generic-cl
:emptyp
:multiply
:accumulate)
(:shadowing-import-from :fiveam :fail)
(:import-from :lol :defmacro!)
(:import-from :tridash.parser
:declaration-parse-error)
(:import-from :tridash.frontend
:+infix-operators+))
(in-package :tridash/test.parser)
(named-readtables:in-readtable :interpol-syntax)
(def-suite parser
:description "Test parser."
:in frontend)
(in-suite parser)
Test Utilities
(defmacro! test-parser ((o!string &optional (o!operators '+infix-operators+)) &body declarations)
"Checks that the node declarations parsed from STRING are equal to
DECLS. OPERATORS is the operator table to pass to the parser."
`(with-input-from-string (,g!in ,g!string)
(let ((,g!parser (make-parser ,g!in)))
,@(loop
for expected in (declaration-symbols declarations)
collect `(is (decl= ',expected (funcall ,g!parser ,g!operators))))
(let ((,g!last (funcall ,g!parser ,g!operators)))
(is-false ,g!last "Expected EOF. Got: ~a" ,g!last)))))
(defun test-parse-error (string &optional (operators +infix-operators+))
"Tests that parsing a single declaration from STRING results in a
parse error. OPERATORS is the operator table to pass to the
parser."
(with-input-from-string (in string)
(let ((parser (make-parser in)))
(signals declaration-parse-error (funcall parser operators)
"Expected a parse error when parsing: ~a"
string))))
Node Expression Tests
(test atom-nodes
"Test expressions consisting of just atom nodes."
(test-parser
(#?"node1; node2\nnode3")
!|node1| !|node2| !|node3|))
(test functor-nodes-multiple-arguments
"Test expressions consisting of functor nodes of multiple arguments."
(test-parser
(#?"fn(arg1, arg2)\narg")
(!|fn| !|arg1| !|arg2|)
!|arg|))
(test functor-nodes-single-argument
"Test expressions consisting of functor nodes of a single argument."
(test-parser
(#?"fn(arg1)\narg")
(!|fn| !|arg1|)
!|arg|))
(test functor-nodes-no-arguments
"Test expressions consisting of functor nodes of no arguments."
(test-parser
(#?"fn()\narg")
(!|fn|)
!|arg|))
(defconstant +arithmetic-operators+
(alist-hash-map
`(((:infix ,(s "->")) 10 :right)
((:infix ,(s "+")) 20 :left)
((:infix ,(s "-")) 20 :left)
((:infix ,(s "*")) 50 :right)
((:infix ,(s "/")) 50 :left)
((:prefix ,(s "-")) 70)
((:prefix ,(s "^")) 15)
(:open-paren 200)
((:infix ,(s ".")) 500 :left)
((:prefix ,(s "$")) 800))))
(test infix-operator-precedence
"Test infix operator expressions with multiple operators of varying precedence."
(test-parser
(#?"a + b * \nc - e / d" +arithmetic-operators+)
(!-
(!+ !\a (!* !\b !\c))
(!/ !\e !\d))))
(test infix-operator-parenthesis
"Test grouping using parenthesis in infix operator expressions."
(test-parser
(#?"(a + b\n) * (c - e / d)" +arithmetic-operators+)
(!*
(!+ !\a !\b)
(!- !\c (!/ !\e !\d)))))
(test infix-operator-left-associativity
"Test infix operator left associativity."
(test-parser
(#?"a - b - \nc - d" +arithmetic-operators+)
(!-
(!- (!- !\a !\b) !\c)
!\d)))
(test infix-operator-right-associativity
"Test infix operator right associativity."
(test-parser
(#?"a -> b -> \n c -> d\n" +arithmetic-operators+)
(!-> !\a (!-> !\b (!-> !\c !\d)))))
(test infix-operator-and-functors
"Test multi-line infix operator expressions with functor nodes."
(test-parser
(#?"mod.fn(arg1, arg2) + fn2(arg) -\n node1 * node2" +arithmetic-operators+)
(!-
(!+
((!\. !|mod| !|fn|) !|arg1| !|arg2|)
(!|fn2| !|arg|))
(!* !|node1| !|node2|))))
(test prefix-operator-high-precedence
"Test expressions prefix operator with higher precedence than infix operator"
(test-parser
(#?"- a * b + - c" +arithmetic-operators+)
(!+
(!* (!- !\a) !\b)
(!- !\c))))
(test prefix-operator-low-precedence
"Test expressions prefix operator with lower precedence than infix operator"
(test-parser
(#?"- a.b * - c.d" +arithmetic-operators+)
(!*
(!- (!. !\a !\b))
(!- (!. !\c !\d)))))
(test prefix-operator-high-infix-precedence
"Test prefix operator expression within infix expression with high precedence"
(test-parser
(#?"^ a + b * ^ c + d" +arithmetic-operators+)
(!^
(!+
(!+ !\a
(!* !\b (!^ !\c)))
!\d))))
(test prefix-operator-line-term
"Test prefix operator before line terminator"
(test-parser
(#?"a + ^\nb" +arithmetic-operators+)
(!+ !\a !^)
!\b))
(test prefix-operator-no-line-term
"Test prefix operator before newline (not declaration terminator)"
(test-parser
(#?"(a + ^\nb) * c" +arithmetic-operators+)
(!*
(!+ !\a (!^ !\b))
!\c)))
(test prefix-operator-parenthesis
"Test prefix operator in parenthesis expressions"
(test-parser
("-(a + b) * c" +arithmetic-operators+)
(!*
(!- (!+ !\a !\b))
!\c)))
(test prefix-operator-atom
"Test prefix operator treated as atom node"
(test-parser
("a + (^) - b" +arithmetic-operators+)
(!- (!+ !\a !^) !\b)))
(test prefix-operator-infix-left-associativity
"Test prefix operator in infix expression with left associativity"
(test-parser
("a - ^ b - c" +arithmetic-operators+)
(!- !\a (!^ (!- !\b !\c)))))
(test prefix-operator-and-functors
"Test prefix operator in expression with functors"
(test-parser
(#?"- mod.fn(arg1, arg2) + - fn2(arg) - \n $ fn3(a1, a2, a3)" +arithmetic-operators+)
(!-
(!+ (!- ((!. !|mod| !|fn|) !|arg1| !|arg2|))
(!- (!|fn2| !|arg|)))
((!$ !|fn3|) !\a1 !\a2 !\a3))))
(test node-lists
"Test node lists."
(test-parser
(#?"fn(a, b) : {a;b\nc\n\nd}"
(alist-hash-map
`(((:infix ,(s ":")) 10 :right)
(:open-paren 200))))
(!\:
(!|fn| !\a !\b)
(!|/prog| !\a !\b !\c !\d))))
Test Literals
(test mixed-literals
"Test expressions with of mixed literal types."
(let ((ops (alist-hash-map
`(((:infix ,(s "+")) 20 :left)
((:infix ,(s "-")) 20 :left)
((:infix ,(s "*")) 50 :right)
((:infix ,(s "/")) 50 :left)
(:open-paren 200)))))
(test-parser
(#?"a + 1 + 2.3 -\"hello\" " ops)
(!- (!+ (!+ !\a 1) 2.3) "hello"))))
(test string-line-feed-tab-escape
"Test strings with line feed and tab character escape sequences."
(test-parser
(" \"Hello\\n\\tWorld\" ")
#?"Hello\n\tWorld"))
(test string-carriage-return-line-feed-escape
"Test strings with carriage return and line feed escape sequences."
(test-parser
(" \"Hello\\r\\nWorld\" ")
#?"Hello\r\nWorld"))
(test string-escape-quotes-backslashes
"Test strings with escaped quotes and backslashes."
(test-parser
(" \"He\\\\she said \\\"Hello World\\\"\" ")
"He\\she said \"Hello World\""))
(test string-unicode-escape
"Test strings with unicode escape sequences"
(test-parser
(" \"x\\u{2265}5\" ")
#?"x\x{2265}5"))
(test string-unicode-escape-unclosed
"Test strings with unclosed unicode escape sequences"
(test-parser
(" \"x \\u{2265 5\" ")
#?"x \x{2265} 5"))
(test string-escape-u-1
"Test strings with escaped `u` character with unicode character code."
(test-parser
(" \"x \\ux1\" ")
"x ux1"))
(test string-escape-u-2
"Test strings with escaped `u` character with unicode character code."
(test-parser
(" \"x \\u10\" ")
"x u10"))
(test infix-operator-errors
"Test parse errors involving malformed infix operator expressions."
(test-parse-error "a + b")
(test-parse-error "a +")
(test-parse-error "a ->")
(test-parse-error #?"a.\n")
(test-parse-error #?"a.;"))
(test functor-errors
"Test parse errors involving malformed functor expressions."
(test-parse-error "fn(a,b")
(test-parse-error "fn(a,b,)")
(test-parse-error "fn(a,b))"))
(test parenthesis-errors
"Test parse errors involving malformed parenthesis expressions."
(test-parse-error "(a -> b")
(test-parse-error "a -> b)")
(test-parse-error "(a -> b))")
(test-parse-error "((a -> b)")
(test-parse-error "(a , b)")
(test-parse-error "{a b}"))
(test node-list-errors
"Test parse errors involving node lists."
(test-parse-error #?"{a -> b; c\nd")
(test-parse-error #?"a -> b}")
(test-parse-error #?"{a -> b; c\nd}}")
(test-parse-error #?"{{a -> b;c\nd}"))
|
bf9f8edf9e9e848100c7b88a887b5cd4d61a3eca986e211d00aff9df82cc4bc5 | barrucadu/yukibot | Mongo.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
Module : . Mongo
Copyright : ( c ) 2016
License : MIT
-- Stability : experimental
-- Portability : OverloadedStrings
module Yukibot.Mongo where
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import Database.MongoDB ((=:))
import qualified Database.MongoDB as M
import Yukibot.Configuration
import Yukibot.Types
-- | Connection information for MongoDB.
data MongoConfig = MongoConfig
{ mongoHost :: M.Host
^ Hostname ( default : " )
, mongoNamespace :: Text
^ collection namespace ( default : \"yukibot\ " )
}
deriving (Eq, Ord, Show)
-- | Extract MongoDB configuration.
mongoConfig :: Table -> MongoConfig
mongoConfig cfg =
let get key def = fromMaybe def $ getString key =<< getTable "mongodb" cfg
hostname = get "host" "localhost"
port = maybe M.defaultPort M.PortNumber $ getInteger "port" =<< getTable "mongodb" cfg
in MongoConfig
{ mongoHost = M.Host (T.unpack hostname) port
, mongoNamespace = get "namespace" "yukibot"
}
-- | Run a query.
--
-- This automatically namespaces by backend signature. The current
-- backend signature is added as a \"_backendsig\" field to the
-- selector.
queryMongo :: MongoConfig -> PluginName -> BackendSig -> M.Selector -> M.Order -> IO [M.Document]
queryMongo cfg pn bsig selBy sortBy =
let selBy' = addBSig bsig selBy
in doMongo cfg pn $ \c -> M.rest =<< M.find (M.select selBy' c) { M.sort = sortBy }
-- | Insert values.
--
-- This automatically namespaces by backend signature. The current
-- backend signature is added as a \"_backendsig\" field to the
-- inserted documents.
insertMongo :: MongoConfig -> PluginName -> BackendSig -> [M.Document] -> IO ()
insertMongo cfg pn bsig ds =
let ds' = map (addBSig bsig) ds
in doMongo cfg pn $ \c -> M.insertMany_ c ds'
| Upsert a value : replace the first document in the selection if
-- there is one; otherwise insert a new document.
--
-- This automatically namespaces by backend signature. The current
-- backend signature is added as a \"_backendsig\" field to the
-- selector and new document.
upsertMongo :: MongoConfig -> PluginName -> BackendSig -> M.Selector -> M.Document -> IO ()
upsertMongo cfg pn bsig selBy doc =
let selBy' = addBSig bsig selBy
doc' = addBSig bsig doc
in doMongo cfg pn $ \c -> M.upsert (M.Select selBy' c) doc'
-- | Delete values.
--
-- This automatically namespaces by backend signature. The current
-- backend signature is added as a \"_backendsig\" field to the
-- selector.
deleteMongo :: MongoConfig -> PluginName -> BackendSig -> M.Selector -> IO ()
deleteMongo cfg pn bsig selBy =
let selBy' = addBSig bsig selBy
in doMongo cfg pn $ \c -> M.delete (M.select selBy' c)
-- | Run a function over a MongoDB database, using the collection
-- belonging to a plugin.
--
-- This does NOT automatically namespace things by the backend
-- signature! Make sure to appropriately manage the \"_backendsig\"
-- field of any documents if you want this scoping!
doMongo :: MongoConfig -> PluginName -> (M.Collection -> M.Action IO a) -> IO a
doMongo cfg pn cf = do
pipe <- M.connect $ mongoHost cfg
let c = collectionFor cfg pn
res <- M.access pipe M.master c (cf c)
M.close pipe
pure res
-- | Get the collection name for the given plugin.
collectionFor :: MongoConfig -> PluginName -> M.Collection
collectionFor cfg pn
| T.null (mongoNamespace cfg) = getPluginName pn
| otherwise = mongoNamespace cfg <> "_" <> getPluginName pn
-- | Add the \"_backendsig\" field to a 'Document'. This overwrites
-- any prior \"_backendsig\" field.
addBSig :: BackendSig -> M.Document -> M.Document
addBSig (bname, sname, index) doc = newBSig : removeOldBSig doc where
removeOldBSig = filter ((/="_backendsig") . M.label)
newBSig = "_backendsig" =: [ "bname" =: bname
, "sname" =: sname
, "index" =: index
]
| null | https://raw.githubusercontent.com/barrucadu/yukibot/7b12153e2cc71b62f86ab3a851a250dce83343b8/yukibot-core/Yukibot/Mongo.hs | haskell | # LANGUAGE OverloadedStrings #
|
Stability : experimental
Portability : OverloadedStrings
| Connection information for MongoDB.
| Extract MongoDB configuration.
| Run a query.
This automatically namespaces by backend signature. The current
backend signature is added as a \"_backendsig\" field to the
selector.
| Insert values.
This automatically namespaces by backend signature. The current
backend signature is added as a \"_backendsig\" field to the
inserted documents.
there is one; otherwise insert a new document.
This automatically namespaces by backend signature. The current
backend signature is added as a \"_backendsig\" field to the
selector and new document.
| Delete values.
This automatically namespaces by backend signature. The current
backend signature is added as a \"_backendsig\" field to the
selector.
| Run a function over a MongoDB database, using the collection
belonging to a plugin.
This does NOT automatically namespace things by the backend
signature! Make sure to appropriately manage the \"_backendsig\"
field of any documents if you want this scoping!
| Get the collection name for the given plugin.
| Add the \"_backendsig\" field to a 'Document'. This overwrites
any prior \"_backendsig\" field. |
Module : . Mongo
Copyright : ( c ) 2016
License : MIT
module Yukibot.Mongo where
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import Database.MongoDB ((=:))
import qualified Database.MongoDB as M
import Yukibot.Configuration
import Yukibot.Types
data MongoConfig = MongoConfig
{ mongoHost :: M.Host
^ Hostname ( default : " )
, mongoNamespace :: Text
^ collection namespace ( default : \"yukibot\ " )
}
deriving (Eq, Ord, Show)
mongoConfig :: Table -> MongoConfig
mongoConfig cfg =
let get key def = fromMaybe def $ getString key =<< getTable "mongodb" cfg
hostname = get "host" "localhost"
port = maybe M.defaultPort M.PortNumber $ getInteger "port" =<< getTable "mongodb" cfg
in MongoConfig
{ mongoHost = M.Host (T.unpack hostname) port
, mongoNamespace = get "namespace" "yukibot"
}
queryMongo :: MongoConfig -> PluginName -> BackendSig -> M.Selector -> M.Order -> IO [M.Document]
queryMongo cfg pn bsig selBy sortBy =
let selBy' = addBSig bsig selBy
in doMongo cfg pn $ \c -> M.rest =<< M.find (M.select selBy' c) { M.sort = sortBy }
insertMongo :: MongoConfig -> PluginName -> BackendSig -> [M.Document] -> IO ()
insertMongo cfg pn bsig ds =
let ds' = map (addBSig bsig) ds
in doMongo cfg pn $ \c -> M.insertMany_ c ds'
| Upsert a value : replace the first document in the selection if
upsertMongo :: MongoConfig -> PluginName -> BackendSig -> M.Selector -> M.Document -> IO ()
upsertMongo cfg pn bsig selBy doc =
let selBy' = addBSig bsig selBy
doc' = addBSig bsig doc
in doMongo cfg pn $ \c -> M.upsert (M.Select selBy' c) doc'
deleteMongo :: MongoConfig -> PluginName -> BackendSig -> M.Selector -> IO ()
deleteMongo cfg pn bsig selBy =
let selBy' = addBSig bsig selBy
in doMongo cfg pn $ \c -> M.delete (M.select selBy' c)
doMongo :: MongoConfig -> PluginName -> (M.Collection -> M.Action IO a) -> IO a
doMongo cfg pn cf = do
pipe <- M.connect $ mongoHost cfg
let c = collectionFor cfg pn
res <- M.access pipe M.master c (cf c)
M.close pipe
pure res
collectionFor :: MongoConfig -> PluginName -> M.Collection
collectionFor cfg pn
| T.null (mongoNamespace cfg) = getPluginName pn
| otherwise = mongoNamespace cfg <> "_" <> getPluginName pn
addBSig :: BackendSig -> M.Document -> M.Document
addBSig (bname, sname, index) doc = newBSig : removeOldBSig doc where
removeOldBSig = filter ((/="_backendsig") . M.label)
newBSig = "_backendsig" =: [ "bname" =: bname
, "sname" =: sname
, "index" =: index
]
|
224707193c0ae2de1742a30eac33f6f9920195e15284d23671f1fb36943129e6 | emanjavacas/cosycat | annotations.cljs | (ns cosycat.backend.handlers.annotations
(:require [re-frame.core :as re-frame]
[schema.core :as s]
[ajax.core :refer [POST GET]]
[cosycat.schemas.annotation-schemas :refer [annotation-schema]]
[cosycat.app-utils
:refer [deep-merge is-last-partition token-id->span span->token-id]]
[cosycat.utils :refer [format get-msg now]]
[cosycat.backend.middleware :refer [standard-middleware]]
[cosycat.backend.handlers.utils :refer [get-query get-corpus-param expand-db-path]]
[taoensso.timbre :as timbre]))
;;; Incoming annotations; query-panel
(defn is-token?
"is current token a token of interest (as per token-id-or-ids?)"
[id token-id-or-ids]
(if (sequential? token-id-or-ids)
(contains? (apply hash-set token-id-or-ids) id)
(= id token-id-or-ids)))
(defn update-hit-anns [hit anns]
(mapv (fn [{token-id :id :as token}]
(if-let [ann (get anns token-id)]
(update token :anns deep-merge ann)
token))
hit))
(defn delete-hit-ann [hit token-id-or-ids key]
(mapv (fn [{id :id anns :anns :as token}]
(if (and (is-token? id token-id-or-ids) (contains? anns key))
(assoc token :anns (dissoc anns key))
token))
hit))
(defn find-hit-id
"find the hid id given `token-ids`"
[token-id-or-ids hit-maps]
(if (coll? token-id-or-ids)
(some (fn [token-id] (find-hit-id token-id hit-maps)) token-id-or-ids)
(some (fn [{:keys [id hit]}]
(when (some #{token-id-or-ids} (map :id hit))
id))
hit-maps)))
(defn update-db [db path-to-results hit-id token-ids update-fn & args]
(let [path-to-hit (fn [hit-id] (into path-to-results [hit-id :hit]))
results-by-id (get-in db path-to-results)]
(if (contains? results-by-id hit-id)
;; found hit by id
(do (timbre/debug "found hit by id") (apply update-in db (path-to-hit hit-id) update-fn args))
(if-let [hit-id (find-hit-id token-ids (vals results-by-id))]
;; found hit for annotation
(do (timbre/debug "found hit") (apply update-in db (path-to-hit hit-id) update-fn args))
;; couldn't find hit for annotation
(do (timbre/debug "couldn't find hit") db)))))
(defmulti add-annotations
"generic reducer function for incoming annotation data"
(fn [db {:keys [payload]}] (type payload)))
(defmethod add-annotations cljs.core/PersistentArrayMap
[db {{:keys [project-name hit-id anns]} :payload db-path :db-path}]
(let [path-to-query (into [:projects project-name] (expand-db-path :query))
path-to-review (into [:projects project-name] (expand-db-path :review))
token-ids (keys anns)]
(-> db
(update-db path-to-query hit-id token-ids update-hit-anns anns)
(update-db path-to-review hit-id token-ids update-hit-anns anns))))
(defmethod add-annotations cljs.core/PersistentVector
[db {ms :payload :as data}]
(reduce (fn [db m] (add-annotations db (assoc data :payload m))) db ms))
(re-frame/register-handler ;; generic handler
:add-annotation
standard-middleware
(fn [db [_ data]] (add-annotations db data)))
(re-frame/register-handler
:remove-annotation
standard-middleware
(fn [db [_ {{:keys [project-name hit-id key span]} :payload db-path :db-path}]]
(let [path-to-query (into [:projects project-name] (expand-db-path :query))
path-to-review (into [:projects project-name] (expand-db-path :review))
token-ids (span->token-id span)]
(-> db
(update-db path-to-query hit-id token-ids delete-hit-ann token-ids key)
(update-db path-to-review hit-id token-ids delete-hit-ann token-ids key)))))
(defn fetch-annotation-handler [& {:keys [is-last db-path]}]
(fn [payload]
(re-frame/dispatch [:add-annotation {:payload payload :db-path db-path}])
(when is-last
(re-frame/dispatch [:stop-throbbing :fetch-annotations]))))
(defn fetch-annotation-error-handler []
(fn [data]
(re-frame/dispatch [:stop-throbbing :fetch-annotations])
(timbre/warn "Couldn't fetch anns" data)))
(re-frame/register-handler ;; general annotation fetcher for query hits
:fetch-annotations
standard-middleware
(fn [db [_ {:keys [page-margins corpus db-path] :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])
margins (count page-margins)
partition-size 20]
(re-frame/dispatch [:start-throbbing :fetch-annotations])
(doseq [[i subpage-margins] (map-indexed vector (partition-all partition-size page-margins))
:let [is-last (is-last-partition margins partition-size i)]]
(GET "/annotation/page"
{:params {:page-margins subpage-margins :project-name project-name :corpus corpus}
:handler (fetch-annotation-handler :is-last is-last :db-path db-path)
:error-handler (fetch-annotation-error-handler)})))
db))
(re-frame/register-handler ;; annotation fetcher for issue hits
:fetch-issue-hit-annotations
(fn [db [_ {:keys [start end hit-id doc corpus]} {issue-id :id :as issue}]]
(let [active-project (get-in db [:session :active-project])]
(GET "/annotation/page"
{:params {:page-margins [{:start start :end end :hit-id hit-id :doc doc}]
:project-name active-project
:corpus corpus}
:handler (fn [data]
(when-let [{:keys [anns]} (first data)]
(re-frame/dispatch
[:update-issue-meta issue-id [:hit-map :hit]
(fn [hit] (update-hit-anns hit anns))])))
:error-handler #(timbre/warn "Couldn't fetch annotations" (str %))})
db)))
(re-frame/register-handler ;; annotation fetcher for review pages
:fetch-review-hit-annotations
(fn [db [_ {:keys [start end hit-id doc corpus]}]]
(let [project-name (get-in db [:session :active-project])]
(GET "/annotation/page"
{:params {:page-margins [{:start start :end end :hit-id hit-id :doc doc}]
:project-name project-name
:corpus corpus}
:handler (fn [data]
(when-let [{:keys [hit-id project-name anns]} (first data)]
(re-frame/dispatch
[:add-review-annotation
{:hit-id hit-id :project-name project-name :anns anns}])))
:error-handler #(timbre/warn "Couldn't fetch annotations" (str %))})
db)))
(defn query-review-handler [window]
(fn [{:keys [grouped-data] :as review-summary}]
{:pre [(apply = (mapcat (fn [{:keys [anns]}] (map :corpus anns)) grouped-data))]}
(re-frame/dispatch [:set-review-results review-summary window])))
(defn query-review-error-handler [data]
(timbre/debug data)
(re-frame/dispatch [:notify {:message "Couldn't fetch annotations"}]))
(defn build-query-map
[{{{ann-key :string key-as-regex? :as-regex?} :key
{ann-value :string value-as-regex? :as-regex?} :value} :ann
{hit-id :string hit-id-as-regex? :as-regex?} :hit-id
{:keys [from to]} :timestamp corpus :corpus username :username :as query-map}]
(cond-> {}
(not (empty? ann-key)) (assoc-in [:ann :key :string] ann-key)
key-as-regex? (assoc-in [:ann :key :as-regex?] true)
(not (empty? ann-value)) (assoc-in [:ann :value :string] ann-value)
value-as-regex? (assoc-in [:ann :value :as-regex?] true)
(not (empty? hit-id)) (assoc-in [:hit-id :string] hit-id)
hit-id-as-regex? (assoc-in [:hit-id :as-regex?] true)
(not (empty? corpus)) (assoc :corpus (vec corpus))
(not (empty? username)) (assoc :username (vec username))
from (assoc-in [:timestamp :from] from)
to (assoc-in [:timestamp :to] to)))
(re-frame/register-handler
:query-review
standard-middleware
(fn [db [_ {:keys [page-num] :or {page-num 1}}]]
(let [active-project (get-in db [:session :active-project])
path-to-query-opts [:projects active-project :session :review :query-opts]
{:keys [query-map context size window] :as query-opts} (get-in db path-to-query-opts)]
(re-frame/dispatch [:unset-review-results])
(re-frame/dispatch [:start-throbbing :review-frame])
(GET "annotation/query"
{:params {:query-map (build-query-map query-map)
:context context
:page {:page-num page-num :page-size size}
:project-name active-project}
:handler (query-review-handler window)
:error-handler query-review-error-handler})
db)))
;;; Outgoing annotations
(defn notification-message
[{{{B :B O :O :as scope} :scope span-type :type} :span :as data} message]
(if-not span-type ;; project-level error (e.g. insufficient rights)
{:message message}
(->> (case span-type
"token" (get-msg [:annotation :error :token] scope message)
"IOB" (get-msg [:annotation :error :IOB] B O message))
(assoc {} :message))))
(defmulti dispatch-annotation-handler*
"Variadic handler for successful annotations. Dispatches are based on whether
ann-map is a vector (bulk annotation payload) or a map (single annotation payload)"
(fn [{payload :payload}] (type payload)))
(defmethod dispatch-annotation-handler* cljs.core/PersistentArrayMap
[{{status :status message :message data :data} :payload db-path :db-path}]
(case status
:ok (do (re-frame/dispatch [:add-annotation {:payload data :db-path db-path}])
(re-frame/dispatch [:notify {:message (str "Added 1 annotation")}]))
:error (re-frame/dispatch [:notify (notification-message data message)])))
(defmethod dispatch-annotation-handler* cljs.core/PersistentVector
[{ms :payload db-path :db-path}]
(let [{oks :ok errors :error :as grouped} (group-by :status ms)
message (str "Added " (count oks) " annotations with " (count errors) " errors")]
(when-not (empty? oks)
(do (re-frame/dispatch [:add-annotation {:payload (mapv :data oks) :db-path db-path}])
(re-frame/dispatch [:notify {:message message}])))
(doseq [{data :data message :message} errors]
(re-frame/dispatch [:notify (notification-message data message)]))))
(defn dispatch-annotation-handler [& {:keys [db-path]}]
(fn [data] (dispatch-annotation-handler* {:payload data :db-path db-path})))
(defn error-handler [& args]
(re-frame/dispatch [:notify {:message "Unrecognized internal error"}]))
(re-frame/register-handler
:dispatch-simple-annotation
standard-middleware
(fn [db [_ {:keys [ann-data db-path corpus] :or {db-path :query}}]]
(let [{:keys [ann-map hit-id token-from token-to]} ann-data
span (if token-to (token-id->span token-from token-to) (token-id->span token-from))
ann-map (assoc ann-map :hit-id hit-id :span span :timestamp (now))]
(re-frame/dispatch [:dispatch-annotation ann-map corpus :db-path db-path]))
db))
(defn package-ann-maps [ann-map hit-ids token-ids & [token-to's]]
{:pre [(or (not token-to's) (= (count token-ids) (count token-to's)))]}
(let [timestamp (now)]
(if token-to's
(mapv (fn [hit-id token-from token-to]
(let [span (token-id->span token-from token-to)]
(assoc ann-map :hit-id hit-id :span span :timestamp timestamp)))
hit-ids token-ids token-to's)
(mapv (fn [hit-id token-id]
(let [span (token-id->span token-id)]
(assoc ann-map :hit-id hit-id :span span :timestamp timestamp)))
hit-ids token-ids))))
(re-frame/register-handler
:dispatch-bulk-annotation
standard-middleware
(fn [db [_ {:keys [ann-data db-path corpus] :or {db-path :query}}]]
(let [{:keys [ann-map hit-ids token-ids token-to's]} ann-data
ann-maps (package-ann-maps ann-map hit-ids token-ids token-to's)]
(re-frame/dispatch [:dispatch-annotation ann-maps corpus :db-path db-path]))
db))
(re-frame/register-handler
:dispatch-annotation
standard-middleware
(fn [db [_ ann-map-or-maps corpus & {:keys [db-path] :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])]
(try (POST "/annotation/new"
{:params (cond-> {:ann-map ann-map-or-maps
:project-name project-name
:corpus (get-corpus-param db project-name db-path corpus)}
(= db-path :query) (assoc :query (get-query db project-name)))
:handler (dispatch-annotation-handler :db-path db-path)
:error-handler error-handler})
(catch :default e
(re-frame/dispatch
[:notify {:message (format "Couldn't dispatch annotation. Reason: [%s]" (str e))}])))
db)))
(defn update-annotation-handler
[& {:keys [db-path]}]
(fn [{status :status message :message payload :data}]
(condp = status
:ok (re-frame/dispatch [:add-annotation {:payload payload :db-path db-path}])
:error (re-frame/dispatch
[:notify {:message (format "Couldn't update annotation! Reason: [%s]" message)}]))))
(re-frame/register-handler
:update-annotation
(fn [db [_ {update-map :update-map db-path :db-path :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])
path-to-results [:projects project-name :session :query :results]
corpus (get-in db (into path-to-results [:results-summary :corpus]))
query (get-in db (into path-to-results [:results-summary :query-str]))
update-map (assoc update-map :timestamp (.now js/Date) :corpus corpus :query query)]
(POST "/annotation/update"
{:params {:update-map update-map :project-name project-name}
:handler (update-annotation-handler :db-path db-path)
:error-handler error-handler})
db)))
(defn remove-annotation-handler [& {:keys [db-path]}]
(fn [{{project-name :project-name hit-id :hit-id span :span key :key :as data} :data
status :status message :message}]
(condp = status
:ok (re-frame/dispatch [:remove-annotation {:payload data :db-path db-path}])
:error (re-frame/dispatch
[:notify {:message (format "Couldn't remove annotation! Reason: [%s]" message)
:meta data}]))))
(re-frame/register-handler
:delete-annotation
(fn [db [_ {{:keys [ann-map hit-id]} :ann-data db-path :db-path :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])]
(POST "/annotation/remove"
{:params {:project-name project-name :hit-id hit-id :ann ann-map}
:handler (remove-annotation-handler :db-path db-path)
:error-handler error-handler})
db)))
| null | https://raw.githubusercontent.com/emanjavacas/cosycat/a7186363d3c0bdc7b714af126feb565f98793a6e/src/cljs/cosycat/backend/handlers/annotations.cljs | clojure | Incoming annotations; query-panel
found hit by id
found hit for annotation
couldn't find hit for annotation
generic handler
general annotation fetcher for query hits
annotation fetcher for issue hits
annotation fetcher for review pages
Outgoing annotations
project-level error (e.g. insufficient rights) | (ns cosycat.backend.handlers.annotations
(:require [re-frame.core :as re-frame]
[schema.core :as s]
[ajax.core :refer [POST GET]]
[cosycat.schemas.annotation-schemas :refer [annotation-schema]]
[cosycat.app-utils
:refer [deep-merge is-last-partition token-id->span span->token-id]]
[cosycat.utils :refer [format get-msg now]]
[cosycat.backend.middleware :refer [standard-middleware]]
[cosycat.backend.handlers.utils :refer [get-query get-corpus-param expand-db-path]]
[taoensso.timbre :as timbre]))
(defn is-token?
"is current token a token of interest (as per token-id-or-ids?)"
[id token-id-or-ids]
(if (sequential? token-id-or-ids)
(contains? (apply hash-set token-id-or-ids) id)
(= id token-id-or-ids)))
(defn update-hit-anns [hit anns]
(mapv (fn [{token-id :id :as token}]
(if-let [ann (get anns token-id)]
(update token :anns deep-merge ann)
token))
hit))
(defn delete-hit-ann [hit token-id-or-ids key]
(mapv (fn [{id :id anns :anns :as token}]
(if (and (is-token? id token-id-or-ids) (contains? anns key))
(assoc token :anns (dissoc anns key))
token))
hit))
(defn find-hit-id
"find the hid id given `token-ids`"
[token-id-or-ids hit-maps]
(if (coll? token-id-or-ids)
(some (fn [token-id] (find-hit-id token-id hit-maps)) token-id-or-ids)
(some (fn [{:keys [id hit]}]
(when (some #{token-id-or-ids} (map :id hit))
id))
hit-maps)))
(defn update-db [db path-to-results hit-id token-ids update-fn & args]
(let [path-to-hit (fn [hit-id] (into path-to-results [hit-id :hit]))
results-by-id (get-in db path-to-results)]
(if (contains? results-by-id hit-id)
(do (timbre/debug "found hit by id") (apply update-in db (path-to-hit hit-id) update-fn args))
(if-let [hit-id (find-hit-id token-ids (vals results-by-id))]
(do (timbre/debug "found hit") (apply update-in db (path-to-hit hit-id) update-fn args))
(do (timbre/debug "couldn't find hit") db)))))
(defmulti add-annotations
"generic reducer function for incoming annotation data"
(fn [db {:keys [payload]}] (type payload)))
(defmethod add-annotations cljs.core/PersistentArrayMap
[db {{:keys [project-name hit-id anns]} :payload db-path :db-path}]
(let [path-to-query (into [:projects project-name] (expand-db-path :query))
path-to-review (into [:projects project-name] (expand-db-path :review))
token-ids (keys anns)]
(-> db
(update-db path-to-query hit-id token-ids update-hit-anns anns)
(update-db path-to-review hit-id token-ids update-hit-anns anns))))
(defmethod add-annotations cljs.core/PersistentVector
[db {ms :payload :as data}]
(reduce (fn [db m] (add-annotations db (assoc data :payload m))) db ms))
:add-annotation
standard-middleware
(fn [db [_ data]] (add-annotations db data)))
(re-frame/register-handler
:remove-annotation
standard-middleware
(fn [db [_ {{:keys [project-name hit-id key span]} :payload db-path :db-path}]]
(let [path-to-query (into [:projects project-name] (expand-db-path :query))
path-to-review (into [:projects project-name] (expand-db-path :review))
token-ids (span->token-id span)]
(-> db
(update-db path-to-query hit-id token-ids delete-hit-ann token-ids key)
(update-db path-to-review hit-id token-ids delete-hit-ann token-ids key)))))
(defn fetch-annotation-handler [& {:keys [is-last db-path]}]
(fn [payload]
(re-frame/dispatch [:add-annotation {:payload payload :db-path db-path}])
(when is-last
(re-frame/dispatch [:stop-throbbing :fetch-annotations]))))
(defn fetch-annotation-error-handler []
(fn [data]
(re-frame/dispatch [:stop-throbbing :fetch-annotations])
(timbre/warn "Couldn't fetch anns" data)))
:fetch-annotations
standard-middleware
(fn [db [_ {:keys [page-margins corpus db-path] :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])
margins (count page-margins)
partition-size 20]
(re-frame/dispatch [:start-throbbing :fetch-annotations])
(doseq [[i subpage-margins] (map-indexed vector (partition-all partition-size page-margins))
:let [is-last (is-last-partition margins partition-size i)]]
(GET "/annotation/page"
{:params {:page-margins subpage-margins :project-name project-name :corpus corpus}
:handler (fetch-annotation-handler :is-last is-last :db-path db-path)
:error-handler (fetch-annotation-error-handler)})))
db))
:fetch-issue-hit-annotations
(fn [db [_ {:keys [start end hit-id doc corpus]} {issue-id :id :as issue}]]
(let [active-project (get-in db [:session :active-project])]
(GET "/annotation/page"
{:params {:page-margins [{:start start :end end :hit-id hit-id :doc doc}]
:project-name active-project
:corpus corpus}
:handler (fn [data]
(when-let [{:keys [anns]} (first data)]
(re-frame/dispatch
[:update-issue-meta issue-id [:hit-map :hit]
(fn [hit] (update-hit-anns hit anns))])))
:error-handler #(timbre/warn "Couldn't fetch annotations" (str %))})
db)))
:fetch-review-hit-annotations
(fn [db [_ {:keys [start end hit-id doc corpus]}]]
(let [project-name (get-in db [:session :active-project])]
(GET "/annotation/page"
{:params {:page-margins [{:start start :end end :hit-id hit-id :doc doc}]
:project-name project-name
:corpus corpus}
:handler (fn [data]
(when-let [{:keys [hit-id project-name anns]} (first data)]
(re-frame/dispatch
[:add-review-annotation
{:hit-id hit-id :project-name project-name :anns anns}])))
:error-handler #(timbre/warn "Couldn't fetch annotations" (str %))})
db)))
(defn query-review-handler [window]
(fn [{:keys [grouped-data] :as review-summary}]
{:pre [(apply = (mapcat (fn [{:keys [anns]}] (map :corpus anns)) grouped-data))]}
(re-frame/dispatch [:set-review-results review-summary window])))
(defn query-review-error-handler [data]
(timbre/debug data)
(re-frame/dispatch [:notify {:message "Couldn't fetch annotations"}]))
(defn build-query-map
[{{{ann-key :string key-as-regex? :as-regex?} :key
{ann-value :string value-as-regex? :as-regex?} :value} :ann
{hit-id :string hit-id-as-regex? :as-regex?} :hit-id
{:keys [from to]} :timestamp corpus :corpus username :username :as query-map}]
(cond-> {}
(not (empty? ann-key)) (assoc-in [:ann :key :string] ann-key)
key-as-regex? (assoc-in [:ann :key :as-regex?] true)
(not (empty? ann-value)) (assoc-in [:ann :value :string] ann-value)
value-as-regex? (assoc-in [:ann :value :as-regex?] true)
(not (empty? hit-id)) (assoc-in [:hit-id :string] hit-id)
hit-id-as-regex? (assoc-in [:hit-id :as-regex?] true)
(not (empty? corpus)) (assoc :corpus (vec corpus))
(not (empty? username)) (assoc :username (vec username))
from (assoc-in [:timestamp :from] from)
to (assoc-in [:timestamp :to] to)))
(re-frame/register-handler
:query-review
standard-middleware
(fn [db [_ {:keys [page-num] :or {page-num 1}}]]
(let [active-project (get-in db [:session :active-project])
path-to-query-opts [:projects active-project :session :review :query-opts]
{:keys [query-map context size window] :as query-opts} (get-in db path-to-query-opts)]
(re-frame/dispatch [:unset-review-results])
(re-frame/dispatch [:start-throbbing :review-frame])
(GET "annotation/query"
{:params {:query-map (build-query-map query-map)
:context context
:page {:page-num page-num :page-size size}
:project-name active-project}
:handler (query-review-handler window)
:error-handler query-review-error-handler})
db)))
(defn notification-message
[{{{B :B O :O :as scope} :scope span-type :type} :span :as data} message]
{:message message}
(->> (case span-type
"token" (get-msg [:annotation :error :token] scope message)
"IOB" (get-msg [:annotation :error :IOB] B O message))
(assoc {} :message))))
(defmulti dispatch-annotation-handler*
"Variadic handler for successful annotations. Dispatches are based on whether
ann-map is a vector (bulk annotation payload) or a map (single annotation payload)"
(fn [{payload :payload}] (type payload)))
(defmethod dispatch-annotation-handler* cljs.core/PersistentArrayMap
[{{status :status message :message data :data} :payload db-path :db-path}]
(case status
:ok (do (re-frame/dispatch [:add-annotation {:payload data :db-path db-path}])
(re-frame/dispatch [:notify {:message (str "Added 1 annotation")}]))
:error (re-frame/dispatch [:notify (notification-message data message)])))
(defmethod dispatch-annotation-handler* cljs.core/PersistentVector
[{ms :payload db-path :db-path}]
(let [{oks :ok errors :error :as grouped} (group-by :status ms)
message (str "Added " (count oks) " annotations with " (count errors) " errors")]
(when-not (empty? oks)
(do (re-frame/dispatch [:add-annotation {:payload (mapv :data oks) :db-path db-path}])
(re-frame/dispatch [:notify {:message message}])))
(doseq [{data :data message :message} errors]
(re-frame/dispatch [:notify (notification-message data message)]))))
(defn dispatch-annotation-handler [& {:keys [db-path]}]
(fn [data] (dispatch-annotation-handler* {:payload data :db-path db-path})))
(defn error-handler [& args]
(re-frame/dispatch [:notify {:message "Unrecognized internal error"}]))
(re-frame/register-handler
:dispatch-simple-annotation
standard-middleware
(fn [db [_ {:keys [ann-data db-path corpus] :or {db-path :query}}]]
(let [{:keys [ann-map hit-id token-from token-to]} ann-data
span (if token-to (token-id->span token-from token-to) (token-id->span token-from))
ann-map (assoc ann-map :hit-id hit-id :span span :timestamp (now))]
(re-frame/dispatch [:dispatch-annotation ann-map corpus :db-path db-path]))
db))
(defn package-ann-maps [ann-map hit-ids token-ids & [token-to's]]
{:pre [(or (not token-to's) (= (count token-ids) (count token-to's)))]}
(let [timestamp (now)]
(if token-to's
(mapv (fn [hit-id token-from token-to]
(let [span (token-id->span token-from token-to)]
(assoc ann-map :hit-id hit-id :span span :timestamp timestamp)))
hit-ids token-ids token-to's)
(mapv (fn [hit-id token-id]
(let [span (token-id->span token-id)]
(assoc ann-map :hit-id hit-id :span span :timestamp timestamp)))
hit-ids token-ids))))
(re-frame/register-handler
:dispatch-bulk-annotation
standard-middleware
(fn [db [_ {:keys [ann-data db-path corpus] :or {db-path :query}}]]
(let [{:keys [ann-map hit-ids token-ids token-to's]} ann-data
ann-maps (package-ann-maps ann-map hit-ids token-ids token-to's)]
(re-frame/dispatch [:dispatch-annotation ann-maps corpus :db-path db-path]))
db))
(re-frame/register-handler
:dispatch-annotation
standard-middleware
(fn [db [_ ann-map-or-maps corpus & {:keys [db-path] :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])]
(try (POST "/annotation/new"
{:params (cond-> {:ann-map ann-map-or-maps
:project-name project-name
:corpus (get-corpus-param db project-name db-path corpus)}
(= db-path :query) (assoc :query (get-query db project-name)))
:handler (dispatch-annotation-handler :db-path db-path)
:error-handler error-handler})
(catch :default e
(re-frame/dispatch
[:notify {:message (format "Couldn't dispatch annotation. Reason: [%s]" (str e))}])))
db)))
(defn update-annotation-handler
[& {:keys [db-path]}]
(fn [{status :status message :message payload :data}]
(condp = status
:ok (re-frame/dispatch [:add-annotation {:payload payload :db-path db-path}])
:error (re-frame/dispatch
[:notify {:message (format "Couldn't update annotation! Reason: [%s]" message)}]))))
(re-frame/register-handler
:update-annotation
(fn [db [_ {update-map :update-map db-path :db-path :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])
path-to-results [:projects project-name :session :query :results]
corpus (get-in db (into path-to-results [:results-summary :corpus]))
query (get-in db (into path-to-results [:results-summary :query-str]))
update-map (assoc update-map :timestamp (.now js/Date) :corpus corpus :query query)]
(POST "/annotation/update"
{:params {:update-map update-map :project-name project-name}
:handler (update-annotation-handler :db-path db-path)
:error-handler error-handler})
db)))
(defn remove-annotation-handler [& {:keys [db-path]}]
(fn [{{project-name :project-name hit-id :hit-id span :span key :key :as data} :data
status :status message :message}]
(condp = status
:ok (re-frame/dispatch [:remove-annotation {:payload data :db-path db-path}])
:error (re-frame/dispatch
[:notify {:message (format "Couldn't remove annotation! Reason: [%s]" message)
:meta data}]))))
(re-frame/register-handler
:delete-annotation
(fn [db [_ {{:keys [ann-map hit-id]} :ann-data db-path :db-path :or {db-path :query}}]]
(let [project-name (get-in db [:session :active-project])]
(POST "/annotation/remove"
{:params {:project-name project-name :hit-id hit-id :ann ann-map}
:handler (remove-annotation-handler :db-path db-path)
:error-handler error-handler})
db)))
|
a08dbe2309efd9a5ef1e477c7aee730bfaa1da709ab710da1ddad82e0500d2b9 | bennn/iPoe | reader.rkt | #lang ipoe
#:name cinquain
#:rhyme-scheme ((2 4 6 8 2))
| null | https://raw.githubusercontent.com/bennn/iPoe/4a988f6537fb738b4fe842c404f9d78f658ab76f/ipoe/cinquain/lang/reader.rkt | racket | #lang ipoe
#:name cinquain
#:rhyme-scheme ((2 4 6 8 2))
| |
59ac2d53d3fc29e1e7869e860c5fdaa3316ac49dc25c9ee52cd4c9a960d93ec5 | rlepigre/pml | pos.ml | (** Source code position management. This module can be used to map the
elements of an abstract syntax tree to sequences of characters in a
source file. *)
open Earley_core
* Type of a position corresponding to a continuous range of characters in
a ( utf8 encoded ) source file . For denoting a zero - length position is to
have [ end_col ] equal to [ start_col - 1 ] .
a (utf8 encoded) source file. For denoting a zero-length position is to
have [end_col] equal to [start_col - 1]. *)
type pos =
{ fname : string option (** File name for the position. *)
; start_line : int (** Line number of the starting point. *)
; start_col : int (** Column number (utf8) of the starting point. *)
; end_line : int (** Line number of the ending point. *)
; end_col : int (** Column number (utf8) of the ending point. *) }
(** Convenient short name for an optional position. *)
type popt = pos option
(** Type constructor extending a type (e.g. an element of an abstract syntax
tree) with a source code position. *)
type 'a loc =
{ elt : 'a (** The element that is being localised. *)
; pos : popt (** Position of the element in the source code. *) }
(** Localised string type (widely used). *)
type strloc = string loc
(** [make pos elt] associates the position [pos] to [elt]. *)
let make : popt -> 'a -> 'a loc =
fun pos elt -> { elt ; pos }
(** [in_pos pos elt] associates the position [pos] to [elt]. *)
let in_pos : pos -> 'a -> 'a loc =
fun p elt -> { elt ; pos = Some p }
(** [none elt] wraps [elt] in a localisation structure with no specified
source position. *)
let none : 'a -> 'a loc =
fun elt -> { elt ; pos = None }
let merge : pos -> pos -> pos = fun p1 p2 ->
match compare p1.start_line p2.start_line with
| n when n < 0 -> {p1 with end_line = p2.end_line ; end_col = p2.end_col}
| n when n > 0 -> {p2 with end_line = p1.end_line ; end_col = p1.end_col}
| _ (* n=0 *) -> let start_col = min p1.start_col p2.start_col in
let end_col = max p1.start_col p2.start_col in
{p1 with start_col ; end_col}
let union : popt -> popt -> popt = fun p1 p2 ->
match (p1, p2) with
| (None , None ) -> None
| (Some _ , None ) -> p1
| (None , Some _ ) -> p2
| (Some p1, Some p2) -> Some (merge p1 p2)
* [ locate buf1 pos1 buf2 pos2 ] builds a position structure given two
DeCaP input buffers . This function can be used by DeCaP to generate
the position of elements during parsing .
@see < -savoie.fr/decap/ > DeCap
DeCaP input buffers. This function can be used by DeCaP to generate
the position of elements during parsing.
@see <-savoie.fr/decap/> DeCap *)
let locate buf1 pos1 buf2 pos2 =
let fname =
match Input.filename buf1 with
| "" -> None
| fname -> Some fname
in
let start_line = Input.line_num buf1 in
let end_line = Input.line_num buf2 in
let start_col = Input.utf8_col_num buf1 pos1 in
let end_col = Input.utf8_col_num buf2 pos2 in
assert(start_line <= end_line);
assert(start_line < end_line || start_col <= end_col);
{ fname ; start_line ; start_col ; end_line ; end_col }
(** [pos_to_string pos] transforms the position [pos] into a readable
format. *)
let pos_to_string : pos -> string =
fun p ->
let fname =
match p.fname with
| None -> ""
| Some fname -> Printf.sprintf "file <%s>, " fname
in
if p.start_line <> p.end_line then
Printf.sprintf "%sposition %d:%d to %d:%d"
fname p.start_line p.start_col p.end_line p.end_col
else if p.start_col = p.end_col then
Printf.sprintf "%sline %d, character %d"
fname p.start_line p.start_col
else
Printf.sprintf "%sline %d, characters %d to %d"
fname p.start_line p.start_col p.end_col
* [ print_pos oc pos ] prints the position [ pos ] to the channel [ oc ] .
let print_pos : out_channel -> pos -> unit =
fun ch p -> output_string ch ("at " ^ (pos_to_string p))
* [ print_pos oc pos ] prints the position [ pos ] to the channel [ oc ] .
let print_pos_opt : out_channel -> pos option -> unit =
fun ch p ->
match p with
| None -> output_string ch "at an unknown location"
| Some p -> print_pos ch p
(** [short_pos_to_string pos] is similar to [pos_to_string pos] but uses
a shorter format. *)
let short_pos_to_string : pos -> string =
fun p ->
let fname =
match p.fname with
| None -> ""
| Some fname -> Printf.sprintf "%s, " fname
in
if p.start_line <> p.end_line then
Printf.sprintf "%s%d:%d-%d:%d" fname
p.start_line p.start_col p.end_line p.end_col
else if p.start_col = p.end_col then
Printf.sprintf "%s%d:%d" fname p.start_line p.start_col
else
Printf.sprintf "%s%d:%d-%d" fname p.start_line p.start_col p.end_col
* [ print_short_pos oc pos ] prints the position [ pos ] to the channel [ oc ]
using a shorter format that [ print_pos oc pos ] .
using a shorter format that [print_pos oc pos]. *)
let print_short_pos : out_channel -> pos -> unit =
fun ch p -> output_string ch ("at " ^ (short_pos_to_string p))
* [ print_pos oc pos ] prints the position [ pos ] to the channel [ oc ] .
let print_short_pos_opt : out_channel -> pos option -> unit =
fun ch p ->
match p with
| None -> output_string ch "at an unknown location"
| Some p -> print_short_pos ch p
| null | https://raw.githubusercontent.com/rlepigre/pml/cdfdea0eecc6767b16edc6a7bef917bc9dd746ed/src/util/pos.ml | ocaml | * Source code position management. This module can be used to map the
elements of an abstract syntax tree to sequences of characters in a
source file.
* File name for the position.
* Line number of the starting point.
* Column number (utf8) of the starting point.
* Line number of the ending point.
* Column number (utf8) of the ending point.
* Convenient short name for an optional position.
* Type constructor extending a type (e.g. an element of an abstract syntax
tree) with a source code position.
* The element that is being localised.
* Position of the element in the source code.
* Localised string type (widely used).
* [make pos elt] associates the position [pos] to [elt].
* [in_pos pos elt] associates the position [pos] to [elt].
* [none elt] wraps [elt] in a localisation structure with no specified
source position.
n=0
* [pos_to_string pos] transforms the position [pos] into a readable
format.
* [short_pos_to_string pos] is similar to [pos_to_string pos] but uses
a shorter format. |
open Earley_core
* Type of a position corresponding to a continuous range of characters in
a ( utf8 encoded ) source file . For denoting a zero - length position is to
have [ end_col ] equal to [ start_col - 1 ] .
a (utf8 encoded) source file. For denoting a zero-length position is to
have [end_col] equal to [start_col - 1]. *)
type pos =
type popt = pos option
type 'a loc =
type strloc = string loc
let make : popt -> 'a -> 'a loc =
fun pos elt -> { elt ; pos }
let in_pos : pos -> 'a -> 'a loc =
fun p elt -> { elt ; pos = Some p }
let none : 'a -> 'a loc =
fun elt -> { elt ; pos = None }
let merge : pos -> pos -> pos = fun p1 p2 ->
match compare p1.start_line p2.start_line with
| n when n < 0 -> {p1 with end_line = p2.end_line ; end_col = p2.end_col}
| n when n > 0 -> {p2 with end_line = p1.end_line ; end_col = p1.end_col}
let end_col = max p1.start_col p2.start_col in
{p1 with start_col ; end_col}
let union : popt -> popt -> popt = fun p1 p2 ->
match (p1, p2) with
| (None , None ) -> None
| (Some _ , None ) -> p1
| (None , Some _ ) -> p2
| (Some p1, Some p2) -> Some (merge p1 p2)
* [ locate buf1 pos1 buf2 pos2 ] builds a position structure given two
DeCaP input buffers . This function can be used by DeCaP to generate
the position of elements during parsing .
@see < -savoie.fr/decap/ > DeCap
DeCaP input buffers. This function can be used by DeCaP to generate
the position of elements during parsing.
@see <-savoie.fr/decap/> DeCap *)
let locate buf1 pos1 buf2 pos2 =
let fname =
match Input.filename buf1 with
| "" -> None
| fname -> Some fname
in
let start_line = Input.line_num buf1 in
let end_line = Input.line_num buf2 in
let start_col = Input.utf8_col_num buf1 pos1 in
let end_col = Input.utf8_col_num buf2 pos2 in
assert(start_line <= end_line);
assert(start_line < end_line || start_col <= end_col);
{ fname ; start_line ; start_col ; end_line ; end_col }
let pos_to_string : pos -> string =
fun p ->
let fname =
match p.fname with
| None -> ""
| Some fname -> Printf.sprintf "file <%s>, " fname
in
if p.start_line <> p.end_line then
Printf.sprintf "%sposition %d:%d to %d:%d"
fname p.start_line p.start_col p.end_line p.end_col
else if p.start_col = p.end_col then
Printf.sprintf "%sline %d, character %d"
fname p.start_line p.start_col
else
Printf.sprintf "%sline %d, characters %d to %d"
fname p.start_line p.start_col p.end_col
* [ print_pos oc pos ] prints the position [ pos ] to the channel [ oc ] .
let print_pos : out_channel -> pos -> unit =
fun ch p -> output_string ch ("at " ^ (pos_to_string p))
* [ print_pos oc pos ] prints the position [ pos ] to the channel [ oc ] .
let print_pos_opt : out_channel -> pos option -> unit =
fun ch p ->
match p with
| None -> output_string ch "at an unknown location"
| Some p -> print_pos ch p
let short_pos_to_string : pos -> string =
fun p ->
let fname =
match p.fname with
| None -> ""
| Some fname -> Printf.sprintf "%s, " fname
in
if p.start_line <> p.end_line then
Printf.sprintf "%s%d:%d-%d:%d" fname
p.start_line p.start_col p.end_line p.end_col
else if p.start_col = p.end_col then
Printf.sprintf "%s%d:%d" fname p.start_line p.start_col
else
Printf.sprintf "%s%d:%d-%d" fname p.start_line p.start_col p.end_col
* [ print_short_pos oc pos ] prints the position [ pos ] to the channel [ oc ]
using a shorter format that [ print_pos oc pos ] .
using a shorter format that [print_pos oc pos]. *)
let print_short_pos : out_channel -> pos -> unit =
fun ch p -> output_string ch ("at " ^ (short_pos_to_string p))
* [ print_pos oc pos ] prints the position [ pos ] to the channel [ oc ] .
let print_short_pos_opt : out_channel -> pos option -> unit =
fun ch p ->
match p with
| None -> output_string ch "at an unknown location"
| Some p -> print_short_pos ch p
|
e50a0bee311178a1cd2441a3e224933cba00fd6b8c6ae3e2895530c54245d9a5 | ocaml-flambda/flambda-backend | set_of_closures.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, OCamlPro
and ,
(* *)
(* Copyright 2013--2019 OCamlPro SAS *)
Copyright 2014 - -2019 Jane Street Group LLC
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
type t =
{ function_decls : Function_declarations.t;
value_slots : (Simple.t * Flambda_kind.With_subkind.t) Value_slot.Map.t;
alloc_mode : Alloc_mode.For_allocations.t
}
let print_value_slot ppf (simple, kind) =
Format.fprintf ppf "@[(%a @<1>\u{2237} %a)@]" Simple.print simple
Flambda_kind.With_subkind.print kind
let [@ocamlformat "disable"] print ppf
{ function_decls;
value_slots;
alloc_mode;
} =
Format.fprintf ppf "@[<hov 1>(%tset_of_closures%t@ \
@[<hov 1>(function_decls@ %a)@]@ \
@[<hov 1>(value_slots@ %a)@]@ \
@[<hov 1>(alloc_mode@ %a)@]\
)@]"
Flambda_colours.prim_constructive
Flambda_colours.pop
(Function_declarations.print) function_decls
(Value_slot.Map.print print_value_slot) value_slots
Alloc_mode.For_allocations.print alloc_mode
include Container_types.Make (struct
type nonrec t = t
let print = print
let hash _ = Misc.fatal_error "Not yet implemented"
let compare
{ function_decls = function_decls1;
value_slots = value_slots1;
alloc_mode = alloc_mode1
}
{ function_decls = function_decls2;
value_slots = value_slots2;
alloc_mode = alloc_mode2
} =
let c = Function_declarations.compare function_decls1 function_decls2 in
if c <> 0
then c
else
let compare_value_slot (simple1, kind1) (simple2, kind2) =
let c = Simple.compare simple1 simple2 in
if c <> 0 then c else Flambda_kind.With_subkind.compare kind1 kind2
in
let c =
Value_slot.Map.compare compare_value_slot value_slots1 value_slots2
in
if c <> 0
then c
else Alloc_mode.For_allocations.compare alloc_mode1 alloc_mode2
let equal t1 t2 = compare t1 t2 = 0
end)
let is_empty { function_decls; value_slots; alloc_mode = _ } =
Function_declarations.is_empty function_decls
&& Value_slot.Map.is_empty value_slots
let create ~value_slots alloc_mode function_decls =
{ function_decls; value_slots; alloc_mode }
let function_decls t = t.function_decls
let value_slots t = t.value_slots
let alloc_mode t = t.alloc_mode
let is_closed t = Value_slot.Map.is_empty t.value_slots
let [@ocamlformat "disable"] print ppf
{ function_decls;
value_slots;
alloc_mode;
} =
if Value_slot.Map.is_empty value_slots then
Format.fprintf ppf "@[<hov 1>(%tset_of_closures%t@ %a@ \
@[<hov 1>%a@]\
)@]"
Flambda_colours.prim_constructive
Flambda_colours.pop
Alloc_mode.For_allocations.print alloc_mode
(Function_declarations.print) function_decls
else
Format.fprintf ppf "@[<hov 1>(%tset_of_closures%t@ %a@ \
@[<hov 1>%a@]@ \
@[<hov 1>(env@ %a)@]\
)@]"
Flambda_colours.prim_constructive
Flambda_colours.pop
Alloc_mode.For_allocations.print alloc_mode
Function_declarations.print function_decls
(Value_slot.Map.print print_value_slot) value_slots
let free_names { function_decls; value_slots; alloc_mode = _ } =
let free_names_of_value_slots =
Value_slot.Map.fold
(fun value_slot (simple, _kind) free_names ->
Name_occurrences.union free_names
(Name_occurrences.add_value_slot_in_declaration
(Simple.free_names simple) value_slot Name_mode.normal))
value_slots Name_occurrences.empty
in
Name_occurrences.union_list
[Function_declarations.free_names function_decls; free_names_of_value_slots]
let apply_renaming ({ function_decls; value_slots; alloc_mode } as t) renaming =
let alloc_mode' =
Alloc_mode.For_allocations.apply_renaming alloc_mode renaming
in
let function_decls' =
Function_declarations.apply_renaming function_decls renaming
in
let changed = ref false in
let value_slots' =
Value_slot.Map.filter_map
(fun var (simple, kind) ->
if Renaming.value_slot_is_used renaming var
then (
let simple' = Simple.apply_renaming simple renaming in
if not (simple == simple') then changed := true;
Some (simple', kind))
else (
changed := true;
None))
value_slots
in
if alloc_mode == alloc_mode'
&& function_decls == function_decls'
&& not !changed
then t
else
{ function_decls = function_decls';
value_slots = value_slots';
alloc_mode = alloc_mode'
}
let ids_for_export { function_decls; value_slots; alloc_mode } =
let function_decls_ids =
Function_declarations.ids_for_export function_decls
in
Ids_for_export.union
(Value_slot.Map.fold
(fun _value_slot (simple, _kind) ids ->
Ids_for_export.add_simple ids simple)
value_slots function_decls_ids)
(Alloc_mode.For_allocations.ids_for_export alloc_mode)
let filter_function_declarations t ~f =
let function_decls = Function_declarations.filter t.function_decls ~f in
{ t with function_decls }
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/61abb3f595c93a7d411726aff04c1d1ce2d54e40/middle_end/flambda2/terms/set_of_closures.ml | ocaml | ************************************************************************
OCaml
Copyright 2013--2019 OCamlPro SAS
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************ | , OCamlPro
and ,
Copyright 2014 - -2019 Jane Street Group LLC
the GNU Lesser General Public License version 2.1 , with the
type t =
{ function_decls : Function_declarations.t;
value_slots : (Simple.t * Flambda_kind.With_subkind.t) Value_slot.Map.t;
alloc_mode : Alloc_mode.For_allocations.t
}
let print_value_slot ppf (simple, kind) =
Format.fprintf ppf "@[(%a @<1>\u{2237} %a)@]" Simple.print simple
Flambda_kind.With_subkind.print kind
let [@ocamlformat "disable"] print ppf
{ function_decls;
value_slots;
alloc_mode;
} =
Format.fprintf ppf "@[<hov 1>(%tset_of_closures%t@ \
@[<hov 1>(function_decls@ %a)@]@ \
@[<hov 1>(value_slots@ %a)@]@ \
@[<hov 1>(alloc_mode@ %a)@]\
)@]"
Flambda_colours.prim_constructive
Flambda_colours.pop
(Function_declarations.print) function_decls
(Value_slot.Map.print print_value_slot) value_slots
Alloc_mode.For_allocations.print alloc_mode
include Container_types.Make (struct
type nonrec t = t
let print = print
let hash _ = Misc.fatal_error "Not yet implemented"
let compare
{ function_decls = function_decls1;
value_slots = value_slots1;
alloc_mode = alloc_mode1
}
{ function_decls = function_decls2;
value_slots = value_slots2;
alloc_mode = alloc_mode2
} =
let c = Function_declarations.compare function_decls1 function_decls2 in
if c <> 0
then c
else
let compare_value_slot (simple1, kind1) (simple2, kind2) =
let c = Simple.compare simple1 simple2 in
if c <> 0 then c else Flambda_kind.With_subkind.compare kind1 kind2
in
let c =
Value_slot.Map.compare compare_value_slot value_slots1 value_slots2
in
if c <> 0
then c
else Alloc_mode.For_allocations.compare alloc_mode1 alloc_mode2
let equal t1 t2 = compare t1 t2 = 0
end)
let is_empty { function_decls; value_slots; alloc_mode = _ } =
Function_declarations.is_empty function_decls
&& Value_slot.Map.is_empty value_slots
let create ~value_slots alloc_mode function_decls =
{ function_decls; value_slots; alloc_mode }
let function_decls t = t.function_decls
let value_slots t = t.value_slots
let alloc_mode t = t.alloc_mode
let is_closed t = Value_slot.Map.is_empty t.value_slots
let [@ocamlformat "disable"] print ppf
{ function_decls;
value_slots;
alloc_mode;
} =
if Value_slot.Map.is_empty value_slots then
Format.fprintf ppf "@[<hov 1>(%tset_of_closures%t@ %a@ \
@[<hov 1>%a@]\
)@]"
Flambda_colours.prim_constructive
Flambda_colours.pop
Alloc_mode.For_allocations.print alloc_mode
(Function_declarations.print) function_decls
else
Format.fprintf ppf "@[<hov 1>(%tset_of_closures%t@ %a@ \
@[<hov 1>%a@]@ \
@[<hov 1>(env@ %a)@]\
)@]"
Flambda_colours.prim_constructive
Flambda_colours.pop
Alloc_mode.For_allocations.print alloc_mode
Function_declarations.print function_decls
(Value_slot.Map.print print_value_slot) value_slots
let free_names { function_decls; value_slots; alloc_mode = _ } =
let free_names_of_value_slots =
Value_slot.Map.fold
(fun value_slot (simple, _kind) free_names ->
Name_occurrences.union free_names
(Name_occurrences.add_value_slot_in_declaration
(Simple.free_names simple) value_slot Name_mode.normal))
value_slots Name_occurrences.empty
in
Name_occurrences.union_list
[Function_declarations.free_names function_decls; free_names_of_value_slots]
let apply_renaming ({ function_decls; value_slots; alloc_mode } as t) renaming =
let alloc_mode' =
Alloc_mode.For_allocations.apply_renaming alloc_mode renaming
in
let function_decls' =
Function_declarations.apply_renaming function_decls renaming
in
let changed = ref false in
let value_slots' =
Value_slot.Map.filter_map
(fun var (simple, kind) ->
if Renaming.value_slot_is_used renaming var
then (
let simple' = Simple.apply_renaming simple renaming in
if not (simple == simple') then changed := true;
Some (simple', kind))
else (
changed := true;
None))
value_slots
in
if alloc_mode == alloc_mode'
&& function_decls == function_decls'
&& not !changed
then t
else
{ function_decls = function_decls';
value_slots = value_slots';
alloc_mode = alloc_mode'
}
let ids_for_export { function_decls; value_slots; alloc_mode } =
let function_decls_ids =
Function_declarations.ids_for_export function_decls
in
Ids_for_export.union
(Value_slot.Map.fold
(fun _value_slot (simple, _kind) ids ->
Ids_for_export.add_simple ids simple)
value_slots function_decls_ids)
(Alloc_mode.For_allocations.ids_for_export alloc_mode)
let filter_function_declarations t ~f =
let function_decls = Function_declarations.filter t.function_decls ~f in
{ t with function_decls }
|
7e408b9b583802daea591918c34d9b14767cf0d750718de295c7e06f7e063a73 | VincentToups/racket-lib | demon-name-generator.rkt | #lang racket
(define bom-names (list "Aaron"
"Aaron"
"Abinadi"
"Abinadom"
"Abish"
"Aha"
"Ahah"
"Akish"
"Alma"
"Alma"
"Amaleki"
"Amaleki"
"Amalickiah"
"Amaron"
"Aminadab"
"Aminadi"
"Amlici"
"Ammah"
"Ammaron"
"Ammon"
"Ammon"
"Ammoron"
"Amnigaddah"
"Amnor"
"Amoron"
"Amos"
"Amos"
"Amulek"
"Amulon"
"Antiomno"
"Antionah"
"Antionum"
"Antipus"
"Archeantus"
"Benjamin"
"Brother of Jared"
"Captain Moroni"
"Cezoram"
"Chemish"
"Christ"
"Cohor"
"Cohor"
"Cohor"
"Com"
"Com"
"Corianton"
"Coriantor"
"Coriantum"
"Coriantum"
"Coriantumr"
"Coriantumr"
"Coriantumr"
"Corihor"
"Corihor"
"Corom"
"Cumenihah"
"Emer"
"Emron"
"Enos"
"Esrom"
"Ethem"
"Ether"
"Ezias"
"Gadianton"
"Gid"
"Giddianhi"
"Giddonah"
"Giddonah"
"Gideon"
"Gidgiddonah"
"Gidgiddoni"
"Gilead"
"Gilgah"
"Gilgal"
"Hagoth"
"Hearthom"
"Helam"
"Helaman"
"Helaman"
"Helaman"
"Helem"
"Helorum"
"Hem"
"Heth"
"Heth"
"Himni"
"Isabel"
"Isaiah"
"Ishmael"
"Ishmael"
"Jacob"
"Jacob"
"Jacob"
"Jacom"
"Jared"
"Jared"
"Jarom"
"Jeneum"
"Jeremiah"
"Jesus Christ"
"Jonas"
"Jonas"
"Joseph"
"Josh"
"Kib"
"Kim"
"Kimnor"
"Benjamin"
"Kish"
"Kishkumen"
"Korihor"
"Kumen"
"Kumenonhi"
"Laban"
"Lachoneus"
"Lachoneus"
"Lamah"
"Laman"
"Laman"
"Laman"
"Laman"
"Lamoni"
"Lehi"
"Lehi"
"Lehi"
"Lehi"
"Lehonti"
"Lemuel"
"Levi"
"Lib"
"Lib"
"Limhah"
"Limher"
"Limhi"
"Luram"
"Mahah"
"Manti"
"Mathoni"
"Mathonihah"
"Morianton"
"Morianton"
"Mormon"
"Mormon"
"Moron"
"Moroni"
"Moroni"
"Moronihah"
"Moronihah"
"Mosiah"
"Mosiah"
"Mulek"
"Muloki"
"Nehor"
"Nephi"
"Nephi"
"Nephi"
"Nephi"
"Nephihah"
"Neum"
"Nimrah son of Akish"
"Noah"
"Noah"
"Omer"
"Omner"
"Omni"
"Orihah"
"Paanchi"
"Pachus"
"Pacumeni"
"Pagag"
"Pahoran"
"Pahoran"
"Riplakish"
"Sam"
"Samuel"
"Sariah"
"Seantum"
"Seezoram"
"Seth"
"Shared"
"Shem"
"Shemnon"
"Sherem"
"Shez"
"Shez"
"Shiblom"
"Shiblom"
"Shiblon"
"Shiz"
"Shule"
"Teancum"
"Teomner"
"Timothy"
"Tubaloth"
"Zarahemla"
"Zedekiah"
"Zeezrom"
"Zelph"
"Zemnarihah"
"Zenephi"
"Zeniff"
"Zenock"
"Zenos"
"Zerahemnah"
"Zeram"
"Zoram"
"Zoram"
"Zoram"))
(define star-trek-race-names (list "Aaamazzarite"
"Acamarian"
"Aenar"
"Akaali"
"Akritirian"
"Aksani"
"Alcyone"
"Aldean"
"Algolian"
"Alkian"
"Allasomorph"
"Alsuran"
"Andorian"
"Android"
"Angosian"
"Ankari"
"Annari"
"Antaran"
"Antarian"
"Antedean"
"Antican"
"Arbazan"
"Arcadian"
"Ardanan"
"Arcturian"
"Argala"
"Argelian"
"Argosian"
"Argrathi"
"Arkarian"
"Arkonian"
"Arrithean"
"Axanar"
"B'Saari"
"B'omar"
"Ba'ku"
"Ba'neth"
"Bajoran"
"Bandi"
"Banean"
"Bara"
"Barkonian"
"Barolian"
"Barzan"
"Benkaran"
"Benthan"
"Benzenite"
"Benzite"
"Berellian"
"Betazoid"
"Betelgeusian"
"Biomimetic"
"Bolian"
"Boraalan"
"Boray"
"Borg"
"Boslic"
"Botha"
"Breen"
"Brekkian"
"Brenari"
"Briori"
"Brunali"
"Bynar"
"Byzallian"
"Byzatium"
"Caatati"
"Cairn"
"Caitian"
"Calamarain"
"Caldonian"
"Camorite"
"Capellan"
"Cardassian"
"Chalnoth"
"Changeling"
"Chokuzan"
"Chrysalian"
"Coridan"
"Corvallen"
"Cravic"
"Crystalline"
"Cytherian"
"Cytoplasmic"
"D'Arsay"
"Dachlyd"
"Daliwakan"
"Debrune"
"Deltan"
"Denebian"
"Denobulan"
"Devidian"
"Devore"
"Dinaali"
"Doosodarian"
"Dopterian"
"Dosi"
"Douwd"
"Dralian"
"Drayan"
"Dream"
"Dreman"
"Druoda"
"Edo"
"Ekosian"
"El-Aurian"
"Elasian"
"Elaysian"
"Ellora"
"Enaran"
"Enolian"
"Entaban"
"Entharan"
"Eska"
"Etanian"
"Evora"
"Excalbian"
"Exocomp"
"Farn"
"Fek'Ihri"
"Fen"
"Ferengi"
"Flaxian"
"Fotiallian"
"Frunalian"
"Galadoran"
"Gallamite"
"Garan"
"Garenor"
"Genton"
"Gorn"
"Grazerite"
"Gree"
"Grizzela"
"Haakonian"
"Halanan"
"Haliian"
"Halkan"
"Haradin"
"Harkonian"
"Hazari"
"Hekaran"
"Hierarchy"
"Hirogen"
"Horta"
"Hologram"
"Human"
"Human"
"Human"
"Hunter"
"Hupyrian"
"Hur'q"
"Husnock"
"Iconian"
"Ikaaran"
"Ilari"
"Ilidarian"
"Illyrian"
"Imhotep"
"Iotian"
"Ithenite"
"Iyaaran"
"J'naii"
"Jarada"
"Jem'Hadar"
"K'normian"
"Kadi"
"Kaelon"
"Kantare"
"Karemma"
"Kartelan"
"Kaylar"
"Kazarite"
"Kazon"
"Kelvan"
"Kesat"
"Kinbori"
"Klaestron"
"Klingon"
"Kmada"
"Kobali"
"Kobliad"
"Kohl"
"Kolaati"
"Kolaran"
"Kolhari"
"Komar"
"Kostolain"
"Kotakian"
"Kovaalan"
"Kradin"
"Kraylor"
"Kreetassan"
"Krenim"
"Kressari"
"Kriosian"
"Ktarian"
"Kyrian"
"Kzin"
"Ledosian"
"Lethean"
"Leyron"
"Ligonian"
"Lissepian"
"Lokirrim"
"Lorillian"
"Lothra"
"Lurian"
"Lycocians"
"Lyridian"
"Lysian"
"Lyssarrian"
"Lytasian"
"M-"
"M'klexa"
"Makull's"
"Maldorian"
"Malkoth"
"Malon"
"Malurian"
"Mari"
"Mawasi"
"Mazarite"
"Medusan"
"Megarite"
"Mekhar"
"Melkot"
"Menk"
"Menthar"
"Metrons"
"Microbrain"
"Mikhal"
"Mikulak"
"Minosian"
"Mintakan"
"Miradorn"
"Mislenite"
"Mizarian"
"Mokra"
"Monean"
"Morphinian"
"Motali"
"Mulzirak"
"Mylean"
"N'Kree"
"Nacene"
"Napean"
"Nasari"
"Nassordin"
"Nausicaan"
"Nechani"
"Nezu"
"Night"
"Nihydron"
"Norcadian"
"Numiri"
"Nuu'bari"
"Nuvian"
"Nygean"
"Nyrian"
"Ocampa"
"Octran"
"Old"
"Organian"
"Orion"
"Ornaran"
"Osaarian"
"Ovion"
"Pakled"
"Pah"
"Paraagan"
"Parada"
"Parein"
"Paxans"
"Peliar"
"Peljenite"
"Pendari"
"Petarian"
"Ponea"
"Pralor"
"Prang"
"Promellian"
"Prophet"
"Proxcinian"
"Pygorian"
"Q"
"Qomar"
"Quarren"
"Rakhari"
"Rakosan"
"Ram"
"Ramuran"
"Reegrunion"
"Reman"
"Retellian"
"Rhaandarite"
"Rigelian"
"Rilnar"
"Risian"
"Romulan"
"Rutian"
"Sakari"
"Saltah'na"
"Satarran"
"Saurian"
"Scathosian"
"Selay"
"Serosian"
"Sheliak"
"Shivolian"
"Sikarian"
"Silver"
"Skagaran"
"Skrreea"
"Sky"
"Son'a"
"Species"
"Sphere"
"Srivani"
"Stakoron"
"Susanite"
"Suliban"
"Swarm"
"T-Rogoran"
"T'Lani"
"Tak"
"Takaran"
"Takarian"
"Takrit"
"Talarian"
"Talavian"
"Talaxian"
"Talosian"
"Tamarian"
"Tandaran"
"Tanugan"
"Tarahongian"
"Tarellian"
"Taresian"
"Tarkalean"
"Tarkan"
"Tarlac"
"Tavnian"
"Tellarite"
"Telsian"
"Teplan"
"Terellian"
"Terkellian"
"Terrelian"
"Terrellian"
"Tessic's"
"Thasian"
"Tholian"
"Tiburonian"
"Torothan"
"Tosk"
"Trabe"
"Triannon"
"Tribble"
"Trill"
"Troyian"
"Turei"
"Tygarian"
"Tzenkethi"
"U'tani"
"Ubean"
"Ullian"
"Uxali"
"V'radian"
"Vaadwaur"
"Valakian"
"Valerian"
"Valtese"
"Varro"
"Vaskan"
"Ventaxian"
"Ventu"
"Verathan"
"Verillian"
"Vhnori"
"Vian"
"Vidiian"
"Vissian"
"Vodwar"
"Vojean"
"Vok'sha"
"Vorgon"
"Vori"
"Vorta"
"Vostigye"
"Voth"
"Vulcan"
"Wadi"
"Wogneer"
"Wyngari"
"Wysanti"
"Xarantine"
"Xepolite"
"Xindi"
"Xyrillian"
"Yaderan"
"Yallitian"
"Yalosian"
"Yash-El"
"Yattho"
"Yridian"
"Zahl"
"Zakdorn"
"Zaldan"
"Zalkonian"
"Zaranite"
"Zetaran"
"Zeon"
"Zhuora"))
(define countries (list "Afghanistan"
"Albania"
"Algeria"
"Andorra"
"Angola"
"Antigua & Deps"
"Argentina"
"Armenia"
"Australia"
"Austria"
"Azerbaijan"
"Bahamas"
"Bahrain"
"Bangladesh"
"Barbados"
"Belarus"
"Belgium"
"Belize"
"Benin"
"Bhutan"
"Bolivia"
"Bosnia Herzegovina"
"Botswana"
"Brazil"
"Brunei"
"Bulgaria"
"Burkina"
"Burundi"
"Cambodia"
"Cameroon"
"Canada"
"Cape Verde"
"Central African Republic"
"Chad"
"Chile"
"China"
"Colombia"
"Comoros"
"Congo"
"Congo"
"Costa Rica"
"Croatia"
"Cuba"
"Cyprus"
"Czech Republic"
"Denmark"
"Djibouti"
"Dominica"
"Dominican Republic"
"East Timor"
"Ecuador"
"Egypt"
"El Salvador"
"Equatorial Guinea"
"Eritrea"
"Estonia"
"Ethiopia"
"Fiji"
"Finland"
"France"
"Gabon"
"Gambia"
"Georgia"
"Germany"
"Ghana"
"Greece"
"Grenada"
"Guatemala"
"Guinea"
"Guinea-Bissau"
"Guyana"
"Haiti"
"Honduras"
"Hungary"
"Iceland"
"India"
"Indonesia"
"Iran"
"Iraq"
"Ireland"
"Israel"
"Italy"
"Ivory Coast"
"Jamaica"
"Japan"
"Jordan"
"Kazakhstan"
"Kenya"
"Kiribati"
"Korea North"
"Korea South"
"Kosovo"
"Kuwait"
"Kyrgyzstan"
"Laos"
"Latvia"
"Lebanon"
"Lesotho"
"Liberia"
"Libya"
"Liechtenstein"
"Lithuania"
"Luxembourg"
"Macedonia"
"Madagascar"
"Malawi"
"Malaysia"
"Maldives"
"Mali"
"Malta"
"Marshall Islands"
"Mauritania"
"Mauritius"
"Mexico"
"Micronesia"
"Moldova"
"Monaco"
"Mongolia"
"Montenegro"
"Morocco"
"Mozambique"
"Myanmar"
"Namibia"
"Nauru"
"Nepal"
"Netherlands"
"New Zealand"
"Nicaragua"
"Niger"
"Nigeria"
"Norway"
"Oman"
"Pakistan"
"Palau"
"Panama"
"Papua New Guinea"
"Paraguay"
"Peru"
"Philippines"
"Poland"
"Portugal"
"Qatar"
"Romania"
"Russian Federation"
"Rwanda"
"St Kitts & Nevis"
"St Lucia"
"Saint Vincent & the Grenadines"
"Samoa"
"San Marino"
"Sao Tome & Principe"
"Saudi Arabia"
"Senegal"
"Serbia"
"Seychelles"
"Sierra Leone"
"Singapore"
"Slovakia"
"Slovenia"
"Solomon Islands"
"Somalia"
"South Africa"
"South Sudan"
"Spain"
"Sri Lanka"
"Sudan"
"Suriname"
"Swaziland"
"Sweden"
"Switzerland"
"Syria"
"Taiwan"
"Tajikistan"
"Tanzania"
"Thailand"
"Togo"
"Tonga"
"Trinidad & Tobago"
"Tunisia"
"Turkey"
"Turkmenistan"
"Tuvalu"
"Uganda"
"Ukraine"
"United Arab Emirates"
"United Kingdom"
"United States"
"Uruguay"
"Uzbekistan"
"Vanuatu"
"Vatican City"
"Venezuela"
"Vietnam"
"Yemen"
"Zambia"
"Zimbabwe"))
(define demon-names
(list "Abraxas"
"Abbadon"
"Agrith-Naar"
"Aku"
"Alastair"
"Alastor"
"Algaliarept"
"Alichino"
"Andariel"
"Angel"
"Anyanka"
"Anzu"
"Archimonde"
"Artery"
"Asmodeus"
"Asura"
"Azal"
"Azazeal"
"Azazel"
"Azazel"
"Azmodan"
"Azura"
"Amaimon"
"Baal"
"Babau"
"Bacarra"
"Bal'lak"
"Balor"
"Balrog"
"Balthazar"
"Baphomet"
"Barakiel"
"Barbariccia"
"Barbas"
"Bartimaeus"
"Bat'Zul"
"Beastie"
"Be'lakor"
"Bebilith"
"Beelzebub"
"Beleth"
"Belfagor"
"Belial"
"Belphegor"
"Belthazor"
"Berry"
"Betelguese"
"Blackheart"
"Cacodemon"
"Cadaver"
"Cagnazzo"
"Calcabrina"
"Calcifer"
"Castor"
"Cordelia"
"Chernabog"
"Cherry"
"Ciriatto"
"Claude"
"Crawly"
"Crowley"
"Cyberdemon"
"Cryto"
"D'Hoffryn"
"Dabura"
"Draghinazzo"
"Dante"
"Darkseid"
"Decarbia"
"Delrith"
"Demonita"
"Devi"
"Diablo"
"Doviculus"
"Doyle"
"Dretch"
"Dumain"
"Duriel"
"Errtu"
"Etna"
"Etrigan"
"Faquarl"
"Farfarello"
"Femur"
"Firebrand"
"Randall"
"Furfur"
"Gaap"
"Gary"
"Glabrezu"
"Gregor"
"Gothmog"
"The"
"Halfrek"
"Har'lakk"
"Hastur"
"Hellboy"
"Hell"
"Hezrou"
"Hiei"
"Him"
"Hnikarr"
"Hot"
"Hex"
"Infernal"
"Inferno"
"Jabor"
"Jadis"
"Janemba"
"Japhrimel"
"Jennifer"
"Juiblex"
"K'ril"
"Kal'Ger"
"DCI"
"Khorne"
"Kil'jaeden"
"Kneesocks"
"Koakuma"
"Korrok"
"Kronos"
"Freddy"
"Laharl"
"Lamia"
"Leviathan"
"Libicocco"
"Ligur"
"Lilith"
"Little"
"Longhorn"
"Lorne"
"Loki"
"Lucifer"
"Mal'Ganis"
"Malacoda"
"Maledict"
"Malfegor"
"Malice"
"Mammon"
"Mancubus"
"Mannoroth"
"Marilith"
"Masselin"
"Meg"
"Mehrunes"
"Melkor"
"Mephisto"
"Mephisto"
"Mephistopheles"
"Mephisto"
"N'zall"
"Nadia"
"Nalfeshnee"
"Nanatoo"
"Nero"
"Neuro"
"Newt"
"Nouda"
"Nurgle"
"Oyashiro"
"Rin"
"Pazuzu"
"Pennywise"
"Psaro"
"Quasit"
"Queezle"
"Qwan"
"Qweffor"
"Rakdos"
"Ramuthra"
"Red"
"Retriever"
"Randall"
"Ronove"
"Rosier"
"Rubicante"
"Ruby"
"Satan"
"Satan"
"Sauron"
"Scanty"
"Scarlet"
"Scarmiglione"
"Scumspawn"
"Sebastian"
"Shax"
"Silitha"
"Slaanesh"
"Sparda"
"Spawn"
"Spike"
"Spine"
"Straga"
"Tempus"
"Thammaron"
"Tiamat"
"Toby"
"To'Kash"
"Trigon"
"Turok-Han"
"Tzeentch"
"Ungoliant"
"Vein"
"Vergil"
"Violator"
"Vrock"
"Vulgrim"
"Vyers"
"Ware"
"Wormwood"
"Yaksha"
"Yk'Lagor"
"Zankou"
"Zepar"
"Overlord"
"Zuul"))
(define (max-length-over g)
(foldl (lambda (n s)
(max n (string-length s)))
0
g))
(define (empty-string? s)
(= 0 (string-length s)))
(define (those-non-empty strings)
(let loop ((acc '())
(strings strings))
(match strings
((list) (reverse acc))
((cons (? empty-string?) rest)
(loop acc rest))
((cons s rest)
(loop (cons s acc)
rest)))))
(define (string-head s)
(substring s 0 1))
(define (string-tail s)
(substring s 1 (string-length s)))
(define (last-character s)
(if (empty-string? s) ""
(substring s (- (string-length s) 1) (string-length s))))
(define (dict-update d key fun . args)
(match args
((list)
(let ((val (dict-ref d key)))
(dict-set d key (fun val))))
((list or-value)
(let ((val (dict-ref d key (lambda () or-value))))
(dict-set d key (fun val))))))
(define (at-plus d k)
(dict-update d k (lambda (x) (+ x 1)) 0))
(define (char->symbol c)
(string->symbol (list->string (list c))))
(define (char->symbol* o)
(cond ((symbol? o) o)
((char? o) (char->symbol o))))
(struct triple (index from to) #:transparent)
(define (string->triples s)
(let loop
((i 0)
(l (append '(start)
(string->list s)
'(end)))
(triples '()))
(match l
((list 'end) (reverse triples))
((cons from
(and rest
(cons to _)))
(loop
(+ i 1)
rest
(cons (triple i
(char->symbol* from)
(char->symbol* to))
triples))))))
(define (populate-transition-table names)
(let loop
((names names)
(table (make-immutable-hash '())))
(match names
((list) table)
((cons name names)
(loop names
(foldl
(lambda (triple table)
(at-plus table triple))
table
(string->triples name)))))))
;; (string->triples "test")
;; (populate-transition-table demon-names)
(define (=/c n)
(lambda (m)
(= n m)))
(define (eq?/c a)
(lambda (b)
(eq? a b)))
(define (restrict-table table index from)
(foldl
(lambda (trip new-table)
(match trip
((triple
(? (=/c index))
(? (eq?/c from))
_)
(dict-set new-table trip (dict-ref table trip)))
(_ new-table)))
(make-immutable-hash '())
(dict-keys table)))
(define (next-character table prev-character index . args)
(match args
((list) (next-character table prev-character
index
(current-pseudo-random-generator)))
((list generator)
(let* ((sub-table
(restrict-table table index prev-character))
(total-elements (foldl + 0 (dict-values sub-table)))
(draw (random total-elements generator)))
(let loop
((draw draw)
(key/val (dict->list sub-table)))
(match key/val
((cons (cons
(triple _ from to)
count) rest)
(if (or (empty? rest)
(<= draw 0))
to
(loop (- draw count)
rest)))))))))
(define (symbol-list->string sl)
(foldl (lambda (symbol string)
(string-append
string (symbol->string symbol)))
""
sl))
(define (generate-demon-name table . args)
(match args
((list) (generate-demon-name table (current-pseudo-random-generator)))
((list gen)
(let loop ((ix 0)
(name-list '(start)))
(let ((next (next-character table (car name-list) ix gen)))
(if (eq? next 'end)
(symbol-list->string (cdr (reverse name-list)))
(loop
(+ ix 1)
(cons next name-list))))))))
(define standard-table (populate-transition-table demon-names))
(define (generate-demon-names table n . args)
(match args
[(list)
(generate-demon-names table n (current-pseudo-random-generator) '())]
[(list gen)
(generate-demon-names table n gen '())]
[(list gen acc)
(match n
[0 acc]
[n (generate-demon-names table (- n 1) gen
(cons (generate-demon-name table gen) acc))])]))
(provide generate-demon-names
generate-demon-name
standard-table
populate-transition-table)
| null | https://raw.githubusercontent.com/VincentToups/racket-lib/d8aed0959fd148615b000ceecd7b8a6128cfcfa8/ensorcelled/demon-name-generator.rkt | racket | (string->triples "test")
(populate-transition-table demon-names) | #lang racket
(define bom-names (list "Aaron"
"Aaron"
"Abinadi"
"Abinadom"
"Abish"
"Aha"
"Ahah"
"Akish"
"Alma"
"Alma"
"Amaleki"
"Amaleki"
"Amalickiah"
"Amaron"
"Aminadab"
"Aminadi"
"Amlici"
"Ammah"
"Ammaron"
"Ammon"
"Ammon"
"Ammoron"
"Amnigaddah"
"Amnor"
"Amoron"
"Amos"
"Amos"
"Amulek"
"Amulon"
"Antiomno"
"Antionah"
"Antionum"
"Antipus"
"Archeantus"
"Benjamin"
"Brother of Jared"
"Captain Moroni"
"Cezoram"
"Chemish"
"Christ"
"Cohor"
"Cohor"
"Cohor"
"Com"
"Com"
"Corianton"
"Coriantor"
"Coriantum"
"Coriantum"
"Coriantumr"
"Coriantumr"
"Coriantumr"
"Corihor"
"Corihor"
"Corom"
"Cumenihah"
"Emer"
"Emron"
"Enos"
"Esrom"
"Ethem"
"Ether"
"Ezias"
"Gadianton"
"Gid"
"Giddianhi"
"Giddonah"
"Giddonah"
"Gideon"
"Gidgiddonah"
"Gidgiddoni"
"Gilead"
"Gilgah"
"Gilgal"
"Hagoth"
"Hearthom"
"Helam"
"Helaman"
"Helaman"
"Helaman"
"Helem"
"Helorum"
"Hem"
"Heth"
"Heth"
"Himni"
"Isabel"
"Isaiah"
"Ishmael"
"Ishmael"
"Jacob"
"Jacob"
"Jacob"
"Jacom"
"Jared"
"Jared"
"Jarom"
"Jeneum"
"Jeremiah"
"Jesus Christ"
"Jonas"
"Jonas"
"Joseph"
"Josh"
"Kib"
"Kim"
"Kimnor"
"Benjamin"
"Kish"
"Kishkumen"
"Korihor"
"Kumen"
"Kumenonhi"
"Laban"
"Lachoneus"
"Lachoneus"
"Lamah"
"Laman"
"Laman"
"Laman"
"Laman"
"Lamoni"
"Lehi"
"Lehi"
"Lehi"
"Lehi"
"Lehonti"
"Lemuel"
"Levi"
"Lib"
"Lib"
"Limhah"
"Limher"
"Limhi"
"Luram"
"Mahah"
"Manti"
"Mathoni"
"Mathonihah"
"Morianton"
"Morianton"
"Mormon"
"Mormon"
"Moron"
"Moroni"
"Moroni"
"Moronihah"
"Moronihah"
"Mosiah"
"Mosiah"
"Mulek"
"Muloki"
"Nehor"
"Nephi"
"Nephi"
"Nephi"
"Nephi"
"Nephihah"
"Neum"
"Nimrah son of Akish"
"Noah"
"Noah"
"Omer"
"Omner"
"Omni"
"Orihah"
"Paanchi"
"Pachus"
"Pacumeni"
"Pagag"
"Pahoran"
"Pahoran"
"Riplakish"
"Sam"
"Samuel"
"Sariah"
"Seantum"
"Seezoram"
"Seth"
"Shared"
"Shem"
"Shemnon"
"Sherem"
"Shez"
"Shez"
"Shiblom"
"Shiblom"
"Shiblon"
"Shiz"
"Shule"
"Teancum"
"Teomner"
"Timothy"
"Tubaloth"
"Zarahemla"
"Zedekiah"
"Zeezrom"
"Zelph"
"Zemnarihah"
"Zenephi"
"Zeniff"
"Zenock"
"Zenos"
"Zerahemnah"
"Zeram"
"Zoram"
"Zoram"
"Zoram"))
(define star-trek-race-names (list "Aaamazzarite"
"Acamarian"
"Aenar"
"Akaali"
"Akritirian"
"Aksani"
"Alcyone"
"Aldean"
"Algolian"
"Alkian"
"Allasomorph"
"Alsuran"
"Andorian"
"Android"
"Angosian"
"Ankari"
"Annari"
"Antaran"
"Antarian"
"Antedean"
"Antican"
"Arbazan"
"Arcadian"
"Ardanan"
"Arcturian"
"Argala"
"Argelian"
"Argosian"
"Argrathi"
"Arkarian"
"Arkonian"
"Arrithean"
"Axanar"
"B'Saari"
"B'omar"
"Ba'ku"
"Ba'neth"
"Bajoran"
"Bandi"
"Banean"
"Bara"
"Barkonian"
"Barolian"
"Barzan"
"Benkaran"
"Benthan"
"Benzenite"
"Benzite"
"Berellian"
"Betazoid"
"Betelgeusian"
"Biomimetic"
"Bolian"
"Boraalan"
"Boray"
"Borg"
"Boslic"
"Botha"
"Breen"
"Brekkian"
"Brenari"
"Briori"
"Brunali"
"Bynar"
"Byzallian"
"Byzatium"
"Caatati"
"Cairn"
"Caitian"
"Calamarain"
"Caldonian"
"Camorite"
"Capellan"
"Cardassian"
"Chalnoth"
"Changeling"
"Chokuzan"
"Chrysalian"
"Coridan"
"Corvallen"
"Cravic"
"Crystalline"
"Cytherian"
"Cytoplasmic"
"D'Arsay"
"Dachlyd"
"Daliwakan"
"Debrune"
"Deltan"
"Denebian"
"Denobulan"
"Devidian"
"Devore"
"Dinaali"
"Doosodarian"
"Dopterian"
"Dosi"
"Douwd"
"Dralian"
"Drayan"
"Dream"
"Dreman"
"Druoda"
"Edo"
"Ekosian"
"El-Aurian"
"Elasian"
"Elaysian"
"Ellora"
"Enaran"
"Enolian"
"Entaban"
"Entharan"
"Eska"
"Etanian"
"Evora"
"Excalbian"
"Exocomp"
"Farn"
"Fek'Ihri"
"Fen"
"Ferengi"
"Flaxian"
"Fotiallian"
"Frunalian"
"Galadoran"
"Gallamite"
"Garan"
"Garenor"
"Genton"
"Gorn"
"Grazerite"
"Gree"
"Grizzela"
"Haakonian"
"Halanan"
"Haliian"
"Halkan"
"Haradin"
"Harkonian"
"Hazari"
"Hekaran"
"Hierarchy"
"Hirogen"
"Horta"
"Hologram"
"Human"
"Human"
"Human"
"Hunter"
"Hupyrian"
"Hur'q"
"Husnock"
"Iconian"
"Ikaaran"
"Ilari"
"Ilidarian"
"Illyrian"
"Imhotep"
"Iotian"
"Ithenite"
"Iyaaran"
"J'naii"
"Jarada"
"Jem'Hadar"
"K'normian"
"Kadi"
"Kaelon"
"Kantare"
"Karemma"
"Kartelan"
"Kaylar"
"Kazarite"
"Kazon"
"Kelvan"
"Kesat"
"Kinbori"
"Klaestron"
"Klingon"
"Kmada"
"Kobali"
"Kobliad"
"Kohl"
"Kolaati"
"Kolaran"
"Kolhari"
"Komar"
"Kostolain"
"Kotakian"
"Kovaalan"
"Kradin"
"Kraylor"
"Kreetassan"
"Krenim"
"Kressari"
"Kriosian"
"Ktarian"
"Kyrian"
"Kzin"
"Ledosian"
"Lethean"
"Leyron"
"Ligonian"
"Lissepian"
"Lokirrim"
"Lorillian"
"Lothra"
"Lurian"
"Lycocians"
"Lyridian"
"Lysian"
"Lyssarrian"
"Lytasian"
"M-"
"M'klexa"
"Makull's"
"Maldorian"
"Malkoth"
"Malon"
"Malurian"
"Mari"
"Mawasi"
"Mazarite"
"Medusan"
"Megarite"
"Mekhar"
"Melkot"
"Menk"
"Menthar"
"Metrons"
"Microbrain"
"Mikhal"
"Mikulak"
"Minosian"
"Mintakan"
"Miradorn"
"Mislenite"
"Mizarian"
"Mokra"
"Monean"
"Morphinian"
"Motali"
"Mulzirak"
"Mylean"
"N'Kree"
"Nacene"
"Napean"
"Nasari"
"Nassordin"
"Nausicaan"
"Nechani"
"Nezu"
"Night"
"Nihydron"
"Norcadian"
"Numiri"
"Nuu'bari"
"Nuvian"
"Nygean"
"Nyrian"
"Ocampa"
"Octran"
"Old"
"Organian"
"Orion"
"Ornaran"
"Osaarian"
"Ovion"
"Pakled"
"Pah"
"Paraagan"
"Parada"
"Parein"
"Paxans"
"Peliar"
"Peljenite"
"Pendari"
"Petarian"
"Ponea"
"Pralor"
"Prang"
"Promellian"
"Prophet"
"Proxcinian"
"Pygorian"
"Q"
"Qomar"
"Quarren"
"Rakhari"
"Rakosan"
"Ram"
"Ramuran"
"Reegrunion"
"Reman"
"Retellian"
"Rhaandarite"
"Rigelian"
"Rilnar"
"Risian"
"Romulan"
"Rutian"
"Sakari"
"Saltah'na"
"Satarran"
"Saurian"
"Scathosian"
"Selay"
"Serosian"
"Sheliak"
"Shivolian"
"Sikarian"
"Silver"
"Skagaran"
"Skrreea"
"Sky"
"Son'a"
"Species"
"Sphere"
"Srivani"
"Stakoron"
"Susanite"
"Suliban"
"Swarm"
"T-Rogoran"
"T'Lani"
"Tak"
"Takaran"
"Takarian"
"Takrit"
"Talarian"
"Talavian"
"Talaxian"
"Talosian"
"Tamarian"
"Tandaran"
"Tanugan"
"Tarahongian"
"Tarellian"
"Taresian"
"Tarkalean"
"Tarkan"
"Tarlac"
"Tavnian"
"Tellarite"
"Telsian"
"Teplan"
"Terellian"
"Terkellian"
"Terrelian"
"Terrellian"
"Tessic's"
"Thasian"
"Tholian"
"Tiburonian"
"Torothan"
"Tosk"
"Trabe"
"Triannon"
"Tribble"
"Trill"
"Troyian"
"Turei"
"Tygarian"
"Tzenkethi"
"U'tani"
"Ubean"
"Ullian"
"Uxali"
"V'radian"
"Vaadwaur"
"Valakian"
"Valerian"
"Valtese"
"Varro"
"Vaskan"
"Ventaxian"
"Ventu"
"Verathan"
"Verillian"
"Vhnori"
"Vian"
"Vidiian"
"Vissian"
"Vodwar"
"Vojean"
"Vok'sha"
"Vorgon"
"Vori"
"Vorta"
"Vostigye"
"Voth"
"Vulcan"
"Wadi"
"Wogneer"
"Wyngari"
"Wysanti"
"Xarantine"
"Xepolite"
"Xindi"
"Xyrillian"
"Yaderan"
"Yallitian"
"Yalosian"
"Yash-El"
"Yattho"
"Yridian"
"Zahl"
"Zakdorn"
"Zaldan"
"Zalkonian"
"Zaranite"
"Zetaran"
"Zeon"
"Zhuora"))
(define countries (list "Afghanistan"
"Albania"
"Algeria"
"Andorra"
"Angola"
"Antigua & Deps"
"Argentina"
"Armenia"
"Australia"
"Austria"
"Azerbaijan"
"Bahamas"
"Bahrain"
"Bangladesh"
"Barbados"
"Belarus"
"Belgium"
"Belize"
"Benin"
"Bhutan"
"Bolivia"
"Bosnia Herzegovina"
"Botswana"
"Brazil"
"Brunei"
"Bulgaria"
"Burkina"
"Burundi"
"Cambodia"
"Cameroon"
"Canada"
"Cape Verde"
"Central African Republic"
"Chad"
"Chile"
"China"
"Colombia"
"Comoros"
"Congo"
"Congo"
"Costa Rica"
"Croatia"
"Cuba"
"Cyprus"
"Czech Republic"
"Denmark"
"Djibouti"
"Dominica"
"Dominican Republic"
"East Timor"
"Ecuador"
"Egypt"
"El Salvador"
"Equatorial Guinea"
"Eritrea"
"Estonia"
"Ethiopia"
"Fiji"
"Finland"
"France"
"Gabon"
"Gambia"
"Georgia"
"Germany"
"Ghana"
"Greece"
"Grenada"
"Guatemala"
"Guinea"
"Guinea-Bissau"
"Guyana"
"Haiti"
"Honduras"
"Hungary"
"Iceland"
"India"
"Indonesia"
"Iran"
"Iraq"
"Ireland"
"Israel"
"Italy"
"Ivory Coast"
"Jamaica"
"Japan"
"Jordan"
"Kazakhstan"
"Kenya"
"Kiribati"
"Korea North"
"Korea South"
"Kosovo"
"Kuwait"
"Kyrgyzstan"
"Laos"
"Latvia"
"Lebanon"
"Lesotho"
"Liberia"
"Libya"
"Liechtenstein"
"Lithuania"
"Luxembourg"
"Macedonia"
"Madagascar"
"Malawi"
"Malaysia"
"Maldives"
"Mali"
"Malta"
"Marshall Islands"
"Mauritania"
"Mauritius"
"Mexico"
"Micronesia"
"Moldova"
"Monaco"
"Mongolia"
"Montenegro"
"Morocco"
"Mozambique"
"Myanmar"
"Namibia"
"Nauru"
"Nepal"
"Netherlands"
"New Zealand"
"Nicaragua"
"Niger"
"Nigeria"
"Norway"
"Oman"
"Pakistan"
"Palau"
"Panama"
"Papua New Guinea"
"Paraguay"
"Peru"
"Philippines"
"Poland"
"Portugal"
"Qatar"
"Romania"
"Russian Federation"
"Rwanda"
"St Kitts & Nevis"
"St Lucia"
"Saint Vincent & the Grenadines"
"Samoa"
"San Marino"
"Sao Tome & Principe"
"Saudi Arabia"
"Senegal"
"Serbia"
"Seychelles"
"Sierra Leone"
"Singapore"
"Slovakia"
"Slovenia"
"Solomon Islands"
"Somalia"
"South Africa"
"South Sudan"
"Spain"
"Sri Lanka"
"Sudan"
"Suriname"
"Swaziland"
"Sweden"
"Switzerland"
"Syria"
"Taiwan"
"Tajikistan"
"Tanzania"
"Thailand"
"Togo"
"Tonga"
"Trinidad & Tobago"
"Tunisia"
"Turkey"
"Turkmenistan"
"Tuvalu"
"Uganda"
"Ukraine"
"United Arab Emirates"
"United Kingdom"
"United States"
"Uruguay"
"Uzbekistan"
"Vanuatu"
"Vatican City"
"Venezuela"
"Vietnam"
"Yemen"
"Zambia"
"Zimbabwe"))
(define demon-names
(list "Abraxas"
"Abbadon"
"Agrith-Naar"
"Aku"
"Alastair"
"Alastor"
"Algaliarept"
"Alichino"
"Andariel"
"Angel"
"Anyanka"
"Anzu"
"Archimonde"
"Artery"
"Asmodeus"
"Asura"
"Azal"
"Azazeal"
"Azazel"
"Azazel"
"Azmodan"
"Azura"
"Amaimon"
"Baal"
"Babau"
"Bacarra"
"Bal'lak"
"Balor"
"Balrog"
"Balthazar"
"Baphomet"
"Barakiel"
"Barbariccia"
"Barbas"
"Bartimaeus"
"Bat'Zul"
"Beastie"
"Be'lakor"
"Bebilith"
"Beelzebub"
"Beleth"
"Belfagor"
"Belial"
"Belphegor"
"Belthazor"
"Berry"
"Betelguese"
"Blackheart"
"Cacodemon"
"Cadaver"
"Cagnazzo"
"Calcabrina"
"Calcifer"
"Castor"
"Cordelia"
"Chernabog"
"Cherry"
"Ciriatto"
"Claude"
"Crawly"
"Crowley"
"Cyberdemon"
"Cryto"
"D'Hoffryn"
"Dabura"
"Draghinazzo"
"Dante"
"Darkseid"
"Decarbia"
"Delrith"
"Demonita"
"Devi"
"Diablo"
"Doviculus"
"Doyle"
"Dretch"
"Dumain"
"Duriel"
"Errtu"
"Etna"
"Etrigan"
"Faquarl"
"Farfarello"
"Femur"
"Firebrand"
"Randall"
"Furfur"
"Gaap"
"Gary"
"Glabrezu"
"Gregor"
"Gothmog"
"The"
"Halfrek"
"Har'lakk"
"Hastur"
"Hellboy"
"Hell"
"Hezrou"
"Hiei"
"Him"
"Hnikarr"
"Hot"
"Hex"
"Infernal"
"Inferno"
"Jabor"
"Jadis"
"Janemba"
"Japhrimel"
"Jennifer"
"Juiblex"
"K'ril"
"Kal'Ger"
"DCI"
"Khorne"
"Kil'jaeden"
"Kneesocks"
"Koakuma"
"Korrok"
"Kronos"
"Freddy"
"Laharl"
"Lamia"
"Leviathan"
"Libicocco"
"Ligur"
"Lilith"
"Little"
"Longhorn"
"Lorne"
"Loki"
"Lucifer"
"Mal'Ganis"
"Malacoda"
"Maledict"
"Malfegor"
"Malice"
"Mammon"
"Mancubus"
"Mannoroth"
"Marilith"
"Masselin"
"Meg"
"Mehrunes"
"Melkor"
"Mephisto"
"Mephisto"
"Mephistopheles"
"Mephisto"
"N'zall"
"Nadia"
"Nalfeshnee"
"Nanatoo"
"Nero"
"Neuro"
"Newt"
"Nouda"
"Nurgle"
"Oyashiro"
"Rin"
"Pazuzu"
"Pennywise"
"Psaro"
"Quasit"
"Queezle"
"Qwan"
"Qweffor"
"Rakdos"
"Ramuthra"
"Red"
"Retriever"
"Randall"
"Ronove"
"Rosier"
"Rubicante"
"Ruby"
"Satan"
"Satan"
"Sauron"
"Scanty"
"Scarlet"
"Scarmiglione"
"Scumspawn"
"Sebastian"
"Shax"
"Silitha"
"Slaanesh"
"Sparda"
"Spawn"
"Spike"
"Spine"
"Straga"
"Tempus"
"Thammaron"
"Tiamat"
"Toby"
"To'Kash"
"Trigon"
"Turok-Han"
"Tzeentch"
"Ungoliant"
"Vein"
"Vergil"
"Violator"
"Vrock"
"Vulgrim"
"Vyers"
"Ware"
"Wormwood"
"Yaksha"
"Yk'Lagor"
"Zankou"
"Zepar"
"Overlord"
"Zuul"))
(define (max-length-over g)
(foldl (lambda (n s)
(max n (string-length s)))
0
g))
(define (empty-string? s)
(= 0 (string-length s)))
(define (those-non-empty strings)
(let loop ((acc '())
(strings strings))
(match strings
((list) (reverse acc))
((cons (? empty-string?) rest)
(loop acc rest))
((cons s rest)
(loop (cons s acc)
rest)))))
(define (string-head s)
(substring s 0 1))
(define (string-tail s)
(substring s 1 (string-length s)))
(define (last-character s)
(if (empty-string? s) ""
(substring s (- (string-length s) 1) (string-length s))))
(define (dict-update d key fun . args)
(match args
((list)
(let ((val (dict-ref d key)))
(dict-set d key (fun val))))
((list or-value)
(let ((val (dict-ref d key (lambda () or-value))))
(dict-set d key (fun val))))))
(define (at-plus d k)
(dict-update d k (lambda (x) (+ x 1)) 0))
(define (char->symbol c)
(string->symbol (list->string (list c))))
(define (char->symbol* o)
(cond ((symbol? o) o)
((char? o) (char->symbol o))))
(struct triple (index from to) #:transparent)
(define (string->triples s)
(let loop
((i 0)
(l (append '(start)
(string->list s)
'(end)))
(triples '()))
(match l
((list 'end) (reverse triples))
((cons from
(and rest
(cons to _)))
(loop
(+ i 1)
rest
(cons (triple i
(char->symbol* from)
(char->symbol* to))
triples))))))
(define (populate-transition-table names)
(let loop
((names names)
(table (make-immutable-hash '())))
(match names
((list) table)
((cons name names)
(loop names
(foldl
(lambda (triple table)
(at-plus table triple))
table
(string->triples name)))))))
(define (=/c n)
(lambda (m)
(= n m)))
(define (eq?/c a)
(lambda (b)
(eq? a b)))
(define (restrict-table table index from)
(foldl
(lambda (trip new-table)
(match trip
((triple
(? (=/c index))
(? (eq?/c from))
_)
(dict-set new-table trip (dict-ref table trip)))
(_ new-table)))
(make-immutable-hash '())
(dict-keys table)))
(define (next-character table prev-character index . args)
(match args
((list) (next-character table prev-character
index
(current-pseudo-random-generator)))
((list generator)
(let* ((sub-table
(restrict-table table index prev-character))
(total-elements (foldl + 0 (dict-values sub-table)))
(draw (random total-elements generator)))
(let loop
((draw draw)
(key/val (dict->list sub-table)))
(match key/val
((cons (cons
(triple _ from to)
count) rest)
(if (or (empty? rest)
(<= draw 0))
to
(loop (- draw count)
rest)))))))))
(define (symbol-list->string sl)
(foldl (lambda (symbol string)
(string-append
string (symbol->string symbol)))
""
sl))
(define (generate-demon-name table . args)
(match args
((list) (generate-demon-name table (current-pseudo-random-generator)))
((list gen)
(let loop ((ix 0)
(name-list '(start)))
(let ((next (next-character table (car name-list) ix gen)))
(if (eq? next 'end)
(symbol-list->string (cdr (reverse name-list)))
(loop
(+ ix 1)
(cons next name-list))))))))
(define standard-table (populate-transition-table demon-names))
(define (generate-demon-names table n . args)
(match args
[(list)
(generate-demon-names table n (current-pseudo-random-generator) '())]
[(list gen)
(generate-demon-names table n gen '())]
[(list gen acc)
(match n
[0 acc]
[n (generate-demon-names table (- n 1) gen
(cons (generate-demon-name table gen) acc))])]))
(provide generate-demon-names
generate-demon-name
standard-table
populate-transition-table)
|
280970954d7a03b7cea88e74d1cd666200cdb87bc084a75327d4deeddb3e93f3 | day8/re-frame-10x | server.cljs | (ns ^{:mranderson/inlined true} day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.dom.server
(:require ["react-dom/server" :as dom-server]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.impl.util :as util]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.impl.template :as tmpl]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.impl.protocols :as p]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.ratom :as ratom]))
(defn render-to-string
"Turns a component into an HTML string."
([component]
(render-to-string component tmpl/default-compiler))
([component compiler]
(ratom/flush!)
(binding [util/*non-reactive* true]
(dom-server/renderToString (p/as-element compiler component)))))
(defn render-to-static-markup
"Turns a component into an HTML string, without data-react-id attributes, etc."
([component]
(render-to-static-markup component tmpl/default-compiler))
([component compiler]
(ratom/flush!)
(binding [util/*non-reactive* true]
(dom-server/renderToStaticMarkup (p/as-element compiler component)))))
| null | https://raw.githubusercontent.com/day8/re-frame-10x/d8dcb17e217449aba2cf64b9f843b0e9f86cfcb6/gen-src/day8/re_frame_10x/inlined_deps/reagent/v1v0v0/reagent/dom/server.cljs | clojure | (ns ^{:mranderson/inlined true} day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.dom.server
(:require ["react-dom/server" :as dom-server]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.impl.util :as util]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.impl.template :as tmpl]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.impl.protocols :as p]
[day8.re-frame-10x.inlined-deps.reagent.v1v0v0.reagent.ratom :as ratom]))
(defn render-to-string
"Turns a component into an HTML string."
([component]
(render-to-string component tmpl/default-compiler))
([component compiler]
(ratom/flush!)
(binding [util/*non-reactive* true]
(dom-server/renderToString (p/as-element compiler component)))))
(defn render-to-static-markup
"Turns a component into an HTML string, without data-react-id attributes, etc."
([component]
(render-to-static-markup component tmpl/default-compiler))
([component compiler]
(ratom/flush!)
(binding [util/*non-reactive* true]
(dom-server/renderToStaticMarkup (p/as-element compiler component)))))
| |
771f17db9053d361c9a30f8104c7e5273a85331d0649698659548a3069ecb642 | pflanze/chj-schemelib | weak-srfi-1.scm | Copyright 2010 , 2011 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require (test)
(lazy)
(cj-typed))
;;;
;;;; variants of srfi-1 functions that don't complain if the list ends
;;;; too early
;;;
(define-typed (weak-take lis #(integer? k))
(let recur ((lis lis)
(k k))
(FV (lis)
(if (or (zero? k) (null? lis))
'()
(cons (car lis)
(recur (cdr lis) (- k 1)))))))
(define-typed (weak-drop lis #(integer? k))
(let iter ((lis lis)
(k k))
(if (zero? k)
lis
(FV (lis)
(if (pair? lis)
(iter (cdr lis)
(- k 1))
'())))))
(define-typed (weak-split-at x #(natural0? k))
(let recur ((lis x) (k k))
(if (zero? k)
(values '() lis)
(FV (lis)
(if (null? lis)
(values '() '())
(receive (prefix suffix) (recur (cdr lis) (- k 1))
(values (cons (car lis) prefix) suffix)))))))
(TEST
> (weak-take (stream-iota 2) 0)
()
> (weak-take (stream-iota 2) 1)
(0)
> (weak-take (stream-iota 2) 2)
(0 1)
> (weak-take (stream-iota 2) 3)
(0 1)
> (weak-take '() 0)
()
> (weak-take '() 1)
()
> (weak-drop (iota 5) 4)
(4)
> (promise? (weak-drop (stream-iota 5) 4))
#t
> (weak-drop (iota 5) 5)
()
> (weak-drop (iota 5) 6)
()
> (weak-drop '() 0)
()
> (weak-drop '() 1)
()
> (values->vector (weak-split-at '(a b c d e) 6))
#((a b c d e) ())
> (values->vector (weak-split-at '(a b c d e) 5))
#((a b c d e) ())
> (values->vector (weak-split-at '(a b c d e) 4))
#((a b c d) (e))
> (values->vector (weak-split-at '() 4))
#(() ())
> (values->vector (weak-split-at '() 0))
#(() ())
)
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/weak-srfi-1.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version.
variants of srfi-1 functions that don't complain if the list ends
too early
| Copyright 2010 , 2011 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
(require (test)
(lazy)
(cj-typed))
(define-typed (weak-take lis #(integer? k))
(let recur ((lis lis)
(k k))
(FV (lis)
(if (or (zero? k) (null? lis))
'()
(cons (car lis)
(recur (cdr lis) (- k 1)))))))
(define-typed (weak-drop lis #(integer? k))
(let iter ((lis lis)
(k k))
(if (zero? k)
lis
(FV (lis)
(if (pair? lis)
(iter (cdr lis)
(- k 1))
'())))))
(define-typed (weak-split-at x #(natural0? k))
(let recur ((lis x) (k k))
(if (zero? k)
(values '() lis)
(FV (lis)
(if (null? lis)
(values '() '())
(receive (prefix suffix) (recur (cdr lis) (- k 1))
(values (cons (car lis) prefix) suffix)))))))
(TEST
> (weak-take (stream-iota 2) 0)
()
> (weak-take (stream-iota 2) 1)
(0)
> (weak-take (stream-iota 2) 2)
(0 1)
> (weak-take (stream-iota 2) 3)
(0 1)
> (weak-take '() 0)
()
> (weak-take '() 1)
()
> (weak-drop (iota 5) 4)
(4)
> (promise? (weak-drop (stream-iota 5) 4))
#t
> (weak-drop (iota 5) 5)
()
> (weak-drop (iota 5) 6)
()
> (weak-drop '() 0)
()
> (weak-drop '() 1)
()
> (values->vector (weak-split-at '(a b c d e) 6))
#((a b c d e) ())
> (values->vector (weak-split-at '(a b c d e) 5))
#((a b c d e) ())
> (values->vector (weak-split-at '(a b c d e) 4))
#((a b c d) (e))
> (values->vector (weak-split-at '() 4))
#(() ())
> (values->vector (weak-split-at '() 0))
#(() ())
)
|
44cd742fc14e0cfa008b7201492c5138cde1a85bf90209bb9757d32de4642a87 | haskell-effectful/effectful | ThTests.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE CPP #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
module Main where
import Data.Kind (Type)
import GHC.TypeLits
import Effectful
import Effectful.TH
main :: IO ()
main = pure () -- only compilation tests
data SimpleADT (m :: Type -> Type) (a :: Type)
= SimpleADTC1 Int
| SimpleADTC2 String
makeEffect ''SimpleADT
data ADTSyntax1 (m :: Type -> Type) (a :: Type)
= a ~ Int => ADTSyntax1C String
makeEffect ''ADTSyntax1
data ADTSyntax2 (m :: Type -> Type) (a :: Type)
= a ~ Int => ADTSyntax2C1 Int
| a ~ String => ADTSyntax2C2 String
makeEffect ''ADTSyntax2
data ADTSyntax3 (m :: Type -> Type) (a :: Type)
= Show a => ADTSyntax3C a
makeEffect ''ADTSyntax3
data GADTSyntax :: Effect where
GADTSyntaxC1 :: Int -> GADTSyntax m Int
GADTSyntaxC2 :: String -> GADTSyntax m String
GADTSyntaxC3 :: IOE :> es => Bool -> GADTSyntax (Eff es) a
makeEffect ''GADTSyntax
data Fields (m :: Type -> Type) (a :: Type)
= FieldsC { fieldsCF1 :: Int, fieldsCF2 :: String }
makeEffect ''Fields
newtype Newtype1 (m :: Type -> Type) (a :: Type)
= Newtype1C Int
makeEffect ''Newtype1
newtype Newtype2 :: Effect where
Newtype2C :: String -> Newtype2 m a
makeEffect ''Newtype2
data Instance = ADTI | GADTI | NTI | MMI
data family Family (s :: Instance) (m :: Type -> Type) a
data instance Family 'ADTI _ _ = ADTIC1 Int | ADTIC2 String
makeEffect 'ADTIC1
data instance Family 'GADTI _ _ where
GADTIC1 :: Int -> Family 'GADTI m Int
GADTIC2 :: String -> Family 'GADTI m String
makeEffect 'GADTIC1
newtype instance Family 'NTI _ _ = NTIC Int
makeEffect 'NTIC
data instance Family 'MMI m (_ m) where
MMIC1 :: f m -> Family 'MMI m (f m)
MMIC2 :: (forall x. m x -> m (f m)) -> Family 'MMI m (f m)
makeEffect 'MMIC1
data Complex :: Effect where
Mono :: Int -> Complex m Bool
Poly :: a -> Complex m a
PolyIn :: a -> Complex m Bool
PolyOut :: Int -> Complex m a
Lots :: a -> b -> c -> d -> e -> f -> Complex m ()
Nested :: Maybe b -> Complex m (Maybe a)
MultiNested :: (Maybe a, [b]) -> Complex m (Maybe a, [b])
Existential :: (forall e. e -> Maybe e) -> Complex m a
LotsNested :: Maybe a -> [b] -> (c, c) -> Complex m (a, b, c)
Dict :: Ord a => a -> Complex m a
MultiDict :: (Eq a, Ord b, Enum a, Num c)
=> a -> b -> c -> Complex m ()
IndexedMono :: f 0 -> Complex m Int
IndexedPoly :: forall f (n :: Nat) m . f n -> Complex m (f (n + 1))
IndexedPolyDict :: KnownNat n => f n -> Complex m Int
makeEffect ''Complex
data HOEff :: Effect where
EffArgMono :: m () -> HOEff m ()
EffArgPoly :: m a -> HOEff m a
EffArgComb :: m a -> (m a -> m b) -> HOEff m b
EffRank2 :: (forall x. m x -> m (Maybe x)) -> HOEff m a
makeEffect ''HOEff
data ComplexEffArgs b c :: Effect where
EffMono :: Int -> ComplexEffArgs Int String m Bool
EffPoly1 :: a -> ComplexEffArgs a b m a
EffPoly2 :: a -> ComplexEffArgs a (Maybe a) m Bool
EffPolyFree :: String -> ComplexEffArgs a b m Int
EffSame1 :: ComplexEffArgs a a m a
EffSame2 :: ComplexEffArgs b b m a
EffHO :: m b -> ComplexEffArgs b Int m String
makeEffect ''ComplexEffArgs
data HKEffArgs f g :: Effect where
HKRank2 :: (forall x . f x -> g x) -> HKEffArgs f g m a
makeEffect ''HKEffArgs
data ByCon :: Effect where
ByConA :: Int -> ByCon m String
ByConB :: Int -> ByCon m String
makeEffect 'ByConA
data ByField :: Effect where
ByFieldA :: { byFieldAf :: Int } -> ByField m Int
ByFieldB :: { byFieldBf :: Int } -> ByField m Int
makeEffect 'byFieldAf
type family F ty
data AmbEff :: Effect where
AmbEff :: Int -> AmbEff m (F ty)
This only works in GHC > = 9 , otherwise the ' ty ' variable is ambiguous .
#if __GLASGOW_HASKELL__ >= 900
makeEffect 'AmbEff
#endif
| null | https://raw.githubusercontent.com/haskell-effectful/effectful/b77e9c3d65dbd4df4ff02fdd326c0b843dbf684e/effectful-th/tests/ThTests.hs | haskell | only compilation tests | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE CPP #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
module Main where
import Data.Kind (Type)
import GHC.TypeLits
import Effectful
import Effectful.TH
main :: IO ()
data SimpleADT (m :: Type -> Type) (a :: Type)
= SimpleADTC1 Int
| SimpleADTC2 String
makeEffect ''SimpleADT
data ADTSyntax1 (m :: Type -> Type) (a :: Type)
= a ~ Int => ADTSyntax1C String
makeEffect ''ADTSyntax1
data ADTSyntax2 (m :: Type -> Type) (a :: Type)
= a ~ Int => ADTSyntax2C1 Int
| a ~ String => ADTSyntax2C2 String
makeEffect ''ADTSyntax2
data ADTSyntax3 (m :: Type -> Type) (a :: Type)
= Show a => ADTSyntax3C a
makeEffect ''ADTSyntax3
data GADTSyntax :: Effect where
GADTSyntaxC1 :: Int -> GADTSyntax m Int
GADTSyntaxC2 :: String -> GADTSyntax m String
GADTSyntaxC3 :: IOE :> es => Bool -> GADTSyntax (Eff es) a
makeEffect ''GADTSyntax
data Fields (m :: Type -> Type) (a :: Type)
= FieldsC { fieldsCF1 :: Int, fieldsCF2 :: String }
makeEffect ''Fields
newtype Newtype1 (m :: Type -> Type) (a :: Type)
= Newtype1C Int
makeEffect ''Newtype1
newtype Newtype2 :: Effect where
Newtype2C :: String -> Newtype2 m a
makeEffect ''Newtype2
data Instance = ADTI | GADTI | NTI | MMI
data family Family (s :: Instance) (m :: Type -> Type) a
data instance Family 'ADTI _ _ = ADTIC1 Int | ADTIC2 String
makeEffect 'ADTIC1
data instance Family 'GADTI _ _ where
GADTIC1 :: Int -> Family 'GADTI m Int
GADTIC2 :: String -> Family 'GADTI m String
makeEffect 'GADTIC1
newtype instance Family 'NTI _ _ = NTIC Int
makeEffect 'NTIC
data instance Family 'MMI m (_ m) where
MMIC1 :: f m -> Family 'MMI m (f m)
MMIC2 :: (forall x. m x -> m (f m)) -> Family 'MMI m (f m)
makeEffect 'MMIC1
data Complex :: Effect where
Mono :: Int -> Complex m Bool
Poly :: a -> Complex m a
PolyIn :: a -> Complex m Bool
PolyOut :: Int -> Complex m a
Lots :: a -> b -> c -> d -> e -> f -> Complex m ()
Nested :: Maybe b -> Complex m (Maybe a)
MultiNested :: (Maybe a, [b]) -> Complex m (Maybe a, [b])
Existential :: (forall e. e -> Maybe e) -> Complex m a
LotsNested :: Maybe a -> [b] -> (c, c) -> Complex m (a, b, c)
Dict :: Ord a => a -> Complex m a
MultiDict :: (Eq a, Ord b, Enum a, Num c)
=> a -> b -> c -> Complex m ()
IndexedMono :: f 0 -> Complex m Int
IndexedPoly :: forall f (n :: Nat) m . f n -> Complex m (f (n + 1))
IndexedPolyDict :: KnownNat n => f n -> Complex m Int
makeEffect ''Complex
data HOEff :: Effect where
EffArgMono :: m () -> HOEff m ()
EffArgPoly :: m a -> HOEff m a
EffArgComb :: m a -> (m a -> m b) -> HOEff m b
EffRank2 :: (forall x. m x -> m (Maybe x)) -> HOEff m a
makeEffect ''HOEff
data ComplexEffArgs b c :: Effect where
EffMono :: Int -> ComplexEffArgs Int String m Bool
EffPoly1 :: a -> ComplexEffArgs a b m a
EffPoly2 :: a -> ComplexEffArgs a (Maybe a) m Bool
EffPolyFree :: String -> ComplexEffArgs a b m Int
EffSame1 :: ComplexEffArgs a a m a
EffSame2 :: ComplexEffArgs b b m a
EffHO :: m b -> ComplexEffArgs b Int m String
makeEffect ''ComplexEffArgs
data HKEffArgs f g :: Effect where
HKRank2 :: (forall x . f x -> g x) -> HKEffArgs f g m a
makeEffect ''HKEffArgs
data ByCon :: Effect where
ByConA :: Int -> ByCon m String
ByConB :: Int -> ByCon m String
makeEffect 'ByConA
data ByField :: Effect where
ByFieldA :: { byFieldAf :: Int } -> ByField m Int
ByFieldB :: { byFieldBf :: Int } -> ByField m Int
makeEffect 'byFieldAf
type family F ty
data AmbEff :: Effect where
AmbEff :: Int -> AmbEff m (F ty)
This only works in GHC > = 9 , otherwise the ' ty ' variable is ambiguous .
#if __GLASGOW_HASKELL__ >= 900
makeEffect 'AmbEff
#endif
|
46ed8a7d69ee68439447a743a31afd7dad5a58ad16b34d871a23e4c2e4bcfc99 | tezos/tezos-mirror | script_interpreter_defs.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 - 2022 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
(*
This module provides auxiliary definitions used in the interpreter.
These are internal private definitions. Do not rely on them outside
the interpreter.
*)
open Alpha_context
open Script
open Script_typed_ir
open Script_ir_translator
open Local_gas_counter
type error += Rollup_invalid_transaction_amount | Rollup_invalid_entrypoint
let () =
register_error_kind
`Permanent
~id:"operation.rollup_invalid_transaction_amount"
~title:"Transaction amount to a rollup must be zero"
~description:
"Because rollups are outside of the delegation mechanism of Tezos, they \
cannot own Tez, and therefore transactions targeting a rollup must have \
its amount field set to zero."
~pp:(fun ppf () ->
Format.pp_print_string ppf "Transaction amount to a rollup must be zero.")
Data_encoding.unit
(function Rollup_invalid_transaction_amount -> Some () | _ -> None)
(fun () -> Rollup_invalid_transaction_amount) ;
register_error_kind
`Permanent
~id:"operation.rollup_invalid_entrypoint"
~title:"Only the default entrypoint is allowed for rollups"
~description:"Rollups only support transactions to the default entrypoint."
~pp:(fun ppf () ->
Format.pp_print_string
ppf
"Rollups only support transactions to the default entrypoint.")
Data_encoding.unit
(function Rollup_invalid_entrypoint -> Some () | _ -> None)
(fun () -> Rollup_invalid_entrypoint)
Computing the cost of instructions
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
The function [ cost_of_instr ] provides a cost model for
instructions . It is used by the interpreter to track the
consumption of gas . This consumption may depend on the values
on the stack .
Computing the cost of Michelson instructions
============================================
The function [cost_of_instr] provides a cost model for Michelson
instructions. It is used by the interpreter to track the
consumption of gas. This consumption may depend on the values
on the stack.
*)
module Interp_costs = Michelson_v1_gas.Cost_of.Interpreter
let cost_of_instr : type a s r f. (a, s, r, f) kinstr -> a -> s -> Gas.cost =
fun i accu stack ->
match i with
| IList_map _ ->
let list = accu in
Interp_costs.list_map list
| IList_iter _ ->
let list = accu in
Interp_costs.list_iter list
| ISet_iter _ ->
let set = accu in
Interp_costs.set_iter set
| ISet_mem _ ->
let v = accu and set, _ = stack in
Interp_costs.set_mem v set
| ISet_update _ ->
let v = accu and _, (set, _) = stack in
Interp_costs.set_update v set
| IMap_map _ ->
let map = accu in
Interp_costs.map_map map
| IMap_iter _ ->
let map = accu in
Interp_costs.map_iter map
| IMap_mem _ ->
let v = accu and map, _ = stack in
Interp_costs.map_mem v map
| IMap_get _ ->
let v = accu and map, _ = stack in
Interp_costs.map_get v map
| IMap_update _ ->
let k = accu and _, (map, _) = stack in
Interp_costs.map_update k map
| IMap_get_and_update _ ->
let k = accu and _, (map, _) = stack in
Interp_costs.map_get_and_update k map
| IBig_map_mem _ ->
let Big_map map, _ = stack in
Interp_costs.big_map_mem map.diff
| IBig_map_get _ ->
let Big_map map, _ = stack in
Interp_costs.big_map_get map.diff
| IBig_map_update _ ->
let _, (Big_map map, _) = stack in
Interp_costs.big_map_update map.diff
| IBig_map_get_and_update _ ->
let _, (Big_map map, _) = stack in
Interp_costs.big_map_get_and_update map.diff
| IAdd_seconds_to_timestamp _ ->
let n = accu and t, _ = stack in
Interp_costs.add_seconds_timestamp n t
| IAdd_timestamp_to_seconds _ ->
let t = accu and n, _ = stack in
Interp_costs.add_timestamp_seconds t n
| ISub_timestamp_seconds _ ->
let t = accu and n, _ = stack in
Interp_costs.sub_timestamp_seconds t n
| IDiff_timestamps _ ->
let t1 = accu and t2, _ = stack in
Interp_costs.diff_timestamps t1 t2
| IConcat_string_pair _ ->
let x = accu and y, _ = stack in
Interp_costs.concat_string_pair x y
| IConcat_string _ ->
let ss = accu in
Interp_costs.concat_string_precheck ss
| ISlice_string _ ->
let (_offset : Script_int.n Script_int.num) = accu in
let _length, (s, _) = stack in
Interp_costs.slice_string s
| IConcat_bytes_pair _ ->
let x = accu and y, _ = stack in
Interp_costs.concat_bytes_pair x y
| IConcat_bytes _ ->
let ss = accu in
Interp_costs.concat_string_precheck ss
| ISlice_bytes _ ->
let _, (s, _) = stack in
Interp_costs.slice_bytes s
| IBytes_nat _ ->
let n = accu in
Interp_costs.bytes_nat n
| INat_bytes _ ->
let b = accu in
Interp_costs.nat_bytes b
| IBytes_int _ ->
let n = accu in
Interp_costs.bytes_int n
| IInt_bytes _ ->
let b = accu in
Interp_costs.int_bytes b
| IMul_teznat _ -> Interp_costs.mul_teznat
| IMul_nattez _ -> Interp_costs.mul_nattez
| IAbs_int _ ->
let x = accu in
Interp_costs.abs_int x
| INeg _ ->
let x = accu in
Interp_costs.neg x
| IAdd_int _ ->
let x = accu and y, _ = stack in
Interp_costs.add_int x y
| IAdd_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.add_nat x y
| ISub_int _ ->
let x = accu and y, _ = stack in
Interp_costs.sub_int x y
| IMul_int _ ->
let x = accu and y, _ = stack in
Interp_costs.mul_int x y
| IMul_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.mul_nat x y
| IEdiv_teznat _ ->
let x = accu and y, _ = stack in
Interp_costs.ediv_teznat x y
| IEdiv_int _ ->
let x = accu and y, _ = stack in
Interp_costs.ediv_int x y
| IEdiv_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.ediv_nat x y
| ILsl_nat _ ->
let x = accu in
Interp_costs.lsl_nat x
| ILsl_bytes _ ->
let x = accu in
let y, _ = stack in
Interp_costs.lsl_bytes x y
| ILsr_nat _ ->
let x = accu in
Interp_costs.lsr_nat x
| ILsr_bytes _ ->
let x = accu in
let y, _ = stack in
Interp_costs.lsr_bytes x y
| IOr_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.or_nat x y
| IOr_bytes _ ->
let x = accu and y, _ = stack in
Interp_costs.or_bytes x y
| IAnd_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.and_nat x y
| IAnd_int_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.and_int_nat x y
| IAnd_bytes _ ->
let x = accu and y, _ = stack in
Interp_costs.and_bytes x y
| IXor_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.xor_nat x y
| IXor_bytes _ ->
let x = accu and y, _ = stack in
Interp_costs.xor_bytes x y
| INot_int _ ->
let x = accu in
Interp_costs.not_int x
| INot_bytes _ ->
let x = accu in
Interp_costs.not_bytes x
| ICompare (_, ty, _) ->
let a = accu and b, _ = stack in
Interp_costs.compare ty a b
| ICheck_signature _ ->
let key = accu and _, (message, _) = stack in
Interp_costs.check_signature key message
| IHash_key _ ->
let pk = accu in
Interp_costs.hash_key pk
| IBlake2b _ ->
let bytes = accu in
Interp_costs.blake2b bytes
| ISha256 _ ->
let bytes = accu in
Interp_costs.sha256 bytes
| ISha512 _ ->
let bytes = accu in
Interp_costs.sha512 bytes
| IKeccak _ ->
let bytes = accu in
Interp_costs.keccak bytes
| ISha3 _ ->
let bytes = accu in
Interp_costs.sha3 bytes
| IPairing_check_bls12_381 _ ->
let pairs = accu in
Interp_costs.pairing_check_bls12_381 pairs
| ISapling_verify_update _ ->
let tx = accu in
let inputs = Gas_input_size.sapling_transaction_inputs tx in
let outputs = Gas_input_size.sapling_transaction_outputs tx in
let bound_data = Gas_input_size.sapling_transaction_bound_data tx in
Interp_costs.sapling_verify_update ~inputs ~outputs ~bound_data
| ISapling_verify_update_deprecated _ ->
let tx = accu in
let inputs = List.length tx.inputs in
let outputs = List.length tx.outputs in
Interp_costs.sapling_verify_update_deprecated ~inputs ~outputs
| ISplit_ticket _ ->
let (amount_a, amount_b), _ = stack in
Interp_costs.split_ticket amount_a amount_b
| IJoin_tickets (_, ty, _) ->
let ticket_a, ticket_b = accu in
Interp_costs.join_tickets ty ticket_a ticket_b
| IHalt _ -> Interp_costs.halt
| IDrop _ -> Interp_costs.drop
| IDup _ -> Interp_costs.dup
| ISwap _ -> Interp_costs.swap
| IConst _ -> Interp_costs.const
| ICons_some _ -> Interp_costs.cons_some
| ICons_none _ -> Interp_costs.cons_none
| IIf_none _ -> Interp_costs.if_none
| IOpt_map _ -> Interp_costs.opt_map
| ICons_pair _ -> Interp_costs.cons_pair
| IUnpair _ -> Interp_costs.unpair
| ICar _ -> Interp_costs.car
| ICdr _ -> Interp_costs.cdr
| ICons_left _ -> Interp_costs.cons_left
| ICons_right _ -> Interp_costs.cons_right
| IIf_left _ -> Interp_costs.if_left
| ICons_list _ -> Interp_costs.cons_list
| INil _ -> Interp_costs.nil
| IIf_cons _ -> Interp_costs.if_cons
| IList_size _ -> Interp_costs.list_size
| IEmpty_set _ -> Interp_costs.empty_set
| ISet_size _ -> Interp_costs.set_size
| IEmpty_map _ -> Interp_costs.empty_map
| IMap_size _ -> Interp_costs.map_size
| IEmpty_big_map _ -> Interp_costs.empty_big_map
| IString_size _ -> Interp_costs.string_size
| IBytes_size _ -> Interp_costs.bytes_size
| IAdd_tez _ -> Interp_costs.add_tez
| ISub_tez _ -> Interp_costs.sub_tez
| ISub_tez_legacy _ -> Interp_costs.sub_tez_legacy
| IOr _ -> Interp_costs.bool_or
| IAnd _ -> Interp_costs.bool_and
| IXor _ -> Interp_costs.bool_xor
| INot _ -> Interp_costs.bool_not
| IIs_nat _ -> Interp_costs.is_nat
| IInt_nat _ -> Interp_costs.int_nat
| IInt_bls12_381_fr _ -> Interp_costs.int_bls12_381_fr
| IEdiv_tez _ -> Interp_costs.ediv_tez
| IIf _ -> Interp_costs.if_
| ILoop _ -> Interp_costs.loop
| ILoop_left _ -> Interp_costs.loop_left
| IDip _ -> Interp_costs.dip
| IExec _ -> Interp_costs.exec
| IApply _ -> (
let l, _ = stack in
match l with
| Lam _ -> Interp_costs.apply ~rec_flag:false
| LamRec _ -> Interp_costs.apply ~rec_flag:true)
| ILambda _ -> Interp_costs.lambda
| IFailwith _ -> Gas.free
| IEq _ -> Interp_costs.eq
| INeq _ -> Interp_costs.neq
| ILt _ -> Interp_costs.lt
| ILe _ -> Interp_costs.le
| IGt _ -> Interp_costs.gt
| IGe _ -> Interp_costs.ge
| IPack _ -> Gas.free
| IUnpack _ ->
let b = accu in
Interp_costs.unpack b
| IAddress _ -> Interp_costs.address
| IContract _ -> Interp_costs.contract
| ITransfer_tokens _ -> Interp_costs.transfer_tokens
| IView _ -> Interp_costs.view
| IImplicit_account _ -> Interp_costs.implicit_account
| ISet_delegate _ -> Interp_costs.set_delegate
| IBalance _ -> Interp_costs.balance
| ILevel _ -> Interp_costs.level
| INow _ -> Interp_costs.now
| IMin_block_time _ -> Interp_costs.min_block_time
| ISapling_empty_state _ -> Interp_costs.sapling_empty_state
| ISource _ -> Interp_costs.source
| ISender _ -> Interp_costs.sender
| ISelf _ -> Interp_costs.self
| ISelf_address _ -> Interp_costs.self_address
| IAmount _ -> Interp_costs.amount
| IDig (_, n, _, _) -> Interp_costs.dign n
| IDug (_, n, _, _) -> Interp_costs.dugn n
| IDipn (_, n, _, _, _) -> Interp_costs.dipn n
| IDropn (_, n, _, _) -> Interp_costs.dropn n
| IChainId _ -> Interp_costs.chain_id
| ICreate_contract _ -> Interp_costs.create_contract
| INever _ -> ( match accu with _ -> .)
| IVoting_power _ -> Interp_costs.voting_power
| ITotal_voting_power _ -> Interp_costs.total_voting_power
| IAdd_bls12_381_g1 _ -> Interp_costs.add_bls12_381_g1
| IAdd_bls12_381_g2 _ -> Interp_costs.add_bls12_381_g2
| IAdd_bls12_381_fr _ -> Interp_costs.add_bls12_381_fr
| IMul_bls12_381_g1 _ -> Interp_costs.mul_bls12_381_g1
| IMul_bls12_381_g2 _ -> Interp_costs.mul_bls12_381_g2
| IMul_bls12_381_fr _ -> Interp_costs.mul_bls12_381_fr
| INeg_bls12_381_g1 _ -> Interp_costs.neg_bls12_381_g1
| INeg_bls12_381_g2 _ -> Interp_costs.neg_bls12_381_g2
| INeg_bls12_381_fr _ -> Interp_costs.neg_bls12_381_fr
| IMul_bls12_381_fr_z _ ->
let z = accu in
Interp_costs.mul_bls12_381_fr_z z
| IMul_bls12_381_z_fr _ ->
let z, _ = stack in
Interp_costs.mul_bls12_381_z_fr z
| IDup_n (_, n, _, _) -> Interp_costs.dupn n
| IComb (_, n, _, _) -> Interp_costs.comb n
| IUncomb (_, n, _, _) -> Interp_costs.uncomb n
| IComb_get (_, n, _, _) -> Interp_costs.comb_get n
| IComb_set (_, n, _, _) -> Interp_costs.comb_set n
| ITicket _ | ITicket_deprecated _ -> Interp_costs.ticket
| IRead_ticket _ -> Interp_costs.read_ticket
| IOpen_chest _ ->
let (_chest_key : Script_timelock.chest_key) = accu
and chest, (time, _) = stack in
Interp_costs.open_chest ~chest ~time:(Script_int.to_zint time)
| IEmit _ -> Interp_costs.emit
| ILog _ -> Gas.free
[@@ocaml.inline always]
let cost_of_control : type a s r f. (a, s, r, f) continuation -> Gas.cost =
fun ks ->
match ks with
| KLog _ -> Gas.free
| KNil -> Interp_costs.Control.nil
| KCons (_, _) -> Interp_costs.Control.cons
| KReturn _ -> Interp_costs.Control.return
| KMap_head (_, _) -> Interp_costs.Control.map_head
| KUndip (_, _, _) -> Interp_costs.Control.undip
| KLoop_in (_, _) -> Interp_costs.Control.loop_in
| KLoop_in_left (_, _) -> Interp_costs.Control.loop_in_left
| KIter (_, _, _, _) -> Interp_costs.Control.iter
| KList_enter_body (_, xs, _, _, len, _) ->
Interp_costs.Control.list_enter_body xs len
| KList_exit_body (_, _, _, _, _, _) -> Interp_costs.Control.list_exit_body
| KMap_enter_body (_, _, _, _, _) -> Interp_costs.Control.map_enter_body
| KMap_exit_body (_, _, map, key, _, _) ->
Interp_costs.Control.map_exit_body key map
| KView_exit (_, _) -> Interp_costs.Control.view_exit
[ step ] calls [ ] at the beginning of each execution step .
[ Local_gas_counter.consume ] is used in the implementation of
[ IConcat_string ] and [ IConcat_bytes ] because in that special cases , the
cost is expressed with respect to a non - constant - time computation on the
inputs .
[step] calls [consume_instr] at the beginning of each execution step.
[Local_gas_counter.consume] is used in the implementation of
[IConcat_string] and [IConcat_bytes] because in that special cases, the
cost is expressed with respect to a non-constant-time computation on the
inputs.
*)
let consume_instr local_gas_counter k accu stack =
let cost = cost_of_instr k accu stack in
consume_opt local_gas_counter cost
[@@ocaml.inline always]
let consume_control local_gas_counter ks =
let cost = cost_of_control ks in
consume_opt local_gas_counter cost
[@@ocaml.inline always]
let get_log = function
| None -> Lwt.return (Ok None)
| Some logger -> logger.get_log ()
[@@ocaml.inline always]
(*
Auxiliary functions used by the interpretation loop
===================================================
*)
(* The following function pops n elements from the stack
and push their reintroduction in the continuations stack. *)
let rec kundip :
type a s e z c u d w b t.
(a, s, e, z, c, u, d, w) stack_prefix_preservation_witness ->
c ->
u ->
(d, w, b, t) kinstr ->
a * s * (e, z, b, t) kinstr =
fun w accu stack k ->
match w with
| KPrefix (loc, ty, w) ->
let k = IConst (loc, ty, accu, k) in
let accu, stack = stack in
kundip w accu stack k
| KRest -> (accu, stack, k)
[ apply ctxt gas ty v lam ] specializes [ lam ] by fixing its first
formal argument to [ v ] . The type of [ v ] is represented by [ ty ] .
formal argument to [v]. The type of [v] is represented by [ty]. *)
let apply ctxt gas capture_ty capture lam =
let loc = Micheline.dummy_location in
let ctxt = update_context gas ctxt in
Script_ir_unparser.unparse_ty ~loc ctxt capture_ty >>?= fun (ty_expr, ctxt) ->
unparse_data ctxt Optimized capture_ty capture >>=? fun (const_expr, ctxt) ->
let make_expr expr =
Micheline.(
Seq
( loc,
Prim (loc, I_PUSH, [ty_expr; Micheline.root const_expr], [])
:: Prim (loc, I_PAIR, [], [])
:: expr ))
in
let lam' =
match lam with
| LamRec (descr, expr) -> (
let (Item_t (full_arg_ty, Item_t (Lambda_t (_, _, _), Bot_t))) =
descr.kbef
in
let (Item_t (ret_ty, Bot_t)) = descr.kaft in
Script_ir_unparser.unparse_ty ~loc ctxt full_arg_ty
>>?= fun (arg_ty_expr, ctxt) ->
Script_ir_unparser.unparse_ty ~loc ctxt ret_ty
>>?= fun (ret_ty_expr, ctxt) ->
match full_arg_ty with
| Pair_t (capture_ty, arg_ty, _, _) ->
let arg_stack_ty = Item_t (arg_ty, Bot_t) in
(* To avoid duplicating the recursive lambda [lam], we
return a regular lambda that builds the tuple of
parameters and applies it to `lam`. Since `lam` is
recursive it will push itself on top of the stack at
execution time. *)
let full_descr =
{
kloc = descr.kloc;
kbef = arg_stack_ty;
kaft = descr.kaft;
kinstr =
IConst
( descr.kloc,
capture_ty,
capture,
ICons_pair
( descr.kloc,
ILambda
( descr.kloc,
lam,
ISwap
( descr.kloc,
IExec
( descr.kloc,
Some descr.kaft,
IHalt descr.kloc ) ) ) ) );
}
in
let full_expr =
make_expr
Micheline.
[
Prim
(loc, I_LAMBDA_REC, [arg_ty_expr; ret_ty_expr; expr], []);
Prim (loc, I_SWAP, [], []);
Prim (loc, I_EXEC, [], []);
]
in
return (Lam (full_descr, full_expr), ctxt))
| Lam (descr, expr) -> (
let (Item_t (full_arg_ty, Bot_t)) = descr.kbef in
match full_arg_ty with
| Pair_t (capture_ty, arg_ty, _, _) ->
let arg_stack_ty = Item_t (arg_ty, Bot_t) in
let full_descr =
{
kloc = descr.kloc;
kbef = arg_stack_ty;
kaft = descr.kaft;
kinstr =
IConst
( descr.kloc,
capture_ty,
capture,
ICons_pair (descr.kloc, descr.kinstr) );
}
in
let full_expr = make_expr [expr] in
return (Lam (full_descr, full_expr), ctxt))
in
lam' >>=? fun (lam', ctxt) ->
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (lam', ctxt, gas)
let make_transaction_to_tx_rollup (type t) ctxt ~destination ~amount
~(parameters_ty : ((t ticket, tx_rollup_l2_address) pair, _) ty) ~parameters
=
The entrypoints of a transaction rollup are polymorphic wrt . the
tickets it can process . However , two values can have
the same Micheline representation , but different types . What
this means is that when we start the execution of a transaction
rollup , the type of its argument is lost if we just give it the
values provided by the script .
To address this issue , we instrument a transfer to a transaction
rollup to inject the exact type of the entrypoint as used by
the smart contract . This allows the transaction rollup to extract
the type of the ticket .
tickets it can process. However, two Michelson values can have
the same Micheline representation, but different types. What
this means is that when we start the execution of a transaction
rollup, the type of its argument is lost if we just give it the
values provided by the Michelson script.
To address this issue, we instrument a transfer to a transaction
rollup to inject the exact type of the entrypoint as used by
the smart contract. This allows the transaction rollup to extract
the type of the ticket. *)
error_unless Tez.(amount = zero) Rollup_invalid_transaction_amount
>>?= fun () ->
let (Pair_t (Ticket_t (tp, _), _, _, _)) = parameters_ty in
unparse_data ctxt Optimized parameters_ty parameters
>>=? fun (unparsed_parameters, ctxt) ->
Lwt.return
( Script_ir_unparser.unparse_ty ~loc:Micheline.dummy_location ctxt tp
>>? fun (ty, ctxt) ->
let unparsed_parameters =
Micheline.Seq
(Micheline.dummy_location, [Micheline.root unparsed_parameters; ty])
in
Gas.consume ctxt (Script.strip_locations_cost unparsed_parameters)
>|? fun ctxt ->
let unparsed_parameters = Micheline.strip_locations unparsed_parameters in
( Transaction_to_tx_rollup
{destination; parameters_ty; parameters; unparsed_parameters},
ctxt ) )
let make_transaction_to_sc_rollup ctxt ~destination ~amount ~entrypoint
~parameters_ty ~parameters =
error_unless Tez.(amount = zero) Rollup_invalid_transaction_amount
>>?= fun () ->
TODO : /-/issues/4023
We currently do n't support entrypoints as the entrypoint information
for L1 to L2 messages is not propagated to the rollup .
We currently don't support entrypoints as the entrypoint information
for L1 to L2 messages is not propagated to the rollup. *)
error_unless (Entrypoint.is_default entrypoint) Rollup_invalid_entrypoint
>>?= fun () ->
unparse_data ctxt Optimized parameters_ty parameters
>|=? fun (unparsed_parameters, ctxt) ->
( Transaction_to_sc_rollup
{destination; entrypoint; parameters_ty; parameters; unparsed_parameters},
ctxt )
(** [emit_event] generates an internal operation that will effect an event emission
if the contract code returns this successfully. *)
let emit_event (type t tc) (ctxt, sc) gas ~(event_type : (t, tc) ty)
~unparsed_ty ~tag ~(event_data : t) =
let ctxt = update_context gas ctxt in
(* No need to take care of lazy storage as only packable types are allowed *)
let lazy_storage_diff = None in
unparse_data ctxt Optimized event_type event_data
>>=? fun (unparsed_data, ctxt) ->
fresh_internal_nonce ctxt >>?= fun (ctxt, nonce) ->
let operation = Event {ty = unparsed_ty; tag; unparsed_data} in
let iop =
{
source = Destination.Contract (Contract.Originated sc.self);
operation;
nonce;
}
in
let res = {piop = Internal_operation iop; lazy_storage_diff} in
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (res, ctxt, gas)
let make_transaction_to_zk_rollup (type t) ctxt ~destination ~amount
~(parameters_ty : ((t ticket, bytes) pair, _) ty) ~parameters =
error_unless Tez.(amount = zero) Rollup_invalid_transaction_amount
>>?= fun () ->
unparse_data ctxt Optimized parameters_ty parameters
>|=? fun (unparsed_parameters, ctxt) ->
( Transaction_to_zk_rollup
{destination; parameters_ty; parameters; unparsed_parameters},
ctxt )
[ transfer ( ctxt , sc ) gas tez parameters_ty parameters destination entrypoint ]
creates an operation that transfers an amount of [ tez ] to a destination and
an entrypoint instantiated with argument [ parameters ] of type
[ parameters_ty ] .
creates an operation that transfers an amount of [tez] to a destination and
an entrypoint instantiated with argument [parameters] of type
[parameters_ty]. *)
let transfer (type t) (ctxt, sc) gas amount location
(typed_contract : t typed_contract) (parameters : t) =
let ctxt = update_context gas ctxt in
(match typed_contract with
| Typed_implicit destination ->
let () = parameters in
return (Transaction_to_implicit {destination; amount}, None, ctxt)
| Typed_implicit_with_ticket {destination; ticket_ty} ->
unparse_data ctxt Optimized ticket_ty parameters
>>=? fun (unparsed_ticket, ctxt) ->
return
( Transaction_to_implicit_with_ticket
{
destination;
amount;
ticket_ty;
ticket = parameters;
unparsed_ticket = Script.lazy_expr unparsed_ticket;
},
None,
ctxt )
| Typed_originated
{arg_ty = parameters_ty; contract_hash = destination; entrypoint} ->
collect_lazy_storage ctxt parameters_ty parameters
>>?= fun (to_duplicate, ctxt) ->
let to_update = no_lazy_storage_id in
extract_lazy_storage_diff
ctxt
Optimized
parameters_ty
parameters
~to_duplicate
~to_update
~temporary:true
>>=? fun (parameters, lazy_storage_diff, ctxt) ->
unparse_data ctxt Optimized parameters_ty parameters
>|=? fun (unparsed_parameters, ctxt) ->
( Transaction_to_smart_contract
{
destination;
amount;
entrypoint;
location;
parameters_ty;
parameters;
unparsed_parameters;
},
lazy_storage_diff,
ctxt )
| Typed_tx_rollup {arg_ty = parameters_ty; tx_rollup = destination} ->
make_transaction_to_tx_rollup
ctxt
~destination
~amount
~parameters_ty
~parameters
>|=? fun (operation, ctxt) -> (operation, None, ctxt)
| Typed_sc_rollup
{arg_ty = parameters_ty; sc_rollup = destination; entrypoint} ->
make_transaction_to_sc_rollup
ctxt
~destination
~amount
~entrypoint
~parameters_ty
~parameters
>|=? fun (operation, ctxt) -> (operation, None, ctxt)
| Typed_zk_rollup {arg_ty = parameters_ty; zk_rollup = destination} ->
make_transaction_to_zk_rollup
ctxt
~destination
~amount
~parameters_ty
~parameters
>|=? fun (operation, ctxt) -> (operation, None, ctxt))
>>=? fun (operation, lazy_storage_diff, ctxt) ->
fresh_internal_nonce ctxt >>?= fun (ctxt, nonce) ->
let iop =
{
source = Destination.Contract (Contract.Originated sc.self);
operation;
nonce;
}
in
let res = {piop = Internal_operation iop; lazy_storage_diff} in
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (res, ctxt, gas)
* [ create_contract ( ctxt , sc ) gas storage_ty code delegate credit init ]
creates an origination operation for a contract represented by [ code ] , some
initial [ credit ] ( withdrawn from the contract being executed ) , and an
initial storage [ init ] of type [ storage_ty ] .
creates an origination operation for a contract represented by [code], some
initial [credit] (withdrawn from the contract being executed), and an
initial storage [init] of type [storage_ty]. *)
let create_contract (ctxt, sc) gas storage_type code delegate credit init =
let ctxt = update_context gas ctxt in
collect_lazy_storage ctxt storage_type init >>?= fun (to_duplicate, ctxt) ->
let to_update = no_lazy_storage_id in
extract_lazy_storage_diff
ctxt
Optimized
storage_type
init
~to_duplicate
~to_update
~temporary:true
>>=? fun (init, lazy_storage_diff, ctxt) ->
unparse_data ctxt Optimized storage_type init
>>=? fun (unparsed_storage, ctxt) ->
Contract.fresh_contract_from_current_nonce ctxt
>>?= fun (ctxt, preorigination) ->
let operation =
Origination
{
credit;
delegate;
code;
unparsed_storage;
preorigination;
storage_type;
storage = init;
}
in
fresh_internal_nonce ctxt >>?= fun (ctxt, nonce) ->
let source = Destination.Contract (Contract.Originated sc.self) in
let piop = Internal_operation {source; operation; nonce} in
let res = {piop; lazy_storage_diff} in
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (res, preorigination, ctxt, gas)
[ unpack bytes ] deserialize [ bytes ] into a value of type [ ty ] .
let unpack ctxt ~ty ~bytes =
Gas.consume
ctxt
(Script.deserialization_cost_estimated_from_bytes (Bytes.length bytes))
>>?= fun ctxt ->
if
Compare.Int.(Bytes.length bytes >= 1)
&& Compare.Int.(TzEndian.get_uint8 bytes 0 = 0x05)
then
let str = Bytes.sub_string bytes 1 (Bytes.length bytes - 1) in
match Data_encoding.Binary.of_string_opt Script.expr_encoding str with
| None ->
Lwt.return
( Gas.consume ctxt (Interp_costs.unpack_failed str) >|? fun ctxt ->
(None, ctxt) )
| Some expr -> (
parse_data
ctxt
~elab_conf:Script_ir_translator_config.(make ~legacy:false ())
~allow_forged:false
ty
(Micheline.root expr)
>|= function
| Ok (value, ctxt) -> ok (Some value, ctxt)
| Error _ignored ->
Gas.consume ctxt (Interp_costs.unpack_failed str) >|? fun ctxt ->
(None, ctxt))
else return (None, ctxt)
(* [interp_stack_prefix_preserving_operation f w accu stack] applies
a well-typed operation [f] under some prefix of the A-stack
exploiting [w] to justify that the shape of the stack is
preserved. *)
let rec interp_stack_prefix_preserving_operation :
type a s b t c u d w result.
(a -> s -> (b * t) * result) ->
(a, s, b, t, c, u, d, w) stack_prefix_preservation_witness ->
c ->
u ->
(d * w) * result =
fun f n accu stk ->
match (n, stk) with
| KPrefix (_, _, n), rest ->
interp_stack_prefix_preserving_operation f n (fst rest) (snd rest)
|> fun ((v, rest'), result) -> ((accu, (v, rest')), result)
| KRest, v -> f accu v
Some auxiliary functions have complex types and must be annotated
because of GADTs and polymorphic recursion .
To improve readibility , we introduce their types as abbreviations :
Some auxiliary functions have complex types and must be annotated
because of GADTs and polymorphic recursion.
To improve readibility, we introduce their types as abbreviations:
*)
(* A function of this type either introduces type-preserving
instrumentation of a continuation for the purposes of logging
or returns given continuation unchanged. *)
type ('a, 'b, 'c, 'd) cont_instrumentation =
('a, 'b, 'c, 'd) continuation -> ('a, 'b, 'c, 'd) continuation
let id x = x
type ('a, 'b, 'c, 'e, 'f, 'm, 'n, 'o) kmap_exit_type =
('a, 'b, 'e, 'f) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('m * 'n, 'a * 'b, 'o, 'a * 'b) kinstr ->
('m * 'n) list ->
(('m, 'o) map, 'c) ty option ->
('m, 'o) map ->
'm ->
(('m, 'o) map, 'a * 'b, 'e, 'f) continuation ->
'o ->
'a * 'b ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'j, 'k) kmap_enter_type =
('a, 'b * 'c, 'd, 'e) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('j * 'k, 'b * 'c, 'a, 'b * 'c) kinstr ->
('j * 'k) list ->
(('j, 'a) map, 'f) ty option ->
('j, 'a) map ->
(('j, 'a) map, 'b * 'c, 'd, 'e) continuation ->
'b ->
'c ->
('d * 'e * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'i, 'j) klist_exit_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('i, 'a * 'b, 'j, 'a * 'b) kinstr ->
'i list ->
'j Script_list.t ->
('j Script_list.t, 'e) ty option ->
int ->
('j Script_list.t, 'a * 'b, 'c, 'd) continuation ->
'j ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'j) klist_enter_type =
('b, 'a * 'c, 'd, 'e) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('j, 'a * 'c, 'b, 'a * 'c) kinstr ->
'j list ->
'b Script_list.t ->
('b Script_list.t, 'f) ty option ->
int ->
('b Script_list.t, 'a * 'c, 'd, 'e) continuation ->
'a ->
'c ->
('d * 'e * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g) kloop_in_left_type =
outdated_context * step_constants ->
local_gas_counter ->
('c, 'd, 'e, 'f) continuation ->
('a, 'g, 'c, 'd) kinstr ->
('b, 'g, 'e, 'f) continuation ->
('a, 'b) or_ ->
'g ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'r, 'f, 's) kloop_in_type =
outdated_context * step_constants ->
local_gas_counter ->
('b, 'c, 'r, 'f) continuation ->
('a, 's, 'b, 'c) kinstr ->
('a, 's, 'r, 'f) continuation ->
bool ->
'a * 's ->
('r * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 's, 'r, 'f, 'c) kiter_type =
('a, 's, 'r, 'f) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('b, 'a * 's, 'a, 's) kinstr ->
('b, 'c) ty option ->
'b list ->
('a, 's, 'r, 'f) continuation ->
'a ->
's ->
('r * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i) ilist_map_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e, 'a * 'b, 'f, 'a * 'b) kinstr ->
('f Script_list.t, 'a * 'b, 'g, 'h) kinstr ->
('g, 'h, 'c, 'd) continuation ->
('f Script_list.t, 'i) ty option ->
'e Script_list.t ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'cmp) ilist_iter_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e, 'a * 'b, 'a, 'b) kinstr ->
('e, 'cmp) ty option ->
('a, 'b, 'f, 'g) kinstr ->
('f, 'g, 'c, 'd) continuation ->
'e Script_list.t ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g) iset_iter_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e, 'a * 'b, 'a, 'b) kinstr ->
'e comparable_ty option ->
('a, 'b, 'f, 'g) kinstr ->
('f, 'g, 'c, 'd) continuation ->
'e set ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i, 'j) imap_map_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e * 'f, 'a * 'b, 'g, 'a * 'b) kinstr ->
(('e, 'g) map, 'a * 'b, 'h, 'i) kinstr ->
('h, 'i, 'c, 'd) continuation ->
(('e, 'g) map, 'j) ty option ->
('e, 'f) map ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'cmp) imap_iter_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e * 'f, 'a * 'b, 'a, 'b) kinstr ->
('e * 'f, 'cmp) ty option ->
('a, 'b, 'g, 'h) kinstr ->
('g, 'h, 'c, 'd) continuation ->
('e, 'f) map ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) imul_teznat_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Tez.t, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Tez.t ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) imul_nattez_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Tez.t, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Script_int.n Script_int.num ->
Tez.t * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) ilsl_nat_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Script_int.n Script_int.num, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Script_int.n Script_int.num ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) ilsr_nat_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Script_int.n Script_int.num, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Script_int.n Script_int.num ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) ilsl_bytes_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(bytes, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
bytes ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ifailwith_type = {
ifailwith :
'a 'ac 'b.
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
('a, 'ac) ty ->
'a ->
('b, error trace) result Lwt.t;
}
[@@unboxed]
type ('a, 'b, 'c, 'd, 'e, 'f, 'g) iexec_type =
('a, end_of_stack, 'e, 'f) cont_instrumentation ->
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
('a, 'b) stack_ty option ->
('a, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
'g ->
('g, 'a) lambda * 'b ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'i, 'o) iview_type =
('o, end_of_stack, 'e, 'f) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('i, 'o) view_signature ->
('a, 'b) stack_ty option ->
('o option, 'a * 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
'i ->
address * ('a * 'b) ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/32c9ef7c4ad3c6ec14e16ab3d75e353266925fb1/src/proto_alpha/lib_protocol/script_interpreter_defs.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
This module provides auxiliary definitions used in the interpreter.
These are internal private definitions. Do not rely on them outside
the interpreter.
Auxiliary functions used by the interpretation loop
===================================================
The following function pops n elements from the stack
and push their reintroduction in the continuations stack.
To avoid duplicating the recursive lambda [lam], we
return a regular lambda that builds the tuple of
parameters and applies it to `lam`. Since `lam` is
recursive it will push itself on top of the stack at
execution time.
* [emit_event] generates an internal operation that will effect an event emission
if the contract code returns this successfully.
No need to take care of lazy storage as only packable types are allowed
[interp_stack_prefix_preserving_operation f w accu stack] applies
a well-typed operation [f] under some prefix of the A-stack
exploiting [w] to justify that the shape of the stack is
preserved.
A function of this type either introduces type-preserving
instrumentation of a continuation for the purposes of logging
or returns given continuation unchanged. | Copyright ( c ) 2021 - 2022 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Alpha_context
open Script
open Script_typed_ir
open Script_ir_translator
open Local_gas_counter
type error += Rollup_invalid_transaction_amount | Rollup_invalid_entrypoint
let () =
register_error_kind
`Permanent
~id:"operation.rollup_invalid_transaction_amount"
~title:"Transaction amount to a rollup must be zero"
~description:
"Because rollups are outside of the delegation mechanism of Tezos, they \
cannot own Tez, and therefore transactions targeting a rollup must have \
its amount field set to zero."
~pp:(fun ppf () ->
Format.pp_print_string ppf "Transaction amount to a rollup must be zero.")
Data_encoding.unit
(function Rollup_invalid_transaction_amount -> Some () | _ -> None)
(fun () -> Rollup_invalid_transaction_amount) ;
register_error_kind
`Permanent
~id:"operation.rollup_invalid_entrypoint"
~title:"Only the default entrypoint is allowed for rollups"
~description:"Rollups only support transactions to the default entrypoint."
~pp:(fun ppf () ->
Format.pp_print_string
ppf
"Rollups only support transactions to the default entrypoint.")
Data_encoding.unit
(function Rollup_invalid_entrypoint -> Some () | _ -> None)
(fun () -> Rollup_invalid_entrypoint)
Computing the cost of instructions
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
The function [ cost_of_instr ] provides a cost model for
instructions . It is used by the interpreter to track the
consumption of gas . This consumption may depend on the values
on the stack .
Computing the cost of Michelson instructions
============================================
The function [cost_of_instr] provides a cost model for Michelson
instructions. It is used by the interpreter to track the
consumption of gas. This consumption may depend on the values
on the stack.
*)
module Interp_costs = Michelson_v1_gas.Cost_of.Interpreter
let cost_of_instr : type a s r f. (a, s, r, f) kinstr -> a -> s -> Gas.cost =
fun i accu stack ->
match i with
| IList_map _ ->
let list = accu in
Interp_costs.list_map list
| IList_iter _ ->
let list = accu in
Interp_costs.list_iter list
| ISet_iter _ ->
let set = accu in
Interp_costs.set_iter set
| ISet_mem _ ->
let v = accu and set, _ = stack in
Interp_costs.set_mem v set
| ISet_update _ ->
let v = accu and _, (set, _) = stack in
Interp_costs.set_update v set
| IMap_map _ ->
let map = accu in
Interp_costs.map_map map
| IMap_iter _ ->
let map = accu in
Interp_costs.map_iter map
| IMap_mem _ ->
let v = accu and map, _ = stack in
Interp_costs.map_mem v map
| IMap_get _ ->
let v = accu and map, _ = stack in
Interp_costs.map_get v map
| IMap_update _ ->
let k = accu and _, (map, _) = stack in
Interp_costs.map_update k map
| IMap_get_and_update _ ->
let k = accu and _, (map, _) = stack in
Interp_costs.map_get_and_update k map
| IBig_map_mem _ ->
let Big_map map, _ = stack in
Interp_costs.big_map_mem map.diff
| IBig_map_get _ ->
let Big_map map, _ = stack in
Interp_costs.big_map_get map.diff
| IBig_map_update _ ->
let _, (Big_map map, _) = stack in
Interp_costs.big_map_update map.diff
| IBig_map_get_and_update _ ->
let _, (Big_map map, _) = stack in
Interp_costs.big_map_get_and_update map.diff
| IAdd_seconds_to_timestamp _ ->
let n = accu and t, _ = stack in
Interp_costs.add_seconds_timestamp n t
| IAdd_timestamp_to_seconds _ ->
let t = accu and n, _ = stack in
Interp_costs.add_timestamp_seconds t n
| ISub_timestamp_seconds _ ->
let t = accu and n, _ = stack in
Interp_costs.sub_timestamp_seconds t n
| IDiff_timestamps _ ->
let t1 = accu and t2, _ = stack in
Interp_costs.diff_timestamps t1 t2
| IConcat_string_pair _ ->
let x = accu and y, _ = stack in
Interp_costs.concat_string_pair x y
| IConcat_string _ ->
let ss = accu in
Interp_costs.concat_string_precheck ss
| ISlice_string _ ->
let (_offset : Script_int.n Script_int.num) = accu in
let _length, (s, _) = stack in
Interp_costs.slice_string s
| IConcat_bytes_pair _ ->
let x = accu and y, _ = stack in
Interp_costs.concat_bytes_pair x y
| IConcat_bytes _ ->
let ss = accu in
Interp_costs.concat_string_precheck ss
| ISlice_bytes _ ->
let _, (s, _) = stack in
Interp_costs.slice_bytes s
| IBytes_nat _ ->
let n = accu in
Interp_costs.bytes_nat n
| INat_bytes _ ->
let b = accu in
Interp_costs.nat_bytes b
| IBytes_int _ ->
let n = accu in
Interp_costs.bytes_int n
| IInt_bytes _ ->
let b = accu in
Interp_costs.int_bytes b
| IMul_teznat _ -> Interp_costs.mul_teznat
| IMul_nattez _ -> Interp_costs.mul_nattez
| IAbs_int _ ->
let x = accu in
Interp_costs.abs_int x
| INeg _ ->
let x = accu in
Interp_costs.neg x
| IAdd_int _ ->
let x = accu and y, _ = stack in
Interp_costs.add_int x y
| IAdd_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.add_nat x y
| ISub_int _ ->
let x = accu and y, _ = stack in
Interp_costs.sub_int x y
| IMul_int _ ->
let x = accu and y, _ = stack in
Interp_costs.mul_int x y
| IMul_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.mul_nat x y
| IEdiv_teznat _ ->
let x = accu and y, _ = stack in
Interp_costs.ediv_teznat x y
| IEdiv_int _ ->
let x = accu and y, _ = stack in
Interp_costs.ediv_int x y
| IEdiv_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.ediv_nat x y
| ILsl_nat _ ->
let x = accu in
Interp_costs.lsl_nat x
| ILsl_bytes _ ->
let x = accu in
let y, _ = stack in
Interp_costs.lsl_bytes x y
| ILsr_nat _ ->
let x = accu in
Interp_costs.lsr_nat x
| ILsr_bytes _ ->
let x = accu in
let y, _ = stack in
Interp_costs.lsr_bytes x y
| IOr_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.or_nat x y
| IOr_bytes _ ->
let x = accu and y, _ = stack in
Interp_costs.or_bytes x y
| IAnd_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.and_nat x y
| IAnd_int_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.and_int_nat x y
| IAnd_bytes _ ->
let x = accu and y, _ = stack in
Interp_costs.and_bytes x y
| IXor_nat _ ->
let x = accu and y, _ = stack in
Interp_costs.xor_nat x y
| IXor_bytes _ ->
let x = accu and y, _ = stack in
Interp_costs.xor_bytes x y
| INot_int _ ->
let x = accu in
Interp_costs.not_int x
| INot_bytes _ ->
let x = accu in
Interp_costs.not_bytes x
| ICompare (_, ty, _) ->
let a = accu and b, _ = stack in
Interp_costs.compare ty a b
| ICheck_signature _ ->
let key = accu and _, (message, _) = stack in
Interp_costs.check_signature key message
| IHash_key _ ->
let pk = accu in
Interp_costs.hash_key pk
| IBlake2b _ ->
let bytes = accu in
Interp_costs.blake2b bytes
| ISha256 _ ->
let bytes = accu in
Interp_costs.sha256 bytes
| ISha512 _ ->
let bytes = accu in
Interp_costs.sha512 bytes
| IKeccak _ ->
let bytes = accu in
Interp_costs.keccak bytes
| ISha3 _ ->
let bytes = accu in
Interp_costs.sha3 bytes
| IPairing_check_bls12_381 _ ->
let pairs = accu in
Interp_costs.pairing_check_bls12_381 pairs
| ISapling_verify_update _ ->
let tx = accu in
let inputs = Gas_input_size.sapling_transaction_inputs tx in
let outputs = Gas_input_size.sapling_transaction_outputs tx in
let bound_data = Gas_input_size.sapling_transaction_bound_data tx in
Interp_costs.sapling_verify_update ~inputs ~outputs ~bound_data
| ISapling_verify_update_deprecated _ ->
let tx = accu in
let inputs = List.length tx.inputs in
let outputs = List.length tx.outputs in
Interp_costs.sapling_verify_update_deprecated ~inputs ~outputs
| ISplit_ticket _ ->
let (amount_a, amount_b), _ = stack in
Interp_costs.split_ticket amount_a amount_b
| IJoin_tickets (_, ty, _) ->
let ticket_a, ticket_b = accu in
Interp_costs.join_tickets ty ticket_a ticket_b
| IHalt _ -> Interp_costs.halt
| IDrop _ -> Interp_costs.drop
| IDup _ -> Interp_costs.dup
| ISwap _ -> Interp_costs.swap
| IConst _ -> Interp_costs.const
| ICons_some _ -> Interp_costs.cons_some
| ICons_none _ -> Interp_costs.cons_none
| IIf_none _ -> Interp_costs.if_none
| IOpt_map _ -> Interp_costs.opt_map
| ICons_pair _ -> Interp_costs.cons_pair
| IUnpair _ -> Interp_costs.unpair
| ICar _ -> Interp_costs.car
| ICdr _ -> Interp_costs.cdr
| ICons_left _ -> Interp_costs.cons_left
| ICons_right _ -> Interp_costs.cons_right
| IIf_left _ -> Interp_costs.if_left
| ICons_list _ -> Interp_costs.cons_list
| INil _ -> Interp_costs.nil
| IIf_cons _ -> Interp_costs.if_cons
| IList_size _ -> Interp_costs.list_size
| IEmpty_set _ -> Interp_costs.empty_set
| ISet_size _ -> Interp_costs.set_size
| IEmpty_map _ -> Interp_costs.empty_map
| IMap_size _ -> Interp_costs.map_size
| IEmpty_big_map _ -> Interp_costs.empty_big_map
| IString_size _ -> Interp_costs.string_size
| IBytes_size _ -> Interp_costs.bytes_size
| IAdd_tez _ -> Interp_costs.add_tez
| ISub_tez _ -> Interp_costs.sub_tez
| ISub_tez_legacy _ -> Interp_costs.sub_tez_legacy
| IOr _ -> Interp_costs.bool_or
| IAnd _ -> Interp_costs.bool_and
| IXor _ -> Interp_costs.bool_xor
| INot _ -> Interp_costs.bool_not
| IIs_nat _ -> Interp_costs.is_nat
| IInt_nat _ -> Interp_costs.int_nat
| IInt_bls12_381_fr _ -> Interp_costs.int_bls12_381_fr
| IEdiv_tez _ -> Interp_costs.ediv_tez
| IIf _ -> Interp_costs.if_
| ILoop _ -> Interp_costs.loop
| ILoop_left _ -> Interp_costs.loop_left
| IDip _ -> Interp_costs.dip
| IExec _ -> Interp_costs.exec
| IApply _ -> (
let l, _ = stack in
match l with
| Lam _ -> Interp_costs.apply ~rec_flag:false
| LamRec _ -> Interp_costs.apply ~rec_flag:true)
| ILambda _ -> Interp_costs.lambda
| IFailwith _ -> Gas.free
| IEq _ -> Interp_costs.eq
| INeq _ -> Interp_costs.neq
| ILt _ -> Interp_costs.lt
| ILe _ -> Interp_costs.le
| IGt _ -> Interp_costs.gt
| IGe _ -> Interp_costs.ge
| IPack _ -> Gas.free
| IUnpack _ ->
let b = accu in
Interp_costs.unpack b
| IAddress _ -> Interp_costs.address
| IContract _ -> Interp_costs.contract
| ITransfer_tokens _ -> Interp_costs.transfer_tokens
| IView _ -> Interp_costs.view
| IImplicit_account _ -> Interp_costs.implicit_account
| ISet_delegate _ -> Interp_costs.set_delegate
| IBalance _ -> Interp_costs.balance
| ILevel _ -> Interp_costs.level
| INow _ -> Interp_costs.now
| IMin_block_time _ -> Interp_costs.min_block_time
| ISapling_empty_state _ -> Interp_costs.sapling_empty_state
| ISource _ -> Interp_costs.source
| ISender _ -> Interp_costs.sender
| ISelf _ -> Interp_costs.self
| ISelf_address _ -> Interp_costs.self_address
| IAmount _ -> Interp_costs.amount
| IDig (_, n, _, _) -> Interp_costs.dign n
| IDug (_, n, _, _) -> Interp_costs.dugn n
| IDipn (_, n, _, _, _) -> Interp_costs.dipn n
| IDropn (_, n, _, _) -> Interp_costs.dropn n
| IChainId _ -> Interp_costs.chain_id
| ICreate_contract _ -> Interp_costs.create_contract
| INever _ -> ( match accu with _ -> .)
| IVoting_power _ -> Interp_costs.voting_power
| ITotal_voting_power _ -> Interp_costs.total_voting_power
| IAdd_bls12_381_g1 _ -> Interp_costs.add_bls12_381_g1
| IAdd_bls12_381_g2 _ -> Interp_costs.add_bls12_381_g2
| IAdd_bls12_381_fr _ -> Interp_costs.add_bls12_381_fr
| IMul_bls12_381_g1 _ -> Interp_costs.mul_bls12_381_g1
| IMul_bls12_381_g2 _ -> Interp_costs.mul_bls12_381_g2
| IMul_bls12_381_fr _ -> Interp_costs.mul_bls12_381_fr
| INeg_bls12_381_g1 _ -> Interp_costs.neg_bls12_381_g1
| INeg_bls12_381_g2 _ -> Interp_costs.neg_bls12_381_g2
| INeg_bls12_381_fr _ -> Interp_costs.neg_bls12_381_fr
| IMul_bls12_381_fr_z _ ->
let z = accu in
Interp_costs.mul_bls12_381_fr_z z
| IMul_bls12_381_z_fr _ ->
let z, _ = stack in
Interp_costs.mul_bls12_381_z_fr z
| IDup_n (_, n, _, _) -> Interp_costs.dupn n
| IComb (_, n, _, _) -> Interp_costs.comb n
| IUncomb (_, n, _, _) -> Interp_costs.uncomb n
| IComb_get (_, n, _, _) -> Interp_costs.comb_get n
| IComb_set (_, n, _, _) -> Interp_costs.comb_set n
| ITicket _ | ITicket_deprecated _ -> Interp_costs.ticket
| IRead_ticket _ -> Interp_costs.read_ticket
| IOpen_chest _ ->
let (_chest_key : Script_timelock.chest_key) = accu
and chest, (time, _) = stack in
Interp_costs.open_chest ~chest ~time:(Script_int.to_zint time)
| IEmit _ -> Interp_costs.emit
| ILog _ -> Gas.free
[@@ocaml.inline always]
let cost_of_control : type a s r f. (a, s, r, f) continuation -> Gas.cost =
fun ks ->
match ks with
| KLog _ -> Gas.free
| KNil -> Interp_costs.Control.nil
| KCons (_, _) -> Interp_costs.Control.cons
| KReturn _ -> Interp_costs.Control.return
| KMap_head (_, _) -> Interp_costs.Control.map_head
| KUndip (_, _, _) -> Interp_costs.Control.undip
| KLoop_in (_, _) -> Interp_costs.Control.loop_in
| KLoop_in_left (_, _) -> Interp_costs.Control.loop_in_left
| KIter (_, _, _, _) -> Interp_costs.Control.iter
| KList_enter_body (_, xs, _, _, len, _) ->
Interp_costs.Control.list_enter_body xs len
| KList_exit_body (_, _, _, _, _, _) -> Interp_costs.Control.list_exit_body
| KMap_enter_body (_, _, _, _, _) -> Interp_costs.Control.map_enter_body
| KMap_exit_body (_, _, map, key, _, _) ->
Interp_costs.Control.map_exit_body key map
| KView_exit (_, _) -> Interp_costs.Control.view_exit
[ step ] calls [ ] at the beginning of each execution step .
[ Local_gas_counter.consume ] is used in the implementation of
[ IConcat_string ] and [ IConcat_bytes ] because in that special cases , the
cost is expressed with respect to a non - constant - time computation on the
inputs .
[step] calls [consume_instr] at the beginning of each execution step.
[Local_gas_counter.consume] is used in the implementation of
[IConcat_string] and [IConcat_bytes] because in that special cases, the
cost is expressed with respect to a non-constant-time computation on the
inputs.
*)
let consume_instr local_gas_counter k accu stack =
let cost = cost_of_instr k accu stack in
consume_opt local_gas_counter cost
[@@ocaml.inline always]
let consume_control local_gas_counter ks =
let cost = cost_of_control ks in
consume_opt local_gas_counter cost
[@@ocaml.inline always]
let get_log = function
| None -> Lwt.return (Ok None)
| Some logger -> logger.get_log ()
[@@ocaml.inline always]
let rec kundip :
type a s e z c u d w b t.
(a, s, e, z, c, u, d, w) stack_prefix_preservation_witness ->
c ->
u ->
(d, w, b, t) kinstr ->
a * s * (e, z, b, t) kinstr =
fun w accu stack k ->
match w with
| KPrefix (loc, ty, w) ->
let k = IConst (loc, ty, accu, k) in
let accu, stack = stack in
kundip w accu stack k
| KRest -> (accu, stack, k)
[ apply ctxt gas ty v lam ] specializes [ lam ] by fixing its first
formal argument to [ v ] . The type of [ v ] is represented by [ ty ] .
formal argument to [v]. The type of [v] is represented by [ty]. *)
let apply ctxt gas capture_ty capture lam =
let loc = Micheline.dummy_location in
let ctxt = update_context gas ctxt in
Script_ir_unparser.unparse_ty ~loc ctxt capture_ty >>?= fun (ty_expr, ctxt) ->
unparse_data ctxt Optimized capture_ty capture >>=? fun (const_expr, ctxt) ->
let make_expr expr =
Micheline.(
Seq
( loc,
Prim (loc, I_PUSH, [ty_expr; Micheline.root const_expr], [])
:: Prim (loc, I_PAIR, [], [])
:: expr ))
in
let lam' =
match lam with
| LamRec (descr, expr) -> (
let (Item_t (full_arg_ty, Item_t (Lambda_t (_, _, _), Bot_t))) =
descr.kbef
in
let (Item_t (ret_ty, Bot_t)) = descr.kaft in
Script_ir_unparser.unparse_ty ~loc ctxt full_arg_ty
>>?= fun (arg_ty_expr, ctxt) ->
Script_ir_unparser.unparse_ty ~loc ctxt ret_ty
>>?= fun (ret_ty_expr, ctxt) ->
match full_arg_ty with
| Pair_t (capture_ty, arg_ty, _, _) ->
let arg_stack_ty = Item_t (arg_ty, Bot_t) in
let full_descr =
{
kloc = descr.kloc;
kbef = arg_stack_ty;
kaft = descr.kaft;
kinstr =
IConst
( descr.kloc,
capture_ty,
capture,
ICons_pair
( descr.kloc,
ILambda
( descr.kloc,
lam,
ISwap
( descr.kloc,
IExec
( descr.kloc,
Some descr.kaft,
IHalt descr.kloc ) ) ) ) );
}
in
let full_expr =
make_expr
Micheline.
[
Prim
(loc, I_LAMBDA_REC, [arg_ty_expr; ret_ty_expr; expr], []);
Prim (loc, I_SWAP, [], []);
Prim (loc, I_EXEC, [], []);
]
in
return (Lam (full_descr, full_expr), ctxt))
| Lam (descr, expr) -> (
let (Item_t (full_arg_ty, Bot_t)) = descr.kbef in
match full_arg_ty with
| Pair_t (capture_ty, arg_ty, _, _) ->
let arg_stack_ty = Item_t (arg_ty, Bot_t) in
let full_descr =
{
kloc = descr.kloc;
kbef = arg_stack_ty;
kaft = descr.kaft;
kinstr =
IConst
( descr.kloc,
capture_ty,
capture,
ICons_pair (descr.kloc, descr.kinstr) );
}
in
let full_expr = make_expr [expr] in
return (Lam (full_descr, full_expr), ctxt))
in
lam' >>=? fun (lam', ctxt) ->
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (lam', ctxt, gas)
let make_transaction_to_tx_rollup (type t) ctxt ~destination ~amount
~(parameters_ty : ((t ticket, tx_rollup_l2_address) pair, _) ty) ~parameters
=
The entrypoints of a transaction rollup are polymorphic wrt . the
tickets it can process . However , two values can have
the same Micheline representation , but different types . What
this means is that when we start the execution of a transaction
rollup , the type of its argument is lost if we just give it the
values provided by the script .
To address this issue , we instrument a transfer to a transaction
rollup to inject the exact type of the entrypoint as used by
the smart contract . This allows the transaction rollup to extract
the type of the ticket .
tickets it can process. However, two Michelson values can have
the same Micheline representation, but different types. What
this means is that when we start the execution of a transaction
rollup, the type of its argument is lost if we just give it the
values provided by the Michelson script.
To address this issue, we instrument a transfer to a transaction
rollup to inject the exact type of the entrypoint as used by
the smart contract. This allows the transaction rollup to extract
the type of the ticket. *)
error_unless Tez.(amount = zero) Rollup_invalid_transaction_amount
>>?= fun () ->
let (Pair_t (Ticket_t (tp, _), _, _, _)) = parameters_ty in
unparse_data ctxt Optimized parameters_ty parameters
>>=? fun (unparsed_parameters, ctxt) ->
Lwt.return
( Script_ir_unparser.unparse_ty ~loc:Micheline.dummy_location ctxt tp
>>? fun (ty, ctxt) ->
let unparsed_parameters =
Micheline.Seq
(Micheline.dummy_location, [Micheline.root unparsed_parameters; ty])
in
Gas.consume ctxt (Script.strip_locations_cost unparsed_parameters)
>|? fun ctxt ->
let unparsed_parameters = Micheline.strip_locations unparsed_parameters in
( Transaction_to_tx_rollup
{destination; parameters_ty; parameters; unparsed_parameters},
ctxt ) )
let make_transaction_to_sc_rollup ctxt ~destination ~amount ~entrypoint
~parameters_ty ~parameters =
error_unless Tez.(amount = zero) Rollup_invalid_transaction_amount
>>?= fun () ->
TODO : /-/issues/4023
We currently do n't support entrypoints as the entrypoint information
for L1 to L2 messages is not propagated to the rollup .
We currently don't support entrypoints as the entrypoint information
for L1 to L2 messages is not propagated to the rollup. *)
error_unless (Entrypoint.is_default entrypoint) Rollup_invalid_entrypoint
>>?= fun () ->
unparse_data ctxt Optimized parameters_ty parameters
>|=? fun (unparsed_parameters, ctxt) ->
( Transaction_to_sc_rollup
{destination; entrypoint; parameters_ty; parameters; unparsed_parameters},
ctxt )
let emit_event (type t tc) (ctxt, sc) gas ~(event_type : (t, tc) ty)
~unparsed_ty ~tag ~(event_data : t) =
let ctxt = update_context gas ctxt in
let lazy_storage_diff = None in
unparse_data ctxt Optimized event_type event_data
>>=? fun (unparsed_data, ctxt) ->
fresh_internal_nonce ctxt >>?= fun (ctxt, nonce) ->
let operation = Event {ty = unparsed_ty; tag; unparsed_data} in
let iop =
{
source = Destination.Contract (Contract.Originated sc.self);
operation;
nonce;
}
in
let res = {piop = Internal_operation iop; lazy_storage_diff} in
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (res, ctxt, gas)
let make_transaction_to_zk_rollup (type t) ctxt ~destination ~amount
~(parameters_ty : ((t ticket, bytes) pair, _) ty) ~parameters =
error_unless Tez.(amount = zero) Rollup_invalid_transaction_amount
>>?= fun () ->
unparse_data ctxt Optimized parameters_ty parameters
>|=? fun (unparsed_parameters, ctxt) ->
( Transaction_to_zk_rollup
{destination; parameters_ty; parameters; unparsed_parameters},
ctxt )
[ transfer ( ctxt , sc ) gas tez parameters_ty parameters destination entrypoint ]
creates an operation that transfers an amount of [ tez ] to a destination and
an entrypoint instantiated with argument [ parameters ] of type
[ parameters_ty ] .
creates an operation that transfers an amount of [tez] to a destination and
an entrypoint instantiated with argument [parameters] of type
[parameters_ty]. *)
let transfer (type t) (ctxt, sc) gas amount location
(typed_contract : t typed_contract) (parameters : t) =
let ctxt = update_context gas ctxt in
(match typed_contract with
| Typed_implicit destination ->
let () = parameters in
return (Transaction_to_implicit {destination; amount}, None, ctxt)
| Typed_implicit_with_ticket {destination; ticket_ty} ->
unparse_data ctxt Optimized ticket_ty parameters
>>=? fun (unparsed_ticket, ctxt) ->
return
( Transaction_to_implicit_with_ticket
{
destination;
amount;
ticket_ty;
ticket = parameters;
unparsed_ticket = Script.lazy_expr unparsed_ticket;
},
None,
ctxt )
| Typed_originated
{arg_ty = parameters_ty; contract_hash = destination; entrypoint} ->
collect_lazy_storage ctxt parameters_ty parameters
>>?= fun (to_duplicate, ctxt) ->
let to_update = no_lazy_storage_id in
extract_lazy_storage_diff
ctxt
Optimized
parameters_ty
parameters
~to_duplicate
~to_update
~temporary:true
>>=? fun (parameters, lazy_storage_diff, ctxt) ->
unparse_data ctxt Optimized parameters_ty parameters
>|=? fun (unparsed_parameters, ctxt) ->
( Transaction_to_smart_contract
{
destination;
amount;
entrypoint;
location;
parameters_ty;
parameters;
unparsed_parameters;
},
lazy_storage_diff,
ctxt )
| Typed_tx_rollup {arg_ty = parameters_ty; tx_rollup = destination} ->
make_transaction_to_tx_rollup
ctxt
~destination
~amount
~parameters_ty
~parameters
>|=? fun (operation, ctxt) -> (operation, None, ctxt)
| Typed_sc_rollup
{arg_ty = parameters_ty; sc_rollup = destination; entrypoint} ->
make_transaction_to_sc_rollup
ctxt
~destination
~amount
~entrypoint
~parameters_ty
~parameters
>|=? fun (operation, ctxt) -> (operation, None, ctxt)
| Typed_zk_rollup {arg_ty = parameters_ty; zk_rollup = destination} ->
make_transaction_to_zk_rollup
ctxt
~destination
~amount
~parameters_ty
~parameters
>|=? fun (operation, ctxt) -> (operation, None, ctxt))
>>=? fun (operation, lazy_storage_diff, ctxt) ->
fresh_internal_nonce ctxt >>?= fun (ctxt, nonce) ->
let iop =
{
source = Destination.Contract (Contract.Originated sc.self);
operation;
nonce;
}
in
let res = {piop = Internal_operation iop; lazy_storage_diff} in
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (res, ctxt, gas)
* [ create_contract ( ctxt , sc ) gas storage_ty code delegate credit init ]
creates an origination operation for a contract represented by [ code ] , some
initial [ credit ] ( withdrawn from the contract being executed ) , and an
initial storage [ init ] of type [ storage_ty ] .
creates an origination operation for a contract represented by [code], some
initial [credit] (withdrawn from the contract being executed), and an
initial storage [init] of type [storage_ty]. *)
let create_contract (ctxt, sc) gas storage_type code delegate credit init =
let ctxt = update_context gas ctxt in
collect_lazy_storage ctxt storage_type init >>?= fun (to_duplicate, ctxt) ->
let to_update = no_lazy_storage_id in
extract_lazy_storage_diff
ctxt
Optimized
storage_type
init
~to_duplicate
~to_update
~temporary:true
>>=? fun (init, lazy_storage_diff, ctxt) ->
unparse_data ctxt Optimized storage_type init
>>=? fun (unparsed_storage, ctxt) ->
Contract.fresh_contract_from_current_nonce ctxt
>>?= fun (ctxt, preorigination) ->
let operation =
Origination
{
credit;
delegate;
code;
unparsed_storage;
preorigination;
storage_type;
storage = init;
}
in
fresh_internal_nonce ctxt >>?= fun (ctxt, nonce) ->
let source = Destination.Contract (Contract.Originated sc.self) in
let piop = Internal_operation {source; operation; nonce} in
let res = {piop; lazy_storage_diff} in
let gas, ctxt = local_gas_counter_and_outdated_context ctxt in
return (res, preorigination, ctxt, gas)
[ unpack bytes ] deserialize [ bytes ] into a value of type [ ty ] .
let unpack ctxt ~ty ~bytes =
Gas.consume
ctxt
(Script.deserialization_cost_estimated_from_bytes (Bytes.length bytes))
>>?= fun ctxt ->
if
Compare.Int.(Bytes.length bytes >= 1)
&& Compare.Int.(TzEndian.get_uint8 bytes 0 = 0x05)
then
let str = Bytes.sub_string bytes 1 (Bytes.length bytes - 1) in
match Data_encoding.Binary.of_string_opt Script.expr_encoding str with
| None ->
Lwt.return
( Gas.consume ctxt (Interp_costs.unpack_failed str) >|? fun ctxt ->
(None, ctxt) )
| Some expr -> (
parse_data
ctxt
~elab_conf:Script_ir_translator_config.(make ~legacy:false ())
~allow_forged:false
ty
(Micheline.root expr)
>|= function
| Ok (value, ctxt) -> ok (Some value, ctxt)
| Error _ignored ->
Gas.consume ctxt (Interp_costs.unpack_failed str) >|? fun ctxt ->
(None, ctxt))
else return (None, ctxt)
let rec interp_stack_prefix_preserving_operation :
type a s b t c u d w result.
(a -> s -> (b * t) * result) ->
(a, s, b, t, c, u, d, w) stack_prefix_preservation_witness ->
c ->
u ->
(d * w) * result =
fun f n accu stk ->
match (n, stk) with
| KPrefix (_, _, n), rest ->
interp_stack_prefix_preserving_operation f n (fst rest) (snd rest)
|> fun ((v, rest'), result) -> ((accu, (v, rest')), result)
| KRest, v -> f accu v
Some auxiliary functions have complex types and must be annotated
because of GADTs and polymorphic recursion .
To improve readibility , we introduce their types as abbreviations :
Some auxiliary functions have complex types and must be annotated
because of GADTs and polymorphic recursion.
To improve readibility, we introduce their types as abbreviations:
*)
type ('a, 'b, 'c, 'd) cont_instrumentation =
('a, 'b, 'c, 'd) continuation -> ('a, 'b, 'c, 'd) continuation
let id x = x
type ('a, 'b, 'c, 'e, 'f, 'm, 'n, 'o) kmap_exit_type =
('a, 'b, 'e, 'f) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('m * 'n, 'a * 'b, 'o, 'a * 'b) kinstr ->
('m * 'n) list ->
(('m, 'o) map, 'c) ty option ->
('m, 'o) map ->
'm ->
(('m, 'o) map, 'a * 'b, 'e, 'f) continuation ->
'o ->
'a * 'b ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'j, 'k) kmap_enter_type =
('a, 'b * 'c, 'd, 'e) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('j * 'k, 'b * 'c, 'a, 'b * 'c) kinstr ->
('j * 'k) list ->
(('j, 'a) map, 'f) ty option ->
('j, 'a) map ->
(('j, 'a) map, 'b * 'c, 'd, 'e) continuation ->
'b ->
'c ->
('d * 'e * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'i, 'j) klist_exit_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('i, 'a * 'b, 'j, 'a * 'b) kinstr ->
'i list ->
'j Script_list.t ->
('j Script_list.t, 'e) ty option ->
int ->
('j Script_list.t, 'a * 'b, 'c, 'd) continuation ->
'j ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'j) klist_enter_type =
('b, 'a * 'c, 'd, 'e) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('j, 'a * 'c, 'b, 'a * 'c) kinstr ->
'j list ->
'b Script_list.t ->
('b Script_list.t, 'f) ty option ->
int ->
('b Script_list.t, 'a * 'c, 'd, 'e) continuation ->
'a ->
'c ->
('d * 'e * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g) kloop_in_left_type =
outdated_context * step_constants ->
local_gas_counter ->
('c, 'd, 'e, 'f) continuation ->
('a, 'g, 'c, 'd) kinstr ->
('b, 'g, 'e, 'f) continuation ->
('a, 'b) or_ ->
'g ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'r, 'f, 's) kloop_in_type =
outdated_context * step_constants ->
local_gas_counter ->
('b, 'c, 'r, 'f) continuation ->
('a, 's, 'b, 'c) kinstr ->
('a, 's, 'r, 'f) continuation ->
bool ->
'a * 's ->
('r * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 's, 'r, 'f, 'c) kiter_type =
('a, 's, 'r, 'f) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('b, 'a * 's, 'a, 's) kinstr ->
('b, 'c) ty option ->
'b list ->
('a, 's, 'r, 'f) continuation ->
'a ->
's ->
('r * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i) ilist_map_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e, 'a * 'b, 'f, 'a * 'b) kinstr ->
('f Script_list.t, 'a * 'b, 'g, 'h) kinstr ->
('g, 'h, 'c, 'd) continuation ->
('f Script_list.t, 'i) ty option ->
'e Script_list.t ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'cmp) ilist_iter_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e, 'a * 'b, 'a, 'b) kinstr ->
('e, 'cmp) ty option ->
('a, 'b, 'f, 'g) kinstr ->
('f, 'g, 'c, 'd) continuation ->
'e Script_list.t ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g) iset_iter_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e, 'a * 'b, 'a, 'b) kinstr ->
'e comparable_ty option ->
('a, 'b, 'f, 'g) kinstr ->
('f, 'g, 'c, 'd) continuation ->
'e set ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i, 'j) imap_map_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e * 'f, 'a * 'b, 'g, 'a * 'b) kinstr ->
(('e, 'g) map, 'a * 'b, 'h, 'i) kinstr ->
('h, 'i, 'c, 'd) continuation ->
(('e, 'g) map, 'j) ty option ->
('e, 'f) map ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'cmp) imap_iter_type =
('a, 'b, 'c, 'd) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('e * 'f, 'a * 'b, 'a, 'b) kinstr ->
('e * 'f, 'cmp) ty option ->
('a, 'b, 'g, 'h) kinstr ->
('g, 'h, 'c, 'd) continuation ->
('e, 'f) map ->
'a * 'b ->
('c * 'd * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) imul_teznat_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Tez.t, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Tez.t ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) imul_nattez_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Tez.t, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Script_int.n Script_int.num ->
Tez.t * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) ilsl_nat_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Script_int.n Script_int.num, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Script_int.n Script_int.num ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) ilsr_nat_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(Script_int.n Script_int.num, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
Script_int.n Script_int.num ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f) ilsl_bytes_type =
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
(bytes, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
bytes ->
Script_int.n Script_int.num * 'b ->
('e * 'f * outdated_context * local_gas_counter, error trace) result Lwt.t
type ifailwith_type = {
ifailwith :
'a 'ac 'b.
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
Script.location ->
('a, 'ac) ty ->
'a ->
('b, error trace) result Lwt.t;
}
[@@unboxed]
type ('a, 'b, 'c, 'd, 'e, 'f, 'g) iexec_type =
('a, end_of_stack, 'e, 'f) cont_instrumentation ->
logger option ->
outdated_context * step_constants ->
local_gas_counter ->
('a, 'b) stack_ty option ->
('a, 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
'g ->
('g, 'a) lambda * 'b ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
type ('a, 'b, 'c, 'd, 'e, 'f, 'i, 'o) iview_type =
('o, end_of_stack, 'e, 'f) cont_instrumentation ->
outdated_context * step_constants ->
local_gas_counter ->
('i, 'o) view_signature ->
('a, 'b) stack_ty option ->
('o option, 'a * 'b, 'c, 'd) kinstr ->
('c, 'd, 'e, 'f) continuation ->
'i ->
address * ('a * 'b) ->
('e * 'f * outdated_context * local_gas_counter) tzresult Lwt.t
|
9951dc6c6b0f5a58b982e816981256ae6daacc043a7b794a0226cc33612a6867 | clash-lang/clash-prelude | BlockRam.hs | |
Copyright : ( C ) 2013 - 2016 , University of Twente ,
2016 - 2017 , Myrtle Software Ltd ,
2017 , Google Inc.
License : BSD2 ( see the file LICENSE )
Maintainer : < >
BlockRAM primitives
= Using RAMs # usingrams #
We will show a rather elaborate example on how you can , and why you might want
to use ' blockRam 's . We will build a \"small\ " CPU+Memory+Program ROM where we
will slowly evolve to using blockRams . Note that the code is /not/ meant as a
de - facto standard on how to do CPU design in CλaSH .
We start with the definition of the Instructions , Register names and machine
codes :
@
{ \-\ # LANGUAGE RecordWildCards , TupleSections , DeriveAnyClass \#-\ }
module CPU where
import Clash . Prelude
type InstrAddr = Unsigned 8
type = Unsigned 5
type Value = Signed 8
data Instruction
= Compute Operator
| Branch Reg Value
| Jump Value
| Load
| Store Reg MemAddr
| Nop
deriving ( Eq , Show )
data = Zero
| PC
| RegA
| RegB
| RegC
| RegD
| deriving ( Eq , Show , , Undefined )
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving ( Eq , Show )
data MachCode
= MachCode
{ inputX : : , inputY : : , result : : , aluCode : : Operator
, ldReg : : , rdAddr : : , wrAddrM : : Maybe , jmpM : : Maybe Value
}
nullCode = MachCode { inputX = Zero , inputY = Zero , result = Zero , aluCode =
, ldReg = Zero , rdAddr = 0 , wrAddrM = Nothing
, jmpM = Nothing
}
@
Next we define the CPU and its ALU :
@
cpu : : -- ^ Register bank
- > ( Value , Instruction ) -- ^ ( Memory output , Current instruction )
- > ( 7 Value
, ( , Maybe ( , Value ) , InstrAddr )
)
cpu regbank ( memOut , instr ) = ( regbank',(rdAddr,(,aluOut ) ' < $ > ' wrAddrM , fromIntegral ipntr ) )
where
-- Current instruction pointer
ipntr = regbank ' ! ! ' PC
-- Decoder
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
nullCode
-- ALU
regX = regbank ' ! ! ' inputX
regY = regbank ' ! ! ' inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
-- update registers
regbank ' = ' replace ' Zero 0
$ ' replace ' PC nextPC
$ ' replace ' result aluOut
$ ' replace ' ldReg memOut
$ regbank
alu Add x y = x + y
alu Sub x y = x - y
alu _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
@
We initially create a memory out of simple registers :
@
dataMem : : HiddenClockReset domain gated synchronous
= > Signal domain -- ^ Read address
- > Signal domain ( Maybe ( , Value ) ) -- ^ ( write address , data in )
- > Signal domain Value -- ^ data out
dataMem rd wrM = ' ' dataMemT ( ' replicate ' d32 0 ) ( bundle ( rd , wrM ) )
where
dataMemT mem ( rd , wrM ) = ( mem',dout )
where
dout = mem ' ! ! ' rd
mem ' = case wrM of
Just ( wr , din ) - > ' replace ' mem
@
And then connect everything :
@
system : : ( KnownNat n , HiddenClockReset domain gated synchronous ) = > domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
( rdAddr , dout , ipntr ) = ' Clash . Prelude . Mealy.mealyB ' cpu ( ' replicate ' d7 0 ) ( memOut , instr )
instr = ' Clash . Prelude . ROM.asyncRom ' instrs ' < $ > ' ipntr
@
Create a simple program that calculates the GCD of 4 and 6 :
@
-- Compute GCD of 4 and 6
prog = -- 0 : = 4
Compute Incr Zero RegA RegA :>
replicate d3 ( Compute Incr RegA Zero RegA ) + +
Store RegA 0 :>
-- 1 : = 6
Compute Incr Zero RegA RegA :>
replicate d5 ( Compute Incr RegA Zero RegA ) + +
Store RegA 1 :>
-- A : = 4
Load 0 RegA :>
-- B : = 6
Load 1 RegB :>
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- ( a > b )
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
-- ( b > a )
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
@
And test our system :
@
> > > sampleN 31 ( system prog )
[ 0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2 ]
@
to see that our system indeed calculates that the GCD of 6 and 4 is 2 .
= = = Improvement 1 : using @asyncRam@
As you can see , it 's fairly straightforward to build a memory using registers
and read ( ' ! ! ' ) and write ( ' replace ' ) logic . This might however not result in
the most efficient hardware structure , especially when building an ASIC .
Instead it is preferable to use the ' Clash . Prelude . RAM.asyncRam ' function which
has the potential to be translated to a more efficient structure :
@
: : ( KnownNat n , HiddenClockReset domain gated synchronous ) = > domain Value
system2 instrs = memOut
where
memOut = ' Clash . Prelude . RAM.asyncRam ' d32 rdAddr dout
( rdAddr , dout , ipntr ) = ' mealyB ' cpu ( ' replicate ' d7 0 ) ( memOut , instr )
instr = ' Clash . Prelude . ROM.asyncRom ' instrs ' < $ > ' ipntr
@
Again , we can simulate our system and see that it works . This time however ,
we need to disregard the first few output samples , because the initial content of an
' Clash . Prelude . RAM.asyncRam ' is ' undefined ' , and consequently , the first few
output samples are also ' undefined ' . We use the utility function ' printX ' to conveniently
filter out the undefinedness and replace it with the string " X " in the few leading outputs .
@
> > > printX $ sampleN 31 ( prog )
[ X , X , X , X , X,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2 ]
@
= = = Improvement 2 : using @blockRam@
Finally we get to using ' blockRam ' . On FPGAs , ' Clash . Prelude . RAM.asyncRam ' will
be implemented in terms of LUTs , and therefore take up logic resources . FPGAs
also have large(r ) memory structures called /Block , which are preferred ,
especially as the memories we need for our application get bigger . The
' blockRam ' function will be translated to such a /Block RAM/.
One important aspect of Block RAMs have a /synchronous/ read port , meaning that ,
unlike the behaviour of ' Clash . Prelude . RAM.asyncRam ' , given a read address @r@
at time @t@ , the value @v@ in the RAM at address @r@ is only available at time
@t+1@.
For us that means we need to change the design of our CPU . Right now , upon a
load instruction we generate a read address for the memory , and the value at
that read address is immediately available to be put in the register bank .
Because we will be using a BlockRAM , the value is delayed until the next cycle .
We hence need to also delay the register address to which the memory address
is loaded :
@
cpu2 : : ( Vec 7 Value , ) -- ^ ( Register bank , Load reg addr )
- > ( Value , Instruction ) -- ^ ( Memory output , Current instruction )
- > ( ( Vec 7 Value , )
, ( , Maybe ( , Value ) , InstrAddr )
)
cpu2 ( regbank , ldRegD ) ( memOut , instr ) = ( ( regbank',ldRegD'),(rdAddr,(,aluOut ) ' < $ > ' wrAddrM , fromIntegral ipntr ) )
where
-- Current instruction pointer
ipntr = regbank ' ! ! ' PC
-- Decoder
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
nullCode
-- ALU
regX = regbank ' ! ! ' inputX
regY = regbank ' ! ! ' inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
-- update registers
ldRegD ' = ldReg -- Delay the ldReg by 1 cycle
regbank ' = ' replace ' Zero 0
$ ' replace ' PC nextPC
$ ' replace ' result aluOut
$ ' replace ' ldRegD memOut
$ regbank
@
We can now finally instantiate our system with a ' blockRam ' :
@
: : ( KnownNat n , HiddenClockReset domain gated synchronous ) = > domain Value
= memOut
where
memOut = ' blockRam ' ( replicate d32 0 ) rdAddr dout
( rdAddr , dout , ipntr ) = ' mealyB ' cpu2 ( ( ' replicate ' d7 0),Zero ) ( memOut , instr )
instr = ' Clash . Prelude . ROM.asyncRom ' instrs ' < $ > ' ipntr
@
We are , however , not done . We will also need to update our program . The reason
being that values that we try to load in our registers wo n't be loaded into the
register until the next cycle . This is a problem when the next instruction
immediately depended on this memory value . In our case , this was only the case
when the loaded the value , which was stored at address @1@ , into @RegB@.
Our updated program is thus :
@
prog2 = -- 0 : = 4
Compute Incr Zero RegA RegA :>
replicate d3 ( Compute Incr RegA Zero RegA ) + +
Store RegA 0 :>
-- 1 : = 6
Compute Incr Zero RegA RegA :>
replicate d5 ( Compute Incr RegA Zero RegA ) + +
Store RegA 1 :>
-- A : = 4
Load 0 RegA :>
-- B : = 6
Load 1 RegB :>
Nop :> -- Extra NOP
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- ( a > b )
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
-- ( b > a )
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
@
When we simulate our system we see that it works . This time again ,
we need to disregard the first sample , because the initial output of a
' blockRam ' is ' undefined ' . We use the utility function ' printX ' to conveniently
filter out the undefinedness and replace it with the string " X " .
@
> > > printX $ sampleN 33 ( )
[ X,0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2 ]
@
This concludes the short introduction to using ' blockRam ' .
Copyright : (C) 2013-2016, University of Twente,
2016-2017, Myrtle Software Ltd,
2017 , Google Inc.
License : BSD2 (see the file LICENSE)
Maintainer : Christiaan Baaij <>
BlockRAM primitives
= Using RAMs #usingrams#
We will show a rather elaborate example on how you can, and why you might want
to use 'blockRam's. We will build a \"small\" CPU+Memory+Program ROM where we
will slowly evolve to using blockRams. Note that the code is /not/ meant as a
de-facto standard on how to do CPU design in CλaSH.
We start with the definition of the Instructions, Register names and machine
codes:
@
{\-\# LANGUAGE RecordWildCards, TupleSections, DeriveAnyClass \#-\}
module CPU where
import Clash.Prelude
type InstrAddr = Unsigned 8
type MemAddr = Unsigned 5
type Value = Signed 8
data Instruction
= Compute Operator Reg Reg Reg
| Branch Reg Value
| Jump Value
| Load MemAddr Reg
| Store Reg MemAddr
| Nop
deriving (Eq,Show)
data Reg
= Zero
| PC
| RegA
| RegB
| RegC
| RegD
| RegE
deriving (Eq,Show,Enum,Undefined)
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving (Eq,Show)
data MachCode
= MachCode
{ inputX :: Reg
, inputY :: Reg
, result :: Reg
, aluCode :: Operator
, ldReg :: Reg
, rdAddr :: MemAddr
, wrAddrM :: Maybe MemAddr
, jmpM :: Maybe Value
}
nullCode = MachCode { inputX = Zero, inputY = Zero, result = Zero, aluCode = Imm
, ldReg = Zero, rdAddr = 0, wrAddrM = Nothing
, jmpM = Nothing
}
@
Next we define the CPU and its ALU:
@
cpu :: Vec 7 Value -- ^ Register bank
-> (Value,Instruction) -- ^ (Memory output, Current instruction)
-> ( Vec 7 Value
, (MemAddr, Maybe (MemAddr,Value), InstrAddr)
)
cpu regbank (memOut,instr) = (regbank',(rdAddr,(,aluOut) '<$>' wrAddrM,fromIntegral ipntr))
where
-- Current instruction pointer
ipntr = regbank '!!' PC
-- Decoder
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
-- ALU
regX = regbank '!!' inputX
regY = regbank '!!' inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
-- update registers
regbank' = 'replace' Zero 0
$ 'replace' PC nextPC
$ 'replace' result aluOut
$ 'replace' ldReg memOut
$ regbank
alu Add x y = x + y
alu Sub x y = x - y
alu Incr x _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
@
We initially create a memory out of simple registers:
@
dataMem :: HiddenClockReset domain gated synchronous
=> Signal domain MemAddr -- ^ Read address
-> Signal domain (Maybe (MemAddr,Value)) -- ^ (write address, data in)
-> Signal domain Value -- ^ data out
dataMem rd wrM = 'Clash.Prelude.Mealy.mealy' dataMemT ('replicate' d32 0) (bundle (rd,wrM))
where
dataMemT mem (rd,wrM) = (mem',dout)
where
dout = mem '!!' rd
mem' = case wrM of
Just (wr,din) -> 'replace' wr din mem
_ -> mem
@
And then connect everything:
@
system :: (KnownNat n, HiddenClockReset domain gated synchronous) => Vec n Instruction -> Signal domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
(rdAddr,dout,ipntr) = 'Clash.Prelude.Mealy.mealyB' cpu ('replicate' d7 0) (memOut,instr)
instr = 'Clash.Prelude.ROM.asyncRom' instrs '<$>' ipntr
@
Create a simple program that calculates the GCD of 4 and 6:
@
-- Compute GCD of 4 and 6
prog = -- 0 := 4
Compute Incr Zero RegA RegA :>
replicate d3 (Compute Incr RegA Zero RegA) ++
Store RegA 0 :>
-- 1 := 6
Compute Incr Zero RegA RegA :>
replicate d5 (Compute Incr RegA Zero RegA) ++
Store RegA 1 :>
-- A := 4
Load 0 RegA :>
-- B := 6
Load 1 RegB :>
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- (a > b)
Compute Sub RegA RegB RegA :>
Jump (-6) :>
-- (b > a)
Compute Sub RegB RegA RegB :>
Jump (-8) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
@
And test our system:
@
>>> sampleN 31 (system prog)
[0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2]
@
to see that our system indeed calculates that the GCD of 6 and 4 is 2.
=== Improvement 1: using @asyncRam@
As you can see, it's fairly straightforward to build a memory using registers
and read ('!!') and write ('replace') logic. This might however not result in
the most efficient hardware structure, especially when building an ASIC.
Instead it is preferable to use the 'Clash.Prelude.RAM.asyncRam' function which
has the potential to be translated to a more efficient structure:
@
system2 :: (KnownNat n, HiddenClockReset domain gated synchronous) => Vec n Instruction -> Signal domain Value
system2 instrs = memOut
where
memOut = 'Clash.Prelude.RAM.asyncRam' d32 rdAddr dout
(rdAddr,dout,ipntr) = 'mealyB' cpu ('replicate' d7 0) (memOut,instr)
instr = 'Clash.Prelude.ROM.asyncRom' instrs '<$>' ipntr
@
Again, we can simulate our system and see that it works. This time however,
we need to disregard the first few output samples, because the initial content of an
'Clash.Prelude.RAM.asyncRam' is 'undefined', and consequently, the first few
output samples are also 'undefined'. We use the utility function 'printX' to conveniently
filter out the undefinedness and replace it with the string "X" in the few leading outputs.
@
>>> printX $ sampleN 31 (system2 prog)
[X,X,X,X,X,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2]
@
=== Improvement 2: using @blockRam@
Finally we get to using 'blockRam'. On FPGAs, 'Clash.Prelude.RAM.asyncRam' will
be implemented in terms of LUTs, and therefore take up logic resources. FPGAs
also have large(r) memory structures called /Block RAMs/, which are preferred,
especially as the memories we need for our application get bigger. The
'blockRam' function will be translated to such a /Block RAM/.
One important aspect of Block RAMs have a /synchronous/ read port, meaning that,
unlike the behaviour of 'Clash.Prelude.RAM.asyncRam', given a read address @r@
at time @t@, the value @v@ in the RAM at address @r@ is only available at time
@t+1@.
For us that means we need to change the design of our CPU. Right now, upon a
load instruction we generate a read address for the memory, and the value at
that read address is immediately available to be put in the register bank.
Because we will be using a BlockRAM, the value is delayed until the next cycle.
We hence need to also delay the register address to which the memory address
is loaded:
@
cpu2 :: (Vec 7 Value,Reg) -- ^ (Register bank, Load reg addr)
-> (Value,Instruction) -- ^ (Memory output, Current instruction)
-> ( (Vec 7 Value,Reg)
, (MemAddr, Maybe (MemAddr,Value), InstrAddr)
)
cpu2 (regbank,ldRegD) (memOut,instr) = ((regbank',ldRegD'),(rdAddr,(,aluOut) '<$>' wrAddrM,fromIntegral ipntr))
where
-- Current instruction pointer
ipntr = regbank '!!' PC
-- Decoder
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
-- ALU
regX = regbank '!!' inputX
regY = regbank '!!' inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
-- update registers
ldRegD' = ldReg -- Delay the ldReg by 1 cycle
regbank' = 'replace' Zero 0
$ 'replace' PC nextPC
$ 'replace' result aluOut
$ 'replace' ldRegD memOut
$ regbank
@
We can now finally instantiate our system with a 'blockRam':
@
system3 :: (KnownNat n, HiddenClockReset domain gated synchronous) => Vec n Instruction -> Signal domain Value
system3 instrs = memOut
where
memOut = 'blockRam' (replicate d32 0) rdAddr dout
(rdAddr,dout,ipntr) = 'mealyB' cpu2 (('replicate' d7 0),Zero) (memOut,instr)
instr = 'Clash.Prelude.ROM.asyncRom' instrs '<$>' ipntr
@
We are, however, not done. We will also need to update our program. The reason
being that values that we try to load in our registers won't be loaded into the
register until the next cycle. This is a problem when the next instruction
immediately depended on this memory value. In our case, this was only the case
when the loaded the value @6@, which was stored at address @1@, into @RegB@.
Our updated program is thus:
@
prog2 = -- 0 := 4
Compute Incr Zero RegA RegA :>
replicate d3 (Compute Incr RegA Zero RegA) ++
Store RegA 0 :>
-- 1 := 6
Compute Incr Zero RegA RegA :>
replicate d5 (Compute Incr RegA Zero RegA) ++
Store RegA 1 :>
-- A := 4
Load 0 RegA :>
-- B := 6
Load 1 RegB :>
Nop :> -- Extra NOP
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- (a > b)
Compute Sub RegA RegB RegA :>
Jump (-6) :>
-- (b > a)
Compute Sub RegB RegA RegB :>
Jump (-8) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
@
When we simulate our system we see that it works. This time again,
we need to disregard the first sample, because the initial output of a
'blockRam' is 'undefined'. We use the utility function 'printX' to conveniently
filter out the undefinedness and replace it with the string "X".
@
>>> printX $ sampleN 33 (system3 prog2)
[X,0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2]
@
This concludes the short introduction to using 'blockRam'.
-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE GADTs #
# LANGUAGE MagicHash #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE Safe #-}
{-# OPTIONS_HADDOCK show-extensions #-}
module Clash.Prelude.BlockRam
( -- * BlockRAM synchronised to the system clock
blockRam
, blockRamPow2
-- * Read/Write conflict resolution
, readNew
)
where
import GHC.TypeLits (KnownNat, type (^))
import GHC.Stack (HasCallStack, withFrozenCallStack)
import qualified Clash.Explicit.BlockRam as E
import Clash.Signal
import Clash.Sized.Unsigned (Unsigned)
import Clash.Sized.Vector (Vec)
import Clash.XException (Undefined)
$ setup
> > > import Clash . Prelude as C
> > > import qualified Data . List as L
> > > : set -XDataKinds -XRecordWildCards -XTupleSections -XTypeApplications -XFlexibleContexts -XDeriveAnyClass
> > > type InstrAddr = Unsigned 8
> > > type = Unsigned 5
> > > type Value = Signed 8
> > > : {
data = Zero
| PC
| RegA
| RegB
| RegC
| RegD
| deriving ( Eq , Show , , Undefined )
:}
> > > : {
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving ( Eq , Show )
:}
> > > : {
data Instruction
= Compute Operator
| Branch Reg Value
| Jump Value
| Load
| Store Reg MemAddr
| Nop
deriving ( Eq , Show )
:}
> > > : {
data MachCode
= MachCode
{ inputX : : , inputY : : , result : : , aluCode : : Operator
, ldReg : : , rdAddr : : , wrAddrM : : Maybe , jmpM : : Maybe Value
}
:}
> > > : {
nullCode = MachCode { inputX = Zero , inputY = Zero , result = Zero , aluCode =
, ldReg = Zero , rdAddr = 0 , wrAddrM = Nothing
, jmpM = Nothing
}
:}
> > > : {
alu Add x y = x + y
alu Sub x y = x - y
alu _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
:}
> > > : {
cpu : : -- ^ Register bank
- > ( Value , Instruction ) -- ^ ( Memory output , Current instruction )
- > ( 7 Value
, ( , Maybe ( , Value),InstrAddr )
)
cpu regbank ( memOut , instr ) = ( regbank',(rdAddr,(,aluOut ) < $ > wrAddrM , fromIntegral ipntr ) )
where
-- Current instruction pointer
ipntr = regbank C. ! ! PC
-- Decoder
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
-- ALU
regX = regbank C. ! ! inputX
regY = regbank C. ! ! inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
-- update registers
regbank ' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldReg memOut
$ regbank
:}
> > > : {
dataMem
: : HiddenClockReset domain gated synchronous
= > Signal domain domain ( Maybe ( , Value ) )
- > Signal domain Value
dataMem rd wrM = mealy dataMemT ( C.replicate d32 0 ) ( bundle ( rd , wrM ) )
where
dataMemT mem ( rd , wrM ) = ( mem',dout )
where
dout = mem C. ! ! rd
' = case wrM of
Just ( wr , din ) - > replace Nothing - > mem
:}
> > > : {
system
: : ( KnownNat n , HiddenClockReset domain gated synchronous )
= > n Instruction
- > Signal domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
( rdAddr , dout , ipntr ) = mealyB cpu ( C.replicate d7 0 ) ( memOut , instr )
instr = asyncRom instrs < $ > ipntr
:}
> > > : {
-- Compute GCD of 4 and 6
prog = -- 0 : = 4
Compute Incr Zero RegA RegA :>
C.replicate d3 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 0 :>
-- 1 : = 6
Compute Incr Zero RegA RegA :>
C.replicate d5 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 1 :>
-- A : = 4
Load 0 RegA :>
-- B : = 6
Load 1 RegB :>
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- ( a > b )
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
-- ( b > a )
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
> > > : {
: : ( KnownNat n , HiddenClockReset domain gated synchronous )
= > n Instruction
- > Signal domain Value
system2 instrs = memOut
where
memOut = asyncRam d32 rdAddr dout
( rdAddr , dout , ipntr ) = mealyB cpu ( C.replicate d7 0 ) ( memOut , instr )
instr = asyncRom instrs < $ > ipntr
:}
> > > : {
cpu2 : : ( Vec 7 Value , ) -- ^ ( Register bank , Load reg addr )
- > ( Value , Instruction ) -- ^ ( Memory output , Current instruction )
- > ( ( Vec 7 Value , )
, ( , Maybe ( , Value),InstrAddr )
)
cpu2 ( regbank , ldRegD ) ( memOut , instr ) = ( ( regbank',ldRegD'),(rdAddr,(,aluOut ) < $ > wrAddrM , fromIntegral ipntr ) )
where
-- Current instruction pointer
ipntr = regbank C. ! ! PC
-- Decoder
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
-- ALU
regX = regbank C. ! ! inputX
regY = regbank C. ! ! inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
-- update registers
ldRegD ' = ldReg -- Delay the ldReg by 1 cycle
regbank ' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldRegD memOut
$ regbank
:}
> > > : {
: : ( KnownNat n , HiddenClockReset domain gated synchronous )
= > n Instruction
- > Signal domain Value
= memOut
where
memOut = blockRam ( C.replicate d32 0 ) rdAddr dout
( rdAddr , dout , ipntr ) = mealyB cpu2 ( ( C.replicate d7 0),Zero ) ( memOut , instr )
instr = asyncRom instrs < $ > ipntr
:}
> > > : {
prog2 = -- 0 : = 4
Compute Incr Zero RegA RegA :>
C.replicate d3 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 0 :>
-- 1 : = 6
Compute Incr Zero RegA RegA :>
C.replicate d5 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 1 :>
-- A : = 4
Load 0 RegA :>
-- B : = 6
Load 1 RegB :>
Nop :> -- Extra NOP
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- ( a > b )
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
-- ( b > a )
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
>>> import Clash.Prelude as C
>>> import qualified Data.List as L
>>> :set -XDataKinds -XRecordWildCards -XTupleSections -XTypeApplications -XFlexibleContexts -XDeriveAnyClass
>>> type InstrAddr = Unsigned 8
>>> type MemAddr = Unsigned 5
>>> type Value = Signed 8
>>> :{
data Reg
= Zero
| PC
| RegA
| RegB
| RegC
| RegD
| RegE
deriving (Eq,Show,Enum,Undefined)
:}
>>> :{
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving (Eq,Show)
:}
>>> :{
data Instruction
= Compute Operator Reg Reg Reg
| Branch Reg Value
| Jump Value
| Load MemAddr Reg
| Store Reg MemAddr
| Nop
deriving (Eq,Show)
:}
>>> :{
data MachCode
= MachCode
{ inputX :: Reg
, inputY :: Reg
, result :: Reg
, aluCode :: Operator
, ldReg :: Reg
, rdAddr :: MemAddr
, wrAddrM :: Maybe MemAddr
, jmpM :: Maybe Value
}
:}
>>> :{
nullCode = MachCode { inputX = Zero, inputY = Zero, result = Zero, aluCode = Imm
, ldReg = Zero, rdAddr = 0, wrAddrM = Nothing
, jmpM = Nothing
}
:}
>>> :{
alu Add x y = x + y
alu Sub x y = x - y
alu Incr x _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
:}
>>> :{
cpu :: Vec 7 Value -- ^ Register bank
-> (Value,Instruction) -- ^ (Memory output, Current instruction)
-> ( Vec 7 Value
, (MemAddr,Maybe (MemAddr,Value),InstrAddr)
)
cpu regbank (memOut,instr) = (regbank',(rdAddr,(,aluOut) <$> wrAddrM,fromIntegral ipntr))
where
-- Current instruction pointer
ipntr = regbank C.!! PC
-- Decoder
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
-- ALU
regX = regbank C.!! inputX
regY = regbank C.!! inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
-- update registers
regbank' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldReg memOut
$ regbank
:}
>>> :{
dataMem
:: HiddenClockReset domain gated synchronous
=> Signal domain MemAddr
-> Signal domain (Maybe (MemAddr,Value))
-> Signal domain Value
dataMem rd wrM = mealy dataMemT (C.replicate d32 0) (bundle (rd,wrM))
where
dataMemT mem (rd,wrM) = (mem',dout)
where
dout = mem C.!! rd
mem' = case wrM of
Just (wr,din) -> replace wr din mem
Nothing -> mem
:}
>>> :{
system
:: (KnownNat n, HiddenClockReset domain gated synchronous)
=> Vec n Instruction
-> Signal domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
(rdAddr,dout,ipntr) = mealyB cpu (C.replicate d7 0) (memOut,instr)
instr = asyncRom instrs <$> ipntr
:}
>>> :{
-- Compute GCD of 4 and 6
prog = -- 0 := 4
Compute Incr Zero RegA RegA :>
C.replicate d3 (Compute Incr RegA Zero RegA) C.++
Store RegA 0 :>
-- 1 := 6
Compute Incr Zero RegA RegA :>
C.replicate d5 (Compute Incr RegA Zero RegA) C.++
Store RegA 1 :>
-- A := 4
Load 0 RegA :>
-- B := 6
Load 1 RegB :>
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- (a > b)
Compute Sub RegA RegB RegA :>
Jump (-6) :>
-- (b > a)
Compute Sub RegB RegA RegB :>
Jump (-8) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
>>> :{
system2
:: (KnownNat n, HiddenClockReset domain gated synchronous)
=> Vec n Instruction
-> Signal domain Value
system2 instrs = memOut
where
memOut = asyncRam d32 rdAddr dout
(rdAddr,dout,ipntr) = mealyB cpu (C.replicate d7 0) (memOut,instr)
instr = asyncRom instrs <$> ipntr
:}
>>> :{
cpu2 :: (Vec 7 Value,Reg) -- ^ (Register bank, Load reg addr)
-> (Value,Instruction) -- ^ (Memory output, Current instruction)
-> ( (Vec 7 Value,Reg)
, (MemAddr,Maybe (MemAddr,Value),InstrAddr)
)
cpu2 (regbank,ldRegD) (memOut,instr) = ((regbank',ldRegD'),(rdAddr,(,aluOut) <$> wrAddrM,fromIntegral ipntr))
where
-- Current instruction pointer
ipntr = regbank C.!! PC
-- Decoder
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
-- ALU
regX = regbank C.!! inputX
regY = regbank C.!! inputY
aluOut = alu aluCode regX regY
-- next instruction
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
-- update registers
ldRegD' = ldReg -- Delay the ldReg by 1 cycle
regbank' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldRegD memOut
$ regbank
:}
>>> :{
system3
:: (KnownNat n, HiddenClockReset domain gated synchronous)
=> Vec n Instruction
-> Signal domain Value
system3 instrs = memOut
where
memOut = blockRam (C.replicate d32 0) rdAddr dout
(rdAddr,dout,ipntr) = mealyB cpu2 ((C.replicate d7 0),Zero) (memOut,instr)
instr = asyncRom instrs <$> ipntr
:}
>>> :{
prog2 = -- 0 := 4
Compute Incr Zero RegA RegA :>
C.replicate d3 (Compute Incr RegA Zero RegA) C.++
Store RegA 0 :>
-- 1 := 6
Compute Incr Zero RegA RegA :>
C.replicate d5 (Compute Incr RegA Zero RegA) C.++
Store RegA 1 :>
-- A := 4
Load 0 RegA :>
-- B := 6
Load 1 RegB :>
Nop :> -- Extra NOP
-- start
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
-- (a > b)
Compute Sub RegA RegB RegA :>
Jump (-6) :>
-- (b > a)
Compute Sub RegB RegA RegB :>
Jump (-8) :>
-- end
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
-}
| Create a blockRAM with space for @n@ elements .
--
* _ _ NB _ _ : Read value is delayed by 1 cycle
-- * __NB__: Initial output value is 'undefined'
--
-- @
bram40
: : ' HiddenClock ' domain
= > ' Signal ' domain ( ' Unsigned ' 6 )
-- -> 'Signal' domain (Maybe ('Unsigned' 6, 'Clash.Sized.BitVector.Bit'))
-- -> 'Signal' domain 'Clash.Sized.BitVector.Bit'
bram40 = ' blockRam ' ( ' Clash.Sized.Vector.replicate ' d40 1 )
-- @
--
-- Additional helpful information:
--
-- * See "Clash.Prelude.BlockRam#usingrams" for more information on how to use a
-- Block RAM.
-- * Use the adapter 'readNew' for obtaining write-before-read semantics like this: @readNew (blockRam inits) rd wrM@.
blockRam
:: (Enum addr, HiddenClock domain gated, HasCallStack)
^ Initial content of the , also
determines the size , @n@ , of the .
--
-- __NB__: __MUST__ be a constant.
-> Signal domain addr -- ^ Read address @r@
-> Signal domain (Maybe (addr, a))
-- ^ (write address @w@, value to write)
-> Signal domain a
^ Value of the @blockRAM@ at address @r@ from the previous clock
-- cycle
blockRam = \cnt rd wrM -> withFrozenCallStack
(hideClock E.blockRam cnt rd wrM)
# INLINE blockRam #
| Create a blockRAM with space for 2^@n@ elements
--
* _ _ NB _ _ : Read value is delayed by 1 cycle
-- * __NB__: Initial output value is 'undefined'
--
-- @
-- bram32
: : ' HiddenClock ' domain
= > ' Signal ' domain ( ' Unsigned ' 5 )
-- -> 'Signal' domain (Maybe ('Unsigned' 5, 'Clash.Sized.BitVector.Bit'))
-- -> 'Signal' domain 'Clash.Sized.BitVector.Bit'
bram32 = ' blockRamPow2 ' ( ' Clash.Sized.Vector.replicate ' d32 1 )
-- @
--
-- Additional helpful information:
--
-- * See "Clash.Prelude.BlockRam#usingrams" for more information on how to use a
-- Block RAM.
-- * Use the adapter 'readNew' for obtaining write-before-read semantics like this: @readNew (blockRamPow2 inits) rd wrM@.
blockRamPow2
:: (KnownNat n, HiddenClock domain gated, HasCallStack)
^ Initial content of the , also
determines the size , @2^n@ , of the .
--
-- __NB__: __MUST__ be a constant.
-> Signal domain (Unsigned n) -- ^ Read address @r@
-> Signal domain (Maybe (Unsigned n, a))
-- ^ (write address @w@, value to write)
-> Signal domain a
^ Value of the @blockRAM@ at address @r@ from the previous clock
-- cycle
blockRamPow2 = \cnt rd wrM -> withFrozenCallStack
(hideClock E.blockRamPow2 cnt rd wrM)
# INLINE blockRamPow2 #
| Create read - after - write blockRAM from a read - before - write one ( synchronised to system clock )
--
> > > import Clash . Prelude
-- >>> :t readNew (blockRam (0 :> 1 :> Nil))
-- readNew (blockRam (0 :> 1 :> Nil))
-- :: ...
-- ... =>
-- Signal domain addr
-- -> Signal domain (Maybe (addr, a)) -> Signal domain a
readNew :: (Eq addr, Undefined a, HiddenClockReset domain gated synchronous)
=> (Signal domain addr -> Signal domain (Maybe (addr, a)) -> Signal domain a)
^ The @ram@ component
-> Signal domain addr -- ^ Read address @r@
-> Signal domain (Maybe (addr, a)) -- ^ (Write address @w@, value to write)
-> Signal domain a
^ Value of the @ram@ at address @r@ from the previous clock
-- cycle
readNew = hideClockReset (\clk rst -> E.readNew rst clk)
# INLINE readNew #
| null | https://raw.githubusercontent.com/clash-lang/clash-prelude/5645d8417ab495696cf4e0293796133c7fe2a9a7/src/Clash/Prelude/BlockRam.hs | haskell | ^ Register bank
^ ( Memory output , Current instruction )
Current instruction pointer
Decoder
ALU
next instruction
update registers
^ Read address
^ ( write address , data in )
^ data out
Compute GCD of 4 and 6
0 : = 4
1 : = 6
A : = 4
B : = 6
start
( a > b )
( b > a )
end
^ ( Register bank , Load reg addr )
^ ( Memory output , Current instruction )
Current instruction pointer
Decoder
ALU
next instruction
update registers
Delay the ldReg by 1 cycle
0 : = 4
1 : = 6
A : = 4
B : = 6
Extra NOP
start
( a > b )
( b > a )
end
^ Register bank
^ (Memory output, Current instruction)
Current instruction pointer
Decoder
ALU
next instruction
update registers
^ Read address
^ (write address, data in)
^ data out
Compute GCD of 4 and 6
0 := 4
1 := 6
A := 4
B := 6
start
(a > b)
(b > a)
end
^ (Register bank, Load reg addr)
^ (Memory output, Current instruction)
Current instruction pointer
Decoder
ALU
next instruction
update registers
Delay the ldReg by 1 cycle
0 := 4
1 := 6
A := 4
B := 6
Extra NOP
start
(a > b)
(b > a)
end
# LANGUAGE BangPatterns #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeOperators #
# LANGUAGE Safe #
# OPTIONS_HADDOCK show-extensions #
* BlockRAM synchronised to the system clock
* Read/Write conflict resolution
^ Register bank
^ ( Memory output , Current instruction )
Current instruction pointer
Decoder
ALU
next instruction
update registers
Compute GCD of 4 and 6
0 : = 4
1 : = 6
A : = 4
B : = 6
start
( a > b )
( b > a )
end
^ ( Register bank , Load reg addr )
^ ( Memory output , Current instruction )
Current instruction pointer
Decoder
ALU
next instruction
update registers
Delay the ldReg by 1 cycle
0 : = 4
1 : = 6
A : = 4
B : = 6
Extra NOP
start
( a > b )
( b > a )
end
^ Register bank
^ (Memory output, Current instruction)
Current instruction pointer
Decoder
ALU
next instruction
update registers
Compute GCD of 4 and 6
0 := 4
1 := 6
A := 4
B := 6
start
(a > b)
(b > a)
end
^ (Register bank, Load reg addr)
^ (Memory output, Current instruction)
Current instruction pointer
Decoder
ALU
next instruction
update registers
Delay the ldReg by 1 cycle
0 := 4
1 := 6
A := 4
B := 6
Extra NOP
start
(a > b)
(b > a)
end
* __NB__: Initial output value is 'undefined'
@
-> 'Signal' domain (Maybe ('Unsigned' 6, 'Clash.Sized.BitVector.Bit'))
-> 'Signal' domain 'Clash.Sized.BitVector.Bit'
@
Additional helpful information:
* See "Clash.Prelude.BlockRam#usingrams" for more information on how to use a
Block RAM.
* Use the adapter 'readNew' for obtaining write-before-read semantics like this: @readNew (blockRam inits) rd wrM@.
__NB__: __MUST__ be a constant.
^ Read address @r@
^ (write address @w@, value to write)
cycle
* __NB__: Initial output value is 'undefined'
@
bram32
-> 'Signal' domain (Maybe ('Unsigned' 5, 'Clash.Sized.BitVector.Bit'))
-> 'Signal' domain 'Clash.Sized.BitVector.Bit'
@
Additional helpful information:
* See "Clash.Prelude.BlockRam#usingrams" for more information on how to use a
Block RAM.
* Use the adapter 'readNew' for obtaining write-before-read semantics like this: @readNew (blockRamPow2 inits) rd wrM@.
__NB__: __MUST__ be a constant.
^ Read address @r@
^ (write address @w@, value to write)
cycle
>>> :t readNew (blockRam (0 :> 1 :> Nil))
readNew (blockRam (0 :> 1 :> Nil))
:: ...
... =>
Signal domain addr
-> Signal domain (Maybe (addr, a)) -> Signal domain a
^ Read address @r@
^ (Write address @w@, value to write)
cycle | |
Copyright : ( C ) 2013 - 2016 , University of Twente ,
2016 - 2017 , Myrtle Software Ltd ,
2017 , Google Inc.
License : BSD2 ( see the file LICENSE )
Maintainer : < >
BlockRAM primitives
= Using RAMs # usingrams #
We will show a rather elaborate example on how you can , and why you might want
to use ' blockRam 's . We will build a \"small\ " CPU+Memory+Program ROM where we
will slowly evolve to using blockRams . Note that the code is /not/ meant as a
de - facto standard on how to do CPU design in CλaSH .
We start with the definition of the Instructions , Register names and machine
codes :
@
{ \-\ # LANGUAGE RecordWildCards , TupleSections , DeriveAnyClass \#-\ }
module CPU where
import Clash . Prelude
type InstrAddr = Unsigned 8
type = Unsigned 5
type Value = Signed 8
data Instruction
= Compute Operator
| Branch Reg Value
| Jump Value
| Load
| Store Reg MemAddr
| Nop
deriving ( Eq , Show )
data = Zero
| PC
| RegA
| RegB
| RegC
| RegD
| deriving ( Eq , Show , , Undefined )
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving ( Eq , Show )
data MachCode
= MachCode
{ inputX : : , inputY : : , result : : , aluCode : : Operator
, ldReg : : , rdAddr : : , wrAddrM : : Maybe , jmpM : : Maybe Value
}
nullCode = MachCode { inputX = Zero , inputY = Zero , result = Zero , aluCode =
, ldReg = Zero , rdAddr = 0 , wrAddrM = Nothing
, jmpM = Nothing
}
@
Next we define the CPU and its ALU :
@
- > ( 7 Value
, ( , Maybe ( , Value ) , InstrAddr )
)
cpu regbank ( memOut , instr ) = ( regbank',(rdAddr,(,aluOut ) ' < $ > ' wrAddrM , fromIntegral ipntr ) )
where
ipntr = regbank ' ! ! ' PC
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
nullCode
regX = regbank ' ! ! ' inputX
regY = regbank ' ! ! ' inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
regbank ' = ' replace ' Zero 0
$ ' replace ' PC nextPC
$ ' replace ' result aluOut
$ ' replace ' ldReg memOut
$ regbank
alu Add x y = x + y
alu Sub x y = x - y
alu _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
@
We initially create a memory out of simple registers :
@
dataMem : : HiddenClockReset domain gated synchronous
dataMem rd wrM = ' ' dataMemT ( ' replicate ' d32 0 ) ( bundle ( rd , wrM ) )
where
dataMemT mem ( rd , wrM ) = ( mem',dout )
where
dout = mem ' ! ! ' rd
mem ' = case wrM of
Just ( wr , din ) - > ' replace ' mem
@
And then connect everything :
@
system : : ( KnownNat n , HiddenClockReset domain gated synchronous ) = > domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
( rdAddr , dout , ipntr ) = ' Clash . Prelude . Mealy.mealyB ' cpu ( ' replicate ' d7 0 ) ( memOut , instr )
instr = ' Clash . Prelude . ROM.asyncRom ' instrs ' < $ > ' ipntr
@
Create a simple program that calculates the GCD of 4 and 6 :
@
Compute Incr Zero RegA RegA :>
replicate d3 ( Compute Incr RegA Zero RegA ) + +
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
replicate d5 ( Compute Incr RegA Zero RegA ) + +
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
@
And test our system :
@
> > > sampleN 31 ( system prog )
[ 0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2 ]
@
to see that our system indeed calculates that the GCD of 6 and 4 is 2 .
= = = Improvement 1 : using @asyncRam@
As you can see , it 's fairly straightforward to build a memory using registers
and read ( ' ! ! ' ) and write ( ' replace ' ) logic . This might however not result in
the most efficient hardware structure , especially when building an ASIC .
Instead it is preferable to use the ' Clash . Prelude . RAM.asyncRam ' function which
has the potential to be translated to a more efficient structure :
@
: : ( KnownNat n , HiddenClockReset domain gated synchronous ) = > domain Value
system2 instrs = memOut
where
memOut = ' Clash . Prelude . RAM.asyncRam ' d32 rdAddr dout
( rdAddr , dout , ipntr ) = ' mealyB ' cpu ( ' replicate ' d7 0 ) ( memOut , instr )
instr = ' Clash . Prelude . ROM.asyncRom ' instrs ' < $ > ' ipntr
@
Again , we can simulate our system and see that it works . This time however ,
we need to disregard the first few output samples , because the initial content of an
' Clash . Prelude . RAM.asyncRam ' is ' undefined ' , and consequently , the first few
output samples are also ' undefined ' . We use the utility function ' printX ' to conveniently
filter out the undefinedness and replace it with the string " X " in the few leading outputs .
@
> > > printX $ sampleN 31 ( prog )
[ X , X , X , X , X,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2 ]
@
= = = Improvement 2 : using @blockRam@
Finally we get to using ' blockRam ' . On FPGAs , ' Clash . Prelude . RAM.asyncRam ' will
be implemented in terms of LUTs , and therefore take up logic resources . FPGAs
also have large(r ) memory structures called /Block , which are preferred ,
especially as the memories we need for our application get bigger . The
' blockRam ' function will be translated to such a /Block RAM/.
One important aspect of Block RAMs have a /synchronous/ read port , meaning that ,
unlike the behaviour of ' Clash . Prelude . RAM.asyncRam ' , given a read address @r@
at time @t@ , the value @v@ in the RAM at address @r@ is only available at time
@t+1@.
For us that means we need to change the design of our CPU . Right now , upon a
load instruction we generate a read address for the memory , and the value at
that read address is immediately available to be put in the register bank .
Because we will be using a BlockRAM , the value is delayed until the next cycle .
We hence need to also delay the register address to which the memory address
is loaded :
@
- > ( ( Vec 7 Value , )
, ( , Maybe ( , Value ) , InstrAddr )
)
cpu2 ( regbank , ldRegD ) ( memOut , instr ) = ( ( regbank',ldRegD'),(rdAddr,(,aluOut ) ' < $ > ' wrAddrM , fromIntegral ipntr ) )
where
ipntr = regbank ' ! ! ' PC
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
nullCode
regX = regbank ' ! ! ' inputX
regY = regbank ' ! ! ' inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
regbank ' = ' replace ' Zero 0
$ ' replace ' PC nextPC
$ ' replace ' result aluOut
$ ' replace ' ldRegD memOut
$ regbank
@
We can now finally instantiate our system with a ' blockRam ' :
@
: : ( KnownNat n , HiddenClockReset domain gated synchronous ) = > domain Value
= memOut
where
memOut = ' blockRam ' ( replicate d32 0 ) rdAddr dout
( rdAddr , dout , ipntr ) = ' mealyB ' cpu2 ( ( ' replicate ' d7 0),Zero ) ( memOut , instr )
instr = ' Clash . Prelude . ROM.asyncRom ' instrs ' < $ > ' ipntr
@
We are , however , not done . We will also need to update our program . The reason
being that values that we try to load in our registers wo n't be loaded into the
register until the next cycle . This is a problem when the next instruction
immediately depended on this memory value . In our case , this was only the case
when the loaded the value , which was stored at address @1@ , into @RegB@.
Our updated program is thus :
@
Compute Incr Zero RegA RegA :>
replicate d3 ( Compute Incr RegA Zero RegA ) + +
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
replicate d5 ( Compute Incr RegA Zero RegA ) + +
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
@
When we simulate our system we see that it works . This time again ,
we need to disregard the first sample , because the initial output of a
' blockRam ' is ' undefined ' . We use the utility function ' printX ' to conveniently
filter out the undefinedness and replace it with the string " X " .
@
> > > printX $ sampleN 33 ( )
[ X,0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2 ]
@
This concludes the short introduction to using ' blockRam ' .
Copyright : (C) 2013-2016, University of Twente,
2016-2017, Myrtle Software Ltd,
2017 , Google Inc.
License : BSD2 (see the file LICENSE)
Maintainer : Christiaan Baaij <>
BlockRAM primitives
= Using RAMs #usingrams#
We will show a rather elaborate example on how you can, and why you might want
to use 'blockRam's. We will build a \"small\" CPU+Memory+Program ROM where we
will slowly evolve to using blockRams. Note that the code is /not/ meant as a
de-facto standard on how to do CPU design in CλaSH.
We start with the definition of the Instructions, Register names and machine
codes:
@
{\-\# LANGUAGE RecordWildCards, TupleSections, DeriveAnyClass \#-\}
module CPU where
import Clash.Prelude
type InstrAddr = Unsigned 8
type MemAddr = Unsigned 5
type Value = Signed 8
data Instruction
= Compute Operator Reg Reg Reg
| Branch Reg Value
| Jump Value
| Load MemAddr Reg
| Store Reg MemAddr
| Nop
deriving (Eq,Show)
data Reg
= Zero
| PC
| RegA
| RegB
| RegC
| RegD
| RegE
deriving (Eq,Show,Enum,Undefined)
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving (Eq,Show)
data MachCode
= MachCode
{ inputX :: Reg
, inputY :: Reg
, result :: Reg
, aluCode :: Operator
, ldReg :: Reg
, rdAddr :: MemAddr
, wrAddrM :: Maybe MemAddr
, jmpM :: Maybe Value
}
nullCode = MachCode { inputX = Zero, inputY = Zero, result = Zero, aluCode = Imm
, ldReg = Zero, rdAddr = 0, wrAddrM = Nothing
, jmpM = Nothing
}
@
Next we define the CPU and its ALU:
@
-> ( Vec 7 Value
, (MemAddr, Maybe (MemAddr,Value), InstrAddr)
)
cpu regbank (memOut,instr) = (regbank',(rdAddr,(,aluOut) '<$>' wrAddrM,fromIntegral ipntr))
where
ipntr = regbank '!!' PC
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
regX = regbank '!!' inputX
regY = regbank '!!' inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
regbank' = 'replace' Zero 0
$ 'replace' PC nextPC
$ 'replace' result aluOut
$ 'replace' ldReg memOut
$ regbank
alu Add x y = x + y
alu Sub x y = x - y
alu Incr x _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
@
We initially create a memory out of simple registers:
@
dataMem :: HiddenClockReset domain gated synchronous
dataMem rd wrM = 'Clash.Prelude.Mealy.mealy' dataMemT ('replicate' d32 0) (bundle (rd,wrM))
where
dataMemT mem (rd,wrM) = (mem',dout)
where
dout = mem '!!' rd
mem' = case wrM of
Just (wr,din) -> 'replace' wr din mem
_ -> mem
@
And then connect everything:
@
system :: (KnownNat n, HiddenClockReset domain gated synchronous) => Vec n Instruction -> Signal domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
(rdAddr,dout,ipntr) = 'Clash.Prelude.Mealy.mealyB' cpu ('replicate' d7 0) (memOut,instr)
instr = 'Clash.Prelude.ROM.asyncRom' instrs '<$>' ipntr
@
Create a simple program that calculates the GCD of 4 and 6:
@
Compute Incr Zero RegA RegA :>
replicate d3 (Compute Incr RegA Zero RegA) ++
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
replicate d5 (Compute Incr RegA Zero RegA) ++
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump (-6) :>
Compute Sub RegB RegA RegB :>
Jump (-8) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
@
And test our system:
@
>>> sampleN 31 (system prog)
[0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2]
@
to see that our system indeed calculates that the GCD of 6 and 4 is 2.
=== Improvement 1: using @asyncRam@
As you can see, it's fairly straightforward to build a memory using registers
and read ('!!') and write ('replace') logic. This might however not result in
the most efficient hardware structure, especially when building an ASIC.
Instead it is preferable to use the 'Clash.Prelude.RAM.asyncRam' function which
has the potential to be translated to a more efficient structure:
@
system2 :: (KnownNat n, HiddenClockReset domain gated synchronous) => Vec n Instruction -> Signal domain Value
system2 instrs = memOut
where
memOut = 'Clash.Prelude.RAM.asyncRam' d32 rdAddr dout
(rdAddr,dout,ipntr) = 'mealyB' cpu ('replicate' d7 0) (memOut,instr)
instr = 'Clash.Prelude.ROM.asyncRom' instrs '<$>' ipntr
@
Again, we can simulate our system and see that it works. This time however,
we need to disregard the first few output samples, because the initial content of an
'Clash.Prelude.RAM.asyncRam' is 'undefined', and consequently, the first few
output samples are also 'undefined'. We use the utility function 'printX' to conveniently
filter out the undefinedness and replace it with the string "X" in the few leading outputs.
@
>>> printX $ sampleN 31 (system2 prog)
[X,X,X,X,X,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2]
@
=== Improvement 2: using @blockRam@
Finally we get to using 'blockRam'. On FPGAs, 'Clash.Prelude.RAM.asyncRam' will
be implemented in terms of LUTs, and therefore take up logic resources. FPGAs
also have large(r) memory structures called /Block RAMs/, which are preferred,
especially as the memories we need for our application get bigger. The
'blockRam' function will be translated to such a /Block RAM/.
One important aspect of Block RAMs have a /synchronous/ read port, meaning that,
unlike the behaviour of 'Clash.Prelude.RAM.asyncRam', given a read address @r@
at time @t@, the value @v@ in the RAM at address @r@ is only available at time
@t+1@.
For us that means we need to change the design of our CPU. Right now, upon a
load instruction we generate a read address for the memory, and the value at
that read address is immediately available to be put in the register bank.
Because we will be using a BlockRAM, the value is delayed until the next cycle.
We hence need to also delay the register address to which the memory address
is loaded:
@
-> ( (Vec 7 Value,Reg)
, (MemAddr, Maybe (MemAddr,Value), InstrAddr)
)
cpu2 (regbank,ldRegD) (memOut,instr) = ((regbank',ldRegD'),(rdAddr,(,aluOut) '<$>' wrAddrM,fromIntegral ipntr))
where
ipntr = regbank '!!' PC
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
regX = regbank '!!' inputX
regY = regbank '!!' inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
regbank' = 'replace' Zero 0
$ 'replace' PC nextPC
$ 'replace' result aluOut
$ 'replace' ldRegD memOut
$ regbank
@
We can now finally instantiate our system with a 'blockRam':
@
system3 :: (KnownNat n, HiddenClockReset domain gated synchronous) => Vec n Instruction -> Signal domain Value
system3 instrs = memOut
where
memOut = 'blockRam' (replicate d32 0) rdAddr dout
(rdAddr,dout,ipntr) = 'mealyB' cpu2 (('replicate' d7 0),Zero) (memOut,instr)
instr = 'Clash.Prelude.ROM.asyncRom' instrs '<$>' ipntr
@
We are, however, not done. We will also need to update our program. The reason
being that values that we try to load in our registers won't be loaded into the
register until the next cycle. This is a problem when the next instruction
immediately depended on this memory value. In our case, this was only the case
when the loaded the value @6@, which was stored at address @1@, into @RegB@.
Our updated program is thus:
@
Compute Incr Zero RegA RegA :>
replicate d3 (Compute Incr RegA Zero RegA) ++
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
replicate d5 (Compute Incr RegA Zero RegA) ++
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump (-6) :>
Compute Sub RegB RegA RegB :>
Jump (-8) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
@
When we simulate our system we see that it works. This time again,
we need to disregard the first sample, because the initial output of a
'blockRam' is 'undefined'. We use the utility function 'printX' to conveniently
filter out the undefinedness and replace it with the string "X".
@
>>> printX $ sampleN 33 (system3 prog2)
[X,0,0,0,0,0,4,4,4,4,4,4,4,4,6,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,2]
@
This concludes the short introduction to using 'blockRam'.
-}
# LANGUAGE GADTs #
# LANGUAGE MagicHash #
# LANGUAGE ScopedTypeVariables #
module Clash.Prelude.BlockRam
blockRam
, blockRamPow2
, readNew
)
where
import GHC.TypeLits (KnownNat, type (^))
import GHC.Stack (HasCallStack, withFrozenCallStack)
import qualified Clash.Explicit.BlockRam as E
import Clash.Signal
import Clash.Sized.Unsigned (Unsigned)
import Clash.Sized.Vector (Vec)
import Clash.XException (Undefined)
$ setup
> > > import Clash . Prelude as C
> > > import qualified Data . List as L
> > > : set -XDataKinds -XRecordWildCards -XTupleSections -XTypeApplications -XFlexibleContexts -XDeriveAnyClass
> > > type InstrAddr = Unsigned 8
> > > type = Unsigned 5
> > > type Value = Signed 8
> > > : {
data = Zero
| PC
| RegA
| RegB
| RegC
| RegD
| deriving ( Eq , Show , , Undefined )
:}
> > > : {
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving ( Eq , Show )
:}
> > > : {
data Instruction
= Compute Operator
| Branch Reg Value
| Jump Value
| Load
| Store Reg MemAddr
| Nop
deriving ( Eq , Show )
:}
> > > : {
data MachCode
= MachCode
{ inputX : : , inputY : : , result : : , aluCode : : Operator
, ldReg : : , rdAddr : : , wrAddrM : : Maybe , jmpM : : Maybe Value
}
:}
> > > : {
nullCode = MachCode { inputX = Zero , inputY = Zero , result = Zero , aluCode =
, ldReg = Zero , rdAddr = 0 , wrAddrM = Nothing
, jmpM = Nothing
}
:}
> > > : {
alu Add x y = x + y
alu Sub x y = x - y
alu _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
:}
> > > : {
- > ( 7 Value
, ( , Maybe ( , Value),InstrAddr )
)
cpu regbank ( memOut , instr ) = ( regbank',(rdAddr,(,aluOut ) < $ > wrAddrM , fromIntegral ipntr ) )
where
ipntr = regbank C. ! ! PC
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
regX = regbank C. ! ! inputX
regY = regbank C. ! ! inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
regbank ' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldReg memOut
$ regbank
:}
> > > : {
dataMem
: : HiddenClockReset domain gated synchronous
= > Signal domain domain ( Maybe ( , Value ) )
- > Signal domain Value
dataMem rd wrM = mealy dataMemT ( C.replicate d32 0 ) ( bundle ( rd , wrM ) )
where
dataMemT mem ( rd , wrM ) = ( mem',dout )
where
dout = mem C. ! ! rd
' = case wrM of
Just ( wr , din ) - > replace Nothing - > mem
:}
> > > : {
system
: : ( KnownNat n , HiddenClockReset domain gated synchronous )
= > n Instruction
- > Signal domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
( rdAddr , dout , ipntr ) = mealyB cpu ( C.replicate d7 0 ) ( memOut , instr )
instr = asyncRom instrs < $ > ipntr
:}
> > > : {
Compute Incr Zero RegA RegA :>
C.replicate d3 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
C.replicate d5 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
> > > : {
: : ( KnownNat n , HiddenClockReset domain gated synchronous )
= > n Instruction
- > Signal domain Value
system2 instrs = memOut
where
memOut = asyncRam d32 rdAddr dout
( rdAddr , dout , ipntr ) = mealyB cpu ( C.replicate d7 0 ) ( memOut , instr )
instr = asyncRom instrs < $ > ipntr
:}
> > > : {
- > ( ( Vec 7 Value , )
, ( , Maybe ( , Value),InstrAddr )
)
cpu2 ( regbank , ldRegD ) ( memOut , instr ) = ( ( regbank',ldRegD'),(rdAddr,(,aluOut ) < $ > wrAddrM , fromIntegral ipntr ) )
where
ipntr = regbank C. ! ! PC
( MachCode { .. } ) = case instr of
Compute op rx ry res - > nullCode { inputX = rx , inputY = ry , result = res , aluCode = op }
Branch cr a - > nullCode { inputX = cr , jmpM = Just a }
Jump a - > nullCode { aluCode = Incr , jmpM = Just a }
Load a r - > nullCode { ldReg = r , a }
Store r a - > nullCode { inputX = r , wrAddrM = Just a }
regX = regbank C. ! ! inputX
regY = regbank C. ! ! inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 - > ipntr + a
_ - > ipntr + 1
regbank ' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldRegD memOut
$ regbank
:}
> > > : {
: : ( KnownNat n , HiddenClockReset domain gated synchronous )
= > n Instruction
- > Signal domain Value
= memOut
where
memOut = blockRam ( C.replicate d32 0 ) rdAddr dout
( rdAddr , dout , ipntr ) = mealyB cpu2 ( ( C.replicate d7 0),Zero ) ( memOut , instr )
instr = asyncRom instrs < $ > ipntr
:}
> > > : {
Compute Incr Zero RegA RegA :>
C.replicate d3 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
C.replicate d5 ( Compute Incr RegA Zero RegA ) C.++
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump ( -6 ) :>
Compute Sub RegB RegA RegB :>
Jump (-8 ) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
>>> import Clash.Prelude as C
>>> import qualified Data.List as L
>>> :set -XDataKinds -XRecordWildCards -XTupleSections -XTypeApplications -XFlexibleContexts -XDeriveAnyClass
>>> type InstrAddr = Unsigned 8
>>> type MemAddr = Unsigned 5
>>> type Value = Signed 8
>>> :{
data Reg
= Zero
| PC
| RegA
| RegB
| RegC
| RegD
| RegE
deriving (Eq,Show,Enum,Undefined)
:}
>>> :{
data Operator = Add | Sub | Incr | Imm | CmpGt
deriving (Eq,Show)
:}
>>> :{
data Instruction
= Compute Operator Reg Reg Reg
| Branch Reg Value
| Jump Value
| Load MemAddr Reg
| Store Reg MemAddr
| Nop
deriving (Eq,Show)
:}
>>> :{
data MachCode
= MachCode
{ inputX :: Reg
, inputY :: Reg
, result :: Reg
, aluCode :: Operator
, ldReg :: Reg
, rdAddr :: MemAddr
, wrAddrM :: Maybe MemAddr
, jmpM :: Maybe Value
}
:}
>>> :{
nullCode = MachCode { inputX = Zero, inputY = Zero, result = Zero, aluCode = Imm
, ldReg = Zero, rdAddr = 0, wrAddrM = Nothing
, jmpM = Nothing
}
:}
>>> :{
alu Add x y = x + y
alu Sub x y = x - y
alu Incr x _ = x + 1
alu Imm x _ = x
alu CmpGt x y = if x > y then 1 else 0
:}
>>> :{
-> ( Vec 7 Value
, (MemAddr,Maybe (MemAddr,Value),InstrAddr)
)
cpu regbank (memOut,instr) = (regbank',(rdAddr,(,aluOut) <$> wrAddrM,fromIntegral ipntr))
where
ipntr = regbank C.!! PC
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
regX = regbank C.!! inputX
regY = regbank C.!! inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
regbank' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldReg memOut
$ regbank
:}
>>> :{
dataMem
:: HiddenClockReset domain gated synchronous
=> Signal domain MemAddr
-> Signal domain (Maybe (MemAddr,Value))
-> Signal domain Value
dataMem rd wrM = mealy dataMemT (C.replicate d32 0) (bundle (rd,wrM))
where
dataMemT mem (rd,wrM) = (mem',dout)
where
dout = mem C.!! rd
mem' = case wrM of
Just (wr,din) -> replace wr din mem
Nothing -> mem
:}
>>> :{
system
:: (KnownNat n, HiddenClockReset domain gated synchronous)
=> Vec n Instruction
-> Signal domain Value
system instrs = memOut
where
memOut = dataMem rdAddr dout
(rdAddr,dout,ipntr) = mealyB cpu (C.replicate d7 0) (memOut,instr)
instr = asyncRom instrs <$> ipntr
:}
>>> :{
Compute Incr Zero RegA RegA :>
C.replicate d3 (Compute Incr RegA Zero RegA) C.++
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
C.replicate d5 (Compute Incr RegA Zero RegA) C.++
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump (-6) :>
Compute Sub RegB RegA RegB :>
Jump (-8) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
>>> :{
system2
:: (KnownNat n, HiddenClockReset domain gated synchronous)
=> Vec n Instruction
-> Signal domain Value
system2 instrs = memOut
where
memOut = asyncRam d32 rdAddr dout
(rdAddr,dout,ipntr) = mealyB cpu (C.replicate d7 0) (memOut,instr)
instr = asyncRom instrs <$> ipntr
:}
>>> :{
-> ( (Vec 7 Value,Reg)
, (MemAddr,Maybe (MemAddr,Value),InstrAddr)
)
cpu2 (regbank,ldRegD) (memOut,instr) = ((regbank',ldRegD'),(rdAddr,(,aluOut) <$> wrAddrM,fromIntegral ipntr))
where
ipntr = regbank C.!! PC
(MachCode {..}) = case instr of
Compute op rx ry res -> nullCode {inputX=rx,inputY=ry,result=res,aluCode=op}
Branch cr a -> nullCode {inputX=cr,jmpM=Just a}
Jump a -> nullCode {aluCode=Incr,jmpM=Just a}
Load a r -> nullCode {ldReg=r,rdAddr=a}
Store r a -> nullCode {inputX=r,wrAddrM=Just a}
Nop -> nullCode
regX = regbank C.!! inputX
regY = regbank C.!! inputY
aluOut = alu aluCode regX regY
nextPC = case jmpM of
Just a | aluOut /= 0 -> ipntr + a
_ -> ipntr + 1
regbank' = replace Zero 0
$ replace PC nextPC
$ replace result aluOut
$ replace ldRegD memOut
$ regbank
:}
>>> :{
system3
:: (KnownNat n, HiddenClockReset domain gated synchronous)
=> Vec n Instruction
-> Signal domain Value
system3 instrs = memOut
where
memOut = blockRam (C.replicate d32 0) rdAddr dout
(rdAddr,dout,ipntr) = mealyB cpu2 ((C.replicate d7 0),Zero) (memOut,instr)
instr = asyncRom instrs <$> ipntr
:}
>>> :{
Compute Incr Zero RegA RegA :>
C.replicate d3 (Compute Incr RegA Zero RegA) C.++
Store RegA 0 :>
Compute Incr Zero RegA RegA :>
C.replicate d5 (Compute Incr RegA Zero RegA) C.++
Store RegA 1 :>
Load 0 RegA :>
Load 1 RegB :>
Compute CmpGt RegA RegB RegC :>
Branch RegC 4 :>
Compute CmpGt RegB RegA RegC :>
Branch RegC 4 :>
Jump 5 :>
Compute Sub RegA RegB RegA :>
Jump (-6) :>
Compute Sub RegB RegA RegB :>
Jump (-8) :>
Store RegA 2 :>
Load 2 RegC :>
Nil
:}
-}
| Create a blockRAM with space for @n@ elements .
* _ _ NB _ _ : Read value is delayed by 1 cycle
bram40
: : ' HiddenClock ' domain
= > ' Signal ' domain ( ' Unsigned ' 6 )
bram40 = ' blockRam ' ( ' Clash.Sized.Vector.replicate ' d40 1 )
blockRam
:: (Enum addr, HiddenClock domain gated, HasCallStack)
^ Initial content of the , also
determines the size , @n@ , of the .
-> Signal domain (Maybe (addr, a))
-> Signal domain a
^ Value of the @blockRAM@ at address @r@ from the previous clock
blockRam = \cnt rd wrM -> withFrozenCallStack
(hideClock E.blockRam cnt rd wrM)
# INLINE blockRam #
| Create a blockRAM with space for 2^@n@ elements
* _ _ NB _ _ : Read value is delayed by 1 cycle
: : ' HiddenClock ' domain
= > ' Signal ' domain ( ' Unsigned ' 5 )
bram32 = ' blockRamPow2 ' ( ' Clash.Sized.Vector.replicate ' d32 1 )
blockRamPow2
:: (KnownNat n, HiddenClock domain gated, HasCallStack)
^ Initial content of the , also
determines the size , @2^n@ , of the .
-> Signal domain (Maybe (Unsigned n, a))
-> Signal domain a
^ Value of the @blockRAM@ at address @r@ from the previous clock
blockRamPow2 = \cnt rd wrM -> withFrozenCallStack
(hideClock E.blockRamPow2 cnt rd wrM)
# INLINE blockRamPow2 #
| Create read - after - write blockRAM from a read - before - write one ( synchronised to system clock )
> > > import Clash . Prelude
readNew :: (Eq addr, Undefined a, HiddenClockReset domain gated synchronous)
=> (Signal domain addr -> Signal domain (Maybe (addr, a)) -> Signal domain a)
^ The @ram@ component
-> Signal domain a
^ Value of the @ram@ at address @r@ from the previous clock
readNew = hideClockReset (\clk rst -> E.readNew rst clk)
# INLINE readNew #
|
f027b96775a5de6347bdb21c1d7bdbc4b88885d68c01115aff29a312d20ca4f0 | teamwalnut/graphql-ppx | mutation.ml | open Test_shared
module MyQuery =
[%graphql
{|
mutation {
mutationWithError {
value {
stringField
}
errors {
field
message
}
}
}
|}]
let printed_query () =
match Str.search_forward (Str.regexp "^mutation") MyQuery.query 0 with
| 0 -> Pass
| (exception Not_found) | _ ->
Fail "mutation not found as first index of the operation string"
let tests = [ ("Printed query is a mutation", printed_query) ]
| null | https://raw.githubusercontent.com/teamwalnut/graphql-ppx/8276452ebe8d89a748b6b267afc94161650ab620/tests_native/mutation.ml | ocaml | open Test_shared
module MyQuery =
[%graphql
{|
mutation {
mutationWithError {
value {
stringField
}
errors {
field
message
}
}
}
|}]
let printed_query () =
match Str.search_forward (Str.regexp "^mutation") MyQuery.query 0 with
| 0 -> Pass
| (exception Not_found) | _ ->
Fail "mutation not found as first index of the operation string"
let tests = [ ("Printed query is a mutation", printed_query) ]
| |
0ebb9b2b53b13ce710dd1a71140d995003bbb02b339c7a118d962cfe422add10 | exercism/ocaml | space_age.ml | type planet = Mercury | Venus | Earth | Mars
| Jupiter | Saturn | Neptune | Uranus
let age_on _ _ =
failwith "'age_on' is missing"
| null | https://raw.githubusercontent.com/exercism/ocaml/bfd6121f757817865a34db06c3188b5e0ccab518/exercises/practice/space-age/space_age.ml | ocaml | type planet = Mercury | Venus | Earth | Mars
| Jupiter | Saturn | Neptune | Uranus
let age_on _ _ =
failwith "'age_on' is missing"
| |
4c5c24250727314afa2a3c9993b0e6b0fa6393d43f000f376aa0053a38a84a11 | jtuple/riak_zab | riak_zab_backend.erl | %% -------------------------------------------------------------------
%%
%% riak_zab_backend: Copy of riak_kv_backend to avoid requiring
%% entire riak_kv app as a dependency.
%%
%% -------------------------------------------------------------------
%%
riak_kv_backend : backend behaviour
%%
Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(riak_zab_backend).
-export([behaviour_info/1]).
-export([callback_after/3]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-export([standard_test/2]).
-endif.
-spec behaviour_info(atom()) -> 'undefined' | [{atom(), arity()}].
behaviour_info(callbacks) ->
[{start,2}, % (Partition, Config)
( State )
( State , BKey )
( State , BKey , )
( State )
( State , Bucket )
( State , BKey )
( State )
( State , Folder , Acc ) , Folder({B , K},V , Acc )
( State )
( State , Ref , Msg ) - >
behaviour_info(_Other) ->
undefined.
Queue a callback for the backend after Time ms .
-spec callback_after(integer(), reference(), term()) -> reference().
callback_after(Time, Ref, Msg) when is_integer(Time), is_reference(Ref) ->
riak_core_vnode:send_command_after(Time, {backend_callback, Ref, Msg}).
-ifdef(TEST).
standard_test(BackendMod, Config) ->
{ok, S} = BackendMod:start(42, Config),
?assertEqual(ok, BackendMod:put(S,{<<"b1">>,<<"k1">>},<<"v1">>)),
?assertEqual(ok, BackendMod:put(S,{<<"b2">>,<<"k2">>},<<"v2">>)),
?assertEqual({ok,<<"v2">>}, BackendMod:get(S,{<<"b2">>,<<"k2">>})),
?assertEqual({error, notfound}, BackendMod:get(S, {<<"b1">>,<<"k3">>})),
?assertEqual([{<<"b1">>,<<"k1">>},{<<"b2">>,<<"k2">>}],
lists:sort(BackendMod:list(S))),
?assertEqual([<<"k2">>], BackendMod:list_bucket(S, <<"b2">>)),
?assertEqual([<<"k1">>], BackendMod:list_bucket(S, <<"b1">>)),
?assertEqual([<<"k1">>], BackendMod:list_bucket(
S, {filter, <<"b1">>, fun(_K) -> true end})),
?assertEqual([], BackendMod:list_bucket(
S, {filter, <<"b1">>, fun(_K) -> false end})),
BucketList = BackendMod:list_bucket(S, '_'),
?assert(lists:member(<<"b1">>, BucketList)),
?assert(lists:member(<<"b2">>, BucketList)),
?assertEqual(ok, BackendMod:delete(S,{<<"b2">>,<<"k2">>})),
?assertEqual({error, notfound}, BackendMod:get(S, {<<"b2">>, <<"k2">>})),
?assertEqual([{<<"b1">>, <<"k1">>}], BackendMod:list(S)),
Folder = fun(K, V, A) -> [{K,V}|A] end,
?assertEqual([{{<<"b1">>,<<"k1">>},<<"v1">>}], BackendMod:fold(S, Folder, [])),
?assertEqual(ok, BackendMod:put(S,{<<"b3">>,<<"k3">>},<<"v3">>)),
?assertEqual([{{<<"b1">>,<<"k1">>},<<"v1">>},
{{<<"b3">>,<<"k3">>},<<"v3">>}], lists:sort(BackendMod:fold(S, Folder, []))),
?assertEqual(false, BackendMod:is_empty(S)),
?assertEqual(ok, BackendMod:delete(S,{<<"b1">>,<<"k1">>})),
?assertEqual(ok, BackendMod:delete(S,{<<"b3">>,<<"k3">>})),
?assertEqual(true, BackendMod:is_empty(S)),
ok = BackendMod:stop(S).
-endif. % TEST
| null | https://raw.githubusercontent.com/jtuple/riak_zab/ebd54aa540cf65e49d6e0890005e714ee756914a/src/riak_zab_backend.erl | erlang | -------------------------------------------------------------------
riak_zab_backend: Copy of riak_kv_backend to avoid requiring
entire riak_kv app as a dependency.
-------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
(Partition, Config)
TEST | riak_kv_backend : backend behaviour
Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_zab_backend).
-export([behaviour_info/1]).
-export([callback_after/3]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-export([standard_test/2]).
-endif.
-spec behaviour_info(atom()) -> 'undefined' | [{atom(), arity()}].
behaviour_info(callbacks) ->
( State )
( State , BKey )
( State , BKey , )
( State )
( State , Bucket )
( State , BKey )
( State )
( State , Folder , Acc ) , Folder({B , K},V , Acc )
( State )
( State , Ref , Msg ) - >
behaviour_info(_Other) ->
undefined.
Queue a callback for the backend after Time ms .
-spec callback_after(integer(), reference(), term()) -> reference().
callback_after(Time, Ref, Msg) when is_integer(Time), is_reference(Ref) ->
riak_core_vnode:send_command_after(Time, {backend_callback, Ref, Msg}).
-ifdef(TEST).
standard_test(BackendMod, Config) ->
{ok, S} = BackendMod:start(42, Config),
?assertEqual(ok, BackendMod:put(S,{<<"b1">>,<<"k1">>},<<"v1">>)),
?assertEqual(ok, BackendMod:put(S,{<<"b2">>,<<"k2">>},<<"v2">>)),
?assertEqual({ok,<<"v2">>}, BackendMod:get(S,{<<"b2">>,<<"k2">>})),
?assertEqual({error, notfound}, BackendMod:get(S, {<<"b1">>,<<"k3">>})),
?assertEqual([{<<"b1">>,<<"k1">>},{<<"b2">>,<<"k2">>}],
lists:sort(BackendMod:list(S))),
?assertEqual([<<"k2">>], BackendMod:list_bucket(S, <<"b2">>)),
?assertEqual([<<"k1">>], BackendMod:list_bucket(S, <<"b1">>)),
?assertEqual([<<"k1">>], BackendMod:list_bucket(
S, {filter, <<"b1">>, fun(_K) -> true end})),
?assertEqual([], BackendMod:list_bucket(
S, {filter, <<"b1">>, fun(_K) -> false end})),
BucketList = BackendMod:list_bucket(S, '_'),
?assert(lists:member(<<"b1">>, BucketList)),
?assert(lists:member(<<"b2">>, BucketList)),
?assertEqual(ok, BackendMod:delete(S,{<<"b2">>,<<"k2">>})),
?assertEqual({error, notfound}, BackendMod:get(S, {<<"b2">>, <<"k2">>})),
?assertEqual([{<<"b1">>, <<"k1">>}], BackendMod:list(S)),
Folder = fun(K, V, A) -> [{K,V}|A] end,
?assertEqual([{{<<"b1">>,<<"k1">>},<<"v1">>}], BackendMod:fold(S, Folder, [])),
?assertEqual(ok, BackendMod:put(S,{<<"b3">>,<<"k3">>},<<"v3">>)),
?assertEqual([{{<<"b1">>,<<"k1">>},<<"v1">>},
{{<<"b3">>,<<"k3">>},<<"v3">>}], lists:sort(BackendMod:fold(S, Folder, []))),
?assertEqual(false, BackendMod:is_empty(S)),
?assertEqual(ok, BackendMod:delete(S,{<<"b1">>,<<"k1">>})),
?assertEqual(ok, BackendMod:delete(S,{<<"b3">>,<<"k3">>})),
?assertEqual(true, BackendMod:is_empty(S)),
ok = BackendMod:stop(S).
|
e5f60df8840e2c626a99617deef7ee8c1a0f02746e6a5d20021d97e925979d73 | donaldsonjw/bigloo | setup.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / comptime / Cfa / setup.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Tue Jun 25 14:08:53 1996 * /
* Last change : Mon Nov 11 09:54:14 2013 ( serrano ) * /
* Copyright : 1996 - 2013 , see LICENSE file * /
;* ------------------------------------------------------------- */
;* We setup the ast for the Cfa. */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module cfa_setup
(include "Tools/trace.sch")
(import engine_param
type_type
type_cache
type_typeof
module_module
tools_shape
tools_error
ast_var
ast_node
cfa_info
cfa_info2
cfa_info3
cfa_approx
cfa_pair)
(export (set-initial-approx! globals)
(generic node-setup! ::node)
(node-setup*! node*)))
;*---------------------------------------------------------------------*/
;* set-initial-approx! ... */
;*---------------------------------------------------------------------*/
(define (set-initial-approx! globals)
(trace cfa "================== initial ===========================\n")
(for-each (lambda (global)
(trace (cfa 5) "set-initial-approx!: " (shape global)
#\Newline)
(let ((fun (global-value global)))
(fun-setup! fun global)
(for-each (lambda (local)
(widen!::reshaped-local local)
(variable-value-setup! (local-value local)
local))
(sfun-args fun)))
(node-setup! (sfun-body (global-value global))))
globals))
;*---------------------------------------------------------------------*/
;* node-setup! ... */
;*---------------------------------------------------------------------*/
(define-generic (node-setup! node::node))
;*---------------------------------------------------------------------*/
;* node-setup! ::atom ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::atom)
(with-access::atom node (value)
(widen!::atom/Cinfo node
(approx (make-type-approx (get-type-atom value))))))
;*---------------------------------------------------------------------*/
;* node-setup! ::kwote ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::kwote)
(with-access::kwote node (value)
(let ((approx (make-type-approx (get-type-kwote value))))
(approx-top?-set! approx #t)
(widen!::kwote/Cinfo node
(approx approx)))))
;*---------------------------------------------------------------------*/
;* node-setup! ::kwote/node ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! kwote::kwote/node)
(with-access::kwote/node kwote (node value)
(node-setup! node)))
;*---------------------------------------------------------------------*/
;* node-setup! ::var ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::var)
(with-access::var node (variable)
(variable-value-setup! (variable-value variable) variable)
;; this widen! is only for the nice pretting of cfa_show
(cond
((and (local? variable) (not (reshaped-local? variable)))
(widen!::reshaped-local variable))
((and (global? variable) (not (reshaped-global? variable)))
(widen!::reshaped-global variable)))))
;*---------------------------------------------------------------------*/
;* alloc-type? ... */
;* ------------------------------------------------------------- */
;* This predicate returns #t for all types denoting data */
* structures approximated by the cfa . * /
;*---------------------------------------------------------------------*/
(define (alloc-type? type)
(cond
((eq? type *vector*) #t)
((eq? type *procedure*) #t)
((eq? type *struct*) #t)
((eq? type *pair*) (pair-optim?))
(else #f)))
;*---------------------------------------------------------------------*/
;* variable-value-setup! ... */
;*---------------------------------------------------------------------*/
(define-generic (variable-value-setup! value::value variable::variable))
;*---------------------------------------------------------------------*/
;* variable-value-setup! ::sfun ... */
;*---------------------------------------------------------------------*/
(define-method (variable-value-setup! value::sfun var)
;; we reach this method when setting up a `make-procedure' call
on the second argument of this call .
#unspecified)
;*---------------------------------------------------------------------*/
;* variable-value-setup! ::svar ... */
;*---------------------------------------------------------------------*/
(define-method (variable-value-setup! value::svar var::variable)
(let ((typ (variable-type var)))
(if (global? var)
(let ((value (widen!::svar/Cinfo value
(approx (make-type-approx typ)))))
(if (and (not (eq? (global-import var) 'static)) (alloc-type? typ))
(approx-set-top! (svar/Cinfo-approx value))))
(widen!::svar/Cinfo value
(approx (make-type-approx typ))))))
;*---------------------------------------------------------------------*/
;* variable-value-setup! ::pre-clo-env ... */
;*---------------------------------------------------------------------*/
(define-method (variable-value-setup! value::pre-clo-env var)
(trace (cfa 5) "Je set un pre-clo-env..." (shape var) #\Newline)
(call-next-method)
(svar/Cinfo-clo-env?-set! (local-value var) #t))
;*---------------------------------------------------------------------*/
;* variable-value-setup! ::sexit ... */
;*---------------------------------------------------------------------*/
(define-method (variable-value-setup! value::sexit var)
(widen!::sexit/Cinfo value
(approx (make-type-approx (variable-type var)))))
;*---------------------------------------------------------------------*/
;* variable-value-setup! ::scnst ... */
;*---------------------------------------------------------------------*/
(define-method (variable-value-setup! value::scnst/Cinfo var)
'already-done)
;*---------------------------------------------------------------------*/
;* variable-value-setup! ::scnst ... */
;*---------------------------------------------------------------------*/
(define-method (variable-value-setup! value::scnst var)
(trace (cfa 5) "Je setup une scnst: " (shape var) " "
(shape (scnst-node value)) #\Newline)
(if (global? var)
(if (and (eq? (global-module var) *module*)
(memq (scnst-class value) '(sfun sgfun))
(pre-make-procedure-app? (scnst-node value)))
;; this variable holds a closure
(let ((node (scnst-node value)))
(trace (cfa 5) " et en plus, c'est une closure" #\Newline)
(node-setup! node)
(widen!::scnst/Cinfo value
(approx (make-procedure-app-approx node))))
(let ((value (widen!::scnst/Cinfo value
(approx (make-type-approx (variable-type var))))))
(approx-set-top! (scnst/Cinfo-approx value))))
(widen!::scnst/Cinfo value
(approx (make-type-approx (variable-type var))))))
;*---------------------------------------------------------------------*/
;* variable-value-setup! ::cvar ... */
;*---------------------------------------------------------------------*/
(define-method (variable-value-setup! value::cvar var)
(widen!::cvar/Cinfo value
(approx (make-type-approx (variable-type var))))
(when (alloc-type? (variable-type var))
(approx-set-top! (cvar/Cinfo-approx value))))
;*---------------------------------------------------------------------*/
;* node-setup! ::closure ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::closure)
(internal-error "node-setup!" "Unexpected closure" (shape node)))
;*---------------------------------------------------------------------*/
;* node-setup! ::sequence ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::sequence)
(with-access::sequence node (nodes)
(node-setup*! nodes)))
;*---------------------------------------------------------------------*/
;* node-setup! ::sync ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::sync)
(with-access::sync node (body mutex prelock)
(node-setup! mutex)
(node-setup! prelock)
(node-setup! body)))
;*---------------------------------------------------------------------*/
;* node-setup! ::app ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::app)
(trace (cfa 5) "Je setup une app: " (shape node) #\Newline)
(with-access::app node (fun args)
(node-setup*! args)
(let ((variable (var-variable fun)))
(fun-setup! (variable-value variable) variable))))
;*---------------------------------------------------------------------*/
;* fun-setup! ... */
;*---------------------------------------------------------------------*/
(define-generic (fun-setup! fun::fun var)
(if (and (global? var) (not (reshaped-global? var)))
(widen!::reshaped-global var))
#unspecified)
;*---------------------------------------------------------------------*/
;* fun-setup! ::sfun ... */
;*---------------------------------------------------------------------*/
(define-method (fun-setup! fun::sfun var)
(if (and (global? var) (not (reshaped-global? var)))
(widen!::reshaped-global var))
(if (and (global? var) (eq? (global-import var) 'import))
(let ((approx (make-type-approx (global-type var))))
(if (sfun-top? fun) (approx-set-top! approx))
(widen!::extern-sfun/Cinfo fun
(approx approx)))
(widen!::intern-sfun/Cinfo fun
(approx (make-type-approx (variable-type var))))))
;*---------------------------------------------------------------------*/
;* fun-setup! ::cfun ... */
;*---------------------------------------------------------------------*/
(define-method (fun-setup! fun::cfun var)
(if (not (reshaped-global? var))
(widen!::reshaped-global var))
(let ((approx (make-type-approx (global-type var))))
(if (cfun-top? fun) (approx-set-top! approx))
(widen!::cfun/Cinfo fun
(approx approx))))
;*---------------------------------------------------------------------*/
;* node-setup! ::app-ly ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::app-ly)
(with-access::app-ly node (fun arg)
(node-setup! fun)
(node-setup! arg)
(if *optim-cfa-apply-tracking?*
(widen!::app-ly/Cinfo node
(approx (make-empty-approx)))
(begin
(widen!::app-ly/Cinfo node
(approx (make-type-approx *obj*)))
(approx-set-top! (app-ly/Cinfo-approx node))))))
;*---------------------------------------------------------------------*/
;* node-setup! ::funcall ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::funcall)
(with-access::funcall node (fun args)
(node-setup! fun)
(node-setup*! args)
(if *optim-cfa-funcall-tracking?*
(widen!::funcall/Cinfo node
(approx (make-empty-approx))
(va-approx (make-empty-approx)))
(begin
(widen!::funcall/Cinfo node
(approx (make-type-approx *obj*))
(va-approx (make-type-approx *obj*)))
(approx-set-top! (funcall/Cinfo-va-approx node))))))
;*---------------------------------------------------------------------*/
;* node-setup! ::pragma ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::pragma)
(with-access::pragma node (expr* type)
(node-setup*! expr*)
(widen!::pragma/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (pragma/Cinfo-approx node))))
;*---------------------------------------------------------------------*/
* node - setup ! : : ... * /
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::getfield)
(with-access::getfield node (expr* type)
(node-setup*! expr*)
(widen!::getfield/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (getfield/Cinfo-approx node))))
;*---------------------------------------------------------------------*/
* node - setup ! : : ... * /
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::setfield)
(with-access::setfield node (expr* type)
(node-setup*! expr*)
(widen!::setfield/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (setfield/Cinfo-approx node))))
;*---------------------------------------------------------------------*/
;* node-setup! ::new ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::new)
(with-access::new node (expr* type)
(node-setup*! expr*)
(widen!::new/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (new/Cinfo-approx node))))
;*---------------------------------------------------------------------*/
* node - setup ! : : ... * /
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::instanceof)
(with-access::instanceof node (type expr*)
(node-setup*! expr*)
(widen!::instanceof/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (instanceof/Cinfo-approx node))))
;*---------------------------------------------------------------------*/
;* node-setup! ::cast-null ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::cast-null)
(with-access::cast-null node (type)
(widen!::cast-null/Cinfo node
(approx (make-type-approx type)))))
;*---------------------------------------------------------------------*/
;* node-setup! ::cast ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::cast)
(with-access::cast node (arg)
(node-setup! arg)))
;*---------------------------------------------------------------------*/
;* node-setup! ::setq ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::setq)
(trace (cfa 5) "Je setup un setq: " (shape node) #\Newline)
(with-access::setq node (var value)
(node-setup! value)
(node-setup! var)
(widen!::setq/Cinfo node
(approx (make-type-approx *unspec*)))))
;*---------------------------------------------------------------------*/
;* node-setup! ::conditional ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::conditional)
(with-access::conditional node (test true false)
(node-setup! test)
(node-setup! true)
(node-setup! false)
(widen!::conditional/Cinfo node
(approx (make-empty-approx)))))
;*---------------------------------------------------------------------*/
;* node-setup! ::fail ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::fail)
(with-access::fail node (type proc msg obj)
(node-setup! proc)
(node-setup! msg)
(node-setup! obj)
(widen!::fail/Cinfo node
(approx (make-type-approx *obj*)))))
;*---------------------------------------------------------------------*/
;* node-setup! ::select ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::select)
(with-access::select node (clauses test)
(node-setup! test)
(for-each (lambda (clause)
(node-setup! (cdr clause)))
clauses)
(widen!::select/Cinfo node
(approx (make-empty-approx)))))
;*---------------------------------------------------------------------*/
;* node-setup! ::let-fun ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::let-fun)
(with-access::let-fun node (body locals)
(for-each (lambda (l)
(widen!::reshaped-local l)
(let ((fun (local-value l)))
(for-each (lambda (local)
(widen!::reshaped-local local)
(variable-value-setup! (local-value local)
local))
(sfun-args fun))
(node-setup! (sfun-body fun))))
locals)
(node-setup! body)))
;*---------------------------------------------------------------------*/
;* node-setup! ::let-var ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::let-var)
(with-access::let-var node (body bindings)
(trace (cfa 5) "let-var setup: " (shape node) #\Newline)
(for-each (lambda (binding)
(let ((var (car binding))
(val (cdr binding)))
(variable-value-setup! (local-value var) var)
;; if the variable is read-only we set it a binding
;; value to improve the approximations which require
;; offset (such as make-procedure, procedure-ref)
(node-setup! val)
(widen!::reshaped-local var
(binding-value (if (eq? (local-access var) 'read)
val
#f)))
(trace (cfa 5) "~~~ let-var setup " (shape val)
" " (find-runtime-type val)
#\Newline)
(trace (cfa 5) " " (shape var) " ... " #\Newline)))
bindings)
(trace (cfa 5) "let-var body: " (shape body) #\Newline)
(node-setup! body)
(trace (cfa 5) "<<< let-var setup...\n")))
;*---------------------------------------------------------------------*/
;* node-setup! ::set-ex-it ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::set-ex-it)
(with-access::set-ex-it node (var body)
(node-setup! body)
(node-setup! var)
(widen!::reshaped-local (var-variable var))
(widen!::set-ex-it/Cinfo node
(approx (make-type-approx *obj*)))
(approx-set-top! (set-ex-it/Cinfo-approx node))))
;*---------------------------------------------------------------------*/
;* node-setup! ::jump-ex-it ... */
;*---------------------------------------------------------------------*/
(define-method (node-setup! node::jump-ex-it)
(with-access::jump-ex-it node (exit value)
(node-setup! exit)
(node-setup! value)
(widen!::jump-ex-it/Cinfo node
(approx (make-type-approx *obj*)))
(approx-set-top! (jump-ex-it/Cinfo-approx node))))
;*---------------------------------------------------------------------*/
;* node-setup*! ... */
;*---------------------------------------------------------------------*/
(define (node-setup*! node*)
(for-each node-setup! node*))
| null | https://raw.githubusercontent.com/donaldsonjw/bigloo/a4d06e409d0004e159ce92b9908719510a18aed5/comptime/Cfa/setup.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* We setup the ast for the Cfa. */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* set-initial-approx! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::atom ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::kwote ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::kwote/node ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::var ... */
*---------------------------------------------------------------------*/
this widen! is only for the nice pretting of cfa_show
*---------------------------------------------------------------------*/
* alloc-type? ... */
* ------------------------------------------------------------- */
* This predicate returns #t for all types denoting data */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* variable-value-setup! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* variable-value-setup! ::sfun ... */
*---------------------------------------------------------------------*/
we reach this method when setting up a `make-procedure' call
*---------------------------------------------------------------------*/
* variable-value-setup! ::svar ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* variable-value-setup! ::pre-clo-env ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* variable-value-setup! ::sexit ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* variable-value-setup! ::scnst ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* variable-value-setup! ::scnst ... */
*---------------------------------------------------------------------*/
this variable holds a closure
*---------------------------------------------------------------------*/
* variable-value-setup! ::cvar ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::closure ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::sequence ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::sync ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::app ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* fun-setup! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* fun-setup! ::sfun ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* fun-setup! ::cfun ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::app-ly ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::funcall ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::pragma ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::new ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::cast-null ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::cast ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::setq ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::conditional ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::fail ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::select ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::let-fun ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::let-var ... */
*---------------------------------------------------------------------*/
if the variable is read-only we set it a binding
value to improve the approximations which require
offset (such as make-procedure, procedure-ref)
*---------------------------------------------------------------------*/
* node-setup! ::set-ex-it ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup! ::jump-ex-it ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* node-setup*! ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / bigloo / comptime / Cfa / setup.scm * /
* Author : * /
* Creation : Tue Jun 25 14:08:53 1996 * /
* Last change : Mon Nov 11 09:54:14 2013 ( serrano ) * /
* Copyright : 1996 - 2013 , see LICENSE file * /
(module cfa_setup
(include "Tools/trace.sch")
(import engine_param
type_type
type_cache
type_typeof
module_module
tools_shape
tools_error
ast_var
ast_node
cfa_info
cfa_info2
cfa_info3
cfa_approx
cfa_pair)
(export (set-initial-approx! globals)
(generic node-setup! ::node)
(node-setup*! node*)))
(define (set-initial-approx! globals)
(trace cfa "================== initial ===========================\n")
(for-each (lambda (global)
(trace (cfa 5) "set-initial-approx!: " (shape global)
#\Newline)
(let ((fun (global-value global)))
(fun-setup! fun global)
(for-each (lambda (local)
(widen!::reshaped-local local)
(variable-value-setup! (local-value local)
local))
(sfun-args fun)))
(node-setup! (sfun-body (global-value global))))
globals))
(define-generic (node-setup! node::node))
(define-method (node-setup! node::atom)
(with-access::atom node (value)
(widen!::atom/Cinfo node
(approx (make-type-approx (get-type-atom value))))))
(define-method (node-setup! node::kwote)
(with-access::kwote node (value)
(let ((approx (make-type-approx (get-type-kwote value))))
(approx-top?-set! approx #t)
(widen!::kwote/Cinfo node
(approx approx)))))
(define-method (node-setup! kwote::kwote/node)
(with-access::kwote/node kwote (node value)
(node-setup! node)))
(define-method (node-setup! node::var)
(with-access::var node (variable)
(variable-value-setup! (variable-value variable) variable)
(cond
((and (local? variable) (not (reshaped-local? variable)))
(widen!::reshaped-local variable))
((and (global? variable) (not (reshaped-global? variable)))
(widen!::reshaped-global variable)))))
* structures approximated by the cfa . * /
(define (alloc-type? type)
(cond
((eq? type *vector*) #t)
((eq? type *procedure*) #t)
((eq? type *struct*) #t)
((eq? type *pair*) (pair-optim?))
(else #f)))
(define-generic (variable-value-setup! value::value variable::variable))
(define-method (variable-value-setup! value::sfun var)
on the second argument of this call .
#unspecified)
(define-method (variable-value-setup! value::svar var::variable)
(let ((typ (variable-type var)))
(if (global? var)
(let ((value (widen!::svar/Cinfo value
(approx (make-type-approx typ)))))
(if (and (not (eq? (global-import var) 'static)) (alloc-type? typ))
(approx-set-top! (svar/Cinfo-approx value))))
(widen!::svar/Cinfo value
(approx (make-type-approx typ))))))
(define-method (variable-value-setup! value::pre-clo-env var)
(trace (cfa 5) "Je set un pre-clo-env..." (shape var) #\Newline)
(call-next-method)
(svar/Cinfo-clo-env?-set! (local-value var) #t))
(define-method (variable-value-setup! value::sexit var)
(widen!::sexit/Cinfo value
(approx (make-type-approx (variable-type var)))))
(define-method (variable-value-setup! value::scnst/Cinfo var)
'already-done)
(define-method (variable-value-setup! value::scnst var)
(trace (cfa 5) "Je setup une scnst: " (shape var) " "
(shape (scnst-node value)) #\Newline)
(if (global? var)
(if (and (eq? (global-module var) *module*)
(memq (scnst-class value) '(sfun sgfun))
(pre-make-procedure-app? (scnst-node value)))
(let ((node (scnst-node value)))
(trace (cfa 5) " et en plus, c'est une closure" #\Newline)
(node-setup! node)
(widen!::scnst/Cinfo value
(approx (make-procedure-app-approx node))))
(let ((value (widen!::scnst/Cinfo value
(approx (make-type-approx (variable-type var))))))
(approx-set-top! (scnst/Cinfo-approx value))))
(widen!::scnst/Cinfo value
(approx (make-type-approx (variable-type var))))))
(define-method (variable-value-setup! value::cvar var)
(widen!::cvar/Cinfo value
(approx (make-type-approx (variable-type var))))
(when (alloc-type? (variable-type var))
(approx-set-top! (cvar/Cinfo-approx value))))
(define-method (node-setup! node::closure)
(internal-error "node-setup!" "Unexpected closure" (shape node)))
(define-method (node-setup! node::sequence)
(with-access::sequence node (nodes)
(node-setup*! nodes)))
(define-method (node-setup! node::sync)
(with-access::sync node (body mutex prelock)
(node-setup! mutex)
(node-setup! prelock)
(node-setup! body)))
(define-method (node-setup! node::app)
(trace (cfa 5) "Je setup une app: " (shape node) #\Newline)
(with-access::app node (fun args)
(node-setup*! args)
(let ((variable (var-variable fun)))
(fun-setup! (variable-value variable) variable))))
(define-generic (fun-setup! fun::fun var)
(if (and (global? var) (not (reshaped-global? var)))
(widen!::reshaped-global var))
#unspecified)
(define-method (fun-setup! fun::sfun var)
(if (and (global? var) (not (reshaped-global? var)))
(widen!::reshaped-global var))
(if (and (global? var) (eq? (global-import var) 'import))
(let ((approx (make-type-approx (global-type var))))
(if (sfun-top? fun) (approx-set-top! approx))
(widen!::extern-sfun/Cinfo fun
(approx approx)))
(widen!::intern-sfun/Cinfo fun
(approx (make-type-approx (variable-type var))))))
(define-method (fun-setup! fun::cfun var)
(if (not (reshaped-global? var))
(widen!::reshaped-global var))
(let ((approx (make-type-approx (global-type var))))
(if (cfun-top? fun) (approx-set-top! approx))
(widen!::cfun/Cinfo fun
(approx approx))))
(define-method (node-setup! node::app-ly)
(with-access::app-ly node (fun arg)
(node-setup! fun)
(node-setup! arg)
(if *optim-cfa-apply-tracking?*
(widen!::app-ly/Cinfo node
(approx (make-empty-approx)))
(begin
(widen!::app-ly/Cinfo node
(approx (make-type-approx *obj*)))
(approx-set-top! (app-ly/Cinfo-approx node))))))
(define-method (node-setup! node::funcall)
(with-access::funcall node (fun args)
(node-setup! fun)
(node-setup*! args)
(if *optim-cfa-funcall-tracking?*
(widen!::funcall/Cinfo node
(approx (make-empty-approx))
(va-approx (make-empty-approx)))
(begin
(widen!::funcall/Cinfo node
(approx (make-type-approx *obj*))
(va-approx (make-type-approx *obj*)))
(approx-set-top! (funcall/Cinfo-va-approx node))))))
(define-method (node-setup! node::pragma)
(with-access::pragma node (expr* type)
(node-setup*! expr*)
(widen!::pragma/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (pragma/Cinfo-approx node))))
* node - setup ! : : ... * /
(define-method (node-setup! node::getfield)
(with-access::getfield node (expr* type)
(node-setup*! expr*)
(widen!::getfield/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (getfield/Cinfo-approx node))))
* node - setup ! : : ... * /
(define-method (node-setup! node::setfield)
(with-access::setfield node (expr* type)
(node-setup*! expr*)
(widen!::setfield/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (setfield/Cinfo-approx node))))
(define-method (node-setup! node::new)
(with-access::new node (expr* type)
(node-setup*! expr*)
(widen!::new/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (new/Cinfo-approx node))))
* node - setup ! : : ... * /
(define-method (node-setup! node::instanceof)
(with-access::instanceof node (type expr*)
(node-setup*! expr*)
(widen!::instanceof/Cinfo node
(approx (make-type-approx type)))
(approx-set-top! (instanceof/Cinfo-approx node))))
(define-method (node-setup! node::cast-null)
(with-access::cast-null node (type)
(widen!::cast-null/Cinfo node
(approx (make-type-approx type)))))
(define-method (node-setup! node::cast)
(with-access::cast node (arg)
(node-setup! arg)))
(define-method (node-setup! node::setq)
(trace (cfa 5) "Je setup un setq: " (shape node) #\Newline)
(with-access::setq node (var value)
(node-setup! value)
(node-setup! var)
(widen!::setq/Cinfo node
(approx (make-type-approx *unspec*)))))
(define-method (node-setup! node::conditional)
(with-access::conditional node (test true false)
(node-setup! test)
(node-setup! true)
(node-setup! false)
(widen!::conditional/Cinfo node
(approx (make-empty-approx)))))
(define-method (node-setup! node::fail)
(with-access::fail node (type proc msg obj)
(node-setup! proc)
(node-setup! msg)
(node-setup! obj)
(widen!::fail/Cinfo node
(approx (make-type-approx *obj*)))))
(define-method (node-setup! node::select)
(with-access::select node (clauses test)
(node-setup! test)
(for-each (lambda (clause)
(node-setup! (cdr clause)))
clauses)
(widen!::select/Cinfo node
(approx (make-empty-approx)))))
(define-method (node-setup! node::let-fun)
(with-access::let-fun node (body locals)
(for-each (lambda (l)
(widen!::reshaped-local l)
(let ((fun (local-value l)))
(for-each (lambda (local)
(widen!::reshaped-local local)
(variable-value-setup! (local-value local)
local))
(sfun-args fun))
(node-setup! (sfun-body fun))))
locals)
(node-setup! body)))
(define-method (node-setup! node::let-var)
(with-access::let-var node (body bindings)
(trace (cfa 5) "let-var setup: " (shape node) #\Newline)
(for-each (lambda (binding)
(let ((var (car binding))
(val (cdr binding)))
(variable-value-setup! (local-value var) var)
(node-setup! val)
(widen!::reshaped-local var
(binding-value (if (eq? (local-access var) 'read)
val
#f)))
(trace (cfa 5) "~~~ let-var setup " (shape val)
" " (find-runtime-type val)
#\Newline)
(trace (cfa 5) " " (shape var) " ... " #\Newline)))
bindings)
(trace (cfa 5) "let-var body: " (shape body) #\Newline)
(node-setup! body)
(trace (cfa 5) "<<< let-var setup...\n")))
(define-method (node-setup! node::set-ex-it)
(with-access::set-ex-it node (var body)
(node-setup! body)
(node-setup! var)
(widen!::reshaped-local (var-variable var))
(widen!::set-ex-it/Cinfo node
(approx (make-type-approx *obj*)))
(approx-set-top! (set-ex-it/Cinfo-approx node))))
(define-method (node-setup! node::jump-ex-it)
(with-access::jump-ex-it node (exit value)
(node-setup! exit)
(node-setup! value)
(widen!::jump-ex-it/Cinfo node
(approx (make-type-approx *obj*)))
(approx-set-top! (jump-ex-it/Cinfo-approx node))))
(define (node-setup*! node*)
(for-each node-setup! node*))
|
75a63d4d61f0212d4358ea567840862e282c81e158f82453245aeb313ebfe867 | treep/hactors | Tests.hs |
module Control.Concurrent.Actor.Tests where
import Control.Concurrent.Actor hiding ( receive, spawnReceive )
import Control.Concurrent.Actor.Debug
-- -----------------------------------------------------------------------------
-- * @receive@ is non busy
testReceive1 :: IO ()
testReceive1 = do
act <- spawnReceive $
\msg -> case msg of
"ok?" -> putStrLn "ok"
_ -> putStrLn "nothing"
act ! "ok?"
act ! "ok?"
act ! "what?"
return ()
-- > testReceive1
ThreadId 39 : receiving ...
-- ok
ThreadId 39 : receiving ...
-- ok
ThreadId 39 : receiving ...
-- nothing
ThreadId 39 : receiving ...
-- Thus, the @receive@ function don't perform busy waiting.
-- -----------------------------------------------------------------------------
-- * @tolerant@ handle exceptions
testTolerant1 :: IO ()
testTolerant1 = do
act <- spawnReceive $
\msg -> tolerant $ if msg then putStrLn "ok" else putStrLn $ tail []
act ! False
act ! True
act ! True
return ()
-- > testTolerant1
ThreadId 31 : receiving ...
ThreadId 31 : receiving ...
-- ok
ThreadId 31 : receiving ...
-- ok
ThreadId 31 : receiving ...
| null | https://raw.githubusercontent.com/treep/hactors/149730838d67414c86ee4a1d741054598ab148f6/Control/Concurrent/Actor/Tests.hs | haskell | -----------------------------------------------------------------------------
* @receive@ is non busy
> testReceive1
ok
ok
nothing
Thus, the @receive@ function don't perform busy waiting.
-----------------------------------------------------------------------------
* @tolerant@ handle exceptions
> testTolerant1
ok
ok |
module Control.Concurrent.Actor.Tests where
import Control.Concurrent.Actor hiding ( receive, spawnReceive )
import Control.Concurrent.Actor.Debug
testReceive1 :: IO ()
testReceive1 = do
act <- spawnReceive $
\msg -> case msg of
"ok?" -> putStrLn "ok"
_ -> putStrLn "nothing"
act ! "ok?"
act ! "ok?"
act ! "what?"
return ()
ThreadId 39 : receiving ...
ThreadId 39 : receiving ...
ThreadId 39 : receiving ...
ThreadId 39 : receiving ...
testTolerant1 :: IO ()
testTolerant1 = do
act <- spawnReceive $
\msg -> tolerant $ if msg then putStrLn "ok" else putStrLn $ tail []
act ! False
act ! True
act ! True
return ()
ThreadId 31 : receiving ...
ThreadId 31 : receiving ...
ThreadId 31 : receiving ...
ThreadId 31 : receiving ...
|
47155eec2db195348da0ec72e1700f6f4210ce7d86871897047752d59829b054 | apache/couchdb-couch | couch_compress_tests.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_compress_tests).
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-define(TERM, {[{a, 1}, {b, 2}, {c, 3}, {d, 4}, {e, 5}]}).
-define(NONE, <<131,104,1,108,0,0,0,5,104,2,100,0,1,97,97,1,
104,2,100,0,1,98,97,2,104,2,100,0,1,99,97,3,104,2,100,0,
1,100,97,4,104,2,100,0,1,101,97,5,106>>).
-define(DEFLATE, <<131,80,0,0,0,48,120,218,203,96,204,97,96,
96,96,205,96,74,97,96,76,76,100,4,211,73,137,76,96,58,57,
145,25,76,167,36,178,128,233,212,68,214,44,0,212,169,9,51>>).
-define(SNAPPY, <<1,49,64,131,104,1,108,0,0,0,5,104,2,100,0,
1,97,97,1,104,1,8,8,98,97,2,5,8,8,99,97,3,5,8,44,100,97,
4,104,2,100,0,1,101,97,5,106>>).
-define(CORRUPT, <<2,12,85,06>>).
compress_test_() ->
[
?_assertEqual(?NONE, couch_compress:compress(?TERM, none)),
?_assertEqual(?DEFLATE, couch_compress:compress(?TERM, {deflate, 9})),
?_assertEqual(?SNAPPY, couch_compress:compress(?TERM, snappy))
].
decompress_test_() ->
[
?_assertEqual(?TERM, couch_compress:decompress(?NONE)),
?_assertEqual(?TERM, couch_compress:decompress(?DEFLATE)),
?_assertEqual(?TERM, couch_compress:decompress(?SNAPPY)),
?_assertError(invalid_compression, couch_compress:decompress(?CORRUPT))
].
recompress_test_() ->
[
?_assertEqual(?DEFLATE, couch_compress:compress(?NONE, {deflate, 9})),
?_assertEqual(?SNAPPY, couch_compress:compress(?NONE, snappy)),
?_assertEqual(?NONE, couch_compress:compress(?DEFLATE, none)),
?_assertEqual(?SNAPPY, couch_compress:compress(?DEFLATE, snappy)),
?_assertEqual(?NONE, couch_compress:compress(?SNAPPY, none)),
?_assertEqual(?DEFLATE, couch_compress:compress(?SNAPPY, {deflate, 9}))
].
is_compressed_test_() ->
[
?_assert(couch_compress:is_compressed(?NONE, none)),
?_assert(couch_compress:is_compressed(?DEFLATE, {deflate, 9})),
?_assert(couch_compress:is_compressed(?SNAPPY, snappy)),
?_assertNot(couch_compress:is_compressed(?NONE, {deflate, 0})),
?_assertNot(couch_compress:is_compressed(?NONE, {deflate, 9})),
?_assertNot(couch_compress:is_compressed(?NONE, snappy)),
?_assertNot(couch_compress:is_compressed(?DEFLATE, none)),
?_assertNot(couch_compress:is_compressed(?DEFLATE, snappy)),
?_assertNot(couch_compress:is_compressed(?SNAPPY, none)),
?_assertNot(couch_compress:is_compressed(?SNAPPY, {deflate, 9})),
?_assertError(invalid_compression,
couch_compress:is_compressed(?CORRUPT, none)),
?_assertError(invalid_compression,
couch_compress:is_compressed(?CORRUPT, {deflate, 9})),
?_assertError(invalid_compression,
couch_compress:is_compressed(?CORRUPT, snappy))
].
| null | https://raw.githubusercontent.com/apache/couchdb-couch/21c8d37ac6ee1a7fed1de1f54f95a4d3cd9f5248/test/couch_compress_tests.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(couch_compress_tests).
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-define(TERM, {[{a, 1}, {b, 2}, {c, 3}, {d, 4}, {e, 5}]}).
-define(NONE, <<131,104,1,108,0,0,0,5,104,2,100,0,1,97,97,1,
104,2,100,0,1,98,97,2,104,2,100,0,1,99,97,3,104,2,100,0,
1,100,97,4,104,2,100,0,1,101,97,5,106>>).
-define(DEFLATE, <<131,80,0,0,0,48,120,218,203,96,204,97,96,
96,96,205,96,74,97,96,76,76,100,4,211,73,137,76,96,58,57,
145,25,76,167,36,178,128,233,212,68,214,44,0,212,169,9,51>>).
-define(SNAPPY, <<1,49,64,131,104,1,108,0,0,0,5,104,2,100,0,
1,97,97,1,104,1,8,8,98,97,2,5,8,8,99,97,3,5,8,44,100,97,
4,104,2,100,0,1,101,97,5,106>>).
-define(CORRUPT, <<2,12,85,06>>).
compress_test_() ->
[
?_assertEqual(?NONE, couch_compress:compress(?TERM, none)),
?_assertEqual(?DEFLATE, couch_compress:compress(?TERM, {deflate, 9})),
?_assertEqual(?SNAPPY, couch_compress:compress(?TERM, snappy))
].
decompress_test_() ->
[
?_assertEqual(?TERM, couch_compress:decompress(?NONE)),
?_assertEqual(?TERM, couch_compress:decompress(?DEFLATE)),
?_assertEqual(?TERM, couch_compress:decompress(?SNAPPY)),
?_assertError(invalid_compression, couch_compress:decompress(?CORRUPT))
].
recompress_test_() ->
[
?_assertEqual(?DEFLATE, couch_compress:compress(?NONE, {deflate, 9})),
?_assertEqual(?SNAPPY, couch_compress:compress(?NONE, snappy)),
?_assertEqual(?NONE, couch_compress:compress(?DEFLATE, none)),
?_assertEqual(?SNAPPY, couch_compress:compress(?DEFLATE, snappy)),
?_assertEqual(?NONE, couch_compress:compress(?SNAPPY, none)),
?_assertEqual(?DEFLATE, couch_compress:compress(?SNAPPY, {deflate, 9}))
].
is_compressed_test_() ->
[
?_assert(couch_compress:is_compressed(?NONE, none)),
?_assert(couch_compress:is_compressed(?DEFLATE, {deflate, 9})),
?_assert(couch_compress:is_compressed(?SNAPPY, snappy)),
?_assertNot(couch_compress:is_compressed(?NONE, {deflate, 0})),
?_assertNot(couch_compress:is_compressed(?NONE, {deflate, 9})),
?_assertNot(couch_compress:is_compressed(?NONE, snappy)),
?_assertNot(couch_compress:is_compressed(?DEFLATE, none)),
?_assertNot(couch_compress:is_compressed(?DEFLATE, snappy)),
?_assertNot(couch_compress:is_compressed(?SNAPPY, none)),
?_assertNot(couch_compress:is_compressed(?SNAPPY, {deflate, 9})),
?_assertError(invalid_compression,
couch_compress:is_compressed(?CORRUPT, none)),
?_assertError(invalid_compression,
couch_compress:is_compressed(?CORRUPT, {deflate, 9})),
?_assertError(invalid_compression,
couch_compress:is_compressed(?CORRUPT, snappy))
].
|
4309fc34e5cb718367f593c7d651274e8589af86215da93fd6905ade0508af7e | martijnbastiaan/doctest-parallel | Foo.hs | # LANGUAGE TemplateHaskell #
module TemplateHaskell.Foo where
import Language.Haskell.TH
import Text.Printf
-- | Report an error.
--
-- >>> :set -XTemplateHaskell
> > > $ ( logError " Something bad happened ! " )
-- ERROR <interactive>: Something bad happened!
logError :: String -> Q Exp
logError msg = do
loc <- location
let s = (printf "ERROR %s: %s" (loc_filename loc) msg) :: String
[| putStrLn s |]
| null | https://raw.githubusercontent.com/martijnbastiaan/doctest-parallel/f70d6a1c946cc0ada88571b90a39a7cd4d065452/test/integration/TemplateHaskell/Foo.hs | haskell | | Report an error.
>>> :set -XTemplateHaskell
ERROR <interactive>: Something bad happened! | # LANGUAGE TemplateHaskell #
module TemplateHaskell.Foo where
import Language.Haskell.TH
import Text.Printf
> > > $ ( logError " Something bad happened ! " )
logError :: String -> Q Exp
logError msg = do
loc <- location
let s = (printf "ERROR %s: %s" (loc_filename loc) msg) :: String
[| putStrLn s |]
|
d91be8e362d04b1a04004158f962985c75c12a9f5f78623c903dfb69c20d9b7f | fortytools/holumbus | Search.hs | -- ----------------------------------------------------------------------------
|
Module : SimpleSearch
Copyright : Copyright ( C ) 2007 , 2008
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.5
A simple example of Holumbus , providing a command line search with the
default query language .
Module : SimpleSearch
Copyright : Copyright (C) 2007, 2008 Timo B. Huebel
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: portable
Version : 0.5
A simple example of Holumbus, providing a command line search with the
default query language.
-}
-- ----------------------------------------------------------------------------
{-# OPTIONS -fno-warn-type-defaults #-}
module Main where
import System.IO
import System.Environment
import System.Exit
import System.Console.Editline.Readline
import System.Console.GetOpt
import System.CPUTime
import Text.Printf
import Control.Parallel.Strategies
import Char
import Data.Maybe
import Data.Function
import Data.Binary () -- nothing used
import Text.XML.HXT.Arrow.Pickle () -- nothing used
import Control.Monad
import qualified Data.List as L
import qualified Data.Map as M
import qualified Data.IntMap as IM
import qualified Data.IntSet as IS
import Text.XML.HXT.DOM.Unicode
import Holumbus.Index.Inverted.Memory (Inverted)
import Holumbus.Index.Inverted.OneFile (Persistent)
import Holumbus.Index.Documents (Documents)
import Holumbus.Index.Common
import Holumbus.Query.Processor
import Holumbus.Query.Ranking
import Holumbus.Query.Fuzzy
import Holumbus.Query.Result
import Holumbus.Query.Language.Grammar
import Holumbus.Query.Language.Parser
import Holumbus.Query.Distribution.Protocol
import Holumbus.Query.Distribution.Client
import Hayoo.Common
data Flag = Index String
| Documents String
| Server String
| Compress
| Verbose
| Version
| Help deriving (Show, Eq)
version :: String
version = "0.5"
main :: IO ()
main =
do
argv <- getArgs
flags <- commandLineOpts argv
if Version `elem` flags then (putStrLn version) >> (exitWith ExitSuccess) else return ()
if Help `elem` flags then usage [] >> (exitWith ExitSuccess) else return ()
verbose <- return (Verbose `elem` flags)
compress <- return (Compress `elem` flags)
doc <- return (filter isDocuments flags)
if L.null doc then usage ["No documents file given!\n"] else return ()
if length doc > 1 then usage ["Only one documents file allowed!\n"] else return ()
idx <- return (filter isIndex flags)
srv <- return (filter isServer flags)
if not (L.null idx) && not (L.null srv) then usage ["Cannot use local index and remote index at the same time!\n"] else return ()
if L.null idx then do
if L.null srv then usage ["No query server specified!\n"] else return ()
startupDistributed verbose (head doc) (map fromServer srv) compress
else do
if L.null idx then usage ["No index file given!\n"] else return ()
if length idx > 1 then usage ["Only one index file allowed!\n"] else return ()
if compress then usage ["Compression not avaliable for local index!\n"] else return ()
startupLocal verbose (head idx) (head doc)
isIndex :: Flag -> Bool
isIndex (Index _) = True
isIndex _ = False
isDocuments :: Flag -> Bool
isDocuments (Documents _) = True
isDocuments _ = False
isServer :: Flag -> Bool
isServer (Server _) = True
isServer _ = False
fromServer :: Flag -> Server
fromServer (Server s) = s
fromServer _ = ""
-- | Walk through the whole index to remove laziness.
walkIndex :: HolIndex i => i -> Integer
walkIndex i = L.foldl' (\r c -> L.foldl' sumPositions r (allWords i c)) 0 (contexts i)
where
sumPositions r (_, o) = IM.fold (\d r' -> IS.fold (\p r'' -> r'' + fromIntegral p) r' d) r o
-- | Startup using local index.
startupLocal :: Bool -> Flag -> Flag -> IO ()
startupLocal v (Index idxFile) (Documents docFile) =
do
putStrLn "Loading index..."
idx <- (loadFromFile idxFile) :: IO Persistent
return (rnf idx)
if v then putStrLn ("Collected position total of " ++ (show $ walkIndex idx)) else return ()
putStrLn ("Loaded " ++ show (sizeWords idx) ++ " words")
putStrLn "Loading documents..."
doc <- (loadFromFile docFile) :: IO (Documents FunctionInfo)
putStrLn ("Loaded " ++ show (sizeDocs doc) ++ " documents ")
answerQueries (localQuery idx doc) v
startupLocal _ _ _ = usage ["Internal error!\n"]
-- | Startup using remote index.
startupDistributed :: Bool -> Flag -> [Server] -> Bool -> IO ()
startupDistributed v (Documents docFile) srvs compr =
do
putStrLn "Loading documents..."
doc <- (loadFromFile docFile) :: IO (Documents Int)
return (rnf doc)
putStrLn ("Loaded " ++ show (sizeDocs doc) ++ " documents ")
answerQueries (distributedQuery doc srvs compr) v
startupDistributed _ _ _ _ = usage ["Internal error!\n"]
-- | Create the configuration for the query processor.
processCfg :: ProcessConfig
processCfg = ProcessConfig (FuzzyConfig True True 1.0 germanReplacements) True 100
-- | Perform a query on a local index.
localQuery :: (HolIndex i, HolDocuments d a) => i -> d a -> Query -> IO (Result a)
localQuery i d q = return (processQuery processCfg i d q)
-- | Perform a query on a remote index.
distributedQuery :: HolDocuments d a => d a -> [Server] -> Bool -> Query -> IO (Result a)
distributedQuery d s c q = processDistributed cfg d q
where
cfg = DistributedConfig s c processCfg
usage :: [String] -> IO a
usage errs =
if L.null errs then do
hPutStrLn stdout use
exitWith ExitSuccess
else do
hPutStrLn stderr (concat errs ++ "\n" ++ use)
exitWith (ExitFailure (-1))
where
header = "SimpleSearch - A simple command-line search using the Holumbus library.\n\n" ++
"Usage: SimpleSearch [OPTIONS]"
use = usageInfo header options
commandLineOpts :: [String] -> IO [Flag]
commandLineOpts argv =
case getOpt Permute options argv of
(o, [], [] ) -> return o
(_, _, errs) -> usage errs
options :: [OptDescr Flag]
options = [ Option "i" ["index"] (ReqArg Index "FILE") "Loads index from FILE"
, Option "d" ["documents"] (ReqArg Documents "FILE") "Loads documents from FILE"
, Option "s" ["server"] (ReqArg Server "HOST") "Distribute queries using HOST"
, Option "c" ["compress"] (NoArg Compress) "Use compression for transmitting results"
, Option "v" ["verbose"] (NoArg Verbose) "Be more verbose"
, Option "V" ["version"] (NoArg Version) "Output version and exit"
, Option "?" ["help"] (NoArg Help) "Output this help and exit"
]
answerQueries ::(Query -> IO (Result a)) -> Bool -> IO ()
answerQueries f verbose =
do
q <- readline ("Enter query (type :? for help) > ")
if isNothing q then answerQueries f verbose else
do
n <- return (fst $ utf8ToUnicode (fromJust q))
addHistory n
answerQueries' n
where
answerQueries' :: String -> IO ()
answerQueries' "" = answerQueries f verbose
answerQueries' (':':xs) =
do
internalCommand xs
answerQueries f verbose
answerQueries' q =
do
pr <- return (parseQuery q)
if verbose then putStrLn ("Query: \n" ++ (show pr) ++ "\n") else return ()
either printError makeQuery pr
answerQueries f verbose
where
printError err = putStrLn ("Problem parsing query: " ++ err)
makeQuery pq =
do
t1 <- getCPUTime
r <- f pq -- This is where the magic happens!
rr <- return (rank rankCfg r)
printDocHits (docHits rr)
putStrLn ""
printWordHits (wordHits rr)
t2 <- getCPUTime
d <- return ((fromIntegral (t2 - t1) / 1000000000000) :: Float)
ds <- return (printf "%.4f" d)
putStrLn ""
putStrLn ("Query processed in " ++ ds ++ " sec")
where
rankCfg = RankConfig (docRankWeightedByCount weights) (wordRankWeightedByCount weights)
where
weights = [("title", 0.8), ("keywords", 0.6), ("headlines", 0.4), ("content", 0.2)]
internalCommand :: String -> IO ()
internalCommand "q" = exitWith ExitSuccess
internalCommand "?" =
do
putStrLn ""
printHelp
internalCommand _ =
do
putStrLn "Unknown command!"
printDocHits :: DocHits a -> IO ()
printDocHits h =
do
putStrLn "Result:"
printHits' (L.sortBy (compare `on` (docScore . fst . snd)) $ IM.toList h)
putStrLn ""
putStrLn ("Found " ++ (show (IM.size h)) ++ " documents")
where
printHits' [] = return ()
printHits' ((_, (di, _)):xs) =
do
putStr (title $ document di)
putStr " Score: "
putStrLn (show $ docScore di)
putStrLn (uri $ document di)
printHits' xs
return ()
printWordHits :: WordHits -> IO ()
printWordHits h =
do
putStrLn "Completions:"
d <- return (L.sortBy (compare `on` snd) (map (\(c, (_, o)) -> (c, M.fold (\m r -> r + IM.size m) 0 o)) (M.toList h)))
putStrLn (foldr (\(c, s) r -> r ++ c ++ " (" ++ (show s) ++ ") ") "" d)
putStrLn ""
putStrLn ("Found " ++ (show (M.size h)) ++ " possible completions")
printHelp :: IO ()
printHelp =
do
putStrLn "Holumbus treats single words as prefix terms and will give you possible completions."
putStrLn "Words are interpreted case insensitive. Phrases and exact matches (case-sensitive)"
putStrLn "can be specified by using quotes (i.e. \"Foo Bar\" will match this exact sequence)."
putStrLn "Terms just separated by space will be treated implicitly as AND terms."
putStrLn "Other operators have to be specified explicitly. Avaliable operators are: AND, OR, NOT"
putStrLn "Priority can be influenced by round parantheses. If unsure about spelling, a single"
putStrLn "word can be preceeded by ~ to make a fuzzy query."
putStrLn "The contexts to search can be restricted with the : operator (seperate them with , )."
putStrLn "Example: firstcontext,secondcontext:(foo OR bar) NOT foobar"
putStrLn "This will search for documents containing \"foo\" or \"bar\" in the contexts named"
putStrLn "\"firstcontext\" and \"secondcontext\" and no \"foobar\" in the all contexts."
putStrLn ""
putStrLn "Use :q to exit and :? to show this help."
return ()
| null | https://raw.githubusercontent.com/fortytools/holumbus/4b2f7b832feab2715a4d48be0b07dca018eaa8e8/searchengine/examples/hayoo/source/Search.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
# OPTIONS -fno-warn-type-defaults #
nothing used
nothing used
| Walk through the whole index to remove laziness.
| Startup using local index.
| Startup using remote index.
| Create the configuration for the query processor.
| Perform a query on a local index.
| Perform a query on a remote index.
This is where the magic happens! |
|
Module : SimpleSearch
Copyright : Copyright ( C ) 2007 , 2008
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.5
A simple example of Holumbus , providing a command line search with the
default query language .
Module : SimpleSearch
Copyright : Copyright (C) 2007, 2008 Timo B. Huebel
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: portable
Version : 0.5
A simple example of Holumbus, providing a command line search with the
default query language.
-}
module Main where
import System.IO
import System.Environment
import System.Exit
import System.Console.Editline.Readline
import System.Console.GetOpt
import System.CPUTime
import Text.Printf
import Control.Parallel.Strategies
import Char
import Data.Maybe
import Data.Function
import Control.Monad
import qualified Data.List as L
import qualified Data.Map as M
import qualified Data.IntMap as IM
import qualified Data.IntSet as IS
import Text.XML.HXT.DOM.Unicode
import Holumbus.Index.Inverted.Memory (Inverted)
import Holumbus.Index.Inverted.OneFile (Persistent)
import Holumbus.Index.Documents (Documents)
import Holumbus.Index.Common
import Holumbus.Query.Processor
import Holumbus.Query.Ranking
import Holumbus.Query.Fuzzy
import Holumbus.Query.Result
import Holumbus.Query.Language.Grammar
import Holumbus.Query.Language.Parser
import Holumbus.Query.Distribution.Protocol
import Holumbus.Query.Distribution.Client
import Hayoo.Common
data Flag = Index String
| Documents String
| Server String
| Compress
| Verbose
| Version
| Help deriving (Show, Eq)
version :: String
version = "0.5"
main :: IO ()
main =
do
argv <- getArgs
flags <- commandLineOpts argv
if Version `elem` flags then (putStrLn version) >> (exitWith ExitSuccess) else return ()
if Help `elem` flags then usage [] >> (exitWith ExitSuccess) else return ()
verbose <- return (Verbose `elem` flags)
compress <- return (Compress `elem` flags)
doc <- return (filter isDocuments flags)
if L.null doc then usage ["No documents file given!\n"] else return ()
if length doc > 1 then usage ["Only one documents file allowed!\n"] else return ()
idx <- return (filter isIndex flags)
srv <- return (filter isServer flags)
if not (L.null idx) && not (L.null srv) then usage ["Cannot use local index and remote index at the same time!\n"] else return ()
if L.null idx then do
if L.null srv then usage ["No query server specified!\n"] else return ()
startupDistributed verbose (head doc) (map fromServer srv) compress
else do
if L.null idx then usage ["No index file given!\n"] else return ()
if length idx > 1 then usage ["Only one index file allowed!\n"] else return ()
if compress then usage ["Compression not avaliable for local index!\n"] else return ()
startupLocal verbose (head idx) (head doc)
isIndex :: Flag -> Bool
isIndex (Index _) = True
isIndex _ = False
isDocuments :: Flag -> Bool
isDocuments (Documents _) = True
isDocuments _ = False
isServer :: Flag -> Bool
isServer (Server _) = True
isServer _ = False
fromServer :: Flag -> Server
fromServer (Server s) = s
fromServer _ = ""
walkIndex :: HolIndex i => i -> Integer
walkIndex i = L.foldl' (\r c -> L.foldl' sumPositions r (allWords i c)) 0 (contexts i)
where
sumPositions r (_, o) = IM.fold (\d r' -> IS.fold (\p r'' -> r'' + fromIntegral p) r' d) r o
startupLocal :: Bool -> Flag -> Flag -> IO ()
startupLocal v (Index idxFile) (Documents docFile) =
do
putStrLn "Loading index..."
idx <- (loadFromFile idxFile) :: IO Persistent
return (rnf idx)
if v then putStrLn ("Collected position total of " ++ (show $ walkIndex idx)) else return ()
putStrLn ("Loaded " ++ show (sizeWords idx) ++ " words")
putStrLn "Loading documents..."
doc <- (loadFromFile docFile) :: IO (Documents FunctionInfo)
putStrLn ("Loaded " ++ show (sizeDocs doc) ++ " documents ")
answerQueries (localQuery idx doc) v
startupLocal _ _ _ = usage ["Internal error!\n"]
startupDistributed :: Bool -> Flag -> [Server] -> Bool -> IO ()
startupDistributed v (Documents docFile) srvs compr =
do
putStrLn "Loading documents..."
doc <- (loadFromFile docFile) :: IO (Documents Int)
return (rnf doc)
putStrLn ("Loaded " ++ show (sizeDocs doc) ++ " documents ")
answerQueries (distributedQuery doc srvs compr) v
startupDistributed _ _ _ _ = usage ["Internal error!\n"]
processCfg :: ProcessConfig
processCfg = ProcessConfig (FuzzyConfig True True 1.0 germanReplacements) True 100
localQuery :: (HolIndex i, HolDocuments d a) => i -> d a -> Query -> IO (Result a)
localQuery i d q = return (processQuery processCfg i d q)
distributedQuery :: HolDocuments d a => d a -> [Server] -> Bool -> Query -> IO (Result a)
distributedQuery d s c q = processDistributed cfg d q
where
cfg = DistributedConfig s c processCfg
usage :: [String] -> IO a
usage errs =
if L.null errs then do
hPutStrLn stdout use
exitWith ExitSuccess
else do
hPutStrLn stderr (concat errs ++ "\n" ++ use)
exitWith (ExitFailure (-1))
where
header = "SimpleSearch - A simple command-line search using the Holumbus library.\n\n" ++
"Usage: SimpleSearch [OPTIONS]"
use = usageInfo header options
commandLineOpts :: [String] -> IO [Flag]
commandLineOpts argv =
case getOpt Permute options argv of
(o, [], [] ) -> return o
(_, _, errs) -> usage errs
options :: [OptDescr Flag]
options = [ Option "i" ["index"] (ReqArg Index "FILE") "Loads index from FILE"
, Option "d" ["documents"] (ReqArg Documents "FILE") "Loads documents from FILE"
, Option "s" ["server"] (ReqArg Server "HOST") "Distribute queries using HOST"
, Option "c" ["compress"] (NoArg Compress) "Use compression for transmitting results"
, Option "v" ["verbose"] (NoArg Verbose) "Be more verbose"
, Option "V" ["version"] (NoArg Version) "Output version and exit"
, Option "?" ["help"] (NoArg Help) "Output this help and exit"
]
answerQueries ::(Query -> IO (Result a)) -> Bool -> IO ()
answerQueries f verbose =
do
q <- readline ("Enter query (type :? for help) > ")
if isNothing q then answerQueries f verbose else
do
n <- return (fst $ utf8ToUnicode (fromJust q))
addHistory n
answerQueries' n
where
answerQueries' :: String -> IO ()
answerQueries' "" = answerQueries f verbose
answerQueries' (':':xs) =
do
internalCommand xs
answerQueries f verbose
answerQueries' q =
do
pr <- return (parseQuery q)
if verbose then putStrLn ("Query: \n" ++ (show pr) ++ "\n") else return ()
either printError makeQuery pr
answerQueries f verbose
where
printError err = putStrLn ("Problem parsing query: " ++ err)
makeQuery pq =
do
t1 <- getCPUTime
rr <- return (rank rankCfg r)
printDocHits (docHits rr)
putStrLn ""
printWordHits (wordHits rr)
t2 <- getCPUTime
d <- return ((fromIntegral (t2 - t1) / 1000000000000) :: Float)
ds <- return (printf "%.4f" d)
putStrLn ""
putStrLn ("Query processed in " ++ ds ++ " sec")
where
rankCfg = RankConfig (docRankWeightedByCount weights) (wordRankWeightedByCount weights)
where
weights = [("title", 0.8), ("keywords", 0.6), ("headlines", 0.4), ("content", 0.2)]
internalCommand :: String -> IO ()
internalCommand "q" = exitWith ExitSuccess
internalCommand "?" =
do
putStrLn ""
printHelp
internalCommand _ =
do
putStrLn "Unknown command!"
printDocHits :: DocHits a -> IO ()
printDocHits h =
do
putStrLn "Result:"
printHits' (L.sortBy (compare `on` (docScore . fst . snd)) $ IM.toList h)
putStrLn ""
putStrLn ("Found " ++ (show (IM.size h)) ++ " documents")
where
printHits' [] = return ()
printHits' ((_, (di, _)):xs) =
do
putStr (title $ document di)
putStr " Score: "
putStrLn (show $ docScore di)
putStrLn (uri $ document di)
printHits' xs
return ()
printWordHits :: WordHits -> IO ()
printWordHits h =
do
putStrLn "Completions:"
d <- return (L.sortBy (compare `on` snd) (map (\(c, (_, o)) -> (c, M.fold (\m r -> r + IM.size m) 0 o)) (M.toList h)))
putStrLn (foldr (\(c, s) r -> r ++ c ++ " (" ++ (show s) ++ ") ") "" d)
putStrLn ""
putStrLn ("Found " ++ (show (M.size h)) ++ " possible completions")
printHelp :: IO ()
printHelp =
do
putStrLn "Holumbus treats single words as prefix terms and will give you possible completions."
putStrLn "Words are interpreted case insensitive. Phrases and exact matches (case-sensitive)"
putStrLn "can be specified by using quotes (i.e. \"Foo Bar\" will match this exact sequence)."
putStrLn "Terms just separated by space will be treated implicitly as AND terms."
putStrLn "Other operators have to be specified explicitly. Avaliable operators are: AND, OR, NOT"
putStrLn "Priority can be influenced by round parantheses. If unsure about spelling, a single"
putStrLn "word can be preceeded by ~ to make a fuzzy query."
putStrLn "The contexts to search can be restricted with the : operator (seperate them with , )."
putStrLn "Example: firstcontext,secondcontext:(foo OR bar) NOT foobar"
putStrLn "This will search for documents containing \"foo\" or \"bar\" in the contexts named"
putStrLn "\"firstcontext\" and \"secondcontext\" and no \"foobar\" in the all contexts."
putStrLn ""
putStrLn "Use :q to exit and :? to show this help."
return ()
|
6eddf2964df7514848a3712d81d20b3911935fb2338d56cccfe787fbaa827e39 | flotsfacetieux/cl-entity-system | packages.lisp | (in-package #:cl-user)
(defpackage #:cl-entity-system
(:use #:cl :alexandria)
(:nicknames #:cl-es)
(:export ;;#:entity
;;#:entity-id
;;#:entity-group
#:defcomponent
#:component
#:system
;;#:system-entity-manager
;;#:dt-accumulator
;;#:dt-max
;;#:update
;; Entity Manager
#:entity-manager
#:entities
;;#:components-types
#:lowest-unassigned-id
#:make-entity
#:add-component
#:del-component
#:del-components
#:entity-component
#:entity-components
#:remove-entity
#:find-entities
#:find-entities-of
#:find-components
;;#:find-entities-single-type
;;#:find-entities-by-group
;;#:find-entities-multiple-types
))
(defpackage #:cl-entity-system-tests
(:use #:cl #:lisp-unit #:cl-es)
(:nicknames #:cl-es-tests)
(:export #:do-tests))
| null | https://raw.githubusercontent.com/flotsfacetieux/cl-entity-system/6f4e00722f1a666c563e457bfbd7c03924f9e0b8/packages.lisp | lisp | #:entity
#:entity-id
#:entity-group
#:system-entity-manager
#:dt-accumulator
#:dt-max
#:update
Entity Manager
#:components-types
#:find-entities-single-type
#:find-entities-by-group
#:find-entities-multiple-types | (in-package #:cl-user)
(defpackage #:cl-entity-system
(:use #:cl :alexandria)
(:nicknames #:cl-es)
#:defcomponent
#:component
#:system
#:entity-manager
#:entities
#:lowest-unassigned-id
#:make-entity
#:add-component
#:del-component
#:del-components
#:entity-component
#:entity-components
#:remove-entity
#:find-entities
#:find-entities-of
#:find-components
))
(defpackage #:cl-entity-system-tests
(:use #:cl #:lisp-unit #:cl-es)
(:nicknames #:cl-es-tests)
(:export #:do-tests))
|
b8091686167fc94b8699366a1bb48cc64682130bd5770ff903905e419f1bb9ea | spechub/Hets | Formula.hs | |
Module : ./CASL / Formula.hs
Description : Parser for CASL terms and formulae
Copyright : ( c ) , Uni Bremen 2002 - 2006
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
parse terms and formulae
Module : ./CASL/Formula.hs
Description : Parser for CASL terms and formulae
Copyright : (c) Christian Maeder, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
parse terms and formulae
-}
/
from 25 March 2001
C.2.1 Basic Specifications with Subsorts
remarks :
when - else - TERMs are non - mixfix ,
because when - else has lowest precedence .
C.3.1 Precedence
Sorted ( or casted ) terms are not directly recognized ,
because " u v : s " may be " u ( v : s ) " or " ( u v):s "
No term or formula may start with a parenthesized argument list that
includes commas .
The arguments of qualified ops or can only be given by a following
parenthesized argument list .
Braced or bracketed term - lists including commas stem from a possible
% list - annotation or ( for brackets ) from compound lists .
C.6.3 Literal syntax for lists
` % list b1__b2 , c , f ' .
b1 must contain at least one open brace or bracket ( " { " or [ " )
and all brackets must be balanced in " b1 b2 " ( the empty list ) .
all parsers are paramterized with a list containing additional
keywords
/
from 25 March 2001
C.2.1 Basic Specifications with Subsorts
remarks:
when-else-TERMs are non-mixfix,
because when-else has lowest precedence.
C.3.1 Precedence
Sorted (or casted) terms are not directly recognized,
because "u v : s" may be "u (v:s)" or "(u v):s"
No term or formula may start with a parenthesized argument list that
includes commas.
The arguments of qualified ops or preds can only be given by a following
parenthesized argument list.
Braced or bracketed term-lists including commas stem from a possible
%list-annotation or (for brackets) from compound lists.
C.6.3 Literal syntax for lists
`%list b1__b2, c, f'.
b1 must contain at least one open brace or bracket ("{" or [")
and all brackets must be balanced in "b1 b2" (the empty list).
all parsers are paramterized with a String list containing additional
keywords
-}
module CASL.Formula
( term
, mixTerm
, primFormula
, genPrimFormula
, formula
, anColon
, varDecl
, varDecls
, opSort
, opFunSort
, opType
, predType
, predUnitType
, qualPredName
, implKey
, ifKey
) where
import Common.AnnoState
import Common.Id
import Common.Keywords
import Common.Lexer
import Common.Parsec
import Common.Token
import CASL.AS_Basic_CASL
import Text.ParserCombinators.Parsec
simpleTerm :: [String] -> AParser st (TERM f)
simpleTerm k = fmap Mixfix_token $ pToken
( scanFloat
<|> scanString
<|> scanQuotedChar
<|> scanDotWords
<|> reserved (k ++ casl_reserved_fwords) scanAnyWords
<|> reserved (k ++ casl_reserved_fops) scanAnySigns
<|> placeS
<?> "id/literal" )
restTerms :: TermParser f => [String] -> AParser st [TERM f]
restTerms k = (tryItemEnd startKeyword >> return [])
<|> restTerm k <:> restTerms k
<|> return []
startTerm :: TermParser f => [String] -> AParser st (TERM f)
startTerm k =
parenTerm <|> braceTerm <|> bracketTerm <|> try (addAnnos >> simpleTerm k)
restTerm :: TermParser f => [String] -> AParser st (TERM f)
restTerm k = startTerm k <|> typedTerm k <|> castedTerm k
mixTerm :: TermParser f => [String] -> AParser st (TERM f)
mixTerm k = fmap ExtTERM (termParser True) <|> do
l <- startTerm k <:> restTerms k
return $ if isSingle l then head l else Mixfix_term l
-- | when-else terms
term :: TermParser f => [String] -> AParser st (TERM f)
term k = do
t <- mixTerm k
option t $ do
w <- asKey whenS
f <- formula k
e <- asKey elseS
r <- term k
return $ Conditional t f r $ toRange w [] e
anColon :: AParser st Token
anColon = wrapAnnos colonST
typedTerm :: [String] -> AParser st (TERM f)
typedTerm k = do
c <- colonT
t <- sortId k
return $ Mixfix_sorted_term t $ tokPos c
castedTerm :: [String] -> AParser st (TERM f)
castedTerm k = do
c <- asT
t <- sortId k
return $ Mixfix_cast t $ tokPos c
terms :: TermParser f => [String] -> AParser st ([TERM f], [Token])
terms k = term k `separatedBy` anComma
qualVarName :: Token -> AParser st (TERM f)
qualVarName o = do
v <- asKey varS
i <- varId []
c <- colonT
s <- sortId [] << addAnnos
p <- cParenT
return $ Qual_var i s $ toRange o [v, c] p
qualOpName :: Token -> AParser st (TERM f)
qualOpName = fmap (`mkAppl` []) . qualOpSymb
qualOpSymb :: Token -> AParser st OP_SYMB
qualOpSymb o = do
v <- asKey opS
i <- parseId []
c <- anColon
t <- opType [] << addAnnos
p <- cParenT
return $ Qual_op_name i t $ toRange o [v, c] p
opSort :: [String] -> GenParser Char st (Bool, Id, Range)
opSort k = fmap (\ s -> (False, s, nullRange)) (sortId k) <|> do
q <- quMarkT
s <- sortId k
return (True, s, tokPos q)
opFunSort :: [String] -> [Id] -> [Token] -> GenParser Char st OP_TYPE
opFunSort k ts ps = do
a <- pToken (string funS)
(b, s, qs) <- opSort k
return $ Op_type (if b then Partial else Total) ts s
$ appRange (catRange $ ps ++ [a]) qs
opType :: [String] -> AParser st OP_TYPE
opType k = do
(b, s, p) <- opSort k
if b then return $ Op_type Partial [] s p else do
c <- crossT
(ts, ps) <- sortId k `separatedBy` crossT
opFunSort k (s : ts) (c : ps)
<|> opFunSort k [s] []
<|> return (Op_type Total [] s nullRange)
parenTerm :: TermParser f => AParser st (TERM f)
parenTerm = do
o <- wrapAnnos oParenT
qualVarName o <|> qualOpName o <|> qualPredName [] o <|> do
(ts, ps) <- terms []
c <- addAnnos >> cParenT
return (Mixfix_parenthesized ts $ toRange o ps c)
braceTerm :: TermParser f => AParser st (TERM f)
braceTerm = do
o <- wrapAnnos oBraceT
(ts, ps) <- option ([], []) $ terms []
c <- addAnnos >> cBraceT
return $ Mixfix_braced ts $ toRange o ps c
bracketTerm :: TermParser f => AParser st (TERM f)
bracketTerm = do
o <- wrapAnnos oBracketT
(ts, ps) <- option ([], []) $ terms []
c <- addAnnos >> cBracketT
return $ Mixfix_bracketed ts $ toRange o ps c
quant :: AParser st (QUANTIFIER, Token)
quant = choice (map (\ (q, s) -> do
t <- asKey s
return (q, t))
[ (Unique_existential, existsS ++ exMark)
, (Existential, existsS)
, (Universal, forallS) ])
<?> "quantifier"
varDecls :: [String] -> AParser st ([VAR_DECL], [Token])
varDecls ks = separatedBy (varDecl ks) anSemiOrComma
data VarsQualOpOrPred =
VarDecls [VAR_DECL] [Token]
| BoundOp OP_NAME OP_TYPE
| BoundPred PRED_NAME PRED_TYPE
varDeclsOrQual :: [String] -> AParser st VarsQualOpOrPred
varDeclsOrQual k =
fmap (uncurry VarDecls) (varDecls k)
<|> do
o <- oParenT
do Qual_op_name i t _ <- qualOpSymb o
return $ BoundOp i t
<|> do
Qual_pred_name i t _ <- qualPredSymb k o
return $ BoundPred i t
quantFormula :: TermParser f => [String] -> AParser st (FORMULA f)
quantFormula k = do
(q, p) <- quant
vdq <- varDeclsOrQual k
d <- dotT
f <- formula k
return $ case vdq of
VarDecls vs ps -> Quantification q vs f $ toRange p ps d
BoundOp o t -> QuantOp o t f
BoundPred i t -> QuantPred i t f
varDecl :: [String] -> AParser st VAR_DECL
varDecl k = do
(vs, ps) <- varId k `separatedBy` anComma
c <- colonT
s <- sortId k
return $ Var_decl vs s (catRange ps `appRange` tokPos c)
predType :: [String] -> AParser st PRED_TYPE
predType k = do
(ts, ps) <- sortId k `separatedBy` crossT
return (Pred_type ts (catRange ps))
<|> predUnitType
predUnitType :: GenParser Char st PRED_TYPE
predUnitType = do
o <- oParenT
c <- cParenT
return $ Pred_type [] (tokPos o `appRange` tokPos c)
qualPredName :: [String] -> Token -> AParser st (TERM f)
qualPredName k = fmap Mixfix_qual_pred . qualPredSymb k
qualPredSymb :: [String] -> Token -> AParser st PRED_SYMB
qualPredSymb k o = do
v <- asKey predS
i <- parseId k
c <- colonT
s <- predType k << addAnnos
p <- cParenT
return $ Qual_pred_name i s $ toRange o [v, c] p
parenFormula :: TermParser f => [String] -> AParser st (FORMULA f)
parenFormula k = oParenT << addAnnos >>= clParenFormula k
clParenFormula :: TermParser f => [String] -> Token -> AParser st (FORMULA f)
clParenFormula k o = do
q <- qualPredName [] o <|> qualVarName o <|> qualOpName o
l <- restTerms [] -- optional arguments
termFormula k $ if null l then q else Mixfix_term $ q : l
<|> do
f <- formula k << addAnnos
case f of
Mixfix_formula t -> do
c <- cParenT
l <- restTerms k
let tt = Mixfix_parenthesized [t] $ toRange o [] c
ft = if null l then tt else Mixfix_term $ tt : l
termFormula k ft -- commas are not allowed
_ -> cParenT >> return f
termFormula :: TermParser f => [String] -> TERM f -> AParser st (FORMULA f)
termFormula k t = do
e <- asKey exEqual
r <- term k
return $ Equation t Existl r $ tokPos e
<|> do
tryString exEqual
unexpected $ "sign following " ++ exEqual
<|> do
e <- equalT
r <- term k
return $ Equation t Strong r $ tokPos e
<|> do
e <- asKey inS
s <- sortId k
return $ Membership t s $ tokPos e
<|> return (Mixfix_formula t)
primFormula :: TermParser f => [String] -> AParser st (FORMULA f)
primFormula = genPrimFormula (termParser False)
genPrimFormula :: TermParser f => AParser st f -> [String]
-> AParser st (FORMULA f)
genPrimFormula p k = do
f <- p
return $ ExtFORMULA f
<|> primCASLFormula k
primCASLFormula :: TermParser f => [String] -> AParser st (FORMULA f)
primCASLFormula k = do
c <- asKey trueS
return . Atom True . Range $ tokenRange c
<|> do
c <- asKey falseS
return . Atom False . Range $ tokenRange c
<|> do
c <- asKey defS
t <- term k
return . Definedness t $ tokPos c
<|> do
c <- asKey notS <|> asKey negS <?> "\"not\""
f <- primFormula k
return $ Negation f $ tokPos c
<|> parenFormula k
<|> quantFormula k
<|> (term k >>= termFormula k)
andKey :: AParser st Token
andKey = asKey lAnd
orKey :: AParser st Token
orKey = asKey lOr
andOrFormula :: TermParser f => [String] -> AParser st (FORMULA f)
andOrFormula k = primFormula k >>= optAndOr k
optAndOr :: TermParser f => [String] -> FORMULA f -> AParser st (FORMULA f)
optAndOr k f = do
c <- andKey
(fs, ps) <- primFormula k `separatedBy` andKey
return $ Junction Con (f : fs) $ catRange $ c : ps
<|> do
c <- orKey
(fs, ps) <- primFormula k `separatedBy` orKey
return $ Junction Dis (f : fs) $ catRange $ c : ps
<|> return f
implKey :: AParser st Token
implKey = asKey implS
ifKey :: AParser st Token
ifKey = asKey ifS
formula :: TermParser f => [String] -> AParser st (FORMULA f)
formula k = andOrFormula k >>= optImplForm k
optImplForm :: TermParser f => [String] -> FORMULA f -> AParser st (FORMULA f)
optImplForm k f = do
c <- implKey
(fs, ps) <- andOrFormula k `separatedBy` implKey
return $ makeImpl Implication (f : fs) $ catPosAux $ c : ps
<|> do
c <- ifKey
(fs, ps) <- andOrFormula k `separatedBy` ifKey
return $ makeIf (f : fs) $ catPosAux $ c : ps
<|> do
c <- asKey equivS
g <- andOrFormula k
return $ Relation f Equivalence g $ tokPos c
<|> return f
makeImpl :: Relation -> [FORMULA f] -> [Pos] -> FORMULA f
makeImpl b l p = case (l, p) of
([f, g], _) -> Relation f b g (Range p)
(f : r, c : q) -> Relation f b (makeImpl b r q) (Range [c])
_ -> error "makeImpl got illegal argument"
makeIf :: [FORMULA f] -> [Pos] -> FORMULA f
makeIf l p = makeImpl RevImpl (reverse l) $ reverse p
| null | https://raw.githubusercontent.com/spechub/Hets/af7b628a75aab0d510b8ae7f067a5c9bc48d0f9e/CASL/Formula.hs | haskell | | when-else terms
optional arguments
commas are not allowed | |
Module : ./CASL / Formula.hs
Description : Parser for CASL terms and formulae
Copyright : ( c ) , Uni Bremen 2002 - 2006
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
parse terms and formulae
Module : ./CASL/Formula.hs
Description : Parser for CASL terms and formulae
Copyright : (c) Christian Maeder, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
parse terms and formulae
-}
/
from 25 March 2001
C.2.1 Basic Specifications with Subsorts
remarks :
when - else - TERMs are non - mixfix ,
because when - else has lowest precedence .
C.3.1 Precedence
Sorted ( or casted ) terms are not directly recognized ,
because " u v : s " may be " u ( v : s ) " or " ( u v):s "
No term or formula may start with a parenthesized argument list that
includes commas .
The arguments of qualified ops or can only be given by a following
parenthesized argument list .
Braced or bracketed term - lists including commas stem from a possible
% list - annotation or ( for brackets ) from compound lists .
C.6.3 Literal syntax for lists
` % list b1__b2 , c , f ' .
b1 must contain at least one open brace or bracket ( " { " or [ " )
and all brackets must be balanced in " b1 b2 " ( the empty list ) .
all parsers are paramterized with a list containing additional
keywords
/
from 25 March 2001
C.2.1 Basic Specifications with Subsorts
remarks:
when-else-TERMs are non-mixfix,
because when-else has lowest precedence.
C.3.1 Precedence
Sorted (or casted) terms are not directly recognized,
because "u v : s" may be "u (v:s)" or "(u v):s"
No term or formula may start with a parenthesized argument list that
includes commas.
The arguments of qualified ops or preds can only be given by a following
parenthesized argument list.
Braced or bracketed term-lists including commas stem from a possible
%list-annotation or (for brackets) from compound lists.
C.6.3 Literal syntax for lists
`%list b1__b2, c, f'.
b1 must contain at least one open brace or bracket ("{" or [")
and all brackets must be balanced in "b1 b2" (the empty list).
all parsers are paramterized with a String list containing additional
keywords
-}
module CASL.Formula
( term
, mixTerm
, primFormula
, genPrimFormula
, formula
, anColon
, varDecl
, varDecls
, opSort
, opFunSort
, opType
, predType
, predUnitType
, qualPredName
, implKey
, ifKey
) where
import Common.AnnoState
import Common.Id
import Common.Keywords
import Common.Lexer
import Common.Parsec
import Common.Token
import CASL.AS_Basic_CASL
import Text.ParserCombinators.Parsec
simpleTerm :: [String] -> AParser st (TERM f)
simpleTerm k = fmap Mixfix_token $ pToken
( scanFloat
<|> scanString
<|> scanQuotedChar
<|> scanDotWords
<|> reserved (k ++ casl_reserved_fwords) scanAnyWords
<|> reserved (k ++ casl_reserved_fops) scanAnySigns
<|> placeS
<?> "id/literal" )
restTerms :: TermParser f => [String] -> AParser st [TERM f]
restTerms k = (tryItemEnd startKeyword >> return [])
<|> restTerm k <:> restTerms k
<|> return []
startTerm :: TermParser f => [String] -> AParser st (TERM f)
startTerm k =
parenTerm <|> braceTerm <|> bracketTerm <|> try (addAnnos >> simpleTerm k)
restTerm :: TermParser f => [String] -> AParser st (TERM f)
restTerm k = startTerm k <|> typedTerm k <|> castedTerm k
mixTerm :: TermParser f => [String] -> AParser st (TERM f)
mixTerm k = fmap ExtTERM (termParser True) <|> do
l <- startTerm k <:> restTerms k
return $ if isSingle l then head l else Mixfix_term l
term :: TermParser f => [String] -> AParser st (TERM f)
term k = do
t <- mixTerm k
option t $ do
w <- asKey whenS
f <- formula k
e <- asKey elseS
r <- term k
return $ Conditional t f r $ toRange w [] e
anColon :: AParser st Token
anColon = wrapAnnos colonST
typedTerm :: [String] -> AParser st (TERM f)
typedTerm k = do
c <- colonT
t <- sortId k
return $ Mixfix_sorted_term t $ tokPos c
castedTerm :: [String] -> AParser st (TERM f)
castedTerm k = do
c <- asT
t <- sortId k
return $ Mixfix_cast t $ tokPos c
terms :: TermParser f => [String] -> AParser st ([TERM f], [Token])
terms k = term k `separatedBy` anComma
qualVarName :: Token -> AParser st (TERM f)
qualVarName o = do
v <- asKey varS
i <- varId []
c <- colonT
s <- sortId [] << addAnnos
p <- cParenT
return $ Qual_var i s $ toRange o [v, c] p
qualOpName :: Token -> AParser st (TERM f)
qualOpName = fmap (`mkAppl` []) . qualOpSymb
qualOpSymb :: Token -> AParser st OP_SYMB
qualOpSymb o = do
v <- asKey opS
i <- parseId []
c <- anColon
t <- opType [] << addAnnos
p <- cParenT
return $ Qual_op_name i t $ toRange o [v, c] p
opSort :: [String] -> GenParser Char st (Bool, Id, Range)
opSort k = fmap (\ s -> (False, s, nullRange)) (sortId k) <|> do
q <- quMarkT
s <- sortId k
return (True, s, tokPos q)
opFunSort :: [String] -> [Id] -> [Token] -> GenParser Char st OP_TYPE
opFunSort k ts ps = do
a <- pToken (string funS)
(b, s, qs) <- opSort k
return $ Op_type (if b then Partial else Total) ts s
$ appRange (catRange $ ps ++ [a]) qs
opType :: [String] -> AParser st OP_TYPE
opType k = do
(b, s, p) <- opSort k
if b then return $ Op_type Partial [] s p else do
c <- crossT
(ts, ps) <- sortId k `separatedBy` crossT
opFunSort k (s : ts) (c : ps)
<|> opFunSort k [s] []
<|> return (Op_type Total [] s nullRange)
parenTerm :: TermParser f => AParser st (TERM f)
parenTerm = do
o <- wrapAnnos oParenT
qualVarName o <|> qualOpName o <|> qualPredName [] o <|> do
(ts, ps) <- terms []
c <- addAnnos >> cParenT
return (Mixfix_parenthesized ts $ toRange o ps c)
braceTerm :: TermParser f => AParser st (TERM f)
braceTerm = do
o <- wrapAnnos oBraceT
(ts, ps) <- option ([], []) $ terms []
c <- addAnnos >> cBraceT
return $ Mixfix_braced ts $ toRange o ps c
bracketTerm :: TermParser f => AParser st (TERM f)
bracketTerm = do
o <- wrapAnnos oBracketT
(ts, ps) <- option ([], []) $ terms []
c <- addAnnos >> cBracketT
return $ Mixfix_bracketed ts $ toRange o ps c
quant :: AParser st (QUANTIFIER, Token)
quant = choice (map (\ (q, s) -> do
t <- asKey s
return (q, t))
[ (Unique_existential, existsS ++ exMark)
, (Existential, existsS)
, (Universal, forallS) ])
<?> "quantifier"
varDecls :: [String] -> AParser st ([VAR_DECL], [Token])
varDecls ks = separatedBy (varDecl ks) anSemiOrComma
data VarsQualOpOrPred =
VarDecls [VAR_DECL] [Token]
| BoundOp OP_NAME OP_TYPE
| BoundPred PRED_NAME PRED_TYPE
varDeclsOrQual :: [String] -> AParser st VarsQualOpOrPred
varDeclsOrQual k =
fmap (uncurry VarDecls) (varDecls k)
<|> do
o <- oParenT
do Qual_op_name i t _ <- qualOpSymb o
return $ BoundOp i t
<|> do
Qual_pred_name i t _ <- qualPredSymb k o
return $ BoundPred i t
quantFormula :: TermParser f => [String] -> AParser st (FORMULA f)
quantFormula k = do
(q, p) <- quant
vdq <- varDeclsOrQual k
d <- dotT
f <- formula k
return $ case vdq of
VarDecls vs ps -> Quantification q vs f $ toRange p ps d
BoundOp o t -> QuantOp o t f
BoundPred i t -> QuantPred i t f
varDecl :: [String] -> AParser st VAR_DECL
varDecl k = do
(vs, ps) <- varId k `separatedBy` anComma
c <- colonT
s <- sortId k
return $ Var_decl vs s (catRange ps `appRange` tokPos c)
predType :: [String] -> AParser st PRED_TYPE
predType k = do
(ts, ps) <- sortId k `separatedBy` crossT
return (Pred_type ts (catRange ps))
<|> predUnitType
predUnitType :: GenParser Char st PRED_TYPE
predUnitType = do
o <- oParenT
c <- cParenT
return $ Pred_type [] (tokPos o `appRange` tokPos c)
qualPredName :: [String] -> Token -> AParser st (TERM f)
qualPredName k = fmap Mixfix_qual_pred . qualPredSymb k
qualPredSymb :: [String] -> Token -> AParser st PRED_SYMB
qualPredSymb k o = do
v <- asKey predS
i <- parseId k
c <- colonT
s <- predType k << addAnnos
p <- cParenT
return $ Qual_pred_name i s $ toRange o [v, c] p
parenFormula :: TermParser f => [String] -> AParser st (FORMULA f)
parenFormula k = oParenT << addAnnos >>= clParenFormula k
clParenFormula :: TermParser f => [String] -> Token -> AParser st (FORMULA f)
clParenFormula k o = do
q <- qualPredName [] o <|> qualVarName o <|> qualOpName o
termFormula k $ if null l then q else Mixfix_term $ q : l
<|> do
f <- formula k << addAnnos
case f of
Mixfix_formula t -> do
c <- cParenT
l <- restTerms k
let tt = Mixfix_parenthesized [t] $ toRange o [] c
ft = if null l then tt else Mixfix_term $ tt : l
_ -> cParenT >> return f
termFormula :: TermParser f => [String] -> TERM f -> AParser st (FORMULA f)
termFormula k t = do
e <- asKey exEqual
r <- term k
return $ Equation t Existl r $ tokPos e
<|> do
tryString exEqual
unexpected $ "sign following " ++ exEqual
<|> do
e <- equalT
r <- term k
return $ Equation t Strong r $ tokPos e
<|> do
e <- asKey inS
s <- sortId k
return $ Membership t s $ tokPos e
<|> return (Mixfix_formula t)
primFormula :: TermParser f => [String] -> AParser st (FORMULA f)
primFormula = genPrimFormula (termParser False)
genPrimFormula :: TermParser f => AParser st f -> [String]
-> AParser st (FORMULA f)
genPrimFormula p k = do
f <- p
return $ ExtFORMULA f
<|> primCASLFormula k
primCASLFormula :: TermParser f => [String] -> AParser st (FORMULA f)
primCASLFormula k = do
c <- asKey trueS
return . Atom True . Range $ tokenRange c
<|> do
c <- asKey falseS
return . Atom False . Range $ tokenRange c
<|> do
c <- asKey defS
t <- term k
return . Definedness t $ tokPos c
<|> do
c <- asKey notS <|> asKey negS <?> "\"not\""
f <- primFormula k
return $ Negation f $ tokPos c
<|> parenFormula k
<|> quantFormula k
<|> (term k >>= termFormula k)
andKey :: AParser st Token
andKey = asKey lAnd
orKey :: AParser st Token
orKey = asKey lOr
andOrFormula :: TermParser f => [String] -> AParser st (FORMULA f)
andOrFormula k = primFormula k >>= optAndOr k
optAndOr :: TermParser f => [String] -> FORMULA f -> AParser st (FORMULA f)
optAndOr k f = do
c <- andKey
(fs, ps) <- primFormula k `separatedBy` andKey
return $ Junction Con (f : fs) $ catRange $ c : ps
<|> do
c <- orKey
(fs, ps) <- primFormula k `separatedBy` orKey
return $ Junction Dis (f : fs) $ catRange $ c : ps
<|> return f
implKey :: AParser st Token
implKey = asKey implS
ifKey :: AParser st Token
ifKey = asKey ifS
formula :: TermParser f => [String] -> AParser st (FORMULA f)
formula k = andOrFormula k >>= optImplForm k
optImplForm :: TermParser f => [String] -> FORMULA f -> AParser st (FORMULA f)
optImplForm k f = do
c <- implKey
(fs, ps) <- andOrFormula k `separatedBy` implKey
return $ makeImpl Implication (f : fs) $ catPosAux $ c : ps
<|> do
c <- ifKey
(fs, ps) <- andOrFormula k `separatedBy` ifKey
return $ makeIf (f : fs) $ catPosAux $ c : ps
<|> do
c <- asKey equivS
g <- andOrFormula k
return $ Relation f Equivalence g $ tokPos c
<|> return f
makeImpl :: Relation -> [FORMULA f] -> [Pos] -> FORMULA f
makeImpl b l p = case (l, p) of
([f, g], _) -> Relation f b g (Range p)
(f : r, c : q) -> Relation f b (makeImpl b r q) (Range [c])
_ -> error "makeImpl got illegal argument"
makeIf :: [FORMULA f] -> [Pos] -> FORMULA f
makeIf l p = makeImpl RevImpl (reverse l) $ reverse p
|
0490a6b0cb35b0c408525b7bdff2f4564cc29ef9331d595fd3446f501f3156af | clj-bots/pin-ctrl | implementation.clj | (ns clj-bots.pin-ctrl-simulator.implementation
(:require [clj-bots.pin-ctrl
[protocols :as pcp]
[implementation :as impl]]
[clojure.core.async :as async :refer [chan >!! <!! go <! >! go-loop]]))
;; This stuff really needs to be part of the core library
(def available-modes
#{:input :output :ain :pwm})
;; Here we're going to declare some of the things we'll need in the implementation that we'd rather leave at
;; the end of this namespace for logical flow.
(declare pin-mode writeable-pin? ok-val?)
;; In addition to the standard protocol functions, we also need something which let's us set the state of
;; _input_ input pins for the purposes of simulation, since (for obvious reasons) this is not supported via
;; the standard protocols.
(defprotocol PSimControl
"Protocol for simulation control functions"
(set-state! [this pin-n val] "Set the state of a digital or analog input pin"))
;; And now the implementation.
(defrecord SimBoard
[pin-state pin-modes edge-channels config]
pcp/PBoard
(init! [b] b)
(available-pin-modes [_] (:pin-modes config))
(pin-modes [_] @pin-modes)
pcp/POverwireBoard
(reset-board! [_]
(if (= (:board-class config) :overwire)
(do (reset! pin-modes)
(reset! pin-state))
(println "This option is not available for onboard boards")))
pcp/PPinConfigure
(set-mode! [_ pin-n mode]
(swap! pin-modes assoc pin-n mode))
pcp/PReadablePin
(read-value [board pin-n mode]
(get (deref (:pin-state board)) pin-n))
pcp/PWriteablePin
(write-value! [board pin-n mode val]
(swap! pin-state assoc pin-n val))
pcp/PEdgeDetectablePin
(set-edge! [board pin-n edge ch]
(let [match (case edge
(:none "edge") #{}
(:rising "rising") #{:high}
(:falling "falling") #{:low}
(:both "both") #{:high :low})]
(add-watch pin-state
May have to have board ids involved in this scheme for overwire ;
; should have global registry of these board ids XXX
(symbol (str "edge-detection-watch-simulator" pin-n))
(fn [_ _ _ new-val]
(when (match new-val)
(>!! ch new-val))))))
PSimControl
(set-state! [board pin-n val]
(assert (ok-val? board pin-n val)
(str "The value " val " is not an acceptable value for pins of type " (pin-mode board pin-n)))
(swap! pin-state assoc pin-n)))
;; Now we'll flesh out some of the various reading/writing functions
(defn- pin-mode
[board pin-n]
(get (pcp/pin-modes board) pin-n))
(defn writeable-pin?
[board pin-n]
(#{:output :pwm} (pin-mode board pin-n)))
(defn ok-val?
[board pin-n val]
(case (pin-mode board pin-n)
:output (#{0 1 \0 \1 :high :low} val)
:pwm #(and (<= 0 %) (>= 1 %))
false))
;; How we create new boards
(defn sim-board
[config]
(SimBoard.
(atom {})
(atom {})
(atom {})
config))
(defn random-config
[n-pins]
{:pin-modes
(into {}
(for [i (range n-pins)]
[[(rand-nth [:P8 :P9]) i]
(filterv
(fn [_] (> (rand) 0.68))
available-modes)]))})
;; And register the implementation
(def implementation
(reify
Object
(toString [_]
"<BoardSimulator>")
pcp/PPinCtrlImplementation
(create-board [_ config]
(sim-board config))
(default-config [_]
(pcp/default-config 100))))
(impl/register-implementation :simulator implementation)
| null | https://raw.githubusercontent.com/clj-bots/pin-ctrl/1d4aac384061bbb0cb5e01a4ddc51fc0e432c576/src/clj_bots/pin_ctrl_simulator/implementation.clj | clojure | This stuff really needs to be part of the core library
Here we're going to declare some of the things we'll need in the implementation that we'd rather leave at
the end of this namespace for logical flow.
In addition to the standard protocol functions, we also need something which let's us set the state of
_input_ input pins for the purposes of simulation, since (for obvious reasons) this is not supported via
the standard protocols.
And now the implementation.
should have global registry of these board ids XXX
Now we'll flesh out some of the various reading/writing functions
How we create new boards
And register the implementation | (ns clj-bots.pin-ctrl-simulator.implementation
(:require [clj-bots.pin-ctrl
[protocols :as pcp]
[implementation :as impl]]
[clojure.core.async :as async :refer [chan >!! <!! go <! >! go-loop]]))
(def available-modes
#{:input :output :ain :pwm})
(declare pin-mode writeable-pin? ok-val?)
(defprotocol PSimControl
"Protocol for simulation control functions"
(set-state! [this pin-n val] "Set the state of a digital or analog input pin"))
(defrecord SimBoard
[pin-state pin-modes edge-channels config]
pcp/PBoard
(init! [b] b)
(available-pin-modes [_] (:pin-modes config))
(pin-modes [_] @pin-modes)
pcp/POverwireBoard
(reset-board! [_]
(if (= (:board-class config) :overwire)
(do (reset! pin-modes)
(reset! pin-state))
(println "This option is not available for onboard boards")))
pcp/PPinConfigure
(set-mode! [_ pin-n mode]
(swap! pin-modes assoc pin-n mode))
pcp/PReadablePin
(read-value [board pin-n mode]
(get (deref (:pin-state board)) pin-n))
pcp/PWriteablePin
(write-value! [board pin-n mode val]
(swap! pin-state assoc pin-n val))
pcp/PEdgeDetectablePin
(set-edge! [board pin-n edge ch]
(let [match (case edge
(:none "edge") #{}
(:rising "rising") #{:high}
(:falling "falling") #{:low}
(:both "both") #{:high :low})]
(add-watch pin-state
(symbol (str "edge-detection-watch-simulator" pin-n))
(fn [_ _ _ new-val]
(when (match new-val)
(>!! ch new-val))))))
PSimControl
(set-state! [board pin-n val]
(assert (ok-val? board pin-n val)
(str "The value " val " is not an acceptable value for pins of type " (pin-mode board pin-n)))
(swap! pin-state assoc pin-n)))
(defn- pin-mode
[board pin-n]
(get (pcp/pin-modes board) pin-n))
(defn writeable-pin?
[board pin-n]
(#{:output :pwm} (pin-mode board pin-n)))
(defn ok-val?
[board pin-n val]
(case (pin-mode board pin-n)
:output (#{0 1 \0 \1 :high :low} val)
:pwm #(and (<= 0 %) (>= 1 %))
false))
(defn sim-board
[config]
(SimBoard.
(atom {})
(atom {})
(atom {})
config))
(defn random-config
[n-pins]
{:pin-modes
(into {}
(for [i (range n-pins)]
[[(rand-nth [:P8 :P9]) i]
(filterv
(fn [_] (> (rand) 0.68))
available-modes)]))})
(def implementation
(reify
Object
(toString [_]
"<BoardSimulator>")
pcp/PPinCtrlImplementation
(create-board [_ config]
(sim-board config))
(default-config [_]
(pcp/default-config 100))))
(impl/register-implementation :simulator implementation)
|
8d10067b8a1aef8dde0bcfa137f572caab989c95921ed7893e14f2debfdddffa | victornicolet/parsynt | Normalize.ml | *
This file is part of Parsynt .
Author : < >
Parsynt is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with . If not , see < / > .
This file is part of Parsynt.
Author: Victor Nicolet <>
Parsynt is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Parsynt is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Parsynt. If not, see </>.
*)
open AcTerm
open Base
open ResourceModel
open Term
open TermUtils
open Typ
open Unfold
open Utils
type cond = term list
type mcnf = (cond * term) list
type mcnf_extended = (cond * term * term Array.t) list
type normalized_unfolding = {
u : unfolding IM.t;
from_symb_mcnf : mcnf_extended IM.t;
from_init_mcnf : mcnf IM.t;
}
type local_context = Binop.t * term
type collect_result = { context_tree : term; aux_exprs : term Map.M(String).t }
* Collect result : represents an expression ( the context tree ) where
all the expressions that need to be collected for paralellelization
have been replaced in the tree by a variable and the variable name
maps to the expression in aux_exprs .
all the expressions that need to be collected for paralellelization
have been replaced in the tree by a variable and the variable name
maps to the expression in aux_exprs.
*)
let is_cr_aux (t : term) (cr : collect_result) =
match t.texpr with EVar (Var v) -> Option.is_some (Map.find cr.aux_exprs v.vname) | _ -> false
let has_cr_aux (t : term) (cr : collect_result) =
let init = false in
let join = ( || ) in
let case _ t = if is_cr_aux t cr then Some true else None in
Transform.recurse { init; join; case } t
let skeletize (cr : collect_result) : term -> term =
let tf _ t =
let ttyp = match type_of t with Ok x -> x | Error _ -> TTop in
if is_cr_aux t cr then Some (mk_hole ~attrs:t.tattrs (ttyp, 0))
else if not (has_cr_aux t cr) then Some (mk_hole ~attrs:t.tattrs (ttyp, 1))
else None
in
Transform.transform tf
(* ============================================================================== *)
(* Normalization rules. *)
Associativity / commutativity
let ac_opt_rule (fcost : term -> term -> int) : (term -> term) -> term -> term =
fun _ e -> rebuild_tree_AC fcost (to_ac e)
(* RULES *)
let distrib_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (op1, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (op2, a, b), _ when distributes op1 op2 ->
mk_bin (f (mk_bin a op1 t2)) op2 (f (mk_bin b op1 t2))
| _, EBin (op2, a, b) when distributes op1 op2 ->
mk_bin (f (mk_bin t1 op1 a)) op2 (f (mk_bin t1 op1 b))
| _ -> t)
| _ -> t
let unary_down_rule (f : term -> term) (t : term) =
match t.texpr with
| EUn (Neg, x) -> (
match x.texpr with
| EBin (Plus, a, b) -> mk_add (f (mk_opp a)) (f (mk_opp b))
| EBin (Max, a, b) -> mk_min (f (mk_opp a)) (f (mk_opp b))
| EBin (Min, a, b) -> mk_max (f (mk_opp a)) (f (mk_opp b))
| EBin (Times, a, b) -> mk_mul (f (mk_opp a)) b
| EBin (Minus, a, b) -> mk_sub b a
| EUn (Neg, a) -> a
| _ -> t)
| EUn (Not, x) -> (
match x.texpr with
| EBin (And, a, b) -> mk_or (f (mk_not a)) (f (mk_not b))
| EBin (Or, a, b) -> mk_and (f (mk_not a)) (f (mk_not b))
| EUn (Not, a) -> a
| _ -> t)
| _ -> t
let factor_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (op2, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (op1, a, c), EBin (op1', b, c')
when Binop.(op1 = op1') && distributes op1 op2 && ACES.(c = c') ->
mk_bin (f (mk_bin a op2 b)) op1 c
| EBin (op1, c, a), EBin (op1', c', b)
when Binop.(op1 = op1') && distributes op1 op2 && ACES.(c = c') ->
mk_bin c op1 (f (mk_bin a op2 b))
| _, EBin (op1, c', b) when has_ident op1 && distributes op1 op2 && ACES.(t1 = c') ->
let e0 = mk_term (Option.value_exn (ident_elt op1)) in
mk_bin t1 op1 (f (mk_bin e0 op2 b))
| EBin (op1, c', b), _ when has_ident op1 && distributes op1 op2 && ACES.(t2 = c') ->
let e0 = mk_term (Option.value_exn (ident_elt op1)) in
mk_bin t2 op1 (f (mk_bin e0 op2 b))
( a1 or b1 ) and ( ( a2 or b2 ) and t3 ) -- > ( a1 or ( b1 and b2 ) ) and t3
| EBin (op1, a1, b1), EBin (op2', { texpr = EBin (op1', a2, b2); _ }, t3)
when Binop.(op2 = op2') && Binop.(op1 = op1') && distributes op2 op1 ->
if ACES.(a1 = a2) then mk_bin (mk_bin (f a1) op1 (f (mk_bin b1 op2 b2))) op2 (f t3)
else if ACES.(b1 = b2) then mk_bin (mk_bin (f b1) op1 (f (mk_bin a1 op2 a2))) op2 (f t3)
else t
| _ -> t)
| _ -> t
let compar_max_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (o1, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (Max, a, b), _ when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_or e1 e2
| _, EBin (Max, a, b) when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_and e1 e2
| EBin (Max, a, b), _ when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_and e1 e2
| _, EBin (Max, a, b) when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_or e1 e2
(* min(a,b) > c -> a > c and b > c *)
| EBin (Min, a, b), _ when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_and e1 e2
(* c > min(a,b) -> a > c or b > c *)
| _, EBin (Min, a, b) when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_or e1 e2
(* min(a,b) < c -> a < c or b < c *)
| EBin (Min, a, b), _ when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_or e1 e2
(* c < min(a,b) -> c< a and c < b *)
| _, EBin (Min, a, b) when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_or e1 e2
| _ -> t)
| _ -> t
let identity_rule (_ : term -> term) (t : term) = t
let cond_norm_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (o1, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EIte (c, e1, e2), EIte (c', e1', e2') when Terms.(c = c') ->
let e11 = f (mk_bin e1 o1 e1') in
let e22 = f (mk_bin e2 o1 e2') in
mk_ite (f c) e11 e22
| EIte (c, e1, e2), EIte (c', e1', e2') when Terms.(mk_not c = c') ->
let e12 = f (mk_bin e1 o1 e2') in
let e21 = f (mk_bin e2 o1 e1') in
mk_ite (f c) e12 e21
| EIte (c, e1, e2), EIte (c', e1', e2') when Terms.(c = mk_not c') ->
let e12 = f (mk_bin e1 o1 e2') in
let e21 = f (mk_bin e2 o1 e1') in
mk_ite (f c) e21 e12
| EIte (c, e1, e2), EIte (c', e1', e2') when not Terms.(c = c') ->
let e11 = f (mk_bin e1 o1 e1') in
let e12 = f (mk_bin e1 o1 e2') in
let e21 = f (mk_bin e2 o1 e1') in
let e22 = f (mk_bin e2 o1 e2') in
let c = f c in
let c' = f c' in
mk_ite c (mk_ite c' e11 e12) (mk_ite c' e21 e22)
| _, EIte (c, e1', e2') ->
let ee1 = f (mk_bin t1 o1 e1') in
let ee2 = f (mk_bin t1 o1 e2') in
mk_ite (f c) ee1 ee2
| EIte (c, e1, e2), _ ->
let e1e = f (mk_bin e1 o1 t2) in
let e2e = f (mk_bin e2 o1 t2) in
mk_ite (f c) e1e e2e
| _ -> t)
| EUn (u, t1) -> (
match t1.texpr with
| EIte (c, e1, e2) ->
let e1' = f (mk_un u e1) in
let e2' = f (mk_un u e2) in
mk_ite c e1' e2'
| _ -> t)
| EIte(cond , et , ef ) when is_bool t - >
( mk_and ( f cond ) ( f et ) ) ( f ef )
mk_or (mk_and (f cond) (f et)) (f ef) *)
| _ -> t
let cond_fact_rule (f : term -> term) (t : term) =
match t.texpr with
| EIte (cnd, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (o, a, c), EBin (o', b, c') when Binop.(o = o') && Terms.(c = c') ->
let ca = f (mk_ite cnd a b) in
mk_bin ca o c
| EBin (o, c, a), EBin (o', c', b) when Binop.(o = o') && Terms.(c = c') ->
let ca = f (mk_ite cnd a b) in
mk_bin c o ca
| _ -> t)
| _ -> t
let is_cond_normal (e : term) =
let rec check e =
Transform.recurse
{
init = true;
join = ( && );
case =
(fun _ t ->
match t.texpr with EBin _ | EUn _ | EList _ -> Some (not (check_anti t)) | _ -> None);
}
e
and check_anti e =
Transform.recurse
{
init = false;
case = (fun _ t -> match t.texpr with EIte _ -> Some true | _ -> None);
join = (fun a b -> a || b);
}
e
in
match e.texpr with EIte _ -> check e | _ -> false
let apply_to_noncond (f : term -> term) : term -> term =
let sel _ e = match e.texpr with EIte _ -> Some (f e) | _ -> None in
Transform.transform sel
let cond_normal_rules (t : term) = Transform.apply_bottom_up_rule cond_norm_rule t
let minimize_cost (costly : resource list) (term : term) : term =
let fcost e1 e2 = compare_dc_cost (dc_cost_of_term costly e1) (dc_cost_of_term costly e2) in
let rules e =
rebuild_tree_AC fcost
@@ factorize (terms_of_resources costly)
@@ (Transform.apply_bottom_up_rules
[
distrib_rule;
factor_rule;
cond_norm_rule;
cond_fact_rule;
compar_max_rule;
unary_down_rule;
ac_opt_rule fcost;
identity_rule;
]
fcost)
e
in
let rules e = simplify_easy @@ rules e in
let term = to_list_normal_form term in
match term.texpr with
| EIte _ -> if is_cond_normal term then apply_to_noncond rules term else rules term
| _ -> rules term
(* Main entry points. *)
let normalize ?(costly : resource list = []) (t : term) =
if List.is_empty costly then cond_normal_rules t
else
let t' = cond_normal_rules t in
minimize_cost costly t'
let norm_comparison t =
let rule t =
match t.texpr with
| EBin (Gt, t1, t2) -> mk_bin t2 Lt t1
| EBin (Ge, t1, t2) -> mk_bin t2 Le t1
| EUn (Not, { texpr = EBin (Lt, t1, t2); _ }) -> mk_bin t2 Le t1
| EUn (Not, { texpr = EBin (Le, t1, t2); _ }) -> mk_bin t2 Lt t1
| EUn (Not, { texpr = EBin (Ge, t1, t2); _ }) -> mk_bin t1 Lt t2
| EUn (Not, { texpr = EBin (Gt, t1, t2); _ }) -> mk_bin t1 Le t2
| _ -> t
in
let rec apply_until_stable k t =
let t' = Transform.apply_rule rule t in
if ACES.equal t' t || k <= 0 then t' else apply_until_stable (k - 1) t'
in
apply_until_stable 0 t
let weaken ~(hyp : term list) (t : term) : term =
let hyp = List.map ~f:norm_comparison hyp in
let t = norm_comparison t in
let subs =
let f t = match t.texpr with EBin ((Lt | Le | Impl | Or), a, b) -> [ (a, b) ] | _ -> [] in
List.concat (List.map ~f hyp)
in
apply_substs_ac subs t
let to_dnf t =
let rule t0 =
match t0.texpr with
| EUn (Not, t1) -> (
match t1.texpr with
| EBin (And, t2, t3) -> mk_or (mk_not t2) (mk_not t3)
| EBin (Lt, t2, t3) -> mk_bin t2 Ge t3
| EBin (Le, t2, t3) -> mk_bin t2 Gt t3
| EBin (Gt, t2, t3) -> mk_bin t2 Le t3
| EBin (Ge, t2, t3) -> mk_bin t2 Lt t3
| _ -> t0)
| EBin (And, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (Or, t3, t4), _ -> mk_or (mk_and t3 t2) (mk_and t4 t2)
| _, EBin (Or, t3, t4) -> mk_or (mk_and t1 t3) (mk_and t1 t4)
| _ -> t0)
| _ -> t0
in
let rec apply_until_stable k t =
let t' = Transform.apply_rule rule t in
if ACES.equal t' t || k <= 0 then t' else apply_until_stable (k - 1) t'
in
let t1 = apply_until_stable 100 t in
let brk_or t =
let rd : term list Transform.recursor =
{
init = [];
join = ( @ );
case =
(fun f t ->
match t.texpr with EBin (Or, t1, t2) -> Some (f t1 @ f t2) | _ -> Some [ t ]);
}
in
match Transform.recurse rd t with _ :: _ as t' -> t' | _ -> [ t ]
in
brk_or t1
let to_cnf tl =
let if_rule t =
match t.texpr with
| EIte (c, tt, tf) -> mk_and (mk_or tt tf) (mk_and (mk_or (mk_not c) tt) (mk_or c tf))
| _ -> t
in
let dist_and_or t =
match t.texpr with
| EBin (Or, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (And, a, b), _ -> mk_bin (mk_bin a Or t2) And (mk_bin b Or t2)
| _, EBin (Or, a, b) -> mk_bin (mk_bin t1 Or a) And (mk_bin t1 Or b)
| _ -> t)
| _ -> t
in
let brk_and t =
let rd : term list Transform.recursor =
{
init = [];
join = ( @ );
case =
(fun f t ->
match t.texpr with EBin (And, t1, t2) -> Some (f t1 @ f t2) | _ -> Some [ t ]);
}
in
match Transform.recurse rd t with _ :: _ as t' -> t' | _ -> [ t ]
in
Transform.(
apply_rule if_rule --> apply_rule AcTerm.not_rule --> apply_rule dist_and_or
--> eval_const_parts --> brk_and)
tl
let to_mcnf (t : term) =
let rec gather cnd_path e =
match e.texpr with
| EIte (c, e1, e2) -> gather (cnd_path @ to_cnf c) e1 @ gather (cnd_path @ to_cnf (mk_not c)) e2
| _ -> [ (cnd_path, e) ]
in
gather [] (normalize t)
let normalize_branches_mcnf ?(costly = []) (emcnf : mcnf) =
List.map ~f:(fun (cond, e') -> (cond, normalize ~costly e')) emcnf
let pp_mcnf (formt : Formatter.t) (mcnf : (term list * term) list) =
List.iter
~f:(fun (cond, e) ->
Fmt.(
pf formt "@[<hov 2>‣ %a@;@[%a@]@;%a@]@." (box TermPp.pp_term) cond
(styled (`Fg `Red) string)
" ⟼ " (box TermPp.pp_term) e))
(List.map ~f:(fun (el, e) -> (mk_term (ETuple el), e)) mcnf)
let pp_mcnf_ext (formt : Formatter.t) (mcnf : (term list * term * term Array.t) list) =
Fmt.(
list (fun formt (cond, e, e0) ->
pf formt "@[<hov 2>‣ %a@;%a@;%a@;⟅%a⟆@]@."
(box (list ~sep:TermPp.sep_and TermPp.pp_term))
cond
(styled (`Fg `Red) string)
" ⟼ " (box TermPp.pp_term) e
(box (array ~sep:vert TermPp.pp_term))
e0))
formt
(List.map ~f:(fun (el, e, a) -> (el, e, a)) mcnf)
let mcnf_branch_type (e : mcnf) = match e with (_, eb) :: _ -> type_of eb | [] -> Ok TTop
let unique_conjuncts (e : mcnf) =
let cfield uc c = List.mem uc c ~equal:(fun a b -> ACES.(a = b || t_not a = b || a = t_not b)) in
let f uc (cl, _) =
let f' uc c = if cfield uc c then uc else uc @ [ c ] in
List.fold_left ~f:f' ~init:uc cl
in
List.fold_left ~f ~init:[] e
let blast_max : term -> term =
let _mutate f t =
match t.texpr with
| EBin (Max, x, y) ->
let x' = f x in
let y' = f y in
Some (mk_ite (mk_bin x' Ge y') x' y')
| EBin (Min, x, y) ->
let x' = f x in
let y' = f y in
Some (mk_ite (mk_bin x' Ge y') y' x')
| _ -> None
in
Transform.transform _mutate
let rebuild_max : term -> term =
let _mutate f t =
match t.texpr with
| EIte (c, a, b) -> (
let c = f c in
let a = f a in
let b = f b in
match c.texpr with
| EBin (comp, a', b') when Binop.(comp = Ge || comp = Gt) ->
if ACES.(a = a' && b = b') then Some (mk_max a b)
else if ACES.(a = b' && b = a') then Some (mk_min a b)
else Some (mk_ite c a b)
| EBin (comp, a', b') when Binop.(comp = Le || comp = Lt) ->
if ACES.(a = a' && b = b') then Some (mk_min a b)
else if ACES.(a = b' && b = a') then Some (mk_max a b)
else Some (mk_ite c a b)
| _ -> Some (mk_ite c a b))
| _ -> None
in
Transform.transform _mutate
| null | https://raw.githubusercontent.com/victornicolet/parsynt/d3f530923c0c75537b92c2930eb882921f38268c/src/lang/Normalize.ml | ocaml | ==============================================================================
Normalization rules.
RULES
min(a,b) > c -> a > c and b > c
c > min(a,b) -> a > c or b > c
min(a,b) < c -> a < c or b < c
c < min(a,b) -> c< a and c < b
Main entry points. | *
This file is part of Parsynt .
Author : < >
Parsynt is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with . If not , see < / > .
This file is part of Parsynt.
Author: Victor Nicolet <>
Parsynt is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Parsynt is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Parsynt. If not, see </>.
*)
open AcTerm
open Base
open ResourceModel
open Term
open TermUtils
open Typ
open Unfold
open Utils
type cond = term list
type mcnf = (cond * term) list
type mcnf_extended = (cond * term * term Array.t) list
type normalized_unfolding = {
u : unfolding IM.t;
from_symb_mcnf : mcnf_extended IM.t;
from_init_mcnf : mcnf IM.t;
}
type local_context = Binop.t * term
type collect_result = { context_tree : term; aux_exprs : term Map.M(String).t }
* Collect result : represents an expression ( the context tree ) where
all the expressions that need to be collected for paralellelization
have been replaced in the tree by a variable and the variable name
maps to the expression in aux_exprs .
all the expressions that need to be collected for paralellelization
have been replaced in the tree by a variable and the variable name
maps to the expression in aux_exprs.
*)
let is_cr_aux (t : term) (cr : collect_result) =
match t.texpr with EVar (Var v) -> Option.is_some (Map.find cr.aux_exprs v.vname) | _ -> false
let has_cr_aux (t : term) (cr : collect_result) =
let init = false in
let join = ( || ) in
let case _ t = if is_cr_aux t cr then Some true else None in
Transform.recurse { init; join; case } t
let skeletize (cr : collect_result) : term -> term =
let tf _ t =
let ttyp = match type_of t with Ok x -> x | Error _ -> TTop in
if is_cr_aux t cr then Some (mk_hole ~attrs:t.tattrs (ttyp, 0))
else if not (has_cr_aux t cr) then Some (mk_hole ~attrs:t.tattrs (ttyp, 1))
else None
in
Transform.transform tf
Associativity / commutativity
let ac_opt_rule (fcost : term -> term -> int) : (term -> term) -> term -> term =
fun _ e -> rebuild_tree_AC fcost (to_ac e)
let distrib_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (op1, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (op2, a, b), _ when distributes op1 op2 ->
mk_bin (f (mk_bin a op1 t2)) op2 (f (mk_bin b op1 t2))
| _, EBin (op2, a, b) when distributes op1 op2 ->
mk_bin (f (mk_bin t1 op1 a)) op2 (f (mk_bin t1 op1 b))
| _ -> t)
| _ -> t
let unary_down_rule (f : term -> term) (t : term) =
match t.texpr with
| EUn (Neg, x) -> (
match x.texpr with
| EBin (Plus, a, b) -> mk_add (f (mk_opp a)) (f (mk_opp b))
| EBin (Max, a, b) -> mk_min (f (mk_opp a)) (f (mk_opp b))
| EBin (Min, a, b) -> mk_max (f (mk_opp a)) (f (mk_opp b))
| EBin (Times, a, b) -> mk_mul (f (mk_opp a)) b
| EBin (Minus, a, b) -> mk_sub b a
| EUn (Neg, a) -> a
| _ -> t)
| EUn (Not, x) -> (
match x.texpr with
| EBin (And, a, b) -> mk_or (f (mk_not a)) (f (mk_not b))
| EBin (Or, a, b) -> mk_and (f (mk_not a)) (f (mk_not b))
| EUn (Not, a) -> a
| _ -> t)
| _ -> t
let factor_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (op2, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (op1, a, c), EBin (op1', b, c')
when Binop.(op1 = op1') && distributes op1 op2 && ACES.(c = c') ->
mk_bin (f (mk_bin a op2 b)) op1 c
| EBin (op1, c, a), EBin (op1', c', b)
when Binop.(op1 = op1') && distributes op1 op2 && ACES.(c = c') ->
mk_bin c op1 (f (mk_bin a op2 b))
| _, EBin (op1, c', b) when has_ident op1 && distributes op1 op2 && ACES.(t1 = c') ->
let e0 = mk_term (Option.value_exn (ident_elt op1)) in
mk_bin t1 op1 (f (mk_bin e0 op2 b))
| EBin (op1, c', b), _ when has_ident op1 && distributes op1 op2 && ACES.(t2 = c') ->
let e0 = mk_term (Option.value_exn (ident_elt op1)) in
mk_bin t2 op1 (f (mk_bin e0 op2 b))
( a1 or b1 ) and ( ( a2 or b2 ) and t3 ) -- > ( a1 or ( b1 and b2 ) ) and t3
| EBin (op1, a1, b1), EBin (op2', { texpr = EBin (op1', a2, b2); _ }, t3)
when Binop.(op2 = op2') && Binop.(op1 = op1') && distributes op2 op1 ->
if ACES.(a1 = a2) then mk_bin (mk_bin (f a1) op1 (f (mk_bin b1 op2 b2))) op2 (f t3)
else if ACES.(b1 = b2) then mk_bin (mk_bin (f b1) op1 (f (mk_bin a1 op2 a2))) op2 (f t3)
else t
| _ -> t)
| _ -> t
let compar_max_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (o1, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (Max, a, b), _ when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_or e1 e2
| _, EBin (Max, a, b) when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_and e1 e2
| EBin (Max, a, b), _ when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_and e1 e2
| _, EBin (Max, a, b) when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_or e1 e2
| EBin (Min, a, b), _ when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_and e1 e2
| _, EBin (Min, a, b) when Binop.(o1 = Gt || o1 = Ge) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_or e1 e2
| EBin (Min, a, b), _ when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin a o1 t2) in
let e2 = f (mk_bin b o1 t2) in
mk_or e1 e2
| _, EBin (Min, a, b) when Binop.(o1 = Lt || o1 = Le) ->
let e1 = f (mk_bin t1 o1 a) in
let e2 = f (mk_bin t1 o1 b) in
mk_or e1 e2
| _ -> t)
| _ -> t
let identity_rule (_ : term -> term) (t : term) = t
let cond_norm_rule (f : term -> term) (t : term) =
match t.texpr with
| EBin (o1, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EIte (c, e1, e2), EIte (c', e1', e2') when Terms.(c = c') ->
let e11 = f (mk_bin e1 o1 e1') in
let e22 = f (mk_bin e2 o1 e2') in
mk_ite (f c) e11 e22
| EIte (c, e1, e2), EIte (c', e1', e2') when Terms.(mk_not c = c') ->
let e12 = f (mk_bin e1 o1 e2') in
let e21 = f (mk_bin e2 o1 e1') in
mk_ite (f c) e12 e21
| EIte (c, e1, e2), EIte (c', e1', e2') when Terms.(c = mk_not c') ->
let e12 = f (mk_bin e1 o1 e2') in
let e21 = f (mk_bin e2 o1 e1') in
mk_ite (f c) e21 e12
| EIte (c, e1, e2), EIte (c', e1', e2') when not Terms.(c = c') ->
let e11 = f (mk_bin e1 o1 e1') in
let e12 = f (mk_bin e1 o1 e2') in
let e21 = f (mk_bin e2 o1 e1') in
let e22 = f (mk_bin e2 o1 e2') in
let c = f c in
let c' = f c' in
mk_ite c (mk_ite c' e11 e12) (mk_ite c' e21 e22)
| _, EIte (c, e1', e2') ->
let ee1 = f (mk_bin t1 o1 e1') in
let ee2 = f (mk_bin t1 o1 e2') in
mk_ite (f c) ee1 ee2
| EIte (c, e1, e2), _ ->
let e1e = f (mk_bin e1 o1 t2) in
let e2e = f (mk_bin e2 o1 t2) in
mk_ite (f c) e1e e2e
| _ -> t)
| EUn (u, t1) -> (
match t1.texpr with
| EIte (c, e1, e2) ->
let e1' = f (mk_un u e1) in
let e2' = f (mk_un u e2) in
mk_ite c e1' e2'
| _ -> t)
| EIte(cond , et , ef ) when is_bool t - >
( mk_and ( f cond ) ( f et ) ) ( f ef )
mk_or (mk_and (f cond) (f et)) (f ef) *)
| _ -> t
let cond_fact_rule (f : term -> term) (t : term) =
match t.texpr with
| EIte (cnd, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (o, a, c), EBin (o', b, c') when Binop.(o = o') && Terms.(c = c') ->
let ca = f (mk_ite cnd a b) in
mk_bin ca o c
| EBin (o, c, a), EBin (o', c', b) when Binop.(o = o') && Terms.(c = c') ->
let ca = f (mk_ite cnd a b) in
mk_bin c o ca
| _ -> t)
| _ -> t
let is_cond_normal (e : term) =
let rec check e =
Transform.recurse
{
init = true;
join = ( && );
case =
(fun _ t ->
match t.texpr with EBin _ | EUn _ | EList _ -> Some (not (check_anti t)) | _ -> None);
}
e
and check_anti e =
Transform.recurse
{
init = false;
case = (fun _ t -> match t.texpr with EIte _ -> Some true | _ -> None);
join = (fun a b -> a || b);
}
e
in
match e.texpr with EIte _ -> check e | _ -> false
let apply_to_noncond (f : term -> term) : term -> term =
let sel _ e = match e.texpr with EIte _ -> Some (f e) | _ -> None in
Transform.transform sel
let cond_normal_rules (t : term) = Transform.apply_bottom_up_rule cond_norm_rule t
let minimize_cost (costly : resource list) (term : term) : term =
let fcost e1 e2 = compare_dc_cost (dc_cost_of_term costly e1) (dc_cost_of_term costly e2) in
let rules e =
rebuild_tree_AC fcost
@@ factorize (terms_of_resources costly)
@@ (Transform.apply_bottom_up_rules
[
distrib_rule;
factor_rule;
cond_norm_rule;
cond_fact_rule;
compar_max_rule;
unary_down_rule;
ac_opt_rule fcost;
identity_rule;
]
fcost)
e
in
let rules e = simplify_easy @@ rules e in
let term = to_list_normal_form term in
match term.texpr with
| EIte _ -> if is_cond_normal term then apply_to_noncond rules term else rules term
| _ -> rules term
let normalize ?(costly : resource list = []) (t : term) =
if List.is_empty costly then cond_normal_rules t
else
let t' = cond_normal_rules t in
minimize_cost costly t'
let norm_comparison t =
let rule t =
match t.texpr with
| EBin (Gt, t1, t2) -> mk_bin t2 Lt t1
| EBin (Ge, t1, t2) -> mk_bin t2 Le t1
| EUn (Not, { texpr = EBin (Lt, t1, t2); _ }) -> mk_bin t2 Le t1
| EUn (Not, { texpr = EBin (Le, t1, t2); _ }) -> mk_bin t2 Lt t1
| EUn (Not, { texpr = EBin (Ge, t1, t2); _ }) -> mk_bin t1 Lt t2
| EUn (Not, { texpr = EBin (Gt, t1, t2); _ }) -> mk_bin t1 Le t2
| _ -> t
in
let rec apply_until_stable k t =
let t' = Transform.apply_rule rule t in
if ACES.equal t' t || k <= 0 then t' else apply_until_stable (k - 1) t'
in
apply_until_stable 0 t
let weaken ~(hyp : term list) (t : term) : term =
let hyp = List.map ~f:norm_comparison hyp in
let t = norm_comparison t in
let subs =
let f t = match t.texpr with EBin ((Lt | Le | Impl | Or), a, b) -> [ (a, b) ] | _ -> [] in
List.concat (List.map ~f hyp)
in
apply_substs_ac subs t
let to_dnf t =
let rule t0 =
match t0.texpr with
| EUn (Not, t1) -> (
match t1.texpr with
| EBin (And, t2, t3) -> mk_or (mk_not t2) (mk_not t3)
| EBin (Lt, t2, t3) -> mk_bin t2 Ge t3
| EBin (Le, t2, t3) -> mk_bin t2 Gt t3
| EBin (Gt, t2, t3) -> mk_bin t2 Le t3
| EBin (Ge, t2, t3) -> mk_bin t2 Lt t3
| _ -> t0)
| EBin (And, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (Or, t3, t4), _ -> mk_or (mk_and t3 t2) (mk_and t4 t2)
| _, EBin (Or, t3, t4) -> mk_or (mk_and t1 t3) (mk_and t1 t4)
| _ -> t0)
| _ -> t0
in
let rec apply_until_stable k t =
let t' = Transform.apply_rule rule t in
if ACES.equal t' t || k <= 0 then t' else apply_until_stable (k - 1) t'
in
let t1 = apply_until_stable 100 t in
let brk_or t =
let rd : term list Transform.recursor =
{
init = [];
join = ( @ );
case =
(fun f t ->
match t.texpr with EBin (Or, t1, t2) -> Some (f t1 @ f t2) | _ -> Some [ t ]);
}
in
match Transform.recurse rd t with _ :: _ as t' -> t' | _ -> [ t ]
in
brk_or t1
let to_cnf tl =
let if_rule t =
match t.texpr with
| EIte (c, tt, tf) -> mk_and (mk_or tt tf) (mk_and (mk_or (mk_not c) tt) (mk_or c tf))
| _ -> t
in
let dist_and_or t =
match t.texpr with
| EBin (Or, t1, t2) -> (
match (t1.texpr, t2.texpr) with
| EBin (And, a, b), _ -> mk_bin (mk_bin a Or t2) And (mk_bin b Or t2)
| _, EBin (Or, a, b) -> mk_bin (mk_bin t1 Or a) And (mk_bin t1 Or b)
| _ -> t)
| _ -> t
in
let brk_and t =
let rd : term list Transform.recursor =
{
init = [];
join = ( @ );
case =
(fun f t ->
match t.texpr with EBin (And, t1, t2) -> Some (f t1 @ f t2) | _ -> Some [ t ]);
}
in
match Transform.recurse rd t with _ :: _ as t' -> t' | _ -> [ t ]
in
Transform.(
apply_rule if_rule --> apply_rule AcTerm.not_rule --> apply_rule dist_and_or
--> eval_const_parts --> brk_and)
tl
let to_mcnf (t : term) =
let rec gather cnd_path e =
match e.texpr with
| EIte (c, e1, e2) -> gather (cnd_path @ to_cnf c) e1 @ gather (cnd_path @ to_cnf (mk_not c)) e2
| _ -> [ (cnd_path, e) ]
in
gather [] (normalize t)
let normalize_branches_mcnf ?(costly = []) (emcnf : mcnf) =
List.map ~f:(fun (cond, e') -> (cond, normalize ~costly e')) emcnf
let pp_mcnf (formt : Formatter.t) (mcnf : (term list * term) list) =
List.iter
~f:(fun (cond, e) ->
Fmt.(
pf formt "@[<hov 2>‣ %a@;@[%a@]@;%a@]@." (box TermPp.pp_term) cond
(styled (`Fg `Red) string)
" ⟼ " (box TermPp.pp_term) e))
(List.map ~f:(fun (el, e) -> (mk_term (ETuple el), e)) mcnf)
let pp_mcnf_ext (formt : Formatter.t) (mcnf : (term list * term * term Array.t) list) =
Fmt.(
list (fun formt (cond, e, e0) ->
pf formt "@[<hov 2>‣ %a@;%a@;%a@;⟅%a⟆@]@."
(box (list ~sep:TermPp.sep_and TermPp.pp_term))
cond
(styled (`Fg `Red) string)
" ⟼ " (box TermPp.pp_term) e
(box (array ~sep:vert TermPp.pp_term))
e0))
formt
(List.map ~f:(fun (el, e, a) -> (el, e, a)) mcnf)
let mcnf_branch_type (e : mcnf) = match e with (_, eb) :: _ -> type_of eb | [] -> Ok TTop
let unique_conjuncts (e : mcnf) =
let cfield uc c = List.mem uc c ~equal:(fun a b -> ACES.(a = b || t_not a = b || a = t_not b)) in
let f uc (cl, _) =
let f' uc c = if cfield uc c then uc else uc @ [ c ] in
List.fold_left ~f:f' ~init:uc cl
in
List.fold_left ~f ~init:[] e
let blast_max : term -> term =
let _mutate f t =
match t.texpr with
| EBin (Max, x, y) ->
let x' = f x in
let y' = f y in
Some (mk_ite (mk_bin x' Ge y') x' y')
| EBin (Min, x, y) ->
let x' = f x in
let y' = f y in
Some (mk_ite (mk_bin x' Ge y') y' x')
| _ -> None
in
Transform.transform _mutate
let rebuild_max : term -> term =
let _mutate f t =
match t.texpr with
| EIte (c, a, b) -> (
let c = f c in
let a = f a in
let b = f b in
match c.texpr with
| EBin (comp, a', b') when Binop.(comp = Ge || comp = Gt) ->
if ACES.(a = a' && b = b') then Some (mk_max a b)
else if ACES.(a = b' && b = a') then Some (mk_min a b)
else Some (mk_ite c a b)
| EBin (comp, a', b') when Binop.(comp = Le || comp = Lt) ->
if ACES.(a = a' && b = b') then Some (mk_min a b)
else if ACES.(a = b' && b = a') then Some (mk_max a b)
else Some (mk_ite c a b)
| _ -> Some (mk_ite c a b))
| _ -> None
in
Transform.transform _mutate
|
f95e6ac0e20325a62a165b4b9d2b7e65d2be3bcde7ee8069c0c84eab5cd8b4b8 | flodihn/NextGen | libstd_sup.erl | %%----------------------------------------------------------------------
@author < >
%% @doc
This is the supervisor for the standard library ' ' .
%% @end
%%----------------------------------------------------------------------
-module(libstd_sup).
-behaviour(supervisor).
-export([
start_link/0,
init/1
]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
RestartStrategy = one_for_one,
MaxRestarts = 10,
MaxSecondsBetweenRestarts = 30,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Restart = permanent,
Shutdown = 2000,
LibStd = {'libstd', {libstd_srv, start_link, [libstd_std_impl]},
Restart, Shutdown, worker, dynamic},
ObjSup = {'ObjectSupervisor', {obj_sup, start_link, []},
Restart, Shutdown, supervisor, dynamic},
{ok, {SupFlags, [LibStd, ObjSup]}}.
| null | https://raw.githubusercontent.com/flodihn/NextGen/3da1c3ee0d8f658383bdf5fccbdd49ace3cdb323/AreaServer/src/libstd/libstd_sup.erl | erlang | ----------------------------------------------------------------------
@doc
@end
---------------------------------------------------------------------- | @author < >
This is the supervisor for the standard library ' ' .
-module(libstd_sup).
-behaviour(supervisor).
-export([
start_link/0,
init/1
]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
RestartStrategy = one_for_one,
MaxRestarts = 10,
MaxSecondsBetweenRestarts = 30,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Restart = permanent,
Shutdown = 2000,
LibStd = {'libstd', {libstd_srv, start_link, [libstd_std_impl]},
Restart, Shutdown, worker, dynamic},
ObjSup = {'ObjectSupervisor', {obj_sup, start_link, []},
Restart, Shutdown, supervisor, dynamic},
{ok, {SupFlags, [LibStd, ObjSup]}}.
|
f8b6a433bb2324d1b8bab92c27f9739fa7f9fa6f498edcaf74d72cd36cf5e392 | lwhjp/racket-asm | register.rkt | #lang racket/base
(require racklog
"operand.rkt")
(provide (all-defined-out))
(define %general-register
(let ([regs '((0 al ax eax rax)
(3 bl bx ebx rbx)
(1 cl cx ecx rcx)
(2 dl dx edx rdx)
(6 sil si esi rsi)
(7 dil di edi rdi)
(5 bpl bp ebp rbp)
(4 spl sp esp rsp)
(8 r8b r8w r8d r8)
(9 r9b r9w r9d r9)
(10 r10b r10w r10d r10)
(11 r11b r11w r11d r11)
(12 r12b r12w r12d r12)
(13 r13b r13w r13d r13)
(14 r14b r14w r14d r14)
(15 r15b r15w r15d r15))])
(%rel (name width code b w d q)
[((general-register name width code))
(%member (list code b w d q) regs)
(%or (%and (%= name b) (%= width 8))
(%and (%= name w) (%= width 16))
(%and (%= name d) (%= width 32))
(%and (%= name q) (%= width 64)))]
[((general-register 'ah 8 4))]
[((general-register 'bh 8 7))]
[((general-register 'ch 8 5))]
[((general-register 'dh 8 6))])))
(define %segment-register
(%rel ()
[((segment-register 'cs 16 1))]
[((segment-register 'ds 16 3))]
[((segment-register 'es 16 0))]
[((segment-register 'fs 16 4))]
[((segment-register 'gs 16 5))]
[((segment-register 'ss 16 2))]))
(define %register-mode
(%rel (r n w c)
[((general-register n w c) 64)]
[(r 32) (%= r (general-register n w c))
(%member w '(8 16 32))
(%register-with-rex r #f)]
[(r 16) (%register-mode r 32)]))
(define %register-with-rex
(%rel (n w c)
[((general-register n w c) (_)) (%member w '(16 32 64))
(%member c '(0 1 2 3 4 5 6 7))]
[((general-register n w c) #t) (%member c '(8 9 10 11 12 13 14 15))]
[((general-register n 8 c) #f) (%member (cons c n)
'((4 . ah)
(7 . bh)
(5 . ch)
(6 . dh)))]
[((general-register n 8 c) #t) (%member (cons c n)
'((6 . sil)
(7 . dil)
(5 . bpl)
(4 . spl)))]
[((general-register n 8 c) (_)) (%member c '(0 1 2 3))]))
| null | https://raw.githubusercontent.com/lwhjp/racket-asm/57abd235fcb8c7505990f8e9731c01c716324ee5/x86/private/register.rkt | racket | #lang racket/base
(require racklog
"operand.rkt")
(provide (all-defined-out))
(define %general-register
(let ([regs '((0 al ax eax rax)
(3 bl bx ebx rbx)
(1 cl cx ecx rcx)
(2 dl dx edx rdx)
(6 sil si esi rsi)
(7 dil di edi rdi)
(5 bpl bp ebp rbp)
(4 spl sp esp rsp)
(8 r8b r8w r8d r8)
(9 r9b r9w r9d r9)
(10 r10b r10w r10d r10)
(11 r11b r11w r11d r11)
(12 r12b r12w r12d r12)
(13 r13b r13w r13d r13)
(14 r14b r14w r14d r14)
(15 r15b r15w r15d r15))])
(%rel (name width code b w d q)
[((general-register name width code))
(%member (list code b w d q) regs)
(%or (%and (%= name b) (%= width 8))
(%and (%= name w) (%= width 16))
(%and (%= name d) (%= width 32))
(%and (%= name q) (%= width 64)))]
[((general-register 'ah 8 4))]
[((general-register 'bh 8 7))]
[((general-register 'ch 8 5))]
[((general-register 'dh 8 6))])))
(define %segment-register
(%rel ()
[((segment-register 'cs 16 1))]
[((segment-register 'ds 16 3))]
[((segment-register 'es 16 0))]
[((segment-register 'fs 16 4))]
[((segment-register 'gs 16 5))]
[((segment-register 'ss 16 2))]))
(define %register-mode
(%rel (r n w c)
[((general-register n w c) 64)]
[(r 32) (%= r (general-register n w c))
(%member w '(8 16 32))
(%register-with-rex r #f)]
[(r 16) (%register-mode r 32)]))
(define %register-with-rex
(%rel (n w c)
[((general-register n w c) (_)) (%member w '(16 32 64))
(%member c '(0 1 2 3 4 5 6 7))]
[((general-register n w c) #t) (%member c '(8 9 10 11 12 13 14 15))]
[((general-register n 8 c) #f) (%member (cons c n)
'((4 . ah)
(7 . bh)
(5 . ch)
(6 . dh)))]
[((general-register n 8 c) #t) (%member (cons c n)
'((6 . sil)
(7 . dil)
(5 . bpl)
(4 . spl)))]
[((general-register n 8 c) (_)) (%member c '(0 1 2 3))]))
| |
9e4823558c168135a5782a59cabdd58830792de49c869dace8c1a78c51b605ba | discus-lang/ddc | Compounds.hs | {-# OPTIONS_HADDOCK hide #-}
-- | Utilities for constructing and destructing compound expressions.
--
For the annotated version of the AST .
--
module DDC.Core.Exp.Annot.Compounds
( module DDC.Type.Exp.Simple.Compounds
-- * Annotations
, annotOfExp
, mapAnnotOfExp
-- * Lambdas
, xLAMs
, xLams
, makeXLamFlags
, takeXLAMs
, takeXLams
, takeXLamFlags
-- ** Parameters
, bindOfParam
, typeOfParam
, replaceTypeOfParam
, ParamTVB(..)
, takeXLamParamTVB
, splitParamOfType
, makeTFunParams
-- * Applications
, xApps
, makeXAppsWithAnnots
, takeXApps
, takeXApps1
, takeXAppsAsList
, takeXAppsWithAnnots
, takeXConApps
, takeXPrimApps
, takeXNameApps
-- ** Arguments
, takeRType
, takeRTerm
, takeRWitness
, takeRImplicit
, takeExpFromArg
, takeExpsFromArgs
-- * Lets
, xLets, xLetsAnnot
, splitXLets, splitXLetsAnnot
, bindsOfLets
, specBindsOfLets
, valwitBindsOfLets
-- * Alternatives
, patOfAlt
, takeDaConOfAlt
-- * Patterns
, bindsOfPat
-- * Casts
, makeRuns
-- * Witnesses
, wApp
, wApps
, annotOfWitness
, takeWAppsAsList
, takePrimWiConApps
-- * Data Constructors
, xUnit, dcUnit
, takeNameOfDaConPrim
, takeNameOfDaConBound
, takeBaseCtorNameOfDaCon
-- * Bound Variables
, takeBoundOfExp
, takeNameOfExp)
where
import DDC.Core.Exp.Annot.Exp
import DDC.Core.Exp.DaCon
import DDC.Type.Exp.Simple.Compounds
import Data.Maybe (catMaybes)
-- Annotations ----------------------------------------------------------------
-- | Take the outermost annotation from an expression.
annotOfExp :: Exp a n -> a
annotOfExp xx
= case xx of
XVar a _ -> a
XAbs a _ _ -> a
XApp a _ _ -> a
XLet a _ _ -> a
XAtom a _ -> a
XCase a _ _ -> a
XCast a _ _ -> a
-- | Apply a function to the annotation of an expression.
mapAnnotOfExp :: (a -> a) -> Exp a n -> Exp a n
mapAnnotOfExp f xx
= case xx of
XVar a u -> XVar (f a) u
XAbs a b x -> XAbs (f a) b x
XApp a x1 x2 -> XApp (f a) x1 x2
XLet a lt x -> XLet (f a) lt x
XAtom a t -> XAtom (f a) t
XCase a x as -> XCase (f a) x as
XCast a c x -> XCast (f a) c x
-- Lambdas ---------------------------------------------------------------------
-- | Make some nested type lambdas.
xLAMs :: a -> [Bind n] -> Exp a n -> Exp a n
xLAMs a bs x
= foldr (XLAM a) x bs
-- | Make some nested value or witness lambdas.
xLams :: a -> [Bind n] -> Exp a n -> Exp a n
xLams a bs x
= foldr (XLam a) x bs
-- | Split type lambdas from the front of an expression,
-- or `Nothing` if there aren't any.
takeXLAMs :: Exp a n -> Maybe ([Bind n], Exp a n)
takeXLAMs xx
= let go bs (XLAM _ b x) = go (b:bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
-- | Split nested value or witness lambdas from the front of an expression,
-- or `Nothing` if there aren't any.
takeXLams :: Exp a n -> Maybe ([Bind n], Exp a n)
takeXLams xx
= let go bs (XLam _ b x) = go (b:bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
-- | Make some nested lambda abstractions,
-- using a flag to indicate whether the lambda is a
-- level-1 (True), or level-0 (False) binder.
makeXLamFlags :: a -> [(Bool, Bind n)] -> Exp a n -> Exp a n
makeXLamFlags a fbs x
= foldr (\(f, b) x'
-> if f then XLAM a b x'
else XLam a b x')
x fbs
-- | Split nested lambdas from the front of an expression,
-- with a flag indicating whether the lambda was a level-1 (True),
-- or level-0 (False) binder.
takeXLamFlags :: Exp a n -> Maybe ([(Bool, Bind n)], Exp a n)
takeXLamFlags xx
= let go bs (XLAM _ b x) = go ((True, b):bs) x
go bs (XLam _ b x) = go ((False, b):bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
-- Parameters -----------------------------------------------------------------
-- | Take the binder of a parameter.
bindOfParam :: Param n -> Bind n
bindOfParam mm
= case mm of
MType b -> b
MTerm b -> b
MImplicit b -> b
-- | Take the type of a parameter.
typeOfParam :: Param n -> Type n
typeOfParam mm
= typeOfBind $ bindOfParam mm
-- | Replace the type of a parameter.
replaceTypeOfParam :: Type n -> Param n -> Param n
replaceTypeOfParam t mm
= case mm of
MType b -> MType $ replaceTypeOfBind t b
MTerm b -> MTerm $ replaceTypeOfBind t b
MImplicit b -> MImplicit $ replaceTypeOfBind t b
-- | Parameters of a function.
data ParamTVB n
= ParamType (Bind n)
| ParamValue (Bind n)
| ParamBox
deriving Show
-- | Take the parameters of a function.
takeXLamParamTVB :: Exp a n -> Maybe ([ParamTVB n], Exp a n)
takeXLamParamTVB xx
= let go bs (XLAM _ b x) = go (ParamType b : bs) x
go bs (XLam _ b x) = go (ParamValue b : bs) x
go bs (XCast _ CastBox x) = go (ParamBox : bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
-- | Given the type of an abstraction,
-- determine how the abstraction parameterises its body.
splitParamOfType
:: Type n
-> Maybe (ParamSort, ParamMode, Bind n, Type n)
splitParamOfType tt
= case tt of
TApp (TApp (TCon tycon) t1) t2
-> case tycon of
TyConSpec TcConFunExplicit
-> Just (ParamSortTerm, ParamModeExplicit, BNone t1, t2)
TyConSpec TcConFunImplicit
-> Just (ParamSortTerm, ParamModeImplicit, BNone t1, t2)
TyConWitness TwConImpl
-> Just (ParamSortWitness, ParamModeExplicit, BNone t1, t2)
_ -> Nothing
TForall b t2
-> Just (ParamSortType, ParamModeElaborate, b, t2)
_ -> Nothing
-- | Construct a function type from a list of parameter types and the
-- return type.
makeTFunParams :: [Param n] -> Type n -> Type n
makeTFunParams msParam tResult
= tFuns' msParam
where
tFuns' []
= tResult
tFuns' (m : ms)
= case m of
MType b
-> TForall b (tFuns' ms)
MTerm b
-> (TCon $ TyConSpec TcConFunExplicit)
`tApps` [typeOfBind b, tFuns' ms]
MImplicit b
-> (TCon $ TyConSpec TcConFunImplicit)
`tApps` [typeOfBind b, tFuns' ms]
-- Applications ---------------------------------------------------------------
-- | Build sequence of value applications.
xApps :: a -> Exp a n -> [Arg a n] -> Exp a n
xApps a t1 ts = foldl (XApp a) t1 ts
-- | Build sequence of applications.
Similar to ` xApps ` but also takes list of annotations for
the ` XApp ` constructors .
makeXAppsWithAnnots :: Exp a n -> [(Arg a n, a)] -> Exp a n
makeXAppsWithAnnots f xas
= case xas of
[] -> f
(arg, a) : as -> makeXAppsWithAnnots (XApp a f arg) as
-- | Flatten an application into the function part and its arguments.
--
-- Returns `Nothing` if there is no outer application.
takeXApps :: Exp a n -> Maybe (Exp a n, [Arg a n])
takeXApps xx
= case takeXAppsAsList xx of
(_, []) -> Nothing
(x1, as) -> Just (x1, as)
-- | Flatten an application into the function part and its arguments, if any.
takeXAppsAsList :: Exp a n -> (Exp a n, [Arg a n])
takeXAppsAsList xx
= case xx of
XApp _ x1 x2
-> let (f', args') = takeXAppsAsList x1
in (f', args' ++ [x2])
_ -> (xx, [])
-- | Flatten an application into the function part and its arguments.
--
This is like ` takeXApps ` above , except we know there is at least one argument .
takeXApps1 :: Exp a n -> Arg a n -> (Exp a n, [Arg a n])
takeXApps1 x1 x2
= case takeXApps x1 of
Nothing -> (x1, [x2])
Just (x11, x12s) -> (x11, x12s ++ [x2])
-- | Destruct sequence of applications.
-- Similar to `takeXAppsAsList` but also keeps annotations for later.
takeXAppsWithAnnots :: Exp a n -> (Exp a n, [(Arg a n, a)])
takeXAppsWithAnnots xx
= case xx of
XApp a f arg
-> let (f', args') = takeXAppsWithAnnots f
in (f', args' ++ [(arg,a)])
_ -> (xx, [])
-- | Flatten an application of an ambient primitive into the primitive name
-- and its arguments.
--
-- Returns `Nothing` if the expression isn't a primitive application.
takeXPrimApps :: Exp a n -> Maybe (Prim, [Arg a n])
takeXPrimApps xx
= case takeXApps xx of
Just (XPrim _ p, as) -> Just (p, as)
_ -> Nothing
-- | Flatten an application of a named function into the variable
-- and its arguments.
--
-- Returns `Nothing` if the expression isn't such an application.
takeXNameApps :: Exp a n -> Maybe (n, [Arg a n])
takeXNameApps xx
= case takeXApps xx of
Just (XVar _ (UName p), as) -> Just (p, as)
_ -> Nothing
-- | Flatten an application of a data constructor into the constructor
-- and its arguments.
--
-- Returns `Nothing` if the expression isn't a constructor application.
takeXConApps :: Exp a n -> Maybe (DaCon n (Type n), [Arg a n])
takeXConApps xx
= case takeXApps xx of
Just (XCon _ dc, as) -> Just (dc, as)
_ -> Nothing
-- Arguments ------------------------------------------------------------------
-- | Take the type of a type argument, if it is one.
takeRType :: Arg a n -> Maybe (Type n)
takeRType aa
= case aa of
RType t -> Just t
_ -> Nothing
-- | Take a witness from an argument, if it is one.
takeRWitness :: Arg a n -> Maybe (Witness a n)
takeRWitness aa
= case aa of
RWitness w -> Just w
_ -> Nothing
-- | Take a witness from an argument, if it is one.
takeRTerm :: Arg a n -> Maybe (Exp a n)
takeRTerm aa
= case aa of
RTerm x -> Just x
_ -> Nothing
-- | Take a witness from an argument, if it is one.
takeRImplicit :: Arg a n -> Maybe (Arg a n)
takeRImplicit aa
= case aa of
RImplicit x -> Just x
_ -> Nothing
-- | Take the expression from a `RTerm` or `RImplicit argument.
takeExpFromArg :: Arg a n -> Maybe (Exp a n)
takeExpFromArg aa
= case aa of
RTerm x -> Just x
RImplicit a -> takeExpFromArg a
_ -> Nothing
-- | Take any expression arguments
takeExpsFromArgs :: [Arg a n] -> [Exp a n]
takeExpsFromArgs
= catMaybes . map takeExpFromArg
-- Lets -----------------------------------------------------------------------
-- | Wrap some let-bindings around an expression.
xLets :: a -> [Lets a n] -> Exp a n -> Exp a n
xLets a lts x
= foldr (XLet a) x lts
-- | Wrap some let-bindings around an expression, with individual annotations.
xLetsAnnot :: [(Lets a n, a)] -> Exp a n -> Exp a n
xLetsAnnot lts x
= foldr (\(l, a) x' -> XLet a l x') x lts
-- | Split let-bindings from the front of an expression, if any.
splitXLets :: Exp a n -> ([Lets a n], Exp a n)
splitXLets xx
= case xx of
XLet _ lts x
-> let (lts', x') = splitXLets x
in (lts : lts', x')
_ -> ([], xx)
-- | Split let-bindings from the front of an expression, with annotations.
splitXLetsAnnot :: Exp a n -> ([(Lets a n, a)], Exp a n)
splitXLetsAnnot xx
= case xx of
XLet a lts x
-> let (lts', x') = splitXLetsAnnot x
in ((lts, a) : lts', x')
_ -> ([], xx)
-- | Take the binds of a `Lets`.
--
-- The level-1 and level-0 binders are returned separately.
bindsOfLets :: Lets a n -> ([Bind n], [Bind n])
bindsOfLets ll
= case ll of
LLet b _ -> ([], [b])
LRec bxs -> ([], map fst bxs)
LPrivate bs _ bbs -> (bs, bbs)
-- | Like `bindsOfLets` but only take the spec (level-1) binders.
specBindsOfLets :: Lets a n -> [Bind n]
specBindsOfLets ll
= case ll of
LLet _ _ -> []
LRec _ -> []
LPrivate bs _ _ -> bs
-- | Like `bindsOfLets` but only take the value and witness (level-0) binders.
valwitBindsOfLets :: Lets a n -> [Bind n]
valwitBindsOfLets ll
= case ll of
LLet b _ -> [b]
LRec bxs -> map fst bxs
LPrivate _ _ bs -> bs
-- Alternatives ---------------------------------------------------------------
-- | Take the pattern of an alternative.
patOfAlt :: Alt a n -> Pat n
patOfAlt (AAlt pat _) = pat
-- | Take the constructor name of an alternative, if there is one.
takeDaConOfAlt :: Alt a n -> Maybe (DaCon n (Type n))
takeDaConOfAlt aa
= case aa of
AAlt (PData dc _) _ -> Just dc
_ -> Nothing
-- Patterns -------------------------------------------------------------------
-- | Take the binds of a `Pat`.
bindsOfPat :: Pat n -> [Bind n]
bindsOfPat pp
= case pp of
PDefault -> []
PData _ bs -> bs
-- Casts ----------------------------------------------------------------------
-- | Wrap an expression in the given number of 'run' casts.
makeRuns :: a -> Int -> Exp a n -> Exp a n
makeRuns _a 0 x = x
makeRuns a n x = XCast a CastRun (makeRuns a (n - 1) x)
-- Witnesses ------------------------------------------------------------------
-- | Construct a witness application
wApp :: a -> Witness a n -> Witness a n -> Witness a n
wApp = WApp
-- | Construct a sequence of witness applications
wApps :: a -> Witness a n -> [Witness a n] -> Witness a n
wApps a = foldl (wApp a)
-- | Take the annotation from a witness.
annotOfWitness :: Witness a n -> a
annotOfWitness ww
= case ww of
WVar a _ -> a
WCon a _ -> a
WApp a _ _ -> a
WType a _ -> a
-- | Flatten an application into the function parts and arguments, if any.
takeWAppsAsList :: Witness a n -> [Witness a n]
takeWAppsAsList ww
= case ww of
WApp _ w1 w2 -> takeWAppsAsList w1 ++ [w2]
_ -> [ww]
-- | Flatten an application of a witness into the witness constructor
-- name and its arguments.
--
-- Returns nothing if there is no witness constructor in head position.
takePrimWiConApps :: Witness a n -> Maybe (n, [Witness a n])
takePrimWiConApps ww
= case takeWAppsAsList ww of
WCon _ wc : args | WiConBound (UName n) _ <- wc
-> Just (n, args)
_ -> Nothing
-- Units -----------------------------------------------------------------------
-- | Construct a value of unit type.
xUnit :: a -> Exp a n
xUnit a = XCon a dcUnit
-- Bound Variables -------------------------------------------------------------
-- | Pull a variable out of an expression
takeBoundOfExp :: Exp a n -> Maybe (Bound n)
takeBoundOfExp xx
= case xx of
-- Should this look through casts?
XVar _ b -> Just b
_ -> Nothing
-- | Extract user variable out of an expression
takeNameOfExp :: Exp a n -> Maybe n
takeNameOfExp xx
= takeBoundOfExp xx >>= takeNameOfBound
| null | https://raw.githubusercontent.com/discus-lang/ddc/2baa1b4e2d43b6b02135257677671a83cb7384ac/src/s1/ddc-core/DDC/Core/Exp/Annot/Compounds.hs | haskell | # OPTIONS_HADDOCK hide #
| Utilities for constructing and destructing compound expressions.
* Annotations
* Lambdas
** Parameters
* Applications
** Arguments
* Lets
* Alternatives
* Patterns
* Casts
* Witnesses
* Data Constructors
* Bound Variables
Annotations ----------------------------------------------------------------
| Take the outermost annotation from an expression.
| Apply a function to the annotation of an expression.
Lambdas ---------------------------------------------------------------------
| Make some nested type lambdas.
| Make some nested value or witness lambdas.
| Split type lambdas from the front of an expression,
or `Nothing` if there aren't any.
| Split nested value or witness lambdas from the front of an expression,
or `Nothing` if there aren't any.
| Make some nested lambda abstractions,
using a flag to indicate whether the lambda is a
level-1 (True), or level-0 (False) binder.
| Split nested lambdas from the front of an expression,
with a flag indicating whether the lambda was a level-1 (True),
or level-0 (False) binder.
Parameters -----------------------------------------------------------------
| Take the binder of a parameter.
| Take the type of a parameter.
| Replace the type of a parameter.
| Parameters of a function.
| Take the parameters of a function.
| Given the type of an abstraction,
determine how the abstraction parameterises its body.
| Construct a function type from a list of parameter types and the
return type.
Applications ---------------------------------------------------------------
| Build sequence of value applications.
| Build sequence of applications.
| Flatten an application into the function part and its arguments.
Returns `Nothing` if there is no outer application.
| Flatten an application into the function part and its arguments, if any.
| Flatten an application into the function part and its arguments.
| Destruct sequence of applications.
Similar to `takeXAppsAsList` but also keeps annotations for later.
| Flatten an application of an ambient primitive into the primitive name
and its arguments.
Returns `Nothing` if the expression isn't a primitive application.
| Flatten an application of a named function into the variable
and its arguments.
Returns `Nothing` if the expression isn't such an application.
| Flatten an application of a data constructor into the constructor
and its arguments.
Returns `Nothing` if the expression isn't a constructor application.
Arguments ------------------------------------------------------------------
| Take the type of a type argument, if it is one.
| Take a witness from an argument, if it is one.
| Take a witness from an argument, if it is one.
| Take a witness from an argument, if it is one.
| Take the expression from a `RTerm` or `RImplicit argument.
| Take any expression arguments
Lets -----------------------------------------------------------------------
| Wrap some let-bindings around an expression.
| Wrap some let-bindings around an expression, with individual annotations.
| Split let-bindings from the front of an expression, if any.
| Split let-bindings from the front of an expression, with annotations.
| Take the binds of a `Lets`.
The level-1 and level-0 binders are returned separately.
| Like `bindsOfLets` but only take the spec (level-1) binders.
| Like `bindsOfLets` but only take the value and witness (level-0) binders.
Alternatives ---------------------------------------------------------------
| Take the pattern of an alternative.
| Take the constructor name of an alternative, if there is one.
Patterns -------------------------------------------------------------------
| Take the binds of a `Pat`.
Casts ----------------------------------------------------------------------
| Wrap an expression in the given number of 'run' casts.
Witnesses ------------------------------------------------------------------
| Construct a witness application
| Construct a sequence of witness applications
| Take the annotation from a witness.
| Flatten an application into the function parts and arguments, if any.
| Flatten an application of a witness into the witness constructor
name and its arguments.
Returns nothing if there is no witness constructor in head position.
Units -----------------------------------------------------------------------
| Construct a value of unit type.
Bound Variables -------------------------------------------------------------
| Pull a variable out of an expression
Should this look through casts?
| Extract user variable out of an expression | For the annotated version of the AST .
module DDC.Core.Exp.Annot.Compounds
( module DDC.Type.Exp.Simple.Compounds
, annotOfExp
, mapAnnotOfExp
, xLAMs
, xLams
, makeXLamFlags
, takeXLAMs
, takeXLams
, takeXLamFlags
, bindOfParam
, typeOfParam
, replaceTypeOfParam
, ParamTVB(..)
, takeXLamParamTVB
, splitParamOfType
, makeTFunParams
, xApps
, makeXAppsWithAnnots
, takeXApps
, takeXApps1
, takeXAppsAsList
, takeXAppsWithAnnots
, takeXConApps
, takeXPrimApps
, takeXNameApps
, takeRType
, takeRTerm
, takeRWitness
, takeRImplicit
, takeExpFromArg
, takeExpsFromArgs
, xLets, xLetsAnnot
, splitXLets, splitXLetsAnnot
, bindsOfLets
, specBindsOfLets
, valwitBindsOfLets
, patOfAlt
, takeDaConOfAlt
, bindsOfPat
, makeRuns
, wApp
, wApps
, annotOfWitness
, takeWAppsAsList
, takePrimWiConApps
, xUnit, dcUnit
, takeNameOfDaConPrim
, takeNameOfDaConBound
, takeBaseCtorNameOfDaCon
, takeBoundOfExp
, takeNameOfExp)
where
import DDC.Core.Exp.Annot.Exp
import DDC.Core.Exp.DaCon
import DDC.Type.Exp.Simple.Compounds
import Data.Maybe (catMaybes)
annotOfExp :: Exp a n -> a
annotOfExp xx
= case xx of
XVar a _ -> a
XAbs a _ _ -> a
XApp a _ _ -> a
XLet a _ _ -> a
XAtom a _ -> a
XCase a _ _ -> a
XCast a _ _ -> a
mapAnnotOfExp :: (a -> a) -> Exp a n -> Exp a n
mapAnnotOfExp f xx
= case xx of
XVar a u -> XVar (f a) u
XAbs a b x -> XAbs (f a) b x
XApp a x1 x2 -> XApp (f a) x1 x2
XLet a lt x -> XLet (f a) lt x
XAtom a t -> XAtom (f a) t
XCase a x as -> XCase (f a) x as
XCast a c x -> XCast (f a) c x
xLAMs :: a -> [Bind n] -> Exp a n -> Exp a n
xLAMs a bs x
= foldr (XLAM a) x bs
xLams :: a -> [Bind n] -> Exp a n -> Exp a n
xLams a bs x
= foldr (XLam a) x bs
takeXLAMs :: Exp a n -> Maybe ([Bind n], Exp a n)
takeXLAMs xx
= let go bs (XLAM _ b x) = go (b:bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
takeXLams :: Exp a n -> Maybe ([Bind n], Exp a n)
takeXLams xx
= let go bs (XLam _ b x) = go (b:bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
makeXLamFlags :: a -> [(Bool, Bind n)] -> Exp a n -> Exp a n
makeXLamFlags a fbs x
= foldr (\(f, b) x'
-> if f then XLAM a b x'
else XLam a b x')
x fbs
takeXLamFlags :: Exp a n -> Maybe ([(Bool, Bind n)], Exp a n)
takeXLamFlags xx
= let go bs (XLAM _ b x) = go ((True, b):bs) x
go bs (XLam _ b x) = go ((False, b):bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
bindOfParam :: Param n -> Bind n
bindOfParam mm
= case mm of
MType b -> b
MTerm b -> b
MImplicit b -> b
typeOfParam :: Param n -> Type n
typeOfParam mm
= typeOfBind $ bindOfParam mm
replaceTypeOfParam :: Type n -> Param n -> Param n
replaceTypeOfParam t mm
= case mm of
MType b -> MType $ replaceTypeOfBind t b
MTerm b -> MTerm $ replaceTypeOfBind t b
MImplicit b -> MImplicit $ replaceTypeOfBind t b
data ParamTVB n
= ParamType (Bind n)
| ParamValue (Bind n)
| ParamBox
deriving Show
takeXLamParamTVB :: Exp a n -> Maybe ([ParamTVB n], Exp a n)
takeXLamParamTVB xx
= let go bs (XLAM _ b x) = go (ParamType b : bs) x
go bs (XLam _ b x) = go (ParamValue b : bs) x
go bs (XCast _ CastBox x) = go (ParamBox : bs) x
go bs x = (reverse bs, x)
in case go [] xx of
([], _) -> Nothing
(bs, body) -> Just (bs, body)
splitParamOfType
:: Type n
-> Maybe (ParamSort, ParamMode, Bind n, Type n)
splitParamOfType tt
= case tt of
TApp (TApp (TCon tycon) t1) t2
-> case tycon of
TyConSpec TcConFunExplicit
-> Just (ParamSortTerm, ParamModeExplicit, BNone t1, t2)
TyConSpec TcConFunImplicit
-> Just (ParamSortTerm, ParamModeImplicit, BNone t1, t2)
TyConWitness TwConImpl
-> Just (ParamSortWitness, ParamModeExplicit, BNone t1, t2)
_ -> Nothing
TForall b t2
-> Just (ParamSortType, ParamModeElaborate, b, t2)
_ -> Nothing
makeTFunParams :: [Param n] -> Type n -> Type n
makeTFunParams msParam tResult
= tFuns' msParam
where
tFuns' []
= tResult
tFuns' (m : ms)
= case m of
MType b
-> TForall b (tFuns' ms)
MTerm b
-> (TCon $ TyConSpec TcConFunExplicit)
`tApps` [typeOfBind b, tFuns' ms]
MImplicit b
-> (TCon $ TyConSpec TcConFunImplicit)
`tApps` [typeOfBind b, tFuns' ms]
xApps :: a -> Exp a n -> [Arg a n] -> Exp a n
xApps a t1 ts = foldl (XApp a) t1 ts
Similar to ` xApps ` but also takes list of annotations for
the ` XApp ` constructors .
makeXAppsWithAnnots :: Exp a n -> [(Arg a n, a)] -> Exp a n
makeXAppsWithAnnots f xas
= case xas of
[] -> f
(arg, a) : as -> makeXAppsWithAnnots (XApp a f arg) as
takeXApps :: Exp a n -> Maybe (Exp a n, [Arg a n])
takeXApps xx
= case takeXAppsAsList xx of
(_, []) -> Nothing
(x1, as) -> Just (x1, as)
takeXAppsAsList :: Exp a n -> (Exp a n, [Arg a n])
takeXAppsAsList xx
= case xx of
XApp _ x1 x2
-> let (f', args') = takeXAppsAsList x1
in (f', args' ++ [x2])
_ -> (xx, [])
This is like ` takeXApps ` above , except we know there is at least one argument .
takeXApps1 :: Exp a n -> Arg a n -> (Exp a n, [Arg a n])
takeXApps1 x1 x2
= case takeXApps x1 of
Nothing -> (x1, [x2])
Just (x11, x12s) -> (x11, x12s ++ [x2])
takeXAppsWithAnnots :: Exp a n -> (Exp a n, [(Arg a n, a)])
takeXAppsWithAnnots xx
= case xx of
XApp a f arg
-> let (f', args') = takeXAppsWithAnnots f
in (f', args' ++ [(arg,a)])
_ -> (xx, [])
takeXPrimApps :: Exp a n -> Maybe (Prim, [Arg a n])
takeXPrimApps xx
= case takeXApps xx of
Just (XPrim _ p, as) -> Just (p, as)
_ -> Nothing
takeXNameApps :: Exp a n -> Maybe (n, [Arg a n])
takeXNameApps xx
= case takeXApps xx of
Just (XVar _ (UName p), as) -> Just (p, as)
_ -> Nothing
takeXConApps :: Exp a n -> Maybe (DaCon n (Type n), [Arg a n])
takeXConApps xx
= case takeXApps xx of
Just (XCon _ dc, as) -> Just (dc, as)
_ -> Nothing
takeRType :: Arg a n -> Maybe (Type n)
takeRType aa
= case aa of
RType t -> Just t
_ -> Nothing
takeRWitness :: Arg a n -> Maybe (Witness a n)
takeRWitness aa
= case aa of
RWitness w -> Just w
_ -> Nothing
takeRTerm :: Arg a n -> Maybe (Exp a n)
takeRTerm aa
= case aa of
RTerm x -> Just x
_ -> Nothing
takeRImplicit :: Arg a n -> Maybe (Arg a n)
takeRImplicit aa
= case aa of
RImplicit x -> Just x
_ -> Nothing
takeExpFromArg :: Arg a n -> Maybe (Exp a n)
takeExpFromArg aa
= case aa of
RTerm x -> Just x
RImplicit a -> takeExpFromArg a
_ -> Nothing
takeExpsFromArgs :: [Arg a n] -> [Exp a n]
takeExpsFromArgs
= catMaybes . map takeExpFromArg
xLets :: a -> [Lets a n] -> Exp a n -> Exp a n
xLets a lts x
= foldr (XLet a) x lts
xLetsAnnot :: [(Lets a n, a)] -> Exp a n -> Exp a n
xLetsAnnot lts x
= foldr (\(l, a) x' -> XLet a l x') x lts
splitXLets :: Exp a n -> ([Lets a n], Exp a n)
splitXLets xx
= case xx of
XLet _ lts x
-> let (lts', x') = splitXLets x
in (lts : lts', x')
_ -> ([], xx)
splitXLetsAnnot :: Exp a n -> ([(Lets a n, a)], Exp a n)
splitXLetsAnnot xx
= case xx of
XLet a lts x
-> let (lts', x') = splitXLetsAnnot x
in ((lts, a) : lts', x')
_ -> ([], xx)
bindsOfLets :: Lets a n -> ([Bind n], [Bind n])
bindsOfLets ll
= case ll of
LLet b _ -> ([], [b])
LRec bxs -> ([], map fst bxs)
LPrivate bs _ bbs -> (bs, bbs)
specBindsOfLets :: Lets a n -> [Bind n]
specBindsOfLets ll
= case ll of
LLet _ _ -> []
LRec _ -> []
LPrivate bs _ _ -> bs
valwitBindsOfLets :: Lets a n -> [Bind n]
valwitBindsOfLets ll
= case ll of
LLet b _ -> [b]
LRec bxs -> map fst bxs
LPrivate _ _ bs -> bs
patOfAlt :: Alt a n -> Pat n
patOfAlt (AAlt pat _) = pat
takeDaConOfAlt :: Alt a n -> Maybe (DaCon n (Type n))
takeDaConOfAlt aa
= case aa of
AAlt (PData dc _) _ -> Just dc
_ -> Nothing
bindsOfPat :: Pat n -> [Bind n]
bindsOfPat pp
= case pp of
PDefault -> []
PData _ bs -> bs
makeRuns :: a -> Int -> Exp a n -> Exp a n
makeRuns _a 0 x = x
makeRuns a n x = XCast a CastRun (makeRuns a (n - 1) x)
wApp :: a -> Witness a n -> Witness a n -> Witness a n
wApp = WApp
wApps :: a -> Witness a n -> [Witness a n] -> Witness a n
wApps a = foldl (wApp a)
annotOfWitness :: Witness a n -> a
annotOfWitness ww
= case ww of
WVar a _ -> a
WCon a _ -> a
WApp a _ _ -> a
WType a _ -> a
takeWAppsAsList :: Witness a n -> [Witness a n]
takeWAppsAsList ww
= case ww of
WApp _ w1 w2 -> takeWAppsAsList w1 ++ [w2]
_ -> [ww]
takePrimWiConApps :: Witness a n -> Maybe (n, [Witness a n])
takePrimWiConApps ww
= case takeWAppsAsList ww of
WCon _ wc : args | WiConBound (UName n) _ <- wc
-> Just (n, args)
_ -> Nothing
xUnit :: a -> Exp a n
xUnit a = XCon a dcUnit
takeBoundOfExp :: Exp a n -> Maybe (Bound n)
takeBoundOfExp xx
= case xx of
XVar _ b -> Just b
_ -> Nothing
takeNameOfExp :: Exp a n -> Maybe n
takeNameOfExp xx
= takeBoundOfExp xx >>= takeNameOfBound
|
a4d4c95b0dec23ef81285d9dd25fd4cc0305cbddc5912e4f2a3ec141bbf0d4c9 | Incanus3/ExiL | generic-node.lisp | (in-package :exil-rete)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; RETE is the algorithm that matches facts of the knowledge base of an
;; expert system against conditions of its productions (inferences rules).
;; Without this algorithm, matching of each rule against each set of facts
;; would have unfeasible computational complexity even for medium-sized expert
;; systems.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; this is needed in order to compare lists of exil objects
;; all these lists should be sets according to exil-equal-p, as they're all
updated using pusnew : test # ' exil - equal - p , or similar
(defmethod exil-equal-p ((list1 list) (list2 list))
(every (lambda (object) (member object list2 :test #'exil-equal-p)) list1))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass node () ((children :accessor children :initform ())))
(defgeneric add-child (node child))
;; for top node, called by add-wme, for others, called by their parents
(defgeneric activate (node object)
(:documentation "handles various node activations"))
(defgeneric activate-children (node object))
;; for top node, called by remove-wme for others by their parents
(defgeneric inactivate (node object))
(defgeneric inactivate-children (node object))
(defmethod add-child ((node node) (child node))
(pushnew child (children node))
node)
(defmethod activate-children ((node node) object)
(dolist (child (children node))
(activate child object)))
(defmethod inactivate-children ((node node) object)
(dolist (child (children node))
(inactivate child object)))
(defmethod inactivate ((node node) object)
(inactivate-children node object))
;; every node that stores some info is subclass (usualy indirect) of memory-node
(defclass memory-node (node) ((items :accessor items :initform ())))
(defgeneric add-item (memory-node item &optional test)
(:documentation "adds new item to node's memory"))
(defgeneric ext-add-item (memory-node item &optional test)
(:documentation "adds new item to node's memory, as a second value returns
true if the item wasn't there yet"))
(defgeneric delete-item (memory-node item &optional test)
(:documentation "removes item from node's memory"))
(defmethod add-item ((node memory-node) item &optional (test #'exil-equal-p))
(pushnew item (items node) :test test))
(defmethod ext-add-item ((node memory-node) item &optional (test #'exil-equal-p))
"pushes item to node's items, if it isn't already there, returns true if items were altered"
(nth-value 1 (ext-pushnew item (items node) :test test)))
(defmethod delete-item ((node memory-node) item &optional (test #'exil-equal-p))
(setf (items node) (delete item (items node) :test test)))
;; DEBUG:
(defvar *debug-rete* nil)
(defmethod activate :before (node object)
(when *debug-rete*
(format t "~%~a~% activated by ~a" node object)))
(defmethod inactivate :before (node object)
(when *debug-rete*
(format t "~%~a~% inactivated by ~a" node object)))
| null | https://raw.githubusercontent.com/Incanus3/ExiL/de0f7c37538cecb7032cc1f2aa070524b0bc048d/src/rete/rete-nodes/generic-node.lisp | lisp |
RETE is the algorithm that matches facts of the knowledge base of an
expert system against conditions of its productions (inferences rules).
Without this algorithm, matching of each rule against each set of facts
would have unfeasible computational complexity even for medium-sized expert
systems.
this is needed in order to compare lists of exil objects
all these lists should be sets according to exil-equal-p, as they're all
for top node, called by add-wme, for others, called by their parents
for top node, called by remove-wme for others by their parents
every node that stores some info is subclass (usualy indirect) of memory-node
DEBUG: | (in-package :exil-rete)
updated using pusnew : test # ' exil - equal - p , or similar
(defmethod exil-equal-p ((list1 list) (list2 list))
(every (lambda (object) (member object list2 :test #'exil-equal-p)) list1))
(defclass node () ((children :accessor children :initform ())))
(defgeneric add-child (node child))
(defgeneric activate (node object)
(:documentation "handles various node activations"))
(defgeneric activate-children (node object))
(defgeneric inactivate (node object))
(defgeneric inactivate-children (node object))
(defmethod add-child ((node node) (child node))
(pushnew child (children node))
node)
(defmethod activate-children ((node node) object)
(dolist (child (children node))
(activate child object)))
(defmethod inactivate-children ((node node) object)
(dolist (child (children node))
(inactivate child object)))
(defmethod inactivate ((node node) object)
(inactivate-children node object))
(defclass memory-node (node) ((items :accessor items :initform ())))
(defgeneric add-item (memory-node item &optional test)
(:documentation "adds new item to node's memory"))
(defgeneric ext-add-item (memory-node item &optional test)
(:documentation "adds new item to node's memory, as a second value returns
true if the item wasn't there yet"))
(defgeneric delete-item (memory-node item &optional test)
(:documentation "removes item from node's memory"))
(defmethod add-item ((node memory-node) item &optional (test #'exil-equal-p))
(pushnew item (items node) :test test))
(defmethod ext-add-item ((node memory-node) item &optional (test #'exil-equal-p))
"pushes item to node's items, if it isn't already there, returns true if items were altered"
(nth-value 1 (ext-pushnew item (items node) :test test)))
(defmethod delete-item ((node memory-node) item &optional (test #'exil-equal-p))
(setf (items node) (delete item (items node) :test test)))
(defvar *debug-rete* nil)
(defmethod activate :before (node object)
(when *debug-rete*
(format t "~%~a~% activated by ~a" node object)))
(defmethod inactivate :before (node object)
(when *debug-rete*
(format t "~%~a~% inactivated by ~a" node object)))
|
9a9766485758a334c62933cae340b0d8b208584459b03edf521d22986795a4b5 | ztellman/sleight | project.clj | (defproject sleight "0.2.2"
:description "whole-program transformations for clojure"
:dependencies []
:profiles {:dev {:dependencies [[org.clojure/clojure "1.9.0-alpha15"]
[riddley "0.1.12"]]}}
:plugins [[lein-sleight "0.2.2"]]
:sleight {:default {:transforms [sleight.transform-test/make-odd]}
:identity {:transforms []}}
:test-selectors {:make-odd :make-odd
:default (complement :make-odd)})
| null | https://raw.githubusercontent.com/ztellman/sleight/788d7c11164713cc393058cc8fc5f6ec9c441d7f/project.clj | clojure | (defproject sleight "0.2.2"
:description "whole-program transformations for clojure"
:dependencies []
:profiles {:dev {:dependencies [[org.clojure/clojure "1.9.0-alpha15"]
[riddley "0.1.12"]]}}
:plugins [[lein-sleight "0.2.2"]]
:sleight {:default {:transforms [sleight.transform-test/make-odd]}
:identity {:transforms []}}
:test-selectors {:make-odd :make-odd
:default (complement :make-odd)})
| |
5ca08cba65f0af8fa63404e129c3117a955dc96265671654d12c4c09b0d02cd4 | haskellari/edit-distance | STUArray.hs | # LANGUAGE PatternGuards , ScopedTypeVariables , BangPatterns , FlexibleContexts #
module Text.EditDistance.STUArray (
levenshteinDistance, levenshteinDistanceWithLengths, restrictedDamerauLevenshteinDistance, restrictedDamerauLevenshteinDistanceWithLengths
) where
import Text.EditDistance.EditCosts
import Text.EditDistance.MonadUtilities
import Text.EditDistance.ArrayUtilities
import Control.Monad hiding (foldM)
import Control.Monad.ST
import Data.Array.ST
levenshteinDistance :: EditCosts -> String -> String -> Int
levenshteinDistance !costs str1 str2 = levenshteinDistanceWithLengths costs str1_len str2_len str1 str2
where
str1_len = length str1
str2_len = length str2
levenshteinDistanceWithLengths :: EditCosts -> Int -> Int -> String -> String -> Int
levenshteinDistanceWithLengths !costs !str1_len !str2_len str1 str2 = runST (levenshteinDistanceST costs str1_len str2_len str1 str2)
levenshteinDistanceST :: EditCosts -> Int -> Int -> String -> String -> ST s Int
levenshteinDistanceST !costs !str1_len !str2_len str1 str2 = do
-- Create string arrays
str1_array <- stringToArray str1 str1_len
str2_array <- stringToArray str2 str2_len
-- Create array of costs for a single row. Say we index costs by (i, j) where i is the column index and j the row index.
Rows correspond to characters of str2 and columns to characters of str1 . We can get away with just storing a single
row of costs at a time , but we use two because it turns out to be faster
start_cost_row <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
start_cost_row' <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
read_str1 <- unsafeReadArray' str1_array
read_str2 <- unsafeReadArray' str2_array
Fill out the first row ( j = 0 )
_ <- (\f -> foldM f (1, 0) str1) $ \(i, deletion_cost) col_char -> let deletion_cost' = deletion_cost + deletionCost costs col_char in unsafeWriteArray start_cost_row i deletion_cost' >> return (i + 1, deletion_cost')
Fill out the remaining rows ( j > = 1 )
(_, final_row, _) <- (\f -> foldM f (0, start_cost_row, start_cost_row') [1..str2_len]) $ \(!insertion_cost, !cost_row, !cost_row') !j -> do
row_char <- read_str2 j
Initialize the first element of the row ( i = 0 )
let insertion_cost' = insertion_cost + insertionCost costs row_char
unsafeWriteArray cost_row' 0 insertion_cost'
Fill the remaining elements of the row ( i > = 1 )
loopM_ 1 str1_len $ \(!i) -> do
col_char <- read_str1 i
left_up <- unsafeReadArray cost_row (i - 1)
left <- unsafeReadArray cost_row' (i - 1)
here_up <- unsafeReadArray cost_row i
let here = standardCosts costs row_char col_char left left_up here_up
unsafeWriteArray cost_row' i here
return (insertion_cost', cost_row', cost_row)
-- Return an actual answer
unsafeReadArray final_row str1_len
restrictedDamerauLevenshteinDistance :: EditCosts -> String -> String -> Int
restrictedDamerauLevenshteinDistance !costs str1 str2 = restrictedDamerauLevenshteinDistanceWithLengths costs str1_len str2_len str1 str2
where
str1_len = length str1
str2_len = length str2
restrictedDamerauLevenshteinDistanceWithLengths :: EditCosts -> Int -> Int -> String -> String -> Int
restrictedDamerauLevenshteinDistanceWithLengths !costs !str1_len !str2_len str1 str2 = runST (restrictedDamerauLevenshteinDistanceST costs str1_len str2_len str1 str2)
restrictedDamerauLevenshteinDistanceST :: EditCosts -> Int -> Int -> String -> String -> ST s Int
restrictedDamerauLevenshteinDistanceST !costs str1_len str2_len str1 str2 = do
-- Create string arrays
str1_array <- stringToArray str1 str1_len
str2_array <- stringToArray str2 str2_len
-- Create array of costs for a single row. Say we index costs by (i, j) where i is the column index and j the row index.
Rows correspond to characters of str2 and columns to characters of str1 . We can get away with just storing two
rows of costs at a time , but I use three because it turns out to be faster
cost_row <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
read_str1 <- unsafeReadArray' str1_array
read_str2 <- unsafeReadArray' str2_array
Fill out the first row ( j = 0 )
_ <- (\f -> foldM f (1, 0) str1) $ \(i, deletion_cost) col_char -> let deletion_cost' = deletion_cost + deletionCost costs col_char in unsafeWriteArray cost_row i deletion_cost' >> return (i + 1, deletion_cost')
if (str2_len == 0)
then unsafeReadArray cost_row str1_len
else do
-- We defer allocation of these arrays to here because they aren't used in the other branch
cost_row' <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
cost_row'' <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
Fill out the second row ( j = 1 )
row_char <- read_str2 1
Initialize the first element of the row ( i = 0 )
let zero = insertionCost costs row_char
unsafeWriteArray cost_row' 0 zero
Fill the remaining elements of the row ( i > = 1 )
loopM_ 1 str1_len (firstRowColWorker read_str1 row_char cost_row cost_row')
Fill out the remaining rows ( j > = 2 )
(_, _, final_row, _, _) <- foldM (restrictedDamerauLevenshteinDistanceSTRowWorker costs str1_len read_str1 read_str2) (zero, cost_row, cost_row', cost_row'', row_char) [2..str2_len]
-- Return an actual answer
unsafeReadArray final_row str1_len
where
# INLINE firstRowColWorker #
firstRowColWorker read_str1 !row_char !cost_row !cost_row' !i = do
col_char <- read_str1 i
left_up <- unsafeReadArray cost_row (i - 1)
left <- unsafeReadArray cost_row' (i - 1)
here_up <- unsafeReadArray cost_row i
let here = standardCosts costs row_char col_char left left_up here_up
unsafeWriteArray cost_row' i here
# INLINE restrictedDamerauLevenshteinDistanceSTRowWorker #
restrictedDamerauLevenshteinDistanceSTRowWorker :: EditCosts -> Int
-> (Int -> ST s Char) -> (Int -> ST s Char) -- String array accessors
-> (Int, STUArray s Int Int, STUArray s Int Int, STUArray s Int Int, Char) -> Int -- Incoming rows of the matrix in recency order
-> ST s (Int, STUArray s Int Int, STUArray s Int Int, STUArray s Int Int, Char) -- Outgoing rows of the matrix in recency order
restrictedDamerauLevenshteinDistanceSTRowWorker !costs !str1_len read_str1 read_str2 (!insertion_cost, !cost_row, !cost_row', !cost_row'', !prev_row_char) !j = do
row_char <- read_str2 j
Initialize the first element of the row ( i = 0 )
zero_up <- unsafeReadArray cost_row' 0
let insertion_cost' = insertion_cost + insertionCost costs row_char
unsafeWriteArray cost_row'' 0 insertion_cost'
Initialize the second element of the row ( i = 1 )
when (str1_len > 0) $ do
col_char <- read_str1 1
one_up <- unsafeReadArray cost_row' 1
let one = standardCosts costs row_char col_char insertion_cost' zero_up one_up
unsafeWriteArray cost_row'' 1 one
Fill the remaining elements of the row ( i > = 2 )
loopM_ 2 str1_len (colWorker row_char)
return (insertion_cost', cost_row', cost_row'', cost_row, row_char)
where
colWorker !row_char !i = do
prev_col_char <- read_str1 (i - 1)
col_char <- read_str1 i
left_left_up_up <- unsafeReadArray cost_row (i - 2)
left_up <- unsafeReadArray cost_row' (i - 1)
left <- unsafeReadArray cost_row'' (i - 1)
here_up <- unsafeReadArray cost_row' i
let here_standard_only = standardCosts costs row_char col_char left left_up here_up
here = if prev_row_char == col_char && prev_col_char == row_char
then here_standard_only `min` (left_left_up_up + transpositionCost costs col_char row_char)
else here_standard_only
unsafeWriteArray cost_row'' i here
# INLINE standardCosts #
standardCosts :: EditCosts -> Char -> Char -> Int -> Int -> Int -> Int
standardCosts !costs !row_char !col_char !cost_left !cost_left_up !cost_up = deletion_cost `min` insertion_cost `min` subst_cost
where
deletion_cost = cost_left + deletionCost costs col_char
insertion_cost = cost_up + insertionCost costs row_char
subst_cost = cost_left_up + if row_char == col_char then 0 else substitutionCost costs col_char row_char
| null | https://raw.githubusercontent.com/haskellari/edit-distance/5521afd4f4966a947499a16cfc7ce6d9e0a028ee/Text/EditDistance/STUArray.hs | haskell | Create string arrays
Create array of costs for a single row. Say we index costs by (i, j) where i is the column index and j the row index.
Return an actual answer
Create string arrays
Create array of costs for a single row. Say we index costs by (i, j) where i is the column index and j the row index.
We defer allocation of these arrays to here because they aren't used in the other branch
Return an actual answer
String array accessors
Incoming rows of the matrix in recency order
Outgoing rows of the matrix in recency order | # LANGUAGE PatternGuards , ScopedTypeVariables , BangPatterns , FlexibleContexts #
module Text.EditDistance.STUArray (
levenshteinDistance, levenshteinDistanceWithLengths, restrictedDamerauLevenshteinDistance, restrictedDamerauLevenshteinDistanceWithLengths
) where
import Text.EditDistance.EditCosts
import Text.EditDistance.MonadUtilities
import Text.EditDistance.ArrayUtilities
import Control.Monad hiding (foldM)
import Control.Monad.ST
import Data.Array.ST
levenshteinDistance :: EditCosts -> String -> String -> Int
levenshteinDistance !costs str1 str2 = levenshteinDistanceWithLengths costs str1_len str2_len str1 str2
where
str1_len = length str1
str2_len = length str2
levenshteinDistanceWithLengths :: EditCosts -> Int -> Int -> String -> String -> Int
levenshteinDistanceWithLengths !costs !str1_len !str2_len str1 str2 = runST (levenshteinDistanceST costs str1_len str2_len str1 str2)
levenshteinDistanceST :: EditCosts -> Int -> Int -> String -> String -> ST s Int
levenshteinDistanceST !costs !str1_len !str2_len str1 str2 = do
str1_array <- stringToArray str1 str1_len
str2_array <- stringToArray str2 str2_len
Rows correspond to characters of str2 and columns to characters of str1 . We can get away with just storing a single
row of costs at a time , but we use two because it turns out to be faster
start_cost_row <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
start_cost_row' <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
read_str1 <- unsafeReadArray' str1_array
read_str2 <- unsafeReadArray' str2_array
Fill out the first row ( j = 0 )
_ <- (\f -> foldM f (1, 0) str1) $ \(i, deletion_cost) col_char -> let deletion_cost' = deletion_cost + deletionCost costs col_char in unsafeWriteArray start_cost_row i deletion_cost' >> return (i + 1, deletion_cost')
Fill out the remaining rows ( j > = 1 )
(_, final_row, _) <- (\f -> foldM f (0, start_cost_row, start_cost_row') [1..str2_len]) $ \(!insertion_cost, !cost_row, !cost_row') !j -> do
row_char <- read_str2 j
Initialize the first element of the row ( i = 0 )
let insertion_cost' = insertion_cost + insertionCost costs row_char
unsafeWriteArray cost_row' 0 insertion_cost'
Fill the remaining elements of the row ( i > = 1 )
loopM_ 1 str1_len $ \(!i) -> do
col_char <- read_str1 i
left_up <- unsafeReadArray cost_row (i - 1)
left <- unsafeReadArray cost_row' (i - 1)
here_up <- unsafeReadArray cost_row i
let here = standardCosts costs row_char col_char left left_up here_up
unsafeWriteArray cost_row' i here
return (insertion_cost', cost_row', cost_row)
unsafeReadArray final_row str1_len
restrictedDamerauLevenshteinDistance :: EditCosts -> String -> String -> Int
restrictedDamerauLevenshteinDistance !costs str1 str2 = restrictedDamerauLevenshteinDistanceWithLengths costs str1_len str2_len str1 str2
where
str1_len = length str1
str2_len = length str2
restrictedDamerauLevenshteinDistanceWithLengths :: EditCosts -> Int -> Int -> String -> String -> Int
restrictedDamerauLevenshteinDistanceWithLengths !costs !str1_len !str2_len str1 str2 = runST (restrictedDamerauLevenshteinDistanceST costs str1_len str2_len str1 str2)
restrictedDamerauLevenshteinDistanceST :: EditCosts -> Int -> Int -> String -> String -> ST s Int
restrictedDamerauLevenshteinDistanceST !costs str1_len str2_len str1 str2 = do
str1_array <- stringToArray str1 str1_len
str2_array <- stringToArray str2 str2_len
Rows correspond to characters of str2 and columns to characters of str1 . We can get away with just storing two
rows of costs at a time , but I use three because it turns out to be faster
cost_row <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
read_str1 <- unsafeReadArray' str1_array
read_str2 <- unsafeReadArray' str2_array
Fill out the first row ( j = 0 )
_ <- (\f -> foldM f (1, 0) str1) $ \(i, deletion_cost) col_char -> let deletion_cost' = deletion_cost + deletionCost costs col_char in unsafeWriteArray cost_row i deletion_cost' >> return (i + 1, deletion_cost')
if (str2_len == 0)
then unsafeReadArray cost_row str1_len
else do
cost_row' <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
cost_row'' <- newArray_ (0, str1_len) :: ST s (STUArray s Int Int)
Fill out the second row ( j = 1 )
row_char <- read_str2 1
Initialize the first element of the row ( i = 0 )
let zero = insertionCost costs row_char
unsafeWriteArray cost_row' 0 zero
Fill the remaining elements of the row ( i > = 1 )
loopM_ 1 str1_len (firstRowColWorker read_str1 row_char cost_row cost_row')
Fill out the remaining rows ( j > = 2 )
(_, _, final_row, _, _) <- foldM (restrictedDamerauLevenshteinDistanceSTRowWorker costs str1_len read_str1 read_str2) (zero, cost_row, cost_row', cost_row'', row_char) [2..str2_len]
unsafeReadArray final_row str1_len
where
# INLINE firstRowColWorker #
firstRowColWorker read_str1 !row_char !cost_row !cost_row' !i = do
col_char <- read_str1 i
left_up <- unsafeReadArray cost_row (i - 1)
left <- unsafeReadArray cost_row' (i - 1)
here_up <- unsafeReadArray cost_row i
let here = standardCosts costs row_char col_char left left_up here_up
unsafeWriteArray cost_row' i here
# INLINE restrictedDamerauLevenshteinDistanceSTRowWorker #
restrictedDamerauLevenshteinDistanceSTRowWorker :: EditCosts -> Int
restrictedDamerauLevenshteinDistanceSTRowWorker !costs !str1_len read_str1 read_str2 (!insertion_cost, !cost_row, !cost_row', !cost_row'', !prev_row_char) !j = do
row_char <- read_str2 j
Initialize the first element of the row ( i = 0 )
zero_up <- unsafeReadArray cost_row' 0
let insertion_cost' = insertion_cost + insertionCost costs row_char
unsafeWriteArray cost_row'' 0 insertion_cost'
Initialize the second element of the row ( i = 1 )
when (str1_len > 0) $ do
col_char <- read_str1 1
one_up <- unsafeReadArray cost_row' 1
let one = standardCosts costs row_char col_char insertion_cost' zero_up one_up
unsafeWriteArray cost_row'' 1 one
Fill the remaining elements of the row ( i > = 2 )
loopM_ 2 str1_len (colWorker row_char)
return (insertion_cost', cost_row', cost_row'', cost_row, row_char)
where
colWorker !row_char !i = do
prev_col_char <- read_str1 (i - 1)
col_char <- read_str1 i
left_left_up_up <- unsafeReadArray cost_row (i - 2)
left_up <- unsafeReadArray cost_row' (i - 1)
left <- unsafeReadArray cost_row'' (i - 1)
here_up <- unsafeReadArray cost_row' i
let here_standard_only = standardCosts costs row_char col_char left left_up here_up
here = if prev_row_char == col_char && prev_col_char == row_char
then here_standard_only `min` (left_left_up_up + transpositionCost costs col_char row_char)
else here_standard_only
unsafeWriteArray cost_row'' i here
# INLINE standardCosts #
standardCosts :: EditCosts -> Char -> Char -> Int -> Int -> Int -> Int
standardCosts !costs !row_char !col_char !cost_left !cost_left_up !cost_up = deletion_cost `min` insertion_cost `min` subst_cost
where
deletion_cost = cost_left + deletionCost costs col_char
insertion_cost = cost_up + insertionCost costs row_char
subst_cost = cost_left_up + if row_char == col_char then 0 else substitutionCost costs col_char row_char
|
6fab48fab85d319d58fd5494d70e714af8537133058e799f55b87a41a107bdd6 | ocaml-multicore/kcas | tx_stack.ml | module Loc = Kcas.Loc
module Tx = Kcas.Tx
type 'a t = 'a list Loc.t
let create () = Loc.make []
let is_empty s = s |> Tx.get_as @@ ( == ) []
let push s x = Tx.modify s @@ List.cons x
let pop s =
Tx.update_as List.hd s @@ function [] -> raise Exit | _ :: xs -> xs
| null | https://raw.githubusercontent.com/ocaml-multicore/kcas/ecf1658e905cacae567ecd03a79011a32a9932e6/test/tx_stack.ml | ocaml | module Loc = Kcas.Loc
module Tx = Kcas.Tx
type 'a t = 'a list Loc.t
let create () = Loc.make []
let is_empty s = s |> Tx.get_as @@ ( == ) []
let push s x = Tx.modify s @@ List.cons x
let pop s =
Tx.update_as List.hd s @@ function [] -> raise Exit | _ :: xs -> xs
| |
065e6fd1d9ddfcca923f62f15189cb7faa4ea983d72d091867dcd51aeb709156 | valderman/aoc21 | Dec8.hs | # LANGUAGE FlexibleInstances #
import Common
import Data.Function (on)
import Data.List (sort, sortBy, (\\), intersect)
import Data.Maybe (fromJust)
data Configuration = Configuration
{ patterns :: [String]
, output :: [String]
}
instance Input Configuration where
readInput str = Configuration
{ patterns = map sort $ words pat
, output = map sort $ words out
}
where
(pat, '|':out) = break (== '|') str
instance Input [Configuration] where
readInput = map readInput . lines
isUnique :: String -> Bool
isUnique = flip elem [2, 3, 4, 7] . length
mapConnections :: Configuration -> [(Char, Char)]
mapConnections (Configuration ps _) =
[ (a, 'a')
, (b, 'b')
, (c, 'c')
, (d, 'd')
, (e, 'e')
, (f, 'f')
, (g, 'g')
]
where
-- Lord, forgive me, for I don't know what I'm doing
nonUnique = filter (not . isUnique) ps
[one, seven, four, eight] = sortBy (compare `on` length) $ filter isUnique ps
three = single $ filter ((== 2) . length . (\\ seven)) nonUnique
bottomRightVertical = single $ filter (\x -> length x == 1 && not (x `elem` [[e],[d]])) $ map (eight \\) nonUnique
a = single $ seven \\ four
b = single $ four \\ (seven ++ three)
c = single $ one `intersect` bottomRightVertical
d = single $ (intersect three four) \\ one
e = single $ eight \\ (three ++ four)
f = single $ one \\ bottomRightVertical
g = single $ three \\ (seven ++ four)
decode :: Configuration -> [(Char, Char)] -> Configuration
decode (Configuration ps out) connections = Configuration ps [ sort $ map (fromJust . flip lookup connections) signal | signal <- out]
outputNumber :: Configuration -> Int
outputNumber (Configuration _ out) = read $ map lookupDigit out
lookupDigit :: String -> Char
lookupDigit s = maybe (error $ "digit not found: " ++ s) id $ lookup s digitMap
digitMap :: [(String, Char)]
digitMap =
[ ("abcefg", '0')
, ("cf", '1')
, ("acdeg", '2')
, ("acdfg", '3')
, ("bcdf", '4')
, ("abdfg", '5')
, ("abdefg", '6')
, ("acf", '7')
, ("abcdefg", '8')
, ("abcdfg", '9')
]
solve :: Configuration -> Int
solve c = outputNumber $ decode c (mapConnections c)
part1 :: [Configuration] -> String
part1 = show . length . concatMap (filter isUnique . output)
part2 :: [Configuration] -> String
part2 = show . sum . map solve
main :: IO ()
main = both part1 part2
| null | https://raw.githubusercontent.com/valderman/aoc21/84528b4358c14087adf080fff000128c81753a18/Dec8.hs | haskell | Lord, forgive me, for I don't know what I'm doing | # LANGUAGE FlexibleInstances #
import Common
import Data.Function (on)
import Data.List (sort, sortBy, (\\), intersect)
import Data.Maybe (fromJust)
data Configuration = Configuration
{ patterns :: [String]
, output :: [String]
}
instance Input Configuration where
readInput str = Configuration
{ patterns = map sort $ words pat
, output = map sort $ words out
}
where
(pat, '|':out) = break (== '|') str
instance Input [Configuration] where
readInput = map readInput . lines
isUnique :: String -> Bool
isUnique = flip elem [2, 3, 4, 7] . length
mapConnections :: Configuration -> [(Char, Char)]
mapConnections (Configuration ps _) =
[ (a, 'a')
, (b, 'b')
, (c, 'c')
, (d, 'd')
, (e, 'e')
, (f, 'f')
, (g, 'g')
]
where
nonUnique = filter (not . isUnique) ps
[one, seven, four, eight] = sortBy (compare `on` length) $ filter isUnique ps
three = single $ filter ((== 2) . length . (\\ seven)) nonUnique
bottomRightVertical = single $ filter (\x -> length x == 1 && not (x `elem` [[e],[d]])) $ map (eight \\) nonUnique
a = single $ seven \\ four
b = single $ four \\ (seven ++ three)
c = single $ one `intersect` bottomRightVertical
d = single $ (intersect three four) \\ one
e = single $ eight \\ (three ++ four)
f = single $ one \\ bottomRightVertical
g = single $ three \\ (seven ++ four)
decode :: Configuration -> [(Char, Char)] -> Configuration
decode (Configuration ps out) connections = Configuration ps [ sort $ map (fromJust . flip lookup connections) signal | signal <- out]
outputNumber :: Configuration -> Int
outputNumber (Configuration _ out) = read $ map lookupDigit out
lookupDigit :: String -> Char
lookupDigit s = maybe (error $ "digit not found: " ++ s) id $ lookup s digitMap
digitMap :: [(String, Char)]
digitMap =
[ ("abcefg", '0')
, ("cf", '1')
, ("acdeg", '2')
, ("acdfg", '3')
, ("bcdf", '4')
, ("abdfg", '5')
, ("abdefg", '6')
, ("acf", '7')
, ("abcdefg", '8')
, ("abcdfg", '9')
]
solve :: Configuration -> Int
solve c = outputNumber $ decode c (mapConnections c)
part1 :: [Configuration] -> String
part1 = show . length . concatMap (filter isUnique . output)
part2 :: [Configuration] -> String
part2 = show . sum . map solve
main :: IO ()
main = both part1 part2
|
82d4edf1a2130be7a20318e6495becb33ab2f2857fd1d33f1ad2738cfc41d4ce | ghcjs/jsaddle-dom | GlobalPerformance.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.GlobalPerformance
(getPerformance, GlobalPerformance(..), gTypeGlobalPerformance,
IsGlobalPerformance, toGlobalPerformance)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/GlobalPerformance.performance Mozilla GlobalPerformance.performance documentation >
getPerformance ::
(MonadDOM m, IsGlobalPerformance self) => self -> m Performance
getPerformance self
= liftDOM
(((toGlobalPerformance self) ^. js "performance") >>=
fromJSValUnchecked)
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/GlobalPerformance.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.GlobalPerformance
(getPerformance, GlobalPerformance(..), gTypeGlobalPerformance,
IsGlobalPerformance, toGlobalPerformance)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/GlobalPerformance.performance Mozilla GlobalPerformance.performance documentation >
getPerformance ::
(MonadDOM m, IsGlobalPerformance self) => self -> m Performance
getPerformance self
= liftDOM
(((toGlobalPerformance self) ^. js "performance") >>=
fromJSValUnchecked)
|
5cce4bf6cdd4dfe7bd607431aabb8dc53336ec547dc2044a103376306eb8e47d | bjorng/wings | wpc_bend.erl | %%
%% wpc_bend.erl --
%%
%% Plug-in for bending vertices
%%
Copyright ( c ) 2005 - 2011
%%
%% See the file "license.terms" for information on usage and redistribution
%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
%%
%% $Id$
%%
-module(wpc_bend).
-export([init/0,menu/2,command/2]).
-import(lists, [foldl/3]).
-include_lib("wings/src/wings.hrl").
-define(HUGE, 1.0E307).
%% Uncomment the following line to turn on debugging.
%% -define(DEBUG_BEND, 1).
fixed_length or fixed_radius
rodCenter,
rodNormal, % normal along the rod
rodLength, % length from center to top
bendCenter, % used with fixed_radius
bendNormal, % normal from rod to bendCenter
( rotation axis )
posHeightClamp, % straight lines past this height
negHeightClamp}). % straight lines past this height
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Exported functions
%%
init() ->
true.
menu({vertex}, Menu) ->
Menu ++ [separator,
{?__(1,"Bend"),{bend,fun adv_submenu_noclamp/2}},
{?__(2,"Bend Clamped"),{bend,fun adv_submenu_clamped/2}}];
menu(_,Menu) -> Menu.
command({vertex,{bend,Type}}, St) ->
bend_cmd(Type, St);
command(_,_) -> next.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Create the menus
%%
submenu_items(1, Clamped) ->
{{plastic_bend,Clamped},
{'ASK',{[{point, ?__(1,"Pick rod center")},
{point, ?__(2,"Pick rod top")},
{axis, ?__(3,"Pick bend normal")}]
++ submenu_clamp(Clamped),[],[]}}};
submenu_items(2, Clamped) ->
{{pivot_bend,Clamped},
{'ASK',{[{point, ?__(4,"Pick rod center")},
{point, ?__(5,"Pick rod top")},
{axis, ?__(6,"Pick pivot axis")},
{point, ?__(7,"Pick pivot location")}]
++ submenu_clamp(Clamped),[],[]}}};
submenu_items(3, Clamped) ->
{{toprad_bend,Clamped},
{'ASK',{[{point, ?__(8,"Pick rod center")},
{point, ?__(9,"Pick rod top")},
{axis, ?__(10,"Pick bend normal")}]
++ submenu_clamp(Clamped),[],[]}}}.
submenu_clamp(Clamped) ->
case Clamped of
clamped ->
[{point, ?__(1,"Pick Top Clamp Point")},
{point, ?__(2,"Pick Bottom Clamp Point")}];
noclamp ->
[]
end.
adv_submenu_noclamp(help, _) ->
{?__(1,"Plastic Bend"),
?__(2,"Pivot Bend"),
?__(3,"TopRad Bend")};
adv_submenu_noclamp(Button, NS) ->
wings_menu:build_command(submenu_items(Button, noclamp), NS).
adv_submenu_clamped(help, _) ->
{?__(1,"Clamped Plastic Bend"),
?__(2,"Clamped Pivot Bend"),
?__(3,"Clamped TopRad Bend")};
adv_submenu_clamped(Button, NS) ->
wings_menu:build_command(submenu_items(Button, clamped), NS).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Respond to commands
%%
bend_cmd({Mode, {'ASK',Ask}}, St) ->
wings:ask(Ask, St, fun (AskResult, St0) ->
bend_ask_callback({Mode, AskResult}, St0)
end);
%%% For repeat drag cmds
bend_cmd({Mode, Data}, St) ->
bend_ask_callback({Mode, Data}, St).
%%
%%
bend_ask_callback({{plastic_bend,noclamp},
{RodCenter, RodTop, BendNormal}}, St) ->
BD = bend_setup(fixed_length, RodCenter, RodTop, BendNormal),
bend_verts(BD, St);
bend_ask_callback({{pivot_bend,noclamp},
{RodCenter, RodTop, PivotNormal, PivotPoint}}, St) ->
BD = bend_setup(fixed_radius, RodCenter, RodTop,
PivotNormal, PivotPoint),
bend_verts(BD, St);
bend_ask_callback({{toprad_bend,noclamp},
{RodCenter, RodTop, BendNormal}}, St) ->
BD = bend_setup(fixed_radius, RodCenter, RodTop, BendNormal),
bend_verts(BD, St);
%%
Clamped
%%
bend_ask_callback({{plastic_bend,clamped},
{RodCenter, RodTop, BendNormal,
PosClamp, NegClamp}}, St) ->
BD1 = bend_setup(fixed_length, RodCenter, RodTop, BendNormal),
BD2 = bend_setup_clamps(BD1, PosClamp, NegClamp),
bend_verts(BD2, St);
bend_ask_callback({{pivot_bend,clamped},
{RodCenter, RodTop, PivotNormal, PivotPoint,
PosClamp, NegClamp}}, St) ->
BD1 = bend_setup(fixed_radius, RodCenter, RodTop,
PivotNormal, PivotPoint),
BD2 = bend_setup_clamps(BD1, PosClamp, NegClamp),
bend_verts(BD2, St);
bend_ask_callback({{toprad_bend,clamped},
{RodCenter, RodTop, BendNormal,
PosClamp, NegClamp}}, St) ->
BD1 = bend_setup(fixed_radius, RodCenter, RodTop, BendNormal),
BD2 = bend_setup_clamps(BD1, PosClamp, NegClamp),
bend_verts(BD2, St).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Setup the #bend_data() record
%%
bend_setup(DragMode, RodCenter, RodTop, BendNormal) ->
Rod = e3d_vec:sub(RodTop, RodCenter),
BN = e3d_vec:norm(BendNormal),
RNoff = e3d_vec:mul(BN,e3d_vec:dot(Rod,BN)), %% off-axis component
RN = e3d_vec:norm(e3d_vec:sub(Rod,RNoff)),
PN = e3d_vec:norm(e3d_vec:cross(RN,BN)),
RL = e3d_vec:dot(Rod, RN),
BC = e3d_vec:add(RodCenter, e3d_vec:mul(BendNormal,RL)), %% toprad mode
#bend_data{dragMode = DragMode,
rodCenter = RodCenter,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = +?HUGE,
negHeightClamp = -?HUGE}.
bend_setup(DragMode, RodCenter, RodTop, PivotNormal, PivotPoint) ->
Rod = e3d_vec:sub(RodTop, RodCenter),
VC = e3d_vec:sub(RodCenter,PivotPoint),
PN = e3d_vec:norm(PivotNormal),
BC = PivotPoint,
BNoff = e3d_vec:mul(PN,e3d_vec:dot(VC,PN)), %% off-axis component
BN = e3d_vec:norm(e3d_vec:sub(VC,BNoff)),
RN = e3d_vec:norm(e3d_vec:cross(PN,BN)),
RL = e3d_vec:dot(Rod, RN),
#bend_data{dragMode = DragMode,
rodCenter = RodCenter,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = +?HUGE,
negHeightClamp = -?HUGE}.
bend_setup_clamps(#bend_data{rodCenter=RC, rodNormal=RN}=BD,
PosClamp, NegClamp) ->
PV = e3d_vec:sub(PosClamp, RC),
NV = e3d_vec:sub(NegClamp, RC),
PC = e3d_vec:dot(RN, PV),
NC = e3d_vec:dot(RN, NV),
PC2 = if
PC == NC -> +abs(PC);
PC < NC -> NC;
true -> PC
end,
NC2 = if
NC == PC -> -abs(NC);
NC > PC -> PC;
true -> NC
end,
#bend_data{dragMode = BD#bend_data.dragMode,
rodCenter = RC,
rodNormal = RN,
rodLength = BD#bend_data.rodLength,
bendCenter = BD#bend_data.bendCenter,
bendNormal = BD#bend_data.bendNormal,
pivotNormal = BD#bend_data.pivotNormal,
posHeightClamp = PC2,
negHeightClamp = NC2}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Drag and iterate through the vertices
%%
bend_verts(BendData, St) ->
case BendData#bend_data.rodLength of
0.0 ->
wpa:error_msg(?__(1,"Configuration does not result in bending"));
_ ->
FIXME
%% Run a test call. If you don't do this before
%% iterating, and there's an error, it locks up
%% the mouse under linux.
bend_vertex({1.0, 1.0, 1.0}, 45.0, BendData),
wings_drag:fold(fun(Vs, We) ->
bend_verts(BendData, Vs, We)
end, [angle], St)
end.
bend_verts(BendData, Vs0, We) ->
Vs = gb_sets:to_list(Vs0),
VsPos = wings_util:add_vpos(Vs, We),
Fun = fun([Angle], A) ->
foldl(fun({V,Vpos}, VsAcc) ->
[{V,bend_vertex(Vpos,Angle,BendData)}|VsAcc]
end, A, VsPos)
end,
{Vs,Fun}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
The Main Function .
%%
%% The return value is the new position. {X,Y,Z}
%%
bend_vertex(Pos, _, #bend_data{rodLength = 0.0}) ->
Pos;
bend_vertex(Pos, 0.0, #bend_data{dragMode = fixed_length}) ->
Pos;
bend_vertex(Pos, Angle, #bend_data{dragMode = DragMode,
rodCenter = RC,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = PC,
negHeightClamp = NC}=BD) ->
maybe_print_bend_data(Angle, BD),
Radians = Angle * (math:pi()/180.0),
Center = case DragMode of
fixed_length -> e3d_vec:add(RC,e3d_vec:mul(BN, RL/Radians));
fixed_radius -> BC
end,
V = e3d_vec:sub(Pos,Center),
%%% Map to bending axes
Mr = e3d_vec:dot(V, RN),
Mb = e3d_vec:dot(V, BN),
Mp = e3d_vec:dot(V, PN),
if
Mr > PC ->
Rem = Mr - PC,
MyAngle = Radians * (PC/RL),
CosA = math:cos(MyAngle),
SinA = math:sin(MyAngle),
e3d_vec:add([Center,
e3d_vec:mul(PN, Mp),
e3d_vec:mul(BN, CosA * +Mb),
e3d_vec:mul(RN, SinA * -Mb),
e3d_vec:mul(BN, SinA * +Rem),
e3d_vec:mul(RN, CosA * +Rem)]);
Mr < NC ->
Rem = NC - Mr,
MyAngle = Radians * (NC/RL),
CosA = math:cos(MyAngle),
SinA = math:sin(MyAngle),
e3d_vec:add([Center,
e3d_vec:mul(PN, Mp),
e3d_vec:mul(BN, CosA * +Mb),
e3d_vec:mul(RN, SinA * -Mb),
e3d_vec:mul(BN, SinA * -Rem),
e3d_vec:mul(RN, CosA * -Rem)]);
true ->
MyAngle = Radians * (Mr/RL),
CosA = math:cos(MyAngle),
SinA = math:sin(MyAngle),
e3d_vec:add([Center,
e3d_vec:mul(PN, Mp),
e3d_vec:mul(BN, CosA * +Mb),
BN is backwards
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
Utilities
%%
-ifndef(DEBUG_BEND).
maybe_print_bend_data(_, _) -> ok.
-else.
maybe_print_bend_data(Angle, BD) ->
io:format("Angle = ~p: ", [Angle]),
print_bend_data(BD).
print_bend_data(#bend_data{dragMode = DragMode,
rodCenter = RC,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = PC,
negHeightClamp = NC}) ->
case DragMode of
fixed_length -> io:format("fixed_length\n");
fixed_radius -> io:format("fixed_radius\n");
_ -> io:format("bad drag_mode\n")
end,
io:format(" RC [~p, ~p, ~p] BC [~p, ~p, ~p]\n" ++
" RN [~p, ~p, ~p]\n" ++
" BN [~p, ~p, ~p]\n" ++
" PN [~p, ~p, ~p]\n", vectorsToArray([RC,BC,RN,BN,PN])),
io:format(" rodLength = ~p\n", [RL]),
io:format(" posHeightClamp = ~p\n", [PC]),
io:format(" negHeightClamp = ~p\n", [NC]).
vectorsToArray(Vectors) ->
vectorsToArray(Vectors, []).
vectorsToArray([{X,Y,Z}|T], Acc) ->
vectorsToArray(T, Acc ++ [X,Y,Z]);
vectorsToArray([], Acc) ->
Acc.
-endif.
| null | https://raw.githubusercontent.com/bjorng/wings/dec64a500220359dbc552600af486be47c45d301/plugins_src/commands/wpc_bend.erl | erlang |
wpc_bend.erl --
Plug-in for bending vertices
See the file "license.terms" for information on usage and redistribution
of this file, and for a DISCLAIMER OF ALL WARRANTIES.
$Id$
Uncomment the following line to turn on debugging.
-define(DEBUG_BEND, 1).
normal along the rod
length from center to top
used with fixed_radius
normal from rod to bendCenter
straight lines past this height
straight lines past this height
Exported functions
Create the menus
Respond to commands
For repeat drag cmds
Setup the #bend_data() record
off-axis component
toprad mode
off-axis component
Drag and iterate through the vertices
Run a test call. If you don't do this before
iterating, and there's an error, it locks up
the mouse under linux.
The return value is the new position. {X,Y,Z}
Map to bending axes
| Copyright ( c ) 2005 - 2011
-module(wpc_bend).
-export([init/0,menu/2,command/2]).
-import(lists, [foldl/3]).
-include_lib("wings/src/wings.hrl").
-define(HUGE, 1.0E307).
fixed_length or fixed_radius
rodCenter,
( rotation axis )
init() ->
true.
menu({vertex}, Menu) ->
Menu ++ [separator,
{?__(1,"Bend"),{bend,fun adv_submenu_noclamp/2}},
{?__(2,"Bend Clamped"),{bend,fun adv_submenu_clamped/2}}];
menu(_,Menu) -> Menu.
command({vertex,{bend,Type}}, St) ->
bend_cmd(Type, St);
command(_,_) -> next.
submenu_items(1, Clamped) ->
{{plastic_bend,Clamped},
{'ASK',{[{point, ?__(1,"Pick rod center")},
{point, ?__(2,"Pick rod top")},
{axis, ?__(3,"Pick bend normal")}]
++ submenu_clamp(Clamped),[],[]}}};
submenu_items(2, Clamped) ->
{{pivot_bend,Clamped},
{'ASK',{[{point, ?__(4,"Pick rod center")},
{point, ?__(5,"Pick rod top")},
{axis, ?__(6,"Pick pivot axis")},
{point, ?__(7,"Pick pivot location")}]
++ submenu_clamp(Clamped),[],[]}}};
submenu_items(3, Clamped) ->
{{toprad_bend,Clamped},
{'ASK',{[{point, ?__(8,"Pick rod center")},
{point, ?__(9,"Pick rod top")},
{axis, ?__(10,"Pick bend normal")}]
++ submenu_clamp(Clamped),[],[]}}}.
submenu_clamp(Clamped) ->
case Clamped of
clamped ->
[{point, ?__(1,"Pick Top Clamp Point")},
{point, ?__(2,"Pick Bottom Clamp Point")}];
noclamp ->
[]
end.
adv_submenu_noclamp(help, _) ->
{?__(1,"Plastic Bend"),
?__(2,"Pivot Bend"),
?__(3,"TopRad Bend")};
adv_submenu_noclamp(Button, NS) ->
wings_menu:build_command(submenu_items(Button, noclamp), NS).
adv_submenu_clamped(help, _) ->
{?__(1,"Clamped Plastic Bend"),
?__(2,"Clamped Pivot Bend"),
?__(3,"Clamped TopRad Bend")};
adv_submenu_clamped(Button, NS) ->
wings_menu:build_command(submenu_items(Button, clamped), NS).
bend_cmd({Mode, {'ASK',Ask}}, St) ->
wings:ask(Ask, St, fun (AskResult, St0) ->
bend_ask_callback({Mode, AskResult}, St0)
end);
bend_cmd({Mode, Data}, St) ->
bend_ask_callback({Mode, Data}, St).
bend_ask_callback({{plastic_bend,noclamp},
{RodCenter, RodTop, BendNormal}}, St) ->
BD = bend_setup(fixed_length, RodCenter, RodTop, BendNormal),
bend_verts(BD, St);
bend_ask_callback({{pivot_bend,noclamp},
{RodCenter, RodTop, PivotNormal, PivotPoint}}, St) ->
BD = bend_setup(fixed_radius, RodCenter, RodTop,
PivotNormal, PivotPoint),
bend_verts(BD, St);
bend_ask_callback({{toprad_bend,noclamp},
{RodCenter, RodTop, BendNormal}}, St) ->
BD = bend_setup(fixed_radius, RodCenter, RodTop, BendNormal),
bend_verts(BD, St);
Clamped
bend_ask_callback({{plastic_bend,clamped},
{RodCenter, RodTop, BendNormal,
PosClamp, NegClamp}}, St) ->
BD1 = bend_setup(fixed_length, RodCenter, RodTop, BendNormal),
BD2 = bend_setup_clamps(BD1, PosClamp, NegClamp),
bend_verts(BD2, St);
bend_ask_callback({{pivot_bend,clamped},
{RodCenter, RodTop, PivotNormal, PivotPoint,
PosClamp, NegClamp}}, St) ->
BD1 = bend_setup(fixed_radius, RodCenter, RodTop,
PivotNormal, PivotPoint),
BD2 = bend_setup_clamps(BD1, PosClamp, NegClamp),
bend_verts(BD2, St);
bend_ask_callback({{toprad_bend,clamped},
{RodCenter, RodTop, BendNormal,
PosClamp, NegClamp}}, St) ->
BD1 = bend_setup(fixed_radius, RodCenter, RodTop, BendNormal),
BD2 = bend_setup_clamps(BD1, PosClamp, NegClamp),
bend_verts(BD2, St).
bend_setup(DragMode, RodCenter, RodTop, BendNormal) ->
Rod = e3d_vec:sub(RodTop, RodCenter),
BN = e3d_vec:norm(BendNormal),
RN = e3d_vec:norm(e3d_vec:sub(Rod,RNoff)),
PN = e3d_vec:norm(e3d_vec:cross(RN,BN)),
RL = e3d_vec:dot(Rod, RN),
#bend_data{dragMode = DragMode,
rodCenter = RodCenter,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = +?HUGE,
negHeightClamp = -?HUGE}.
bend_setup(DragMode, RodCenter, RodTop, PivotNormal, PivotPoint) ->
Rod = e3d_vec:sub(RodTop, RodCenter),
VC = e3d_vec:sub(RodCenter,PivotPoint),
PN = e3d_vec:norm(PivotNormal),
BC = PivotPoint,
BN = e3d_vec:norm(e3d_vec:sub(VC,BNoff)),
RN = e3d_vec:norm(e3d_vec:cross(PN,BN)),
RL = e3d_vec:dot(Rod, RN),
#bend_data{dragMode = DragMode,
rodCenter = RodCenter,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = +?HUGE,
negHeightClamp = -?HUGE}.
bend_setup_clamps(#bend_data{rodCenter=RC, rodNormal=RN}=BD,
PosClamp, NegClamp) ->
PV = e3d_vec:sub(PosClamp, RC),
NV = e3d_vec:sub(NegClamp, RC),
PC = e3d_vec:dot(RN, PV),
NC = e3d_vec:dot(RN, NV),
PC2 = if
PC == NC -> +abs(PC);
PC < NC -> NC;
true -> PC
end,
NC2 = if
NC == PC -> -abs(NC);
NC > PC -> PC;
true -> NC
end,
#bend_data{dragMode = BD#bend_data.dragMode,
rodCenter = RC,
rodNormal = RN,
rodLength = BD#bend_data.rodLength,
bendCenter = BD#bend_data.bendCenter,
bendNormal = BD#bend_data.bendNormal,
pivotNormal = BD#bend_data.pivotNormal,
posHeightClamp = PC2,
negHeightClamp = NC2}.
bend_verts(BendData, St) ->
case BendData#bend_data.rodLength of
0.0 ->
wpa:error_msg(?__(1,"Configuration does not result in bending"));
_ ->
FIXME
bend_vertex({1.0, 1.0, 1.0}, 45.0, BendData),
wings_drag:fold(fun(Vs, We) ->
bend_verts(BendData, Vs, We)
end, [angle], St)
end.
bend_verts(BendData, Vs0, We) ->
Vs = gb_sets:to_list(Vs0),
VsPos = wings_util:add_vpos(Vs, We),
Fun = fun([Angle], A) ->
foldl(fun({V,Vpos}, VsAcc) ->
[{V,bend_vertex(Vpos,Angle,BendData)}|VsAcc]
end, A, VsPos)
end,
{Vs,Fun}.
The Main Function .
bend_vertex(Pos, _, #bend_data{rodLength = 0.0}) ->
Pos;
bend_vertex(Pos, 0.0, #bend_data{dragMode = fixed_length}) ->
Pos;
bend_vertex(Pos, Angle, #bend_data{dragMode = DragMode,
rodCenter = RC,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = PC,
negHeightClamp = NC}=BD) ->
maybe_print_bend_data(Angle, BD),
Radians = Angle * (math:pi()/180.0),
Center = case DragMode of
fixed_length -> e3d_vec:add(RC,e3d_vec:mul(BN, RL/Radians));
fixed_radius -> BC
end,
V = e3d_vec:sub(Pos,Center),
Mr = e3d_vec:dot(V, RN),
Mb = e3d_vec:dot(V, BN),
Mp = e3d_vec:dot(V, PN),
if
Mr > PC ->
Rem = Mr - PC,
MyAngle = Radians * (PC/RL),
CosA = math:cos(MyAngle),
SinA = math:sin(MyAngle),
e3d_vec:add([Center,
e3d_vec:mul(PN, Mp),
e3d_vec:mul(BN, CosA * +Mb),
e3d_vec:mul(RN, SinA * -Mb),
e3d_vec:mul(BN, SinA * +Rem),
e3d_vec:mul(RN, CosA * +Rem)]);
Mr < NC ->
Rem = NC - Mr,
MyAngle = Radians * (NC/RL),
CosA = math:cos(MyAngle),
SinA = math:sin(MyAngle),
e3d_vec:add([Center,
e3d_vec:mul(PN, Mp),
e3d_vec:mul(BN, CosA * +Mb),
e3d_vec:mul(RN, SinA * -Mb),
e3d_vec:mul(BN, SinA * -Rem),
e3d_vec:mul(RN, CosA * -Rem)]);
true ->
MyAngle = Radians * (Mr/RL),
CosA = math:cos(MyAngle),
SinA = math:sin(MyAngle),
e3d_vec:add([Center,
e3d_vec:mul(PN, Mp),
e3d_vec:mul(BN, CosA * +Mb),
BN is backwards
end.
Utilities
-ifndef(DEBUG_BEND).
maybe_print_bend_data(_, _) -> ok.
-else.
maybe_print_bend_data(Angle, BD) ->
io:format("Angle = ~p: ", [Angle]),
print_bend_data(BD).
print_bend_data(#bend_data{dragMode = DragMode,
rodCenter = RC,
rodNormal = RN,
rodLength = RL,
bendCenter = BC,
bendNormal = BN,
pivotNormal = PN,
posHeightClamp = PC,
negHeightClamp = NC}) ->
case DragMode of
fixed_length -> io:format("fixed_length\n");
fixed_radius -> io:format("fixed_radius\n");
_ -> io:format("bad drag_mode\n")
end,
io:format(" RC [~p, ~p, ~p] BC [~p, ~p, ~p]\n" ++
" RN [~p, ~p, ~p]\n" ++
" BN [~p, ~p, ~p]\n" ++
" PN [~p, ~p, ~p]\n", vectorsToArray([RC,BC,RN,BN,PN])),
io:format(" rodLength = ~p\n", [RL]),
io:format(" posHeightClamp = ~p\n", [PC]),
io:format(" negHeightClamp = ~p\n", [NC]).
vectorsToArray(Vectors) ->
vectorsToArray(Vectors, []).
vectorsToArray([{X,Y,Z}|T], Acc) ->
vectorsToArray(T, Acc ++ [X,Y,Z]);
vectorsToArray([], Acc) ->
Acc.
-endif.
|
aa10d326a4575c9709dd28b994d360af2f88e18bcc50d7b173054afe8f746b74 | tfausak/blunt | Style.hs | {-# LANGUAGE OverloadedStrings #-}
module Blunt.App.Style where
import Clay
import Data.Monoid ((<>))
import qualified Data.Text.Lazy as Text
style :: Text.Text
style = renderWith compact [] css
css :: Css
css = do
html <> body ?
do backgroundColor "#f5f5f5"
color "#151515"
fontFamily [] [sansSerif]
lineHeight (em 1.5)
sym margin nil
sym padding nil
body ?
do boxSizing borderBox
sym2 margin nil auto
maxWidth (em 40)
padding nil (em 1.5) (em 1.5) (em 1.5)
h1 ?
do color "#90a959"
fontSize (em 2)
fontWeight bold
lineHeight (em 3)
sym margin nil
textAlign (alignSide sideCenter)
dl ? do sym margin nil
dt ? do marginTop (em 1.5)
dd ? do sym margin nil
input <> ".output" ?
do border solid (px 1) "#e0e0e0"
boxSizing borderBox
fontFamily [] [monospace]
fontSize (em 1)
width (pct 100)
input ?
do height (em 3)
lineHeight (em 3)
sym2 padding nil (em 0.75)
".output" ?
do sym padding (em 0.75)
whiteSpace preWrap
p ? do margin (em 1.5) nil nil nil
| null | https://raw.githubusercontent.com/tfausak/blunt/a44af0e06138bae94d7fd51330eb5602e7f3d5a3/library/Blunt/App/Style.hs | haskell | # LANGUAGE OverloadedStrings # |
module Blunt.App.Style where
import Clay
import Data.Monoid ((<>))
import qualified Data.Text.Lazy as Text
style :: Text.Text
style = renderWith compact [] css
css :: Css
css = do
html <> body ?
do backgroundColor "#f5f5f5"
color "#151515"
fontFamily [] [sansSerif]
lineHeight (em 1.5)
sym margin nil
sym padding nil
body ?
do boxSizing borderBox
sym2 margin nil auto
maxWidth (em 40)
padding nil (em 1.5) (em 1.5) (em 1.5)
h1 ?
do color "#90a959"
fontSize (em 2)
fontWeight bold
lineHeight (em 3)
sym margin nil
textAlign (alignSide sideCenter)
dl ? do sym margin nil
dt ? do marginTop (em 1.5)
dd ? do sym margin nil
input <> ".output" ?
do border solid (px 1) "#e0e0e0"
boxSizing borderBox
fontFamily [] [monospace]
fontSize (em 1)
width (pct 100)
input ?
do height (em 3)
lineHeight (em 3)
sym2 padding nil (em 0.75)
".output" ?
do sym padding (em 0.75)
whiteSpace preWrap
p ? do margin (em 1.5) nil nil nil
|
406800c18c68d01d690a3b028eec09588dd8021da74ce9eb733c0902afac1693 | snapframework/io-streams | Concurrent.hs | module System.IO.Streams.Tests.Concurrent (tests) where
------------------------------------------------------------------------------
import Control.Concurrent
import Control.Monad
import Prelude hiding (lines, read, takeWhile, unlines, unwords, unwords, words)
import qualified System.IO.Streams as Streams
import qualified System.IO.Streams.Concurrent as Streams
import System.IO.Streams.Tests.Common
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Test)
import Test.QuickCheck hiding (output)
import Test.QuickCheck.Monadic
------------------------------------------------------------------------------
tests :: [Test]
tests = [ testMakeChanPipe
, testConcurrentMerge
, testConcurrentMergeException
, testInputOutput
]
------------------------------------------------------------------------------
testMakeChanPipe :: Test
testMakeChanPipe = testProperty "concurrent/makeChanPipe" $
monadicIO $
forAllM arbitrary prop
where
prop :: [Int] -> PropertyM IO ()
prop l = liftQ $ do
(is, os) <- Streams.makeChanPipe
_ <- forkIO $ Streams.writeList l os >> Streams.write Nothing os
Streams.toList is >>= assertEqual "makeChanPipe" l
------------------------------------------------------------------------------
testConcurrentMerge :: Test
testConcurrentMerge = testCase "concurrent/concurrentMerge" $ do
mvars <- replicateM nthreads newEmptyMVar
chans <- replicateM nthreads newChan
let firstMVar = head mvars
mapM_ (forkIO . ring) $ zip3 mvars (take nthreads $ drop 1 $ cycle mvars)
chans
inputs <- mapM Streams.chanToInput chans
resultMVar <- newEmptyMVar
_ <- forkIO (Streams.concurrentMerge inputs >>= Streams.toList
>>= putMVar resultMVar)
putMVar firstMVar 0
result <- takeMVar resultMVar
assertEqual "concurrent merge" [0..10] result
where
maxval = 10 :: Int
nthreads = 4 :: Int
ring (prev, next, chan) = loop
where
loop = do x <- takeMVar prev
if x > maxval
then do writeChan chan Nothing
putMVar next x
else do writeChan chan $ Just x
threadDelay 100000
putMVar next $! x + 1
loop
------------------------------------------------------------------------------
testConcurrentMergeException :: Test
testConcurrentMergeException =
testCase "concurrent/concurrentMerge/exception" $ do
inp <- Streams.makeInputStream (error "bad") >>=
Streams.concurrentMerge . (:[])
expectExceptionH (Streams.toList inp)
------------------------------------------------------------------------------
testInputOutput :: Test
testInputOutput = testCase "concurrent/input-output" $ do
is <- Streams.fromList [1..10::Int]
chan <- newChan
is' <- Streams.chanToInput chan
Streams.inputToChan is chan
Streams.toList is' >>= assertEqual "input-output" [1..10]
| null | https://raw.githubusercontent.com/snapframework/io-streams/ae692fee732adea9fe843fb3efe0dd43a6993844/test/System/IO/Streams/Tests/Concurrent.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
---------------------------------------------------------------------------- | module System.IO.Streams.Tests.Concurrent (tests) where
import Control.Concurrent
import Control.Monad
import Prelude hiding (lines, read, takeWhile, unlines, unwords, unwords, words)
import qualified System.IO.Streams as Streams
import qualified System.IO.Streams.Concurrent as Streams
import System.IO.Streams.Tests.Common
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Test)
import Test.QuickCheck hiding (output)
import Test.QuickCheck.Monadic
tests :: [Test]
tests = [ testMakeChanPipe
, testConcurrentMerge
, testConcurrentMergeException
, testInputOutput
]
testMakeChanPipe :: Test
testMakeChanPipe = testProperty "concurrent/makeChanPipe" $
monadicIO $
forAllM arbitrary prop
where
prop :: [Int] -> PropertyM IO ()
prop l = liftQ $ do
(is, os) <- Streams.makeChanPipe
_ <- forkIO $ Streams.writeList l os >> Streams.write Nothing os
Streams.toList is >>= assertEqual "makeChanPipe" l
testConcurrentMerge :: Test
testConcurrentMerge = testCase "concurrent/concurrentMerge" $ do
mvars <- replicateM nthreads newEmptyMVar
chans <- replicateM nthreads newChan
let firstMVar = head mvars
mapM_ (forkIO . ring) $ zip3 mvars (take nthreads $ drop 1 $ cycle mvars)
chans
inputs <- mapM Streams.chanToInput chans
resultMVar <- newEmptyMVar
_ <- forkIO (Streams.concurrentMerge inputs >>= Streams.toList
>>= putMVar resultMVar)
putMVar firstMVar 0
result <- takeMVar resultMVar
assertEqual "concurrent merge" [0..10] result
where
maxval = 10 :: Int
nthreads = 4 :: Int
ring (prev, next, chan) = loop
where
loop = do x <- takeMVar prev
if x > maxval
then do writeChan chan Nothing
putMVar next x
else do writeChan chan $ Just x
threadDelay 100000
putMVar next $! x + 1
loop
testConcurrentMergeException :: Test
testConcurrentMergeException =
testCase "concurrent/concurrentMerge/exception" $ do
inp <- Streams.makeInputStream (error "bad") >>=
Streams.concurrentMerge . (:[])
expectExceptionH (Streams.toList inp)
testInputOutput :: Test
testInputOutput = testCase "concurrent/input-output" $ do
is <- Streams.fromList [1..10::Int]
chan <- newChan
is' <- Streams.chanToInput chan
Streams.inputToChan is chan
Streams.toList is' >>= assertEqual "input-output" [1..10]
|
b7f6c9c042cb81e06b11228bcb91a0d2b83ccd730cbc8c03a425b8e93605f256 | static-analysis-engineering/codehawk | bCHARMAssemblyBlock.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2021 - 2023 Aarno Labs , LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2021-2023 Aarno Labs, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
(* chutil *)
open CHXmlDocument
(* bchlib *)
open BCHLibTypes
(* bchlibarm32 *)
open BCHARMTypes
* [ make_arm_assembly_block ~ctxt faddr succ ] returns a new basic
block with context [ ctxt ] , function address [ faddr ] , first address [ ] ,
last address [ ] , and successors [ succ ] .
block with context [ctxt], function address [faddr], first address [baddr],
last address [laddr], and successors [succ].*)
val make_arm_assembly_block:
inline context , other function first
-> doubleword_int (* function address *)
-> doubleword_int (* first address of the basic block *)
-> doubleword_int (* last address of the basic block *)
-> ctxt_iaddress_t list (* addresses of successor blocks *)
-> arm_assembly_block_int
(** [make_ctxt_arm_assembly_block ctxt blk succ] returns a new arm basic block
that is a copy of [blk] with added context [ctxt] and added successors
[succ]. This can be used to create an inlined block.*)
val make_ctxt_arm_assembly_block:
context_t (* new context to be prepended *)
-> arm_assembly_block_int
-> ctxt_iaddress_t list (* new successor blocks *)
-> arm_assembly_block_int
* [ update_arm_assembly_block_successors block s_old s_new ] returns a new
assembly basic block that is identical to [ block ] except that successor
[ s_old ] is replaced by ( possibly multiple ) successors [ s_new ] .
assembly basic block that is identical to [block] except that successor
[s_old] is replaced by (possibly multiple) successors [s_new].*)
val update_arm_assembly_block_successors:
arm_assembly_block_int
-> ctxt_iaddress_t
-> ctxt_iaddress_t list
-> arm_assembly_block_int
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/c1b3158e0d73cda7cfc10d75f6173f4297991a82/CodeHawk/CHB/bchlibarm32/bCHARMAssemblyBlock.mli | ocaml | chutil
bchlib
bchlibarm32
function address
first address of the basic block
last address of the basic block
addresses of successor blocks
* [make_ctxt_arm_assembly_block ctxt blk succ] returns a new arm basic block
that is a copy of [blk] with added context [ctxt] and added successors
[succ]. This can be used to create an inlined block.
new context to be prepended
new successor blocks | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2021 - 2023 Aarno Labs , LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2021-2023 Aarno Labs, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
open CHXmlDocument
open BCHLibTypes
open BCHARMTypes
* [ make_arm_assembly_block ~ctxt faddr succ ] returns a new basic
block with context [ ctxt ] , function address [ faddr ] , first address [ ] ,
last address [ ] , and successors [ succ ] .
block with context [ctxt], function address [faddr], first address [baddr],
last address [laddr], and successors [succ].*)
val make_arm_assembly_block:
inline context , other function first
-> arm_assembly_block_int
val make_ctxt_arm_assembly_block:
-> arm_assembly_block_int
-> arm_assembly_block_int
* [ update_arm_assembly_block_successors block s_old s_new ] returns a new
assembly basic block that is identical to [ block ] except that successor
[ s_old ] is replaced by ( possibly multiple ) successors [ s_new ] .
assembly basic block that is identical to [block] except that successor
[s_old] is replaced by (possibly multiple) successors [s_new].*)
val update_arm_assembly_block_successors:
arm_assembly_block_int
-> ctxt_iaddress_t
-> ctxt_iaddress_t list
-> arm_assembly_block_int
|
3d042a56796d92b11d46cea336b8bf6a964f833e1c1f800fa4b7cc5604960a6d | apache/dubbo-erlang | dubbo_java_type_defined.erl | %%------------------------------------------------------------------------------
Licensed to the Apache Software Foundation ( ASF ) under one or more
%% contributor license agreements. See the NOTICE file distributed with
%% this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License , Version 2.0
( the " License " ) ; you may not use this file except in compliance with
the License . You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%------------------------------------------------------------------------------
-module(dubbo_java_type_defined).
-include("java_type.hrl").
%% API
-export([get_list/0]).
get_list() ->
[
{java_stack_trace_element, <<"java.lang.StackTraceElement">>, record_info(fields, java_stack_trace_element)},
{java_null_pointer_exception, <<"java.lang.NullPointerException">>, record_info(fields, java_null_pointer_exception)},
{java_runtime_exception, <<"java.lang.RuntimeException">>, record_info(fields, java_runtime_exception)},
{index_out_bounds_exception, <<"java.lang.IndexOutOfBoundsException">>, record_info(fields, index_out_bounds_exception)},
{java_string_index_out_bounds_exception, <<"java.lang.StringIndexOutOfBoundsException">>, record_info(fields, java_string_index_out_bounds_exception)},
{java_array_index_out_bounds_exception, <<"java.lang.ArrayIndexOutOfBoundsException">>, record_info(fields, java_array_index_out_bounds_exception)},
{java_arithmetic_exception, <<"java.lang.ArithmeticException">>, record_info(fields, java_arithmetic_exception)},
{java_class_cast_exception, <<"java.lang.ClassCastException">>, record_info(fields, java_class_cast_exception)},
{java_nio_provider_not_found_exception, <<"java.nio.file.ProviderNotFoundException">>, record_info(fields, java_nio_provider_not_found_exception)},
{java_security_exception, <<"java.lang.SecurityException">>, record_info(fields, java_security_exception)},
{java_annotation_type_mismatch_exception, <<"java.lang.AnnotationTypeMismatchException">>, record_info(fields, java_annotation_type_mismatch_exception)},
{dubbo_rpc_exception, <<"org.apache.dubbo.rpc.RpcException">>, record_info(fields, dubbo_rpc_exception)},
{java_enum_constant_not_present_exception, <<"java.lang.EnumConstantNotPresentException">>, record_info(fields, java_enum_constant_not_present_exception)},
{java_no_such_element_exception, <<"java.util.NoSuchElementException">>, record_info(fields, java_no_such_element_exception)},
{java_input_mismatch_exception, <<"java.util.InputMismatchException">>, record_info(fields, java_input_mismatch_exception)},
{dubbo_hessian_exception, <<"com.alibaba.com.caucho.hessian.HessianException">>, record_info(fields, dubbo_hessian_exception)},
{java_wrong_method_type_exception, <<"java.lang.invoke.WrongMethodTypeException">>, record_info(fields, java_wrong_method_type_exception)},
{java_incomplete_annotation_exception, <<"java.lang.annotation.IncompleteAnnotationException">>, record_info(fields, java_incomplete_annotation_exception)},
{java_malformed_parameters_exception, <<"java.lang.reflect.MalformedParametersException">>, record_info(fields, java_malformed_parameters_exception)},
{java_undeclared_throwable_exception, <<"java.lang.reflect.UndeclaredThrowableException">>, record_info(fields, java_undeclared_throwable_exception)},
{dubbo_no_such_property_exception, <<"org.apache.dubbo.common.bytecode.NoSuchPropertyException">>, record_info(fields, dubbo_no_such_property_exception)},
{java_mirrored_types_exception, <<"javax.lang.model.type.MirroredTypesException">>, record_info(fields, java_mirrored_types_exception)},
{dubbo_no_such_method_exception, <<"org.apache.dubbo.common.bytecode.NoSuchMethodException">>, record_info(fields, dubbo_no_such_method_exception)},
{java_unchecked_io_exception, <<"java.io.UncheckedIOException">>, record_info(fields, java_unchecked_io_exception)},
{java_illegal_monitor_state_exception, <<"java.lang.IllegalMonitorStateException">>, record_info(fields, java_illegal_monitor_state_exception)},
{java_negative_array_size_exception, <<"java.lang.NegativeArraySizeException">>, record_info(fields, java_negative_array_size_exception)},
{java_unsupported_operation_exception, <<"java.lang.UnsupportedOperationException">>, record_info(fields, java_unsupported_operation_exception)},
{java_empty_stack_exception, <<"java.util.EmptyStackException">>, record_info(fields, java_empty_stack_exception)},
{java_illegal_state_exception, <<"java.lang.IllegalStateException">>, record_info(fields, java_illegal_state_exception)},
{java_datetime_exception, <<"java.time.DateTimeException">>, record_info(fields, java_datetime_exception)},
{java_completion_exception, <<"java.util.concurrent.CompletionException">>, record_info(fields, java_completion_exception)},
{java_malformed_parameterized_type_exception, <<"java.lang.reflect.MalformedParameterizedTypeException">>, record_info(fields, java_malformed_parameterized_type_exception)},
{dubbo_service_generic_exception, <<"org.apache.dubbo.rpc.service.GenericException">>, record_info(fields, dubbo_service_generic_exception)},
{java_illegal_argument_exception, <<"java.lang.IllegalArgumentException">>, record_info(fields, java_illegal_argument_exception)},
{java_missing_resource_pointer_exception, <<"java.util.MissingResourceException">>, record_info(fields, java_missing_resource_pointer_exception)}
]. | null | https://raw.githubusercontent.com/apache/dubbo-erlang/24e0c1a9028b50d2e9e05e3fe26f4e3335384acc/src/dubbo_java_type_defined.erl | erlang | ------------------------------------------------------------------------------
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------------------------------------------
API | Licensed to the Apache Software Foundation ( ASF ) under one or more
The ASF licenses this file to You under the Apache License , Version 2.0
( the " License " ) ; you may not use this file except in compliance with
the License . You may obtain a copy of the License at
distributed under the License is distributed on an " AS IS " BASIS ,
-module(dubbo_java_type_defined).
-include("java_type.hrl").
-export([get_list/0]).
get_list() ->
[
{java_stack_trace_element, <<"java.lang.StackTraceElement">>, record_info(fields, java_stack_trace_element)},
{java_null_pointer_exception, <<"java.lang.NullPointerException">>, record_info(fields, java_null_pointer_exception)},
{java_runtime_exception, <<"java.lang.RuntimeException">>, record_info(fields, java_runtime_exception)},
{index_out_bounds_exception, <<"java.lang.IndexOutOfBoundsException">>, record_info(fields, index_out_bounds_exception)},
{java_string_index_out_bounds_exception, <<"java.lang.StringIndexOutOfBoundsException">>, record_info(fields, java_string_index_out_bounds_exception)},
{java_array_index_out_bounds_exception, <<"java.lang.ArrayIndexOutOfBoundsException">>, record_info(fields, java_array_index_out_bounds_exception)},
{java_arithmetic_exception, <<"java.lang.ArithmeticException">>, record_info(fields, java_arithmetic_exception)},
{java_class_cast_exception, <<"java.lang.ClassCastException">>, record_info(fields, java_class_cast_exception)},
{java_nio_provider_not_found_exception, <<"java.nio.file.ProviderNotFoundException">>, record_info(fields, java_nio_provider_not_found_exception)},
{java_security_exception, <<"java.lang.SecurityException">>, record_info(fields, java_security_exception)},
{java_annotation_type_mismatch_exception, <<"java.lang.AnnotationTypeMismatchException">>, record_info(fields, java_annotation_type_mismatch_exception)},
{dubbo_rpc_exception, <<"org.apache.dubbo.rpc.RpcException">>, record_info(fields, dubbo_rpc_exception)},
{java_enum_constant_not_present_exception, <<"java.lang.EnumConstantNotPresentException">>, record_info(fields, java_enum_constant_not_present_exception)},
{java_no_such_element_exception, <<"java.util.NoSuchElementException">>, record_info(fields, java_no_such_element_exception)},
{java_input_mismatch_exception, <<"java.util.InputMismatchException">>, record_info(fields, java_input_mismatch_exception)},
{dubbo_hessian_exception, <<"com.alibaba.com.caucho.hessian.HessianException">>, record_info(fields, dubbo_hessian_exception)},
{java_wrong_method_type_exception, <<"java.lang.invoke.WrongMethodTypeException">>, record_info(fields, java_wrong_method_type_exception)},
{java_incomplete_annotation_exception, <<"java.lang.annotation.IncompleteAnnotationException">>, record_info(fields, java_incomplete_annotation_exception)},
{java_malformed_parameters_exception, <<"java.lang.reflect.MalformedParametersException">>, record_info(fields, java_malformed_parameters_exception)},
{java_undeclared_throwable_exception, <<"java.lang.reflect.UndeclaredThrowableException">>, record_info(fields, java_undeclared_throwable_exception)},
{dubbo_no_such_property_exception, <<"org.apache.dubbo.common.bytecode.NoSuchPropertyException">>, record_info(fields, dubbo_no_such_property_exception)},
{java_mirrored_types_exception, <<"javax.lang.model.type.MirroredTypesException">>, record_info(fields, java_mirrored_types_exception)},
{dubbo_no_such_method_exception, <<"org.apache.dubbo.common.bytecode.NoSuchMethodException">>, record_info(fields, dubbo_no_such_method_exception)},
{java_unchecked_io_exception, <<"java.io.UncheckedIOException">>, record_info(fields, java_unchecked_io_exception)},
{java_illegal_monitor_state_exception, <<"java.lang.IllegalMonitorStateException">>, record_info(fields, java_illegal_monitor_state_exception)},
{java_negative_array_size_exception, <<"java.lang.NegativeArraySizeException">>, record_info(fields, java_negative_array_size_exception)},
{java_unsupported_operation_exception, <<"java.lang.UnsupportedOperationException">>, record_info(fields, java_unsupported_operation_exception)},
{java_empty_stack_exception, <<"java.util.EmptyStackException">>, record_info(fields, java_empty_stack_exception)},
{java_illegal_state_exception, <<"java.lang.IllegalStateException">>, record_info(fields, java_illegal_state_exception)},
{java_datetime_exception, <<"java.time.DateTimeException">>, record_info(fields, java_datetime_exception)},
{java_completion_exception, <<"java.util.concurrent.CompletionException">>, record_info(fields, java_completion_exception)},
{java_malformed_parameterized_type_exception, <<"java.lang.reflect.MalformedParameterizedTypeException">>, record_info(fields, java_malformed_parameterized_type_exception)},
{dubbo_service_generic_exception, <<"org.apache.dubbo.rpc.service.GenericException">>, record_info(fields, dubbo_service_generic_exception)},
{java_illegal_argument_exception, <<"java.lang.IllegalArgumentException">>, record_info(fields, java_illegal_argument_exception)},
{java_missing_resource_pointer_exception, <<"java.util.MissingResourceException">>, record_info(fields, java_missing_resource_pointer_exception)}
]. |
fc2b107fdfbc011f1e4ff28f08e8c9f639dd4b07b3555a698ec3ccbc85af26b8 | nuvla/api-server | credential_infrastructure_service_registry_lifecycle_test.clj | (ns sixsq.nuvla.server.resources.credential-infrastructure-service-registry-lifecycle-test
(:require
[clojure.data.json :as json]
[clojure.test :refer [deftest is use-fixtures]]
[peridot.core :refer [content-type header request session]]
[sixsq.nuvla.server.app.params :as p]
[sixsq.nuvla.server.middleware.authn-info :refer [authn-info-header]]
[sixsq.nuvla.server.resources.credential :as credential]
[sixsq.nuvla.server.resources.credential-template :as cred-tpl]
[sixsq.nuvla.server.resources.credential-template-infrastructure-service-registry
:as cred-tpl-registry]
[sixsq.nuvla.server.resources.lifecycle-test-utils :as ltu]
[sixsq.nuvla.server.util.metadata-test-utils :as mdtu]))
(use-fixtures :once ltu/with-test-server-fixture)
(def base-uri (str p/service-context credential/resource-type))
(deftest check-metadata
(mdtu/check-metadata-exists (str credential/resource-type "-" cred-tpl-registry/resource-url)))
(deftest lifecycle
(let [session (-> (ltu/ring-app)
session
(content-type "application/json"))
session-admin (header session authn-info-header "group/nuvla-admin group/nuvla-admin group/nuvla-user group/nuvla-anon")
session-user (header session authn-info-header "user/jane user/jane group/nuvla-user group/nuvla-anon")
session-anon (header session authn-info-header "user/unknown user/unknown group/nuvla-anon")
name-attr "name"
description-attr "description"
tags-attr ["one", "two"]
username-value "my-username"
password-value "my-password"
parent-value "infrastructure-service/alpha"
href (str cred-tpl/resource-type "/" cred-tpl-registry/method)
template-url (str p/service-context cred-tpl/resource-type "/" cred-tpl-registry/method)
template (-> session-admin
(request template-url)
(ltu/body->edn)
(ltu/is-status 200)
(ltu/body))
create-import-no-href {:template (ltu/strip-unwanted-attrs template)}
create-import-href {:name name-attr
:description description-attr
:tags tags-attr
:template {:href href
:parent parent-value
:username username-value
:password password-value}}]
;; admin/user query should succeed but be empty (no credentials created yet)
(doseq [session [session-admin session-user]]
(-> session
(request base-uri)
(ltu/body->edn)
(ltu/is-status 200)
(ltu/is-count zero?)
(ltu/is-operation-present :add)
(ltu/is-operation-absent :delete)
(ltu/is-operation-absent :edit)))
;; anonymous credential collection query should not succeed
(-> session-anon
(request base-uri)
(ltu/body->edn)
(ltu/is-status 403))
;; creating a new credential without reference will fail for all types of users
(doseq [session [session-admin session-user session-anon]]
(-> session
(request base-uri
:request-method :post
:body (json/write-str create-import-no-href))
(ltu/body->edn)
(ltu/is-status 400)))
creating a new credential as anon will fail ; expect 400 because href can not be accessed
(-> session-anon
(request base-uri
:request-method :post
:body (json/write-str create-import-href))
(ltu/body->edn)
(ltu/is-status 400))
;; create a credential as a normal user
(let [resp (-> session-user
(request base-uri
:request-method :post
:body (json/write-str create-import-href))
(ltu/body->edn)
(ltu/is-status 201))
id (ltu/body-resource-id resp)
uri (-> resp
(ltu/location))
abs-uri (str p/service-context uri)]
;; resource id and the uri (location) should be the same
(is (= id uri))
;; admin/user should be able to see and delete credential
(doseq [session [session-admin session-user]]
(-> session
(request abs-uri)
(ltu/body->edn)
(ltu/is-status 200)
(ltu/is-operation-present :delete)
(ltu/is-operation-present :edit)
(ltu/is-operation-present :check)))
;; ensure credential contains correct information
(let [credential (-> session-user
(request abs-uri)
(ltu/body->edn)
(ltu/is-status 200))
{:keys [name description tags
username password
parent]} (ltu/body credential)]
(is (= name name-attr))
(is (= description description-attr))
(is (= tags tags-attr))
(is (= username username-value))
(is (= password password-value))
(is (= parent parent-value))
;; ensure that the check action works
(let [op-url (ltu/get-op credential "check")
check-url (str p/service-context op-url)]
(-> session-user
(request check-url
:request-method :post)
(ltu/body->edn)
(ltu/is-status 202))))
;; delete the credential
(-> session-user
(request abs-uri
:request-method :delete)
(ltu/body->edn)
(ltu/is-status 200)))))
| null | https://raw.githubusercontent.com/nuvla/api-server/a64a61b227733f1a0a945003edf5abaf5150a15c/code/test/sixsq/nuvla/server/resources/credential_infrastructure_service_registry_lifecycle_test.clj | clojure | admin/user query should succeed but be empty (no credentials created yet)
anonymous credential collection query should not succeed
creating a new credential without reference will fail for all types of users
expect 400 because href can not be accessed
create a credential as a normal user
resource id and the uri (location) should be the same
admin/user should be able to see and delete credential
ensure credential contains correct information
ensure that the check action works
delete the credential | (ns sixsq.nuvla.server.resources.credential-infrastructure-service-registry-lifecycle-test
(:require
[clojure.data.json :as json]
[clojure.test :refer [deftest is use-fixtures]]
[peridot.core :refer [content-type header request session]]
[sixsq.nuvla.server.app.params :as p]
[sixsq.nuvla.server.middleware.authn-info :refer [authn-info-header]]
[sixsq.nuvla.server.resources.credential :as credential]
[sixsq.nuvla.server.resources.credential-template :as cred-tpl]
[sixsq.nuvla.server.resources.credential-template-infrastructure-service-registry
:as cred-tpl-registry]
[sixsq.nuvla.server.resources.lifecycle-test-utils :as ltu]
[sixsq.nuvla.server.util.metadata-test-utils :as mdtu]))
(use-fixtures :once ltu/with-test-server-fixture)
(def base-uri (str p/service-context credential/resource-type))
(deftest check-metadata
(mdtu/check-metadata-exists (str credential/resource-type "-" cred-tpl-registry/resource-url)))
(deftest lifecycle
(let [session (-> (ltu/ring-app)
session
(content-type "application/json"))
session-admin (header session authn-info-header "group/nuvla-admin group/nuvla-admin group/nuvla-user group/nuvla-anon")
session-user (header session authn-info-header "user/jane user/jane group/nuvla-user group/nuvla-anon")
session-anon (header session authn-info-header "user/unknown user/unknown group/nuvla-anon")
name-attr "name"
description-attr "description"
tags-attr ["one", "two"]
username-value "my-username"
password-value "my-password"
parent-value "infrastructure-service/alpha"
href (str cred-tpl/resource-type "/" cred-tpl-registry/method)
template-url (str p/service-context cred-tpl/resource-type "/" cred-tpl-registry/method)
template (-> session-admin
(request template-url)
(ltu/body->edn)
(ltu/is-status 200)
(ltu/body))
create-import-no-href {:template (ltu/strip-unwanted-attrs template)}
create-import-href {:name name-attr
:description description-attr
:tags tags-attr
:template {:href href
:parent parent-value
:username username-value
:password password-value}}]
(doseq [session [session-admin session-user]]
(-> session
(request base-uri)
(ltu/body->edn)
(ltu/is-status 200)
(ltu/is-count zero?)
(ltu/is-operation-present :add)
(ltu/is-operation-absent :delete)
(ltu/is-operation-absent :edit)))
(-> session-anon
(request base-uri)
(ltu/body->edn)
(ltu/is-status 403))
(doseq [session [session-admin session-user session-anon]]
(-> session
(request base-uri
:request-method :post
:body (json/write-str create-import-no-href))
(ltu/body->edn)
(ltu/is-status 400)))
(-> session-anon
(request base-uri
:request-method :post
:body (json/write-str create-import-href))
(ltu/body->edn)
(ltu/is-status 400))
(let [resp (-> session-user
(request base-uri
:request-method :post
:body (json/write-str create-import-href))
(ltu/body->edn)
(ltu/is-status 201))
id (ltu/body-resource-id resp)
uri (-> resp
(ltu/location))
abs-uri (str p/service-context uri)]
(is (= id uri))
(doseq [session [session-admin session-user]]
(-> session
(request abs-uri)
(ltu/body->edn)
(ltu/is-status 200)
(ltu/is-operation-present :delete)
(ltu/is-operation-present :edit)
(ltu/is-operation-present :check)))
(let [credential (-> session-user
(request abs-uri)
(ltu/body->edn)
(ltu/is-status 200))
{:keys [name description tags
username password
parent]} (ltu/body credential)]
(is (= name name-attr))
(is (= description description-attr))
(is (= tags tags-attr))
(is (= username username-value))
(is (= password password-value))
(is (= parent parent-value))
(let [op-url (ltu/get-op credential "check")
check-url (str p/service-context op-url)]
(-> session-user
(request check-url
:request-method :post)
(ltu/body->edn)
(ltu/is-status 202))))
(-> session-user
(request abs-uri
:request-method :delete)
(ltu/body->edn)
(ltu/is-status 200)))))
|
67d3a981fb1a3d0a5dcc1cca5523f723b18c3aea79b206944d6447d500574468 | falsetru/htdp | 20.2.3.scm | (define-struct ir (name price))
(define (filter1 rel-op alon t)
(cond
[(empty? alon) empty]
[(rel-op (first alon) t)
(cons (first alon)
(filter1 rel-op (rest alon) t))]
[else
(filter1 rel-op (rest alon) t)]))
(define (filter-non-car xs)
(filter1 ne-ir? xs 'car))
(define (ne-ir? ir t)
(not (symbol=? (ir-name ir) t)))
(require rackunit)
(require rackunit/text-ui)
(define filter-non-car-tests
(test-suite "Test for filter-non-car"
(test-case ""
(check-equal? (filter-non-car empty) empty)
(define ir-list (list (make-ir 'car 10)
(make-ir 'toy 2)
(make-ir 'pc 6)
(make-ir 'car 11)))
(define non-car-ir-list (filter-non-car ir-list))
(check-equal? (length non-car-ir-list) 2)
(check-equal? (ir-name (first non-car-ir-list)) 'toy)
(check-equal? (ir-name (second non-car-ir-list)) 'pc)
(check-equal? (filter-non-car (list (make-ir 'car 12)
(make-ir 'car 11)))
empty)
)
))
(run-tests filter-non-car-tests)
| null | https://raw.githubusercontent.com/falsetru/htdp/4cdad3b999f19b89ff4fa7561839cbcbaad274df/20/20.2.3.scm | scheme | (define-struct ir (name price))
(define (filter1 rel-op alon t)
(cond
[(empty? alon) empty]
[(rel-op (first alon) t)
(cons (first alon)
(filter1 rel-op (rest alon) t))]
[else
(filter1 rel-op (rest alon) t)]))
(define (filter-non-car xs)
(filter1 ne-ir? xs 'car))
(define (ne-ir? ir t)
(not (symbol=? (ir-name ir) t)))
(require rackunit)
(require rackunit/text-ui)
(define filter-non-car-tests
(test-suite "Test for filter-non-car"
(test-case ""
(check-equal? (filter-non-car empty) empty)
(define ir-list (list (make-ir 'car 10)
(make-ir 'toy 2)
(make-ir 'pc 6)
(make-ir 'car 11)))
(define non-car-ir-list (filter-non-car ir-list))
(check-equal? (length non-car-ir-list) 2)
(check-equal? (ir-name (first non-car-ir-list)) 'toy)
(check-equal? (ir-name (second non-car-ir-list)) 'pc)
(check-equal? (filter-non-car (list (make-ir 'car 12)
(make-ir 'car 11)))
empty)
)
))
(run-tests filter-non-car-tests)
| |
1b5832d20f472810a1ee53889ea734dfdc30f4a9586c4988f424ce8d8a303cd9 | processone/grapherl | graph_utils.erl | -module(graph_utils).
-export([get_args/2
,get_args/3
,open_port/2
,get_socket/2
,to_binary/1
,mean/1
,gauge/1
,counter/1
,binary_to_realNumber/1
,run_threads/3
]).
-include_lib("grapherl.hrl").
%%%===================================================================
%%% API functions
%%%===================================================================
%% TL : TupleList
get specified params in ParamList from TL
get_args(TL, ParamList) ->
get_args(TL, ParamList, false).
get_args(TL, ParamList, Default) ->
try get_args0(TL, ParamList, []) of
Params -> {ok, Params}
catch
error:Error ->
case Default of
false ->
{error_params_missing, Error};
_ ->
{ok, Default}
end
end.
helper func for get_router_args/2
get_args0(_TL, [], Acc) ->
lists:reverse(Acc);
get_args0(TL, [Key|T], Acc) ->
{Key, Val} = lists:keyfind(Key, 1, TL),
get_args0(TL, T, [Val| Acc]).
%% NOT IN USE
open_port(udp, Port) ->
gen_udp:open(Port, [{active, true}, binary]);
open_port(tcp, Port) ->
gen_tcp:listen(Port, [binary, {packet, 4}, {active, false},
{reuseaddr, true}] ).
get_socket(_Type, []) ->
{error, false};
get_socket(Type, [Sock | L]) ->
case lists:keyfind(type,1,Sock) of
{type, Type} ->
{socket, Socket} = lists:keyfind(socket, 1, Sock),
{ok, Socket};
_ ->
get_socket(Type, L)
end.
to_binary(Val) when is_binary(Val) ->
Val;
to_binary(Val) when is_integer(Val) ->
erlang:integer_to_binary(Val);
to_binary(Val) when is_float(Val) ->
erlang:float_to_binary(Val);
to_binary(Val) when is_list(Val) ->
erlang:list_to_binary(Val);
to_binary(Val) when is_atom(Val) ->
erlang:atom_to_binary(Val, utf8).
%%
mean(List) ->
mean(List, 0, 0).
mean([], 0, _) -> 0;
mean([], Count, Acc) ->
float_to_binary(Acc/Count * 1.0);
mean([E | Rest], Count, Acc) when is_binary(E) ->
mean([binary_to_realNumber(E) | Rest], Count, Acc);
mean([E | Rest], Count, Acc) when is_integer(E) orelse is_float(E) ->
mean(Rest, Count + 1, E + Acc).
%% the list is expected to contains binary elements
gauge(List) ->
gauge(List, 0, 0, 0).
gauge([], 0, _, _) -> 0;
gauge([], Count, _Prev, Acc) ->
float_to_binary(Acc/Count * 1.0);
gauge([E | Rest], Count, Prev, Acc) when is_binary(E) ->
gauge([process_gauge(E, Prev) | Rest], Count, Prev, Acc);
gauge([E | Rest], Count, _Prev, Acc) when is_integer(E) orelse is_float(E) ->
gauge(Rest, Count + 1, E, E + Acc).
process_gauge(Val, Prev) ->
ValR = binary_to_realNumber(Val),
case Val of
<<"+", _R/binary>> -> Prev + ValR;
<<"-", _R/binary>> -> Prev + ValR;
_ -> ValR
end.
%%
counter(List) ->
counter(List, 0).
counter([], 0) -> 0;
counter([], Acc) -> realNumber_to_binary(Acc);
counter([E | Rest], Acc) when is_binary(E) ->
counter([binary_to_realNumber(E) | Rest], Acc);
counter([E | Rest], Acc) when is_integer(E) orelse is_float(E) ->
counter(Rest, E + Acc).
%%
binary_to_realNumber(Num) ->
try binary_to_integer(Num) catch _:_ -> binary_to_float(Num) end.
realNumber_to_binary(Num) ->
try float_to_binary(Num) catch _:_ -> integer_to_binary(Num) end.
%% N : numbers of thread
: [ Arg1 , ... ]
%% Fun : fun({Key,Value}) -> op end
run_threads(Threads, Args, Fun) ->
run_threads(Threads, 0, Args, Fun).
run_threads(_Tot_threads, Busy_threads, [], _Fun) ->
wait(Busy_threads);
run_threads(Tot_threads, Busy_threads, [Arg | Rest], Fun) ->
if
Busy_threads =:= Tot_threads andalso Rest =/= [] ->
wait(1),
run_threads(Tot_threads, Busy_threads -1, [Arg | Rest], Fun);
true ->
{_Pid, _Ref} = erlang:spawn_monitor(fun() -> Fun(Arg) end),
run_threads(Tot_threads, Busy_threads +1, Rest, Fun)
end.
wait(0) ->
ok;
wait(N) ->
receive
{'DOWN', _, _, _, _} ->
wait(N-1)
after
10000 -> ok
end.
| null | https://raw.githubusercontent.com/processone/grapherl/a45174ff9ffc8b68877b6985d6d05c4fbb868620/grapherl/apps/grapherl/src/graph_utils.erl | erlang | ===================================================================
API functions
===================================================================
TL : TupleList
NOT IN USE
the list is expected to contains binary elements
N : numbers of thread
Fun : fun({Key,Value}) -> op end | -module(graph_utils).
-export([get_args/2
,get_args/3
,open_port/2
,get_socket/2
,to_binary/1
,mean/1
,gauge/1
,counter/1
,binary_to_realNumber/1
,run_threads/3
]).
-include_lib("grapherl.hrl").
get specified params in ParamList from TL
get_args(TL, ParamList) ->
get_args(TL, ParamList, false).
get_args(TL, ParamList, Default) ->
try get_args0(TL, ParamList, []) of
Params -> {ok, Params}
catch
error:Error ->
case Default of
false ->
{error_params_missing, Error};
_ ->
{ok, Default}
end
end.
helper func for get_router_args/2
get_args0(_TL, [], Acc) ->
lists:reverse(Acc);
get_args0(TL, [Key|T], Acc) ->
{Key, Val} = lists:keyfind(Key, 1, TL),
get_args0(TL, T, [Val| Acc]).
open_port(udp, Port) ->
gen_udp:open(Port, [{active, true}, binary]);
open_port(tcp, Port) ->
gen_tcp:listen(Port, [binary, {packet, 4}, {active, false},
{reuseaddr, true}] ).
get_socket(_Type, []) ->
{error, false};
get_socket(Type, [Sock | L]) ->
case lists:keyfind(type,1,Sock) of
{type, Type} ->
{socket, Socket} = lists:keyfind(socket, 1, Sock),
{ok, Socket};
_ ->
get_socket(Type, L)
end.
to_binary(Val) when is_binary(Val) ->
Val;
to_binary(Val) when is_integer(Val) ->
erlang:integer_to_binary(Val);
to_binary(Val) when is_float(Val) ->
erlang:float_to_binary(Val);
to_binary(Val) when is_list(Val) ->
erlang:list_to_binary(Val);
to_binary(Val) when is_atom(Val) ->
erlang:atom_to_binary(Val, utf8).
mean(List) ->
mean(List, 0, 0).
mean([], 0, _) -> 0;
mean([], Count, Acc) ->
float_to_binary(Acc/Count * 1.0);
mean([E | Rest], Count, Acc) when is_binary(E) ->
mean([binary_to_realNumber(E) | Rest], Count, Acc);
mean([E | Rest], Count, Acc) when is_integer(E) orelse is_float(E) ->
mean(Rest, Count + 1, E + Acc).
gauge(List) ->
gauge(List, 0, 0, 0).
gauge([], 0, _, _) -> 0;
gauge([], Count, _Prev, Acc) ->
float_to_binary(Acc/Count * 1.0);
gauge([E | Rest], Count, Prev, Acc) when is_binary(E) ->
gauge([process_gauge(E, Prev) | Rest], Count, Prev, Acc);
gauge([E | Rest], Count, _Prev, Acc) when is_integer(E) orelse is_float(E) ->
gauge(Rest, Count + 1, E, E + Acc).
process_gauge(Val, Prev) ->
ValR = binary_to_realNumber(Val),
case Val of
<<"+", _R/binary>> -> Prev + ValR;
<<"-", _R/binary>> -> Prev + ValR;
_ -> ValR
end.
counter(List) ->
counter(List, 0).
counter([], 0) -> 0;
counter([], Acc) -> realNumber_to_binary(Acc);
counter([E | Rest], Acc) when is_binary(E) ->
counter([binary_to_realNumber(E) | Rest], Acc);
counter([E | Rest], Acc) when is_integer(E) orelse is_float(E) ->
counter(Rest, E + Acc).
binary_to_realNumber(Num) ->
try binary_to_integer(Num) catch _:_ -> binary_to_float(Num) end.
realNumber_to_binary(Num) ->
try float_to_binary(Num) catch _:_ -> integer_to_binary(Num) end.
: [ Arg1 , ... ]
run_threads(Threads, Args, Fun) ->
run_threads(Threads, 0, Args, Fun).
run_threads(_Tot_threads, Busy_threads, [], _Fun) ->
wait(Busy_threads);
run_threads(Tot_threads, Busy_threads, [Arg | Rest], Fun) ->
if
Busy_threads =:= Tot_threads andalso Rest =/= [] ->
wait(1),
run_threads(Tot_threads, Busy_threads -1, [Arg | Rest], Fun);
true ->
{_Pid, _Ref} = erlang:spawn_monitor(fun() -> Fun(Arg) end),
run_threads(Tot_threads, Busy_threads +1, Rest, Fun)
end.
wait(0) ->
ok;
wait(N) ->
receive
{'DOWN', _, _, _, _} ->
wait(N-1)
after
10000 -> ok
end.
|
9d0f386aac670078faadac8995aa997a1bd2c6338cc0d481e3f33858922a15fd | aeternity/aesim | aesim_config.erl | -module(aesim_config).
%=== INCLUDES ==================================================================
-include_lib("stdlib/include/assert.hrl").
-include("aesim_types.hrl").
%=== EXPORTS ===================================================================
-export([new/0]).
-export([get/2]).
-export([get_option/4]).
-export([parse/3, parse/4]).
-export([print_config/1]).
%=== TYPES =====================================================================
-type opt_name() :: atom().
-type opt_type() :: string | integer | integer_infinity | atom | time
| time_infinity | boolean | number.
-type spec() :: {opt_name(), opt_type(), term()}.
-type specs() :: [spec()].
-type option() :: {opt_type(), boolean(), term(), term()}.
-type parser_def() :: {atom(), atom()} | fun((state(), map()) -> state()).
-type parser_defs() :: [parser_def()].
-type state() :: #{
opt_name() => option()
}.
-export_type([state/0]).
%=== API FUNCTIONS =============================================================
-spec new() -> state().
new() ->
#{}.
-spec get(sim(), opt_name()) -> term().
get(Sim, Key) ->
#{config := State} = Sim,
case maps:find(Key, State) of
error -> error({unknown_option, Key});
{ok, {_, _, _, Value}} -> Value
end.
-spec parse(sim(), map(), specs()) -> sim().
parse(Sim, Opts, Specs) ->
parse(Sim, Opts, Specs, []).
-spec parse(sim(), map(), specs(), parser_defs()) -> sim().
parse(Sim, Opts, Specs, ParserFuns) ->
#{config := State} = Sim,
State2 = lists:foldl(fun
({Key, Type, Default}, St) ->
{IsDefault, Source, Value} = get_option(Opts, Key, Type, Default),
add_config(St, Key, Type, IsDefault, Source, Value)
end, State, Specs),
Sim2 = Sim#{config := State2},
lists:foldl(fun
(F, S) when is_function(F) -> F(Opts, S);
({Key, FunName}, S) ->
Mod = get(S, Key),
Mod:FunName(Opts, S)
end, Sim2, ParserFuns).
-spec print_config(sim()) -> ok.
print_config(Sim) ->
#{config := State} = Sim,
lists:foreach(fun({N, {T, D, S, _}}) ->
DefStr = if D -> "(default)"; true -> "" end,
aesim_simulator:print("~-22s: ~27s ~-17w ~9s~n", [N, S, T, DefStr], Sim)
end, lists:keysort(1, maps:to_list(State))).
%=== INTERNAL FUNCTIONS ========================================================
add_config(State, Name, Type, IsDefault, Option, Value) ->
Source = convert(string, Name, Option),
case maps:find(Name, State) of
error ->
State#{Name => {Type, IsDefault, Source, Value}};
{ok, {_, true, _, _}} ->
State#{Name => {Type, IsDefault, Source, Value}};
{ok, _} ->
State
end.
get_option(Opts, Key, Type, Default) ->
case maps:find(Key, Opts) of
{ok, Value} -> {false, Value, convert(Type, Key, Value)};
error -> {true, Default, convert(Type, Key, Default)}
end.
convert(string, _Key, Value) when is_list(Value) -> Value;
convert(_Type, Key, "") -> error({bad_option, {Key, ""}});
convert(atom, _Key, Value) when is_atom(Value) -> Value;
convert(integer, _Key, Value) when is_integer(Value) -> Value;
convert(number, _Key, Value) when is_number(Value) -> Value;
convert(boolean, _Key, "true") -> true;
convert(boolean, _Key, "false") -> false;
convert(boolean, _Key, "1") -> true;
convert(boolean, _Key, "0") -> false;
convert(boolean, _Key, true) -> true;
convert(boolean, _Key, false) -> false;
convert(boolean, _Key, 1) -> true;
convert(boolean, _Key, 0) -> false;
convert(integer_infinity, _Key, Value) when is_integer(Value) -> Value;
convert(integer_infinity, _Key, infinity) -> infinity;
convert(integer_infinity, _Key, "infinity") -> infinity;
convert(time, _Key, Value) when is_integer(Value), Value >= 0 -> Value;
convert(time_infinity, _Key, Value) when is_integer(Value), Value >= 0 -> Value;
convert(time_infinity, _Key, infinity) -> infinity;
convert(time_infinity, _Key, "infinity") -> infinity;
convert(string, _Key, Value) when is_atom(Value) ->
atom_to_list(Value);
convert(string, _Key, Value) when is_integer(Value) ->
integer_to_list(Value);
convert(string, _Key, Value) when is_number(Value) ->
aesim_utils:format("~w", [Value]);
convert(atom, _Key, Value) when is_list(Value) ->
list_to_atom(Value);
convert(time, Key, Value) when is_list(Value) ->
parse_time(Key, Value);
convert(time_infinity, Key, Value) when is_list(Value) ->
parse_time(Key, Value);
convert(Type, Key, Value)
when is_list(Value), Type =:= integer orelse Type =:= integer_infinity ->
try
list_to_integer(Value)
catch
_:badarg -> error({bad_option, Key, Value})
end;
convert(number, Key, Value) ->
try
list_to_integer(Value)
catch
_:badarg ->
try
list_to_float(Value)
catch
_:badarg ->
error({bad_option, Key, Value})
end
end;
convert(_Type, Key, Value) ->
error({bad_option, Key, Value}).
parse_time(Key, Value) -> parse_days({Key, Value}, Value, 0).
parse_days(Original, "", _Acc) -> error({bad_option, Original});
parse_days(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)d(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_hours(Original, Value, Acc);
{match, [Days, Rest]} ->
Acc2 = Acc + list_to_integer(Days) * 24 * 60 * 60 * 1000,
parse_hours(Original, Rest, Acc2)
end.
parse_hours(_Original, "", Acc) -> Acc;
parse_hours(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)h(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_minutes(Original, Value, Acc);
{match, [Hours, Rest]} ->
Acc2 = Acc + list_to_integer(Hours) * 60 * 60 * 1000,
parse_minutes(Original, Rest, Acc2)
end.
parse_minutes(_Original, "", Acc) -> Acc;
parse_minutes(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)m(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_seconds(Original, Value, Acc);
{match, [Minutes, Rest]} ->
Acc2 = Acc + list_to_integer(Minutes) * 60 * 1000,
parse_seconds(Original, Rest, Acc2)
end.
parse_seconds(_Original, "", Acc) -> Acc;
parse_seconds(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)s(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_milliseconds(Original, Value, Acc);
{match, [Seconds, Rest]} ->
Acc2 = Acc + list_to_integer(Seconds) * 1000,
parse_milliseconds(Original, Rest, Acc2)
end.
parse_milliseconds(_Original, "", Acc) -> Acc;
parse_milliseconds(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)$", [{capture, all_but_first, list}]) of
nomatch -> error({bad_option, Original});
{match, [Milliseconds]} ->
Acc + list_to_integer(Milliseconds)
end. | null | https://raw.githubusercontent.com/aeternity/aesim/20d89621a0994e1b6cf2b766d188ec29283e77e1/src/aesim_config.erl | erlang | === INCLUDES ==================================================================
=== EXPORTS ===================================================================
=== TYPES =====================================================================
=== API FUNCTIONS =============================================================
=== INTERNAL FUNCTIONS ======================================================== | -module(aesim_config).
-include_lib("stdlib/include/assert.hrl").
-include("aesim_types.hrl").
-export([new/0]).
-export([get/2]).
-export([get_option/4]).
-export([parse/3, parse/4]).
-export([print_config/1]).
-type opt_name() :: atom().
-type opt_type() :: string | integer | integer_infinity | atom | time
| time_infinity | boolean | number.
-type spec() :: {opt_name(), opt_type(), term()}.
-type specs() :: [spec()].
-type option() :: {opt_type(), boolean(), term(), term()}.
-type parser_def() :: {atom(), atom()} | fun((state(), map()) -> state()).
-type parser_defs() :: [parser_def()].
-type state() :: #{
opt_name() => option()
}.
-export_type([state/0]).
-spec new() -> state().
new() ->
#{}.
-spec get(sim(), opt_name()) -> term().
get(Sim, Key) ->
#{config := State} = Sim,
case maps:find(Key, State) of
error -> error({unknown_option, Key});
{ok, {_, _, _, Value}} -> Value
end.
-spec parse(sim(), map(), specs()) -> sim().
parse(Sim, Opts, Specs) ->
parse(Sim, Opts, Specs, []).
-spec parse(sim(), map(), specs(), parser_defs()) -> sim().
parse(Sim, Opts, Specs, ParserFuns) ->
#{config := State} = Sim,
State2 = lists:foldl(fun
({Key, Type, Default}, St) ->
{IsDefault, Source, Value} = get_option(Opts, Key, Type, Default),
add_config(St, Key, Type, IsDefault, Source, Value)
end, State, Specs),
Sim2 = Sim#{config := State2},
lists:foldl(fun
(F, S) when is_function(F) -> F(Opts, S);
({Key, FunName}, S) ->
Mod = get(S, Key),
Mod:FunName(Opts, S)
end, Sim2, ParserFuns).
-spec print_config(sim()) -> ok.
print_config(Sim) ->
#{config := State} = Sim,
lists:foreach(fun({N, {T, D, S, _}}) ->
DefStr = if D -> "(default)"; true -> "" end,
aesim_simulator:print("~-22s: ~27s ~-17w ~9s~n", [N, S, T, DefStr], Sim)
end, lists:keysort(1, maps:to_list(State))).
add_config(State, Name, Type, IsDefault, Option, Value) ->
Source = convert(string, Name, Option),
case maps:find(Name, State) of
error ->
State#{Name => {Type, IsDefault, Source, Value}};
{ok, {_, true, _, _}} ->
State#{Name => {Type, IsDefault, Source, Value}};
{ok, _} ->
State
end.
get_option(Opts, Key, Type, Default) ->
case maps:find(Key, Opts) of
{ok, Value} -> {false, Value, convert(Type, Key, Value)};
error -> {true, Default, convert(Type, Key, Default)}
end.
convert(string, _Key, Value) when is_list(Value) -> Value;
convert(_Type, Key, "") -> error({bad_option, {Key, ""}});
convert(atom, _Key, Value) when is_atom(Value) -> Value;
convert(integer, _Key, Value) when is_integer(Value) -> Value;
convert(number, _Key, Value) when is_number(Value) -> Value;
convert(boolean, _Key, "true") -> true;
convert(boolean, _Key, "false") -> false;
convert(boolean, _Key, "1") -> true;
convert(boolean, _Key, "0") -> false;
convert(boolean, _Key, true) -> true;
convert(boolean, _Key, false) -> false;
convert(boolean, _Key, 1) -> true;
convert(boolean, _Key, 0) -> false;
convert(integer_infinity, _Key, Value) when is_integer(Value) -> Value;
convert(integer_infinity, _Key, infinity) -> infinity;
convert(integer_infinity, _Key, "infinity") -> infinity;
convert(time, _Key, Value) when is_integer(Value), Value >= 0 -> Value;
convert(time_infinity, _Key, Value) when is_integer(Value), Value >= 0 -> Value;
convert(time_infinity, _Key, infinity) -> infinity;
convert(time_infinity, _Key, "infinity") -> infinity;
convert(string, _Key, Value) when is_atom(Value) ->
atom_to_list(Value);
convert(string, _Key, Value) when is_integer(Value) ->
integer_to_list(Value);
convert(string, _Key, Value) when is_number(Value) ->
aesim_utils:format("~w", [Value]);
convert(atom, _Key, Value) when is_list(Value) ->
list_to_atom(Value);
convert(time, Key, Value) when is_list(Value) ->
parse_time(Key, Value);
convert(time_infinity, Key, Value) when is_list(Value) ->
parse_time(Key, Value);
convert(Type, Key, Value)
when is_list(Value), Type =:= integer orelse Type =:= integer_infinity ->
try
list_to_integer(Value)
catch
_:badarg -> error({bad_option, Key, Value})
end;
convert(number, Key, Value) ->
try
list_to_integer(Value)
catch
_:badarg ->
try
list_to_float(Value)
catch
_:badarg ->
error({bad_option, Key, Value})
end
end;
convert(_Type, Key, Value) ->
error({bad_option, Key, Value}).
parse_time(Key, Value) -> parse_days({Key, Value}, Value, 0).
parse_days(Original, "", _Acc) -> error({bad_option, Original});
parse_days(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)d(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_hours(Original, Value, Acc);
{match, [Days, Rest]} ->
Acc2 = Acc + list_to_integer(Days) * 24 * 60 * 60 * 1000,
parse_hours(Original, Rest, Acc2)
end.
parse_hours(_Original, "", Acc) -> Acc;
parse_hours(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)h(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_minutes(Original, Value, Acc);
{match, [Hours, Rest]} ->
Acc2 = Acc + list_to_integer(Hours) * 60 * 60 * 1000,
parse_minutes(Original, Rest, Acc2)
end.
parse_minutes(_Original, "", Acc) -> Acc;
parse_minutes(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)m(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_seconds(Original, Value, Acc);
{match, [Minutes, Rest]} ->
Acc2 = Acc + list_to_integer(Minutes) * 60 * 1000,
parse_seconds(Original, Rest, Acc2)
end.
parse_seconds(_Original, "", Acc) -> Acc;
parse_seconds(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)s(.*)$", [{capture, all_but_first, list}]) of
nomatch -> parse_milliseconds(Original, Value, Acc);
{match, [Seconds, Rest]} ->
Acc2 = Acc + list_to_integer(Seconds) * 1000,
parse_milliseconds(Original, Rest, Acc2)
end.
parse_milliseconds(_Original, "", Acc) -> Acc;
parse_milliseconds(Original, Value, Acc) ->
case re:run(Value, "^([0-9]*)$", [{capture, all_but_first, list}]) of
nomatch -> error({bad_option, Original});
{match, [Milliseconds]} ->
Acc + list_to_integer(Milliseconds)
end. |
778d56aa9d6bb43291aecdf4141be3c4e81eebf77e94b6134500ca1c28c974bc | lispm/FRL | declare.lisp | ;;;***********************************************************************
Compiler Initilization for FRL files ( execpt util and lisp ) .
;;;***********************************************************************
(include ldeclar)
(eval-when (compile)
(setq interpret-mode nil)
(frl-basic-macro-load))
| null | https://raw.githubusercontent.com/lispm/FRL/1a2aadf71062a89474b1164e4539911011b3b63e/declare.lisp | lisp | ***********************************************************************
*********************************************************************** | Compiler Initilization for FRL files ( execpt util and lisp ) .
(include ldeclar)
(eval-when (compile)
(setq interpret-mode nil)
(frl-basic-macro-load))
|
1ab16b553e9aae667ffd14b201701cb88d132c7b6b8e317e4a6f2a4f95caaeb6 | mrkkrp/megaparsec | Main.hs | module Main (main) where
import Control.DeepSeq
import Criterion.Main
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified ParsersBench.CSV.Attoparsec as A
import qualified ParsersBench.CSV.Megaparsec as M
import qualified ParsersBench.Json.Attoparsec as A
import qualified ParsersBench.Json.Megaparsec as M
import qualified ParsersBench.Log.Attoparsec as A
import qualified ParsersBench.Log.Megaparsec as M
main :: IO ()
main =
defaultMain
[ bgroup
"CSV (Attoparsec)"
[bparser file A.parseCSV | file <- csvFiles],
bgroup
"CSV (Megaparsec)"
[bparser file M.parseCSV | file <- csvFiles],
bgroup
"Log (Attoparsec)"
[bparser file A.parseLog | file <- logFiles],
bgroup
"Log (Megaparsec)"
[bparser file M.parseLog | file <- logFiles],
bgroup
"JSON (Attoparsec)"
[bparser file A.parseJson | file <- jsonFiles],
bgroup
"JSON (Megapasec)"
[bparser file M.parseJson | file <- jsonFiles]
]
bparser :: (NFData a) => FilePath -> (ByteString -> a) -> Benchmark
bparser desc f = env (B.readFile path) (bench desc . nf f)
where
path = "data/" ++ desc
csvFiles :: [FilePath]
csvFiles =
[ "csv-5.csv",
"csv-10.csv",
"csv-20.csv",
"csv-40.csv"
]
logFiles :: [FilePath]
logFiles =
[ "log-5.log",
"log-10.log",
"log-20.log",
"log-40.log"
]
jsonFiles :: [FilePath]
jsonFiles =
[ "json-5.json",
"json-10.json",
"json-20.json",
"json-40.json"
]
| null | https://raw.githubusercontent.com/mrkkrp/megaparsec/69d4c46e36732df5072782903610e0bee5df34e8/parsers-bench/bench/speed/Main.hs | haskell | module Main (main) where
import Control.DeepSeq
import Criterion.Main
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified ParsersBench.CSV.Attoparsec as A
import qualified ParsersBench.CSV.Megaparsec as M
import qualified ParsersBench.Json.Attoparsec as A
import qualified ParsersBench.Json.Megaparsec as M
import qualified ParsersBench.Log.Attoparsec as A
import qualified ParsersBench.Log.Megaparsec as M
main :: IO ()
main =
defaultMain
[ bgroup
"CSV (Attoparsec)"
[bparser file A.parseCSV | file <- csvFiles],
bgroup
"CSV (Megaparsec)"
[bparser file M.parseCSV | file <- csvFiles],
bgroup
"Log (Attoparsec)"
[bparser file A.parseLog | file <- logFiles],
bgroup
"Log (Megaparsec)"
[bparser file M.parseLog | file <- logFiles],
bgroup
"JSON (Attoparsec)"
[bparser file A.parseJson | file <- jsonFiles],
bgroup
"JSON (Megapasec)"
[bparser file M.parseJson | file <- jsonFiles]
]
bparser :: (NFData a) => FilePath -> (ByteString -> a) -> Benchmark
bparser desc f = env (B.readFile path) (bench desc . nf f)
where
path = "data/" ++ desc
csvFiles :: [FilePath]
csvFiles =
[ "csv-5.csv",
"csv-10.csv",
"csv-20.csv",
"csv-40.csv"
]
logFiles :: [FilePath]
logFiles =
[ "log-5.log",
"log-10.log",
"log-20.log",
"log-40.log"
]
jsonFiles :: [FilePath]
jsonFiles =
[ "json-5.json",
"json-10.json",
"json-20.json",
"json-40.json"
]
| |
36e2658367a4afee2c0add55c2a52b72703d0a698228d23a33898a9d1dc85c52 | karimarttila/clojure | dynamodb_utils.clj | (ns simpleserver.test-utils.dynamodb-utils
(:require [simpleserver.test-utils.test-data :as test-data]
[simpleserver.test-utils.test-service :as test-service]
[simpleserver.service.user.user-common :as ss-user-common]
[cognitect.aws.client.api :as aws]
[clojure.tools.logging :as log]))
;; ******************************************************
;; Domain
(defn- init-product-groups-table [client product-groups-table product-groups]
(log/debug "ENTER init-product-groups-table")
(doseq [pg product-groups]
(let [ret (aws/invoke client {:op :PutItem
:request {:TableName product-groups-table
:Item {"pgid" {:S (first pg)}
"pgname" {:S (second pg)}}}})]
(if (seq ret)
(throw (ex-info "Failed to put product-groups" ret))))))
(defn- init-product-table [client product-table products]
(log/debug "ENTER init-product-table")
(doseq [product products]
(let [ret (aws/invoke client {:op :PutItem
:request {:TableName product-table
:Item {"pid" {:S (nth product 0)}
"pgid" {:S (nth product 1)}
"title" {:S (nth product 2)}
"price" {:S (nth product 3)}
"a_or_d" {:S (nth product 4)}
"year" {:S (nth product 5)}
"country" {:S (nth product 6)}
"g_or_l" {:S (nth product 7)}}}})]
(if (seq ret)
(throw (ex-info "Failed to put products" ret))))))
(defmethod test-service/init-domain :ddb [env]
(log/debug "ENTER init-domain")
(let [client (get-in env [:service :domain :db :client])
product-groups-table (get-in env [:service :domain :db :tables :product-group])
product-table (get-in env [:service :domain :db :tables :product])]
(init-product-groups-table client product-groups-table (test-data/product-groups))
(doseq [pg-id (keys (test-data/product-groups))]
(let [products (test-data/raw-products pg-id)]
(init-product-table client product-table products)))))
;; ******************************************************
;; Session
(defmethod test-service/get-sessions :ddb [env]
(log/debug "ENTER get-sessions")
(let [items (aws/invoke (get-in env [:service :session :db :client]) {:op :Scan
:request {:TableName (get-in env [:service :session :db :tables :session])}})]
(reduce (fn [sessions session]
(conj sessions (get-in session [:token :S])))
#{}
(items :Items))))
(defn- remove-token! [db token]
(log/debug "ENTER remove-token:")
(let [result (aws/invoke (:client db) {:op :DeleteItem
:request {:TableName (get-in db [:tables :session])
:Key {"token" {:S token}}}})]
(if (:__type result)
(throw (ex-info "Failed to get token" result))
result)))
(defmethod test-service/reset-sessions! :ddb [env]
(log/debug "ENTER reset-sessions!")
(if (= (:profile env) :test)
(let [db (get-in env [:service :session :db])
sessions (test-service/get-sessions env)]
(dorun (map (partial remove-token! db) sessions)))
(throw (java.lang.UnsupportedOperationException. "You can reset sessions only in test environment!"))))
;; ******************************************************
;; User
(defn- get-converted-users
[raw-users]
(map (fn [item]
item
(let [user-id (get-in item [:userid :S])
email (get-in item [:email :S])
first-name (get-in item [:firstname :S])
last-name (get-in item [:lastname :S])
hashed-password (get-in item [:hpwd :S])]
{:userid user-id
:email email
:first-name first-name
:last-name last-name
:hashed-password hashed-password}))
(:Items raw-users)))
(defn- add-new-user-without-hashing-password!
[_ my-ddb my-table email first-name last-name password]
(log/debug (str "ENTER add-new-user"))
(let [new-id (ss-user-common/uuid)
request {:TableName my-table
:Item {"userid" {:S new-id}
"email" {:S email}
"firstname" {:S first-name}
"lastname" {:S last-name}
"hpwd" {:S password}}}
ret (aws/invoke my-ddb {:op :PutItem
:request request})]
(if (:__type ret)
(throw (ex-info "Failed to add new user without hashing password" ret))
{:email email, :ret :ok})))
(defmethod test-service/get-users :ddb [env]
(log/debug (str "ENTER -get-users"))
(let [db (get-in env [:service :user :db])
raw-users (aws/invoke (:client db) {:op :Scan
:request {:TableName (get-in db [:tables :users])}})
converted-users (get-converted-users raw-users)]
(if (:__type raw-users)
(throw (ex-info "Failed to get raw users" raw-users))
(reduce (fn [users user]
(assoc users (:userid user) user))
{}
converted-users))))
(defmethod test-service/reset-users! :ddb [env]
(log/debug (str "ENTER -reset-users!"))
(if (= (:profile env) :test)
(let [db (get-in env [:service :user :db])
users-to-delete (test-service/get-users env)
emails-to-delete (map (fn [item]
(:email (second item)))
users-to-delete)
initial-users (test-data/users)]
(dorun (map (fn [email]
(let [ret (aws/invoke (:client db) {:op :DeleteItem
:request {
:TableName (get-in db [:tables :users])
:Key {"email" {:S email}}}})]
(if (:__type ret)
(throw (ex-info "Failed to delete user" ret))
ret)))
emails-to-delete))
(dorun (map (fn [user]
(let [user-map (second user)]
(add-new-user-without-hashing-password!
env
(:client db)
(get-in db [:tables :users])
(:email user-map)
(:first-name user-map)
(:last-name user-map)
(:hashed-password user-map))))
initial-users)))
(throw (java.lang.UnsupportedOperationException. "You can reset users only in test environment!"))))
(comment
(simpleserver.test-config/go)
(reset-users! (simpleserver.test-config/test-env))
(get-users (simpleserver.test-config/test-env))
(reset-sessions! (simpleserver.test-config/test-env))
(simpleserver.test-config/test-env)
(get-sessions (simpleserver.test-config/test-env))
(init-domain (simpleserver.test-config/test-env))
) | null | https://raw.githubusercontent.com/karimarttila/clojure/ee1261b9a8e6be92cb47aeb325f82a278f2c1ed3/webstore-demo/re-frame-demo/test/clj/simpleserver/test_utils/dynamodb_utils.clj | clojure | ******************************************************
Domain
******************************************************
Session
******************************************************
User | (ns simpleserver.test-utils.dynamodb-utils
(:require [simpleserver.test-utils.test-data :as test-data]
[simpleserver.test-utils.test-service :as test-service]
[simpleserver.service.user.user-common :as ss-user-common]
[cognitect.aws.client.api :as aws]
[clojure.tools.logging :as log]))
(defn- init-product-groups-table [client product-groups-table product-groups]
(log/debug "ENTER init-product-groups-table")
(doseq [pg product-groups]
(let [ret (aws/invoke client {:op :PutItem
:request {:TableName product-groups-table
:Item {"pgid" {:S (first pg)}
"pgname" {:S (second pg)}}}})]
(if (seq ret)
(throw (ex-info "Failed to put product-groups" ret))))))
(defn- init-product-table [client product-table products]
(log/debug "ENTER init-product-table")
(doseq [product products]
(let [ret (aws/invoke client {:op :PutItem
:request {:TableName product-table
:Item {"pid" {:S (nth product 0)}
"pgid" {:S (nth product 1)}
"title" {:S (nth product 2)}
"price" {:S (nth product 3)}
"a_or_d" {:S (nth product 4)}
"year" {:S (nth product 5)}
"country" {:S (nth product 6)}
"g_or_l" {:S (nth product 7)}}}})]
(if (seq ret)
(throw (ex-info "Failed to put products" ret))))))
(defmethod test-service/init-domain :ddb [env]
(log/debug "ENTER init-domain")
(let [client (get-in env [:service :domain :db :client])
product-groups-table (get-in env [:service :domain :db :tables :product-group])
product-table (get-in env [:service :domain :db :tables :product])]
(init-product-groups-table client product-groups-table (test-data/product-groups))
(doseq [pg-id (keys (test-data/product-groups))]
(let [products (test-data/raw-products pg-id)]
(init-product-table client product-table products)))))
(defmethod test-service/get-sessions :ddb [env]
(log/debug "ENTER get-sessions")
(let [items (aws/invoke (get-in env [:service :session :db :client]) {:op :Scan
:request {:TableName (get-in env [:service :session :db :tables :session])}})]
(reduce (fn [sessions session]
(conj sessions (get-in session [:token :S])))
#{}
(items :Items))))
(defn- remove-token! [db token]
(log/debug "ENTER remove-token:")
(let [result (aws/invoke (:client db) {:op :DeleteItem
:request {:TableName (get-in db [:tables :session])
:Key {"token" {:S token}}}})]
(if (:__type result)
(throw (ex-info "Failed to get token" result))
result)))
(defmethod test-service/reset-sessions! :ddb [env]
(log/debug "ENTER reset-sessions!")
(if (= (:profile env) :test)
(let [db (get-in env [:service :session :db])
sessions (test-service/get-sessions env)]
(dorun (map (partial remove-token! db) sessions)))
(throw (java.lang.UnsupportedOperationException. "You can reset sessions only in test environment!"))))
(defn- get-converted-users
[raw-users]
(map (fn [item]
item
(let [user-id (get-in item [:userid :S])
email (get-in item [:email :S])
first-name (get-in item [:firstname :S])
last-name (get-in item [:lastname :S])
hashed-password (get-in item [:hpwd :S])]
{:userid user-id
:email email
:first-name first-name
:last-name last-name
:hashed-password hashed-password}))
(:Items raw-users)))
(defn- add-new-user-without-hashing-password!
[_ my-ddb my-table email first-name last-name password]
(log/debug (str "ENTER add-new-user"))
(let [new-id (ss-user-common/uuid)
request {:TableName my-table
:Item {"userid" {:S new-id}
"email" {:S email}
"firstname" {:S first-name}
"lastname" {:S last-name}
"hpwd" {:S password}}}
ret (aws/invoke my-ddb {:op :PutItem
:request request})]
(if (:__type ret)
(throw (ex-info "Failed to add new user without hashing password" ret))
{:email email, :ret :ok})))
(defmethod test-service/get-users :ddb [env]
(log/debug (str "ENTER -get-users"))
(let [db (get-in env [:service :user :db])
raw-users (aws/invoke (:client db) {:op :Scan
:request {:TableName (get-in db [:tables :users])}})
converted-users (get-converted-users raw-users)]
(if (:__type raw-users)
(throw (ex-info "Failed to get raw users" raw-users))
(reduce (fn [users user]
(assoc users (:userid user) user))
{}
converted-users))))
(defmethod test-service/reset-users! :ddb [env]
(log/debug (str "ENTER -reset-users!"))
(if (= (:profile env) :test)
(let [db (get-in env [:service :user :db])
users-to-delete (test-service/get-users env)
emails-to-delete (map (fn [item]
(:email (second item)))
users-to-delete)
initial-users (test-data/users)]
(dorun (map (fn [email]
(let [ret (aws/invoke (:client db) {:op :DeleteItem
:request {
:TableName (get-in db [:tables :users])
:Key {"email" {:S email}}}})]
(if (:__type ret)
(throw (ex-info "Failed to delete user" ret))
ret)))
emails-to-delete))
(dorun (map (fn [user]
(let [user-map (second user)]
(add-new-user-without-hashing-password!
env
(:client db)
(get-in db [:tables :users])
(:email user-map)
(:first-name user-map)
(:last-name user-map)
(:hashed-password user-map))))
initial-users)))
(throw (java.lang.UnsupportedOperationException. "You can reset users only in test environment!"))))
(comment
(simpleserver.test-config/go)
(reset-users! (simpleserver.test-config/test-env))
(get-users (simpleserver.test-config/test-env))
(reset-sessions! (simpleserver.test-config/test-env))
(simpleserver.test-config/test-env)
(get-sessions (simpleserver.test-config/test-env))
(init-domain (simpleserver.test-config/test-env))
) |
3e9c573e6ad8ca8b67b1ffcae569e47b600cd9bf188eca85dc1b867a241cf4d5 | bintracker/bintracker | mml-impl.scm | (module mml
(mml::read mml::dialog)
(import scheme (chicken base) (chicken string) (chicken condition)
srfi-1 srfi-13 srfi-14 pstk comparse coops
bt-state bt-types bt-gui)
;; Define PEG parser rules to parse MML strings using comparse
(define (digits+dots)
(sequence* ((digits (as-string (sequence (maybe (in char-set:digit))
(maybe (in char-set:digit)))))
(dots (sequence (maybe (is #\.))
(maybe (is #\.))
(maybe (is #\.)))))
(result (list (string->number digits)
(length (remove boolean? dots))))))
(define note
(sequence* ((note (as-string (in (string->char-set "abcdefg"))))
(note-mod (as-string (maybe (in (string->char-set "+-#")))))
(length-mod (digits+dots)))
(result (cons* 'note (string-append note note-mod)
length-mod))))
(define rest
(sequence* ((_ (in (string->char-set "rp")))
(modifiers (digits+dots)))
(result (cons* 'note "rest" modifiers))))
(define articulation
(sequence* ((_ (is #\m))
(arg (maybe (in (string->char-set "nls")))))
(result (list 'articulation arg))))
(define time
(sequence* ((_ (is #\l))
(len1 (as-string (in char-set:digit)))
(len2 (as-string (maybe (in char-set:digit)))))
(result (list 'time (string->number (string-append len1 len2))))))
(define octave-up
(bind (is #\>) (lambda (r) (result '(octave-up)))))
(define octave-down
(bind (is #\<) (lambda (r) (result '(octave-down)))))
(define octave
(sequence* ((_ (is #\o))
(num (as-string (in char-set:digit))))
(result (list 'octave (string->number num)))))
(define mml-token
(any-of note rest articulation time octave-up octave-down octave))
;; The main tokenizer procedure
(define (tokenize-mml str)
;; (print "tokenize-mml " str)
(handle-exceptions
exn
(begin (print-call-chain)
(abort exn))
(parse (followed-by (zero-or-more mml-token)
end-of-input)
(string-downcase (string-delete char-set:whitespace str)))))
Translate note names from MML to MDAL .
(define (normalize-note-name mml-name octave)
(if (string= mml-name "rest")
'rest
(if (string-contains mml-name "-")
(let ((note-names '("b" "a#" "a" "g#" "g" "f#"
"f" "e" "d#" "d" "c#" "c")))
(string->symbol
(string-append
(list-ref (append (cdr note-names)
(list "b"))
(list-index
(cute string=? <> (string-take mml-name 1))
note-names))
(number->string (if (string-prefix? "c" mml-name)
(sub1 octave)
octave)))))
(string->symbol (string-append (string-translate mml-name #\+ #\#)
(number->string octave))))))
Convert MML tokens into ticks , where each tick represents a 1/2048th note .
(define (tokens->ticks mml-tokens)
(letrec*
((octave 4)
(time 128)
(articulation 7/8)
(evaluate-tokens
(lambda (tokens ticks)
(if (null? tokens)
ticks
(case (caar tokens)
((note)
(let* ((token
(cdar tokens))
(undotted-time
(if (cadr token)
(quotient 512 (cadr token))
time))
(actual-time
(if (caddr token)
(* undotted-time (/ (expt 3 (caddr token))
(expt 2 (caddr token))))
undotted-time))
(on-time
(round (* actual-time articulation)))
(off-time (round (- actual-time on-time))))
(evaluate-tokens
(cdr tokens)
(append ticks
(cons (normalize-note-name (cadar tokens)
octave)
(if (zero? off-time)
(make-list (sub1 on-time) '())
(append (make-list (sub1 on-time) '())
'(rest)
(make-list (sub1 off-time)
'()))))))))
((articulation)
(set! articulation
(case (cadar tokens)
((#\s) 3/4)
((#\l) 1)
(else 7/8)))
(evaluate-tokens (cdr tokens) ticks))
((octave)
(set! octave (cadar tokens))
(evaluate-tokens (cdr tokens) ticks))
((octave-up)
(set! octave (+ 1 octave))
(evaluate-tokens (cdr tokens)
ticks))
((octave-down)
(set! octave (sub1 octave))
(evaluate-tokens (cdr tokens) ticks))
((time)
(set! length (quotient 512 (cadar tokens)))
(evaluate-tokens (cdr tokens) ticks)))))))
(evaluate-tokens mml-tokens '())))
Requantize ticks to a different whole note sub - division , taking EACH nth
;;; value. Rests are moved
(define (requantize ticks each)
(letrec
((run-ticks (lambda (ticks counter had-rest?)
(if (null? ticks)
'()
(if (zero? (sub1 counter))
(cons (car ticks)
(run-ticks (cdr ticks)
each
(or (eqv? 'rest (car ticks))
(and had-rest?
(null? (car ticks))))))
(run-ticks (if (and (not (null? (cdr ticks)))
(null? (cadr ticks)))
(cons (if (eqv? 'rest (car ticks))
(if had-rest? '()'rest)
(car ticks))
(cddr ticks))
(cdr ticks))
(sub1 counter)
had-rest?))))))
(run-ticks ticks 1 #f)))
Read the MML command string STR and transform it into a list of
;;; field node values. If QUANTIZE-TO is specified and denotes how many steps
should make up a quarter note ( max . 512 ) , then the output will be
requantized accordingly . QUANTIZE - TO defaults to 512 , which means the
command will return 1/2048th notes .
(define (mml::read str #!optional (quantize-to 8))
(let ((ticks (tokens->ticks (tokenize-mml str))))
(if (and (positive? quantize-to) (< quantize-to 128))
(requantize ticks (round (/ 512 (* 4 quantize-to))))
(error 'mml::read "Invalid quatization unit"))))
(define dialog-widget
(make <ui-dialog>
'title "MML"
'children
`((header ,<ui-wrapper> setup
((lbl1 label text: "Quantization unit (1-127):")
(qnt entry bg: ,(colors 'row-highlight-minor)
fg: ,(colors 'text)
bd: 0 highlightthickness: 0 insertborderwidth: 1
font: ,(list family: (settings 'font-mono)
size: (settings 'font-size)
weight: 'bold))))
(tb ,<ui-wrapper> setup
((tbox text bd: 1 highlightthickness: 0 blockcursor: yes
bg: ,(colors 'background) fg: ,(colors 'text)
insertbackground: ,(colors 'text)
font: ,(list family: (settings 'font-mono)
size: (settings 'font-size))
height: 10))
yscroll #t))
'traverse '(tbox qnt)
'initializers
(make-hooks
`(ix . ,(lambda a ((ui-ref dialog-widget 'qnt) 'insert 'end "8"))))
'finalizers
(make-hooks
`(f . ,(lambda a
(handle-exceptions
exn
(report-exception
exn "MML Error" "An error occured in MML")
(ui-paste (current 'blockview)
(mml::read ((ui-ref dialog-widget 'tbox)
'get "0.0" 'end)
(string->number ((ui-ref dialog-widget 'qnt)
'get))))))))))
(define (mml::dialog)
(and (current 'blockview)
(ui-show dialog-widget)
(tk/focus (ui-ref dialog-widget 'tbox))))
)
| null | https://raw.githubusercontent.com/bintracker/bintracker/402d727a7edb14ce1ca59e39473fe8b91bde9bdc/plugins/mml/mml-impl.scm | scheme | Define PEG parser rules to parse MML strings using comparse
The main tokenizer procedure
(print "tokenize-mml " str)
value. Rests are moved
field node values. If QUANTIZE-TO is specified and denotes how many steps | (module mml
(mml::read mml::dialog)
(import scheme (chicken base) (chicken string) (chicken condition)
srfi-1 srfi-13 srfi-14 pstk comparse coops
bt-state bt-types bt-gui)
(define (digits+dots)
(sequence* ((digits (as-string (sequence (maybe (in char-set:digit))
(maybe (in char-set:digit)))))
(dots (sequence (maybe (is #\.))
(maybe (is #\.))
(maybe (is #\.)))))
(result (list (string->number digits)
(length (remove boolean? dots))))))
(define note
(sequence* ((note (as-string (in (string->char-set "abcdefg"))))
(note-mod (as-string (maybe (in (string->char-set "+-#")))))
(length-mod (digits+dots)))
(result (cons* 'note (string-append note note-mod)
length-mod))))
(define rest
(sequence* ((_ (in (string->char-set "rp")))
(modifiers (digits+dots)))
(result (cons* 'note "rest" modifiers))))
(define articulation
(sequence* ((_ (is #\m))
(arg (maybe (in (string->char-set "nls")))))
(result (list 'articulation arg))))
(define time
(sequence* ((_ (is #\l))
(len1 (as-string (in char-set:digit)))
(len2 (as-string (maybe (in char-set:digit)))))
(result (list 'time (string->number (string-append len1 len2))))))
(define octave-up
(bind (is #\>) (lambda (r) (result '(octave-up)))))
(define octave-down
(bind (is #\<) (lambda (r) (result '(octave-down)))))
(define octave
(sequence* ((_ (is #\o))
(num (as-string (in char-set:digit))))
(result (list 'octave (string->number num)))))
(define mml-token
(any-of note rest articulation time octave-up octave-down octave))
(define (tokenize-mml str)
(handle-exceptions
exn
(begin (print-call-chain)
(abort exn))
(parse (followed-by (zero-or-more mml-token)
end-of-input)
(string-downcase (string-delete char-set:whitespace str)))))
Translate note names from MML to MDAL .
(define (normalize-note-name mml-name octave)
(if (string= mml-name "rest")
'rest
(if (string-contains mml-name "-")
(let ((note-names '("b" "a#" "a" "g#" "g" "f#"
"f" "e" "d#" "d" "c#" "c")))
(string->symbol
(string-append
(list-ref (append (cdr note-names)
(list "b"))
(list-index
(cute string=? <> (string-take mml-name 1))
note-names))
(number->string (if (string-prefix? "c" mml-name)
(sub1 octave)
octave)))))
(string->symbol (string-append (string-translate mml-name #\+ #\#)
(number->string octave))))))
Convert MML tokens into ticks , where each tick represents a 1/2048th note .
(define (tokens->ticks mml-tokens)
(letrec*
((octave 4)
(time 128)
(articulation 7/8)
(evaluate-tokens
(lambda (tokens ticks)
(if (null? tokens)
ticks
(case (caar tokens)
((note)
(let* ((token
(cdar tokens))
(undotted-time
(if (cadr token)
(quotient 512 (cadr token))
time))
(actual-time
(if (caddr token)
(* undotted-time (/ (expt 3 (caddr token))
(expt 2 (caddr token))))
undotted-time))
(on-time
(round (* actual-time articulation)))
(off-time (round (- actual-time on-time))))
(evaluate-tokens
(cdr tokens)
(append ticks
(cons (normalize-note-name (cadar tokens)
octave)
(if (zero? off-time)
(make-list (sub1 on-time) '())
(append (make-list (sub1 on-time) '())
'(rest)
(make-list (sub1 off-time)
'()))))))))
((articulation)
(set! articulation
(case (cadar tokens)
((#\s) 3/4)
((#\l) 1)
(else 7/8)))
(evaluate-tokens (cdr tokens) ticks))
((octave)
(set! octave (cadar tokens))
(evaluate-tokens (cdr tokens) ticks))
((octave-up)
(set! octave (+ 1 octave))
(evaluate-tokens (cdr tokens)
ticks))
((octave-down)
(set! octave (sub1 octave))
(evaluate-tokens (cdr tokens) ticks))
((time)
(set! length (quotient 512 (cadar tokens)))
(evaluate-tokens (cdr tokens) ticks)))))))
(evaluate-tokens mml-tokens '())))
Requantize ticks to a different whole note sub - division , taking EACH nth
(define (requantize ticks each)
(letrec
((run-ticks (lambda (ticks counter had-rest?)
(if (null? ticks)
'()
(if (zero? (sub1 counter))
(cons (car ticks)
(run-ticks (cdr ticks)
each
(or (eqv? 'rest (car ticks))
(and had-rest?
(null? (car ticks))))))
(run-ticks (if (and (not (null? (cdr ticks)))
(null? (cadr ticks)))
(cons (if (eqv? 'rest (car ticks))
(if had-rest? '()'rest)
(car ticks))
(cddr ticks))
(cdr ticks))
(sub1 counter)
had-rest?))))))
(run-ticks ticks 1 #f)))
Read the MML command string STR and transform it into a list of
should make up a quarter note ( max . 512 ) , then the output will be
requantized accordingly . QUANTIZE - TO defaults to 512 , which means the
command will return 1/2048th notes .
(define (mml::read str #!optional (quantize-to 8))
(let ((ticks (tokens->ticks (tokenize-mml str))))
(if (and (positive? quantize-to) (< quantize-to 128))
(requantize ticks (round (/ 512 (* 4 quantize-to))))
(error 'mml::read "Invalid quatization unit"))))
(define dialog-widget
(make <ui-dialog>
'title "MML"
'children
`((header ,<ui-wrapper> setup
((lbl1 label text: "Quantization unit (1-127):")
(qnt entry bg: ,(colors 'row-highlight-minor)
fg: ,(colors 'text)
bd: 0 highlightthickness: 0 insertborderwidth: 1
font: ,(list family: (settings 'font-mono)
size: (settings 'font-size)
weight: 'bold))))
(tb ,<ui-wrapper> setup
((tbox text bd: 1 highlightthickness: 0 blockcursor: yes
bg: ,(colors 'background) fg: ,(colors 'text)
insertbackground: ,(colors 'text)
font: ,(list family: (settings 'font-mono)
size: (settings 'font-size))
height: 10))
yscroll #t))
'traverse '(tbox qnt)
'initializers
(make-hooks
`(ix . ,(lambda a ((ui-ref dialog-widget 'qnt) 'insert 'end "8"))))
'finalizers
(make-hooks
`(f . ,(lambda a
(handle-exceptions
exn
(report-exception
exn "MML Error" "An error occured in MML")
(ui-paste (current 'blockview)
(mml::read ((ui-ref dialog-widget 'tbox)
'get "0.0" 'end)
(string->number ((ui-ref dialog-widget 'qnt)
'get))))))))))
(define (mml::dialog)
(and (current 'blockview)
(ui-show dialog-widget)
(tk/focus (ui-ref dialog-widget 'tbox))))
)
|
d533300037137023cf9af18e91aa1840deccdb70b2ef598405f1a9d9acfc3acb | TrustInSoft/tis-kernel | printer_builder.ml | (**************************************************************************)
(* *)
This file is part of .
(* *)
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
(* *)
is released under GPLv2
(* *)
(**************************************************************************)
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
module Make
(P: sig class printer: unit -> Printer_api.extensible_printer_type end) =
struct
module type PrinterClass = sig
class printer : Printer_api.extensible_printer_type
end
let printer_class_ref =
ref (module struct class printer = P.printer () end: PrinterClass)
let printer_ref = ref None
module type PrinterExtension = functor (X: PrinterClass) -> PrinterClass
let set_printer p =
printer_class_ref := p;
printer_ref := None
let update_printer x =
let module X = (val x: PrinterExtension) in
let module Cur = (val !printer_class_ref: PrinterClass) in
let module Updated = X(Cur) in
set_printer (module Updated: PrinterClass)
let printer () = match !printer_ref with
| None ->
let module Printer = (val !printer_class_ref: PrinterClass) in
let p = new Printer.printer in
printer_ref := Some p;
p#reset ();
p
| Some p ->
p#reset ();
p
let current_printer () = !printer_class_ref
class extensible_printer = P.printer
let without_annot f fmt x = (printer ())#without_annot f fmt x
let force_brace f fmt x = (printer ())#force_brace f fmt x
let pp_varname fmt x = (printer())#varname fmt x
(* eta-expansion required for applying side-effect of [printer ()] at the
right time *)
let pp_location fmt x = (printer ())#location fmt x
let pp_constant fmt x = (printer ())#constant fmt x
let pp_ikind fmt x = (printer ())#ikind fmt x
let pp_fkind fmt x = (printer ())#fkind fmt x
let pp_storage fmt x = (printer ())#storage fmt x
let pp_typ fmt x = (printer ())#typ None fmt x
let pp_exp fmt x = (printer ())#exp fmt x
let pp_varinfo fmt x = (printer ())#varinfo fmt x
let pp_lval fmt x = (printer ())#lval fmt x
let pp_field fmt x = (printer())#field fmt x
let pp_offset fmt x = (printer ())#offset fmt x
let pp_init fmt x = (printer ())#init fmt x
let pp_binop fmt x = (printer ())#binop fmt x
let pp_unop fmt x = (printer ())#unop fmt x
let pp_attribute fmt x = ignore ((printer ())#attribute fmt x)
let pp_attrparam fmt x = (printer ())#attrparam fmt x
let pp_attributes fmt x = (printer ())#attributes fmt x
let pp_instr fmt x = (printer ())#instr fmt x
let pp_label fmt x = (printer ())#label fmt x
let pp_logic_label fmt x = (printer ())#logic_label fmt x
let pp_stmt fmt x = (printer ())#stmt fmt x
let pp_block fmt x = (printer ())#block fmt x
let pp_global fmt x = (printer ())#global fmt x
let pp_file fmt x = (printer ())#file fmt x
let pp_relation fmt x = (printer ())#relation fmt x
let pp_model_info fmt x = (printer ())#model_info fmt x
let pp_term_lval fmt x = (printer ())#term_lval fmt x
let pp_logic_var fmt x = (printer ())#logic_var fmt x
let pp_logic_type fmt x = (printer ())#logic_type None fmt x
let pp_identified_term fmt x = (printer ())#identified_term fmt x
let pp_term fmt x = (printer ())#term fmt x
let pp_model_field fmt x = (printer())#model_field fmt x
let pp_term_offset fmt x = (printer ())#term_offset fmt x
let pp_predicate fmt x = (printer ())#predicate fmt x
let pp_predicate_named fmt x = (printer ())#predicate_named fmt x
let pp_identified_predicate fmt x = (printer ())#identified_predicate fmt x
let pp_code_annotation fmt x = (printer ())#code_annotation fmt x
let pp_funspec fmt x = (printer ())#funspec fmt x
let pp_behavior fmt x = (printer ())#behavior fmt x
let pp_global_annotation fmt x = (printer ())#global_annotation fmt x
let pp_decreases fmt x = (printer ())#decreases fmt x
let pp_variant fmt x = (printer ())#variant fmt x
let pp_from fmt x = (printer ())#from "assigns" fmt x
let pp_full_assigns fmt x = (printer ())#assigns fmt x
let pp_assigns = pp_full_assigns "assigns"
let pp_allocation fmt x = (printer ())#allocation ~isloop:false fmt x
let pp_loop_from fmt x = (printer ())#from "loop assigns" fmt x
let pp_loop_assigns fmt x = (printer ())#assigns "loop assigns" fmt x
let pp_loop_allocation fmt x = (printer ())#allocation ~isloop:true fmt x
let pp_post_cond fmt x = (printer ())#post_cond fmt x
end
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-kernel/748d28baba90c03c0f5f4654d2e7bb47dfbe4e7d/src/kernel_services/ast_printing/printer_builder.ml | ocaml | ************************************************************************
************************************************************************
************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
eta-expansion required for applying side-effect of [printer ()] at the
right time
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of .
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
is released under GPLv2
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
module Make
(P: sig class printer: unit -> Printer_api.extensible_printer_type end) =
struct
module type PrinterClass = sig
class printer : Printer_api.extensible_printer_type
end
let printer_class_ref =
ref (module struct class printer = P.printer () end: PrinterClass)
let printer_ref = ref None
module type PrinterExtension = functor (X: PrinterClass) -> PrinterClass
let set_printer p =
printer_class_ref := p;
printer_ref := None
let update_printer x =
let module X = (val x: PrinterExtension) in
let module Cur = (val !printer_class_ref: PrinterClass) in
let module Updated = X(Cur) in
set_printer (module Updated: PrinterClass)
let printer () = match !printer_ref with
| None ->
let module Printer = (val !printer_class_ref: PrinterClass) in
let p = new Printer.printer in
printer_ref := Some p;
p#reset ();
p
| Some p ->
p#reset ();
p
let current_printer () = !printer_class_ref
class extensible_printer = P.printer
let without_annot f fmt x = (printer ())#without_annot f fmt x
let force_brace f fmt x = (printer ())#force_brace f fmt x
let pp_varname fmt x = (printer())#varname fmt x
let pp_location fmt x = (printer ())#location fmt x
let pp_constant fmt x = (printer ())#constant fmt x
let pp_ikind fmt x = (printer ())#ikind fmt x
let pp_fkind fmt x = (printer ())#fkind fmt x
let pp_storage fmt x = (printer ())#storage fmt x
let pp_typ fmt x = (printer ())#typ None fmt x
let pp_exp fmt x = (printer ())#exp fmt x
let pp_varinfo fmt x = (printer ())#varinfo fmt x
let pp_lval fmt x = (printer ())#lval fmt x
let pp_field fmt x = (printer())#field fmt x
let pp_offset fmt x = (printer ())#offset fmt x
let pp_init fmt x = (printer ())#init fmt x
let pp_binop fmt x = (printer ())#binop fmt x
let pp_unop fmt x = (printer ())#unop fmt x
let pp_attribute fmt x = ignore ((printer ())#attribute fmt x)
let pp_attrparam fmt x = (printer ())#attrparam fmt x
let pp_attributes fmt x = (printer ())#attributes fmt x
let pp_instr fmt x = (printer ())#instr fmt x
let pp_label fmt x = (printer ())#label fmt x
let pp_logic_label fmt x = (printer ())#logic_label fmt x
let pp_stmt fmt x = (printer ())#stmt fmt x
let pp_block fmt x = (printer ())#block fmt x
let pp_global fmt x = (printer ())#global fmt x
let pp_file fmt x = (printer ())#file fmt x
let pp_relation fmt x = (printer ())#relation fmt x
let pp_model_info fmt x = (printer ())#model_info fmt x
let pp_term_lval fmt x = (printer ())#term_lval fmt x
let pp_logic_var fmt x = (printer ())#logic_var fmt x
let pp_logic_type fmt x = (printer ())#logic_type None fmt x
let pp_identified_term fmt x = (printer ())#identified_term fmt x
let pp_term fmt x = (printer ())#term fmt x
let pp_model_field fmt x = (printer())#model_field fmt x
let pp_term_offset fmt x = (printer ())#term_offset fmt x
let pp_predicate fmt x = (printer ())#predicate fmt x
let pp_predicate_named fmt x = (printer ())#predicate_named fmt x
let pp_identified_predicate fmt x = (printer ())#identified_predicate fmt x
let pp_code_annotation fmt x = (printer ())#code_annotation fmt x
let pp_funspec fmt x = (printer ())#funspec fmt x
let pp_behavior fmt x = (printer ())#behavior fmt x
let pp_global_annotation fmt x = (printer ())#global_annotation fmt x
let pp_decreases fmt x = (printer ())#decreases fmt x
let pp_variant fmt x = (printer ())#variant fmt x
let pp_from fmt x = (printer ())#from "assigns" fmt x
let pp_full_assigns fmt x = (printer ())#assigns fmt x
let pp_assigns = pp_full_assigns "assigns"
let pp_allocation fmt x = (printer ())#allocation ~isloop:false fmt x
let pp_loop_from fmt x = (printer ())#from "loop assigns" fmt x
let pp_loop_assigns fmt x = (printer ())#assigns "loop assigns" fmt x
let pp_loop_allocation fmt x = (printer ())#allocation ~isloop:true fmt x
let pp_post_cond fmt x = (printer ())#post_cond fmt x
end
|
b600956606763e5155502343d67c4248d34ab7960f0052e7e8499ab7ff56699e | buildsome/buildsome | ClangCommands.hs | module Buildsome.ClangCommands
( make
) where
import Prelude.Compat hiding (FilePath)
import Buildsome.BuildMaps (TargetRep)
import Buildsome.Stats (Stats)
import Data.Aeson ((.=))
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.Functor.Identity (Identity(..))
import Data.Maybe (fromMaybe)
import Lib.FilePath (FilePath, (</>))
import Lib.Makefile (TargetType(..), Target, targetInterpolatedCmds)
import qualified Buildsome.BuildMaps as BuildMaps
import qualified Buildsome.Stats as Stats
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy.Char8 as BS8L
import qualified Data.Map as Map
import qualified Lib.Revisit as Revisit
type M = Revisit.M TargetRep Identity
buildCommands :: FilePath -> Stats -> Target -> M [Aeson.Value]
buildCommands cwd stats target =
fmap (fromMaybe []) $
Revisit.avoid (BuildMaps.computeTargetRep target) $ do
deps <- depBuildCommands
pure $ myBuildCommands ++ deps
where
myBuildCommands =
case targetInputs target of
[file]
| not (BS8.null (targetInterpolatedCmds target)) ->
[ Aeson.object
[ "directory" .= BS8.unpack cwd
, "command" .= BS8.unpack (targetInterpolatedCmds target)
, "file" .= BS8.unpack file
]
]
_ -> []
depBuildCommands =
case Map.lookup (BuildMaps.computeTargetRep target) (Stats.ofTarget stats) of
Nothing -> pure [] -- ok because some dependencies have no rule?
Just targetStats -> buildCommandsTargets cwd stats $ Stats.tsDirectDeps targetStats
buildCommandsTargets :: FilePath -> Stats -> [Target] -> M [Aeson.Value]
buildCommandsTargets cwd stats = fmap concat . traverse (buildCommands cwd stats)
make :: FilePath -> Stats -> [Target] -> FilePath -> IO ()
make cwd stats rootTargets filePath = do
putStrLn $ "Writing clang commands to: " ++ show (cwd </> filePath)
BS8L.writeFile (BS8.unpack filePath) $
encodePretty $ reverse $
runIdentity $ Revisit.run (buildCommandsTargets cwd stats rootTargets)
| null | https://raw.githubusercontent.com/buildsome/buildsome/479b92bb74a474a5f0c3292b79202cc850bd8653/src/Buildsome/ClangCommands.hs | haskell | ok because some dependencies have no rule? | module Buildsome.ClangCommands
( make
) where
import Prelude.Compat hiding (FilePath)
import Buildsome.BuildMaps (TargetRep)
import Buildsome.Stats (Stats)
import Data.Aeson ((.=))
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.Functor.Identity (Identity(..))
import Data.Maybe (fromMaybe)
import Lib.FilePath (FilePath, (</>))
import Lib.Makefile (TargetType(..), Target, targetInterpolatedCmds)
import qualified Buildsome.BuildMaps as BuildMaps
import qualified Buildsome.Stats as Stats
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy.Char8 as BS8L
import qualified Data.Map as Map
import qualified Lib.Revisit as Revisit
type M = Revisit.M TargetRep Identity
buildCommands :: FilePath -> Stats -> Target -> M [Aeson.Value]
buildCommands cwd stats target =
fmap (fromMaybe []) $
Revisit.avoid (BuildMaps.computeTargetRep target) $ do
deps <- depBuildCommands
pure $ myBuildCommands ++ deps
where
myBuildCommands =
case targetInputs target of
[file]
| not (BS8.null (targetInterpolatedCmds target)) ->
[ Aeson.object
[ "directory" .= BS8.unpack cwd
, "command" .= BS8.unpack (targetInterpolatedCmds target)
, "file" .= BS8.unpack file
]
]
_ -> []
depBuildCommands =
case Map.lookup (BuildMaps.computeTargetRep target) (Stats.ofTarget stats) of
Just targetStats -> buildCommandsTargets cwd stats $ Stats.tsDirectDeps targetStats
buildCommandsTargets :: FilePath -> Stats -> [Target] -> M [Aeson.Value]
buildCommandsTargets cwd stats = fmap concat . traverse (buildCommands cwd stats)
make :: FilePath -> Stats -> [Target] -> FilePath -> IO ()
make cwd stats rootTargets filePath = do
putStrLn $ "Writing clang commands to: " ++ show (cwd </> filePath)
BS8L.writeFile (BS8.unpack filePath) $
encodePretty $ reverse $
runIdentity $ Revisit.run (buildCommandsTargets cwd stats rootTargets)
|
31eb2f79ebc904356e1d7dbd59804a037ffd95398a8563ba129f1541ac677e27 | ucsd-progsys/nate | widget.ml | (***********************************************************************)
(* *)
MLTk , Tcl / Tk interface of Objective Caml
(* *)
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
(* described in file LICENSE found in the Objective Caml source tree. *)
(* *)
(***********************************************************************)
$ I d : widget.ml , v 1.16 2002/04/26 12:16:22 furuse Exp $
(* Hack to permit having the different data type with the same name
[widget] for CamlTk and LablTk. *)
include Rawwidget
type 'a widget = 'a raw_widget
type any = raw_any
| null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/otherlibs/labltk/support/widget.ml | ocaml | *********************************************************************
described in file LICENSE found in the Objective Caml source tree.
*********************************************************************
Hack to permit having the different data type with the same name
[widget] for CamlTk and LablTk. | MLTk , Tcl / Tk interface of Objective Caml
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
$ I d : widget.ml , v 1.16 2002/04/26 12:16:22 furuse Exp $
include Rawwidget
type 'a widget = 'a raw_widget
type any = raw_any
|
abe676652ce71b52b47c2fc038b470858e22f0496dfdfe0dac60bc4b0c64a141 | georgewsinger/cljs-callback-heaven | project.clj | " SNAPSHOT " is a maven term ; it means " development version " , and is contrasted with " release "
searchable fields
:description "A small clojurescript library that helps you escape callback hell when interoping with javascript and core.async."
:url "N/A"
:license {:name "MIT"
:url ""}
Maven , Clojars dependencies
:dependencies [[org.clojure/clojure "1.8.0" ]
[org.clojure/clojurescript "1.7.228"]
[org.clojure/core.async "0.2.374"] ; cljs.core.async lives here
[org.clojure/tools.nrepl "0.2.10" ] ; non-teletype repl; needed for vim-fireplace
nrepl piggieback middleware & also needed for vim - fireplace
;; lein specific plugins
:plugins [[org.bodil/lein-noderepl "0.1.11"] ; required to launch the node repl
[lein-cljsbuild "1.1.2"] ; required to compile cljs to js
[lein-npm "0.6.1"] ; lein interface with npm (see below)
[lein-doo "0.1.6"]] ; the current "standard" testing framework
Needed for nREPL / piggieback / vim - fireplace
:profiles { :dev {:dependencies [[com.cemerick/piggieback "0.2.1"]
[org.clojure/tools.nrepl "0.2.10"]]
:repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}}}
:npm { ;; package.json npm dependencies
:dependencies [[minimist "1.2.0"]] ; minimist parses CLI arguments
every time you run a " lein npm < cmd > " , uses the fields below to assemble a temporary package.json
:package { :name "cljs-callback-heaven"
:version "0.0.1" ; you must update this manually; `lein npm version` doesn't seem to do it
we target the : main profile from below
:description "N/A"
we target the : main profile from below
:repository {:type "git" :url "git+-callback-heaven.git"}
:keywords ["clojure" "clojurescript"]
:author "George Singer"
:license "MIT"
:private false
:homepage "-callback-heaven#readme"}}
:cljsbuild { :builds { ;; main is the default cljsbuild profile, marked by its use of the :advanced compilation mode
:main {
:source-paths ["src"]
:compiler { :optimizations :advanced
:target :nodejs
:output-dir "out-advanced"
:output-to "target/cljs-callback-heaven.js"
:externs ["externs.js"]
:verbose true
:pretty-print true }}
;; on the other end of the spectrum is "none", named for its compilation mode
:none {
:source-paths ["src"]
:compiler { :optimizations :none
:target :nodejs
:output-dir "out-none"
:output-to "target/cljs-callback-heaven-none.js"
:externs ["externs.js"]
:verbose true
:pretty-print true }}
;; lein doo uses this profile
:test-none {
:source-paths ["src" "test"] ; note the added "test" directory
:compiler { :optimizations :none
:target :nodejs
:output-dir "out-test-none"
:output-to "target/cljs-callback-heaven-test-none.js"
:externs ["externs.js"]
:verbose true
:main cljs-callback-heaven.runner
:pretty-print true }}
;; lein doo uses this profile
:test-advanced {
:source-paths ["src" "test"] ; note the added "test" directory
:compiler { :optimizations :advanced
:target :nodejs
:output-dir "out-test-advanced"
:output-to "target/cljs-callback-heaven-test-advanced.js"
:externs ["externs.js"]
:verbose true
:main cljs-callback-heaven.runner
:pretty-print true }}}})
| null | https://raw.githubusercontent.com/georgewsinger/cljs-callback-heaven/664bc44a7c4cf62853430f08da6cc28e7ed59c80/project.clj | clojure | it means " development version " , and is contrasted with " release "
cljs.core.async lives here
non-teletype repl; needed for vim-fireplace
lein specific plugins
required to launch the node repl
required to compile cljs to js
lein interface with npm (see below)
the current "standard" testing framework
package.json npm dependencies
minimist parses CLI arguments
you must update this manually; `lein npm version` doesn't seem to do it
main is the default cljsbuild profile, marked by its use of the :advanced compilation mode
on the other end of the spectrum is "none", named for its compilation mode
lein doo uses this profile
note the added "test" directory
lein doo uses this profile
note the added "test" directory |
searchable fields
:description "A small clojurescript library that helps you escape callback hell when interoping with javascript and core.async."
:url "N/A"
:license {:name "MIT"
:url ""}
Maven , Clojars dependencies
:dependencies [[org.clojure/clojure "1.8.0" ]
[org.clojure/clojurescript "1.7.228"]
nrepl piggieback middleware & also needed for vim - fireplace
Needed for nREPL / piggieback / vim - fireplace
:profiles { :dev {:dependencies [[com.cemerick/piggieback "0.2.1"]
[org.clojure/tools.nrepl "0.2.10"]]
:repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}}}
every time you run a " lein npm < cmd > " , uses the fields below to assemble a temporary package.json
:package { :name "cljs-callback-heaven"
we target the : main profile from below
:description "N/A"
we target the : main profile from below
:repository {:type "git" :url "git+-callback-heaven.git"}
:keywords ["clojure" "clojurescript"]
:author "George Singer"
:license "MIT"
:private false
:homepage "-callback-heaven#readme"}}
:main {
:source-paths ["src"]
:compiler { :optimizations :advanced
:target :nodejs
:output-dir "out-advanced"
:output-to "target/cljs-callback-heaven.js"
:externs ["externs.js"]
:verbose true
:pretty-print true }}
:none {
:source-paths ["src"]
:compiler { :optimizations :none
:target :nodejs
:output-dir "out-none"
:output-to "target/cljs-callback-heaven-none.js"
:externs ["externs.js"]
:verbose true
:pretty-print true }}
:test-none {
:compiler { :optimizations :none
:target :nodejs
:output-dir "out-test-none"
:output-to "target/cljs-callback-heaven-test-none.js"
:externs ["externs.js"]
:verbose true
:main cljs-callback-heaven.runner
:pretty-print true }}
:test-advanced {
:compiler { :optimizations :advanced
:target :nodejs
:output-dir "out-test-advanced"
:output-to "target/cljs-callback-heaven-test-advanced.js"
:externs ["externs.js"]
:verbose true
:main cljs-callback-heaven.runner
:pretty-print true }}}})
|
6a15acbeb7be82f085d0cbb1fdc7513f2f7b702311c80ebe693d909befbd8815 | bennn/dissertation | for-each.rkt | #lang typed/racket
(require racket/performance-hint
"../unsafe.rkt"
"utils.rkt")
(provide (all-defined-out))
(define-syntax-rule (for-each-array+data-index ds-expr f-expr)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (f js j)
((ann f-expr (Indexes Nonnegative-Fixnum -> Void)) js j))
(cond
[(= dims 0) (f ds 0)]
[else
(define: js : Indexes (make-vector dims 0))
(case dims
[(1) (define: d0 : Index (unsafe-vector-ref ds 0))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(f js j0)
(j0-loop (+ j0 1))))]
[(2) (define: d0 : Index (unsafe-vector-ref ds 0))
(define: d1 : Index (unsafe-vector-ref ds 1))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(let: j1-loop : Void ([j1 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(j1 . < . d1)
(unsafe-vector-set! js 1 j1)
(f js j)
(j1-loop (+ j1 1) (unsafe-fx+ j 1))]
[else
(j0-loop (+ j0 1) j)]))))]
[else (let: i-loop : Nonnegative-Fixnum ([i : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(cond [(i . < . dims)
(define: di : Index (unsafe-vector-ref ds i))
(let: ji-loop : Nonnegative-Fixnum ([ji : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(ji . < . di)
(unsafe-vector-set! js i ji)
(ji-loop (+ ji 1) (i-loop (+ i 1) j))]
[else j]))]
[else (f js j)
(unsafe-fx+ j 1)]))
(void)])])))
(define-syntax-rule (for-each-array-index ds-expr f-expr)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (f js)
((ann f-expr (Indexes -> Void)) js))
(cond
[(= dims 0) (f ds)]
[else
(define: js : Indexes (make-vector dims 0))
(case dims
[(1) (define: d0 : Index (unsafe-vector-ref ds 0))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(f js)
(j0-loop (+ j0 1))))]
[(2) (define: d0 : Index (unsafe-vector-ref ds 0))
(define: d1 : Index (unsafe-vector-ref ds 1))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(let: j1-loop : Void ([j1 : Nonnegative-Fixnum 0])
(cond [(j1 . < . d1)
(unsafe-vector-set! js 1 j1)
(f js)
(j1-loop (+ j1 1))]
[else
(j0-loop (+ j0 1))]))))]
[else (let: i-loop : Void ([i : Nonnegative-Fixnum 0])
(cond [(i . < . dims)
(define: di : Index (unsafe-vector-ref ds i))
(let: ji-loop : Void ([ji : Nonnegative-Fixnum 0])
(when (ji . < . di)
(unsafe-vector-set! js i ji)
(i-loop (+ i 1))
(ji-loop (+ ji 1))))]
[else (f js)]))])])))
(define-syntax-rule (for-each-data-index ds-expr f-expr)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (f j)
((ann f-expr (Nonnegative-Fixnum -> Void)) j))
(cond
[(= dims 0) (f 0)]
[else
(case dims
[(1) (define: d0 : Index (unsafe-vector-ref ds 0))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(f j0)
(j0-loop (+ j0 1))))]
[(2) (define: d0 : Index (unsafe-vector-ref ds 0))
(define: d1 : Index (unsafe-vector-ref ds 1))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(let: j1-loop : Void ([j1 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(j1 . < . d1)
(f j)
(j1-loop (+ j1 1) (unsafe-fx+ j 1))]
[else
(j0-loop (+ j0 1) j)]))))]
[else (let: i-loop : Nonnegative-Fixnum ([i : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(cond [(i . < . dims)
(define: di : Index (unsafe-vector-ref ds i))
(let: ji-loop : Nonnegative-Fixnum ([ji : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(ji . < . di)
(ji-loop (+ ji 1) (i-loop (+ i 1) j))]
[else j]))]
[else (f j)
(unsafe-fx+ j 1)]))
(void)])])))
(define-syntax-rule (inline-build-array-data ds-expr g-expr A)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (g js j)
((ann g-expr (Indexes Nonnegative-Fixnum -> A)) js j))
(define: size : Nonnegative-Fixnum
(let: loop : Nonnegative-Fixnum ([k : Nonnegative-Fixnum 0] [size : Nonnegative-Fixnum 1])
(cond [(k . < . dims) (loop (+ k 1) (unsafe-fx* size (unsafe-vector-ref ds k)))]
[else size])))
(cond [(= size 0) (ann (vector) (Vectorof A))]
[else
(define: js0 : Indexes (make-vector dims 0))
(define: vs : (Vectorof A) (make-vector size (g js0 0)))
(for-each-array+data-index ds (λ (js j) (unsafe-vector-set! vs j (g js j))))
vs])))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/QA/math-test/array-map/natural/math/private/array/for-each.rkt | racket | #lang typed/racket
(require racket/performance-hint
"../unsafe.rkt"
"utils.rkt")
(provide (all-defined-out))
(define-syntax-rule (for-each-array+data-index ds-expr f-expr)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (f js j)
((ann f-expr (Indexes Nonnegative-Fixnum -> Void)) js j))
(cond
[(= dims 0) (f ds 0)]
[else
(define: js : Indexes (make-vector dims 0))
(case dims
[(1) (define: d0 : Index (unsafe-vector-ref ds 0))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(f js j0)
(j0-loop (+ j0 1))))]
[(2) (define: d0 : Index (unsafe-vector-ref ds 0))
(define: d1 : Index (unsafe-vector-ref ds 1))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(let: j1-loop : Void ([j1 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(j1 . < . d1)
(unsafe-vector-set! js 1 j1)
(f js j)
(j1-loop (+ j1 1) (unsafe-fx+ j 1))]
[else
(j0-loop (+ j0 1) j)]))))]
[else (let: i-loop : Nonnegative-Fixnum ([i : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(cond [(i . < . dims)
(define: di : Index (unsafe-vector-ref ds i))
(let: ji-loop : Nonnegative-Fixnum ([ji : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(ji . < . di)
(unsafe-vector-set! js i ji)
(ji-loop (+ ji 1) (i-loop (+ i 1) j))]
[else j]))]
[else (f js j)
(unsafe-fx+ j 1)]))
(void)])])))
(define-syntax-rule (for-each-array-index ds-expr f-expr)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (f js)
((ann f-expr (Indexes -> Void)) js))
(cond
[(= dims 0) (f ds)]
[else
(define: js : Indexes (make-vector dims 0))
(case dims
[(1) (define: d0 : Index (unsafe-vector-ref ds 0))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(f js)
(j0-loop (+ j0 1))))]
[(2) (define: d0 : Index (unsafe-vector-ref ds 0))
(define: d1 : Index (unsafe-vector-ref ds 1))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(unsafe-vector-set! js 0 j0)
(let: j1-loop : Void ([j1 : Nonnegative-Fixnum 0])
(cond [(j1 . < . d1)
(unsafe-vector-set! js 1 j1)
(f js)
(j1-loop (+ j1 1))]
[else
(j0-loop (+ j0 1))]))))]
[else (let: i-loop : Void ([i : Nonnegative-Fixnum 0])
(cond [(i . < . dims)
(define: di : Index (unsafe-vector-ref ds i))
(let: ji-loop : Void ([ji : Nonnegative-Fixnum 0])
(when (ji . < . di)
(unsafe-vector-set! js i ji)
(i-loop (+ i 1))
(ji-loop (+ ji 1))))]
[else (f js)]))])])))
(define-syntax-rule (for-each-data-index ds-expr f-expr)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (f j)
((ann f-expr (Nonnegative-Fixnum -> Void)) j))
(cond
[(= dims 0) (f 0)]
[else
(case dims
[(1) (define: d0 : Index (unsafe-vector-ref ds 0))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(f j0)
(j0-loop (+ j0 1))))]
[(2) (define: d0 : Index (unsafe-vector-ref ds 0))
(define: d1 : Index (unsafe-vector-ref ds 1))
(let: j0-loop : Void ([j0 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(when (j0 . < . d0)
(let: j1-loop : Void ([j1 : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(j1 . < . d1)
(f j)
(j1-loop (+ j1 1) (unsafe-fx+ j 1))]
[else
(j0-loop (+ j0 1) j)]))))]
[else (let: i-loop : Nonnegative-Fixnum ([i : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum 0])
(cond [(i . < . dims)
(define: di : Index (unsafe-vector-ref ds i))
(let: ji-loop : Nonnegative-Fixnum ([ji : Nonnegative-Fixnum 0]
[j : Nonnegative-Fixnum j])
(cond [(ji . < . di)
(ji-loop (+ ji 1) (i-loop (+ i 1) j))]
[else j]))]
[else (f j)
(unsafe-fx+ j 1)]))
(void)])])))
(define-syntax-rule (inline-build-array-data ds-expr g-expr A)
(let*: ([ds : Indexes ds-expr]
[dims : Index (vector-length ds)])
(define-syntax-rule (g js j)
((ann g-expr (Indexes Nonnegative-Fixnum -> A)) js j))
(define: size : Nonnegative-Fixnum
(let: loop : Nonnegative-Fixnum ([k : Nonnegative-Fixnum 0] [size : Nonnegative-Fixnum 1])
(cond [(k . < . dims) (loop (+ k 1) (unsafe-fx* size (unsafe-vector-ref ds k)))]
[else size])))
(cond [(= size 0) (ann (vector) (Vectorof A))]
[else
(define: js0 : Indexes (make-vector dims 0))
(define: vs : (Vectorof A) (make-vector size (g js0 0)))
(for-each-array+data-index ds (λ (js j) (unsafe-vector-set! vs j (g js j))))
vs])))
| |
507fdfc8efeaed6a939ffebacf622400f79511924a7cc6a559baa19a93333538 | cedlemo/OCaml-GI-ctypes-bindings-generator | Font_button.ml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_font_button_new" (void @-> returning (ptr Widget.t_typ))
let create_with_font =
foreign "gtk_font_button_new_with_font" (string @-> returning (ptr Widget.t_typ))
let get_font_name =
foreign "gtk_font_button_get_font_name" (t_typ @-> returning (string_opt))
let get_show_size =
foreign "gtk_font_button_get_show_size" (t_typ @-> returning (bool))
let get_show_style =
foreign "gtk_font_button_get_show_style" (t_typ @-> returning (bool))
let get_title =
foreign "gtk_font_button_get_title" (t_typ @-> returning (string_opt))
let get_use_font =
foreign "gtk_font_button_get_use_font" (t_typ @-> returning (bool))
let get_use_size =
foreign "gtk_font_button_get_use_size" (t_typ @-> returning (bool))
let set_font_name =
foreign "gtk_font_button_set_font_name" (t_typ @-> string @-> returning (bool))
let set_show_size =
foreign "gtk_font_button_set_show_size" (t_typ @-> bool @-> returning (void))
let set_show_style =
foreign "gtk_font_button_set_show_style" (t_typ @-> bool @-> returning (void))
let set_title =
foreign "gtk_font_button_set_title" (t_typ @-> string @-> returning (void))
let set_use_font =
foreign "gtk_font_button_set_use_font" (t_typ @-> bool @-> returning (void))
let set_use_size =
foreign "gtk_font_button_set_use_size" (t_typ @-> bool @-> returning (void))
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Font_button.ml | ocaml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_font_button_new" (void @-> returning (ptr Widget.t_typ))
let create_with_font =
foreign "gtk_font_button_new_with_font" (string @-> returning (ptr Widget.t_typ))
let get_font_name =
foreign "gtk_font_button_get_font_name" (t_typ @-> returning (string_opt))
let get_show_size =
foreign "gtk_font_button_get_show_size" (t_typ @-> returning (bool))
let get_show_style =
foreign "gtk_font_button_get_show_style" (t_typ @-> returning (bool))
let get_title =
foreign "gtk_font_button_get_title" (t_typ @-> returning (string_opt))
let get_use_font =
foreign "gtk_font_button_get_use_font" (t_typ @-> returning (bool))
let get_use_size =
foreign "gtk_font_button_get_use_size" (t_typ @-> returning (bool))
let set_font_name =
foreign "gtk_font_button_set_font_name" (t_typ @-> string @-> returning (bool))
let set_show_size =
foreign "gtk_font_button_set_show_size" (t_typ @-> bool @-> returning (void))
let set_show_style =
foreign "gtk_font_button_set_show_style" (t_typ @-> bool @-> returning (void))
let set_title =
foreign "gtk_font_button_set_title" (t_typ @-> string @-> returning (void))
let set_use_font =
foreign "gtk_font_button_set_use_font" (t_typ @-> bool @-> returning (void))
let set_use_size =
foreign "gtk_font_button_set_use_size" (t_typ @-> bool @-> returning (void))
| |
12a59009881396344774537f13dca8a22dc3cee03faac891e0d3596678a00e31 | Nazar65/guix-phps-channel | php.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2016 - 2020 < >
Copyright © 2016 < >
Copyright © 2018 , 2020 , 2021 < >
Copyright © 2018 < >
Copyright © 2019 < >
Copyright © 2020 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (packages php)
#:use-module (gnu packages)
#:use-module (gnu packages algebra)
#:use-module (gnu packages aspell)
#:use-module (gnu packages base)
#:use-module (gnu packages bison)
#:use-module (gnu packages compression)
#:use-module (gnu packages curl)
#:use-module (gnu packages cyrus-sasl)
#:use-module (gnu packages crypto)
#:use-module (gnu packages databases)
#:use-module (gnu packages dbm)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gd)
#:use-module (gnu packages gettext)
#:use-module (gnu packages glib)
#:use-module (gnu packages gnupg)
#:use-module (gnu packages icu4c)
#:use-module (gnu packages image)
#:use-module (gnu packages linux)
#:use-module (gnu packages multiprecision)
#:use-module (gnu packages openldap)
#:use-module (gnu packages pcre)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages readline)
#:use-module (gnu packages sqlite)
#:use-module (gnu packages textutils)
#:use-module (gnu packages tls)
#:use-module (gnu packages web)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu)
#:use-module (guix utils)
#:use-module ((guix licenses) #:prefix license:))
(define-public php74
(package
(name "php")
(version "7.4.3")
(home-page "/")
(source (origin
(method url-fetch)
(uri (string-append home-page "distributions/"
"php-" version ".tar.xz"))
(sha256
(base32
"0alqqs5hn4jmz1adrbysbw92n55nkw6f9vfivqj829kwhxnqa7yg"))
(modules '((guix build utils)))
(patches
(search-patches "patches/php74.patch"))
(snippet
'(with-directory-excursion "ext"
(for-each delete-file-recursively
;; Some of the bundled libraries have no proper upstream.
;; Ideally we'd extract these out as separate packages:
;;"mbstring/libmbfl"
;;"date/lib"
;;"bcmath/libbcmath"
" fileinfo / libmagic " ; a patched version of
'("gd/libgd"
"pcre/pcre2lib"
"xmlrpc/libxmlrpc"))
#t))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
(let-syntax ((with (syntax-rules ()
((_ option input)
(string-append option "="
(assoc-ref %build-inputs input))))))
(list (with "--with-bz2" "bzip2")
(with "--with-curl" "curl")
(with "--with-gdbm" "gdbm")
libintl.h
(with "--with-gmp" "gmp")
(with "--with-ldap" "openldap")
(with "--with-ldap-sasl" "cyrus-sasl")
(with "--with-pdo-pgsql" "postgresql")
(with "--with-pdo-sqlite" "sqlite")
(with "--with-pgsql" "postgresql")
PHP ’s Pspell extension , while retaining its current name ,
now uses the Aspell library .
(with "--with-pspell" "aspell")
(with "--with-readline" "readline")
(with "--with-sqlite3" "sqlite")
(with "--with-tidy" "tidy")
(with "--with-xsl" "libxslt")
(with "--with-zlib-dir" "zlib")
;; We could add "--with-snmp", but it requires netsnmp that
;; we don't have a package for. It is used to build the snmp
;; extension of php.
"--with-external-pcre"
"--with-external-gd"
"--with-iconv"
"--with-openssl"
"--with-mysqli" ; Required for, e.g. wordpress
"--with-pdo-mysql"
"--with-zip"
"--with-zlib"
"--with-sodium"
Required for , e.g. Zabbix frontend
"--enable-calendar"
"--enable-iconv"
"--enable-ctype"
"--enable-dom"
"--enable-json"
"--enable-hash"
"--enable-libxml"
"--enable-mbstring"
"--enable-openssl"
"--enable-prce"
"--enable-pdo_mysql"
"--enable-simplexml"
"--enable-sodium"
"--enable-xmlwriter"
"--enable-xsl"
"--enable-zip"
"--enable-libxml"
"--enable-lib-openssl"
"--enable-fileinfo"
"--enable-dba=shared"
"--enable-exif"
"--enable-flatfile"
"--enable-fpm"
"--enable-ftp"
"--enable-soap"
"--enable-gd"
"--enable-inifile"
"--enable-intl"
"--enable-mbstring"
"--enable-pcntl"
"--enable-sockets"))
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'do-not-record-build-flags
(lambda _
;; Prevent configure flags from being stored and causing
;; unnecessary runtime dependencies.
(substitute* "scripts/php-config.in"
(("@CONFIGURE_OPTIONS@") "")
(("@PHP_LDFLAGS@") ""))
This file has ISO-8859 - 1 encoding .
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* "main/build-defs.h.in"
(("@CONFIGURE_COMMAND@") "(omitted)")))
#t))
(add-before 'build 'patch-/bin/sh
(lambda _
(substitute* '("run-tests.php" "ext/standard/proc_open.c")
(("/bin/sh") (which "sh")))
#t))
(add-before 'check 'prepare-tests
(lambda _
;; Some of these files have ISO-8859-1 encoding, whereas others
;; use ASCII, so we can't use a "catch-all" find-files here.
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* '("ext/mbstring/tests/mb_send_mail02.phpt"
"ext/mbstring/tests/mb_send_mail04.phpt"
"ext/mbstring/tests/mb_send_mail05.phpt"
"ext/mbstring/tests/mb_send_mail06.phpt")
(("/bin/cat") (which "cat"))))
(substitute* '("ext/mbstring/tests/mb_send_mail01.phpt"
"ext/mbstring/tests/mb_send_mail03.phpt"
"ext/mbstring/tests/bug52681.phpt"
"ext/standard/tests/general_functions/bug34794.phpt"
"ext/standard/tests/general_functions/bug44667.phpt"
"ext/standard/tests/general_functions/proc_open.phpt")
(("/bin/cat") (which "cat")))
;; The encoding of this file is not recognized, so we simply drop it.
(delete-file "ext/mbstring/tests/mb_send_mail07.phpt")
(substitute* "ext/standard/tests/streams/bug60602.phpt"
(("'ls'") (string-append "'" (which "ls") "'")))
,@(if (string-prefix? "arm" (or (%current-system)
(%current-target-system)))
Drop tests known to fail on armhf .
'((for-each delete-file
(list
"ext/calendar/tests/unixtojd_error1.phpt"
;; arm can be a lot slower, so a time-related test fails
"ext/fileinfo/tests/cve-2014-3538-nojit.phpt"
"ext/pcntl/tests/pcntl_unshare_01.phpt"
"ext/pcre/tests/bug76514.phpt"
"ext/pcre/tests/preg_match_error3.phpt"
"ext/pcre/tests/cache_limit.phpt"
"ext/sockets/tests/socket_getopt.phpt"
"ext/sockets/tests/socket_sendrecvmsg_error.phpt"
"ext/standard/tests/general_functions/var_export-locale.phpt"
"ext/standard/tests/general_functions/var_export_basic1.phpt"
"ext/intl/tests/timezone_getErrorCodeMessage_basic.phpt"
"ext/intl/tests/timezone_getOffset_error.phpt"
"sapi/cli/tests/cli_process_title_unix.phpt"
"sapi/fpm/tests/socket-ipv4-fallback.phpt"
"sapi/cli/tests/upload_2G.phpt"
"Zend/tests/concat_003.phpt")))
'())
;; Drop tests that are known to fail.
(for-each delete-file
'("ext/posix/tests/posix_getgrgid.phpt" ; Requires /etc/group.
"ext/posix/tests/posix_getgrnam_basic.phpt" ; Requires /etc/group.
"ext/sockets/tests/bug63000.phpt" ; Fails to detect OS.
"ext/sockets/tests/socket_shutdown.phpt" ; Requires DNS.
"ext/sockets/tests/socket_send.phpt" ; Likewise.
"ext/sockets/tests/mcast_ipv4_recv.phpt" ; Requires multicast.
;; These needs /etc/services.
"ext/standard/tests/general_functions/getservbyname_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_variation1.phpt"
;; And /etc/protocols.
"ext/standard/tests/network/getprotobyname_basic.phpt"
"ext/standard/tests/network/getprotobynumber_basic.phpt"
;; And exotic locales.
"ext/standard/tests/strings/setlocale_basic1.phpt"
"sapi/fpm/tests/socket-ipv4-fallback.phpt"
"ext/ftp/tests/ftp_site_basic.phpt"
"ext/intl/tests/locale_filter_matches3.phpt"
"ext/intl/tests/locale_get_display_name7.phpt"
"ext/intl/tests/rbbiter_getBinaryRules_basic2.phpt"
"ext/intl/tests/rbbiter_getRules_basic2.phpt"
"ext/intl/tests/locale_lookup_variant2.phpt"
"ext/intl/tests/locale_get_display_language.phpt"
"ext/standard/tests/strings/setlocale_basic2.phpt"
"ext/standard/tests/strings/setlocale_basic3.phpt"
"ext/standard/tests/strings/setlocale_variation1.phpt"
"ext/dom/tests/DOMDocument_loadXML_error1.phpt"
"ext/dom/tests/DOMDocument_load_error1.phpt"
"ext/dom/tests/bug43364.phpt"
"ext/libxml/tests/bug61367-read.phpt"
"ext/libxml/tests/libxml_disable_entity_loader.phpt"
"ext/openssl/tests/openssl_x509_checkpurpose_basic.phpt"
This failing test is skipped on PHP 's as it is
;; supposedly inaccurate.
"ext/standard/tests/file/disk_free_space_basic.phpt"
;; The following test erroneously expect the link
;; count of a sub-directory to increase compared to
;; its parent.
"ext/standard/tests/file/lstat_stat_variation8.phpt"
;; This tests whether microseconds ‘differ enough’ and
;; fails inconsistently on ‘fast’ machines.
"ext/date/tests/bug73837.phpt"
;; XXX: These gd tests fails. Likely because our version
;; is different from the (patched) bundled one.
;; Here, gd quits immediately after "fatal libpng error"; while the
;; test expects it to additionally return a "setjmp" error and warning.
"ext/gd/tests/bug39780_extern.phpt"
"ext/gd/tests/libgd00086_extern.phpt"
;; Extra newline in gd-png output.
"ext/gd/tests/bug45799.phpt"
;; Test expects generic "gd warning" but gets the actual function name.
"ext/gd/tests/createfromwbmp2_extern.phpt"
This bug should have been fixed in gd 2.2.2 .
;; Is it a regression?
"ext/gd/tests/bug65148.phpt"
This bug should have been fixed in the gd 2.2
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug66590.phpt"
;; This bug should have been fixed in the php-5.5
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug70102.phpt"
;; This bug should have been fixed in the php-5.6
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug73869.phpt"
;; Some WebP related tests fail.
"ext/gd/tests/webp_basic.phpt"
"ext/gd/tests/imagecreatefromstring_webp.phpt"
;; Expected error message, but from the wrong function
"ext/gd/tests/bug77269.phpt"
TODO : Enable these when libgd is built with xpm support .
"ext/gd/tests/xpm2gd.phpt"
"ext/gd/tests/xpm2jpg.phpt"
"ext/gd/tests/xpm2png.phpt"
Whitespace difference , probably caused by a very
;; long store path
"ext/gd/tests/bug77479.phpt"
Expected invalid XBM but got EOF before image was
;; complete. It's a warning in both cases and test
;; result is the same.
"ext/gd/tests/bug77973.phpt"
;; Test expects uninitialized value to be false, but
;; instead gets "resource(5) of type (gd)".
"ext/gd/tests/bug79067.phpt"
;; The following test fails with "The image size
;; differs: expected 114x115, got 117x117".
"ext/gd/tests/bug79068.phpt"
;; XXX: These iconv tests have the expected outcome,
;; but with different error messages.
Expects " illegal character " , instead gets " unknown error ( 84 ) " .
"ext/iconv/tests/bug52211.phpt"
"ext/iconv/tests/bug60494.phpt"
Expects " wrong charset " , gets unknown error ( 22 ) .
"ext/iconv/tests/iconv_strlen_error2.phpt"
"ext/iconv/tests/iconv_substr_error2.phpt"
;; Expects conversion error, gets "error condition Termsig=11".
"ext/iconv/tests/iconv_strpos_error2.phpt"
"ext/iconv/tests/iconv_strrpos_error2.phpt"
Expects " invalid sequence " but got
;; "unknown error".
"ext/iconv/tests/bug76249.phpt"
;; XXX: These test failures appear legitimate, needs investigation.
;; open_basedir() restriction failure.
"ext/curl/tests/bug61948-unix.phpt"
;; Expects a false boolean, gets empty array from glob().
"ext/standard/tests/file/bug41655_1.phpt"
"ext/standard/tests/file/glob_variation5.phpt"
;; The test expects an Array, but instead get the contents(?).
"ext/gd/tests/bug43073.phpt"
;; imagettftext() returns wrong coordinates.
"ext/gd/tests/bug48732-mb.phpt"
"ext/gd/tests/bug48732.phpt"
Similarly for ( ) .
"ext/gd/tests/bug48801-mb.phpt"
"ext/gd/tests/bug48801.phpt"
;; Different expected output from imagecolorallocate().
"ext/gd/tests/bug53504.phpt"
;; Wrong image size after scaling an image.
"ext/gd/tests/bug73272.phpt"
;; Expects iconv to detect illegal characters, instead gets
" unknown error ( 84 ) " and heap corruption ( ! ) .
"ext/iconv/tests/bug48147.phpt"
;; Expects illegal character ".", gets "=?utf-8?Q?."
"ext/iconv/tests/bug51250.phpt"
;; iconv throws "buffer length exceeded" on some string checks.
"ext/iconv/tests/iconv_mime_encode.phpt"
;; file_get_contents(): iconv stream filter
;; ("ISO-8859-1"=>"UTF-8") unknown error.
"ext/standard/tests/file/bug43008.phpt"
;; Table data not created in sqlite(?).
"ext/pdo_sqlite/tests/bug_42589.phpt"
;; Renicing a process fails in the build environment.
"ext/standard/tests/general_functions/proc_nice_basic.phpt"))
;; Skip tests requiring network access.
(setenv "SKIP_ONLINE_TESTS" "1")
;; Without this variable, 'make test' passes regardless of failures.
(setenv "REPORT_EXIT_STATUS" "1")
;; Skip tests requiring I/O facilities that are unavailable in the
;; build environment
(setenv "SKIP_IO_CAPTURE_TESTS" "1")
#t)))
#:test-target "test"))
(inputs
`(("aspell" ,aspell)
("bzip2" ,bzip2)
("curl" ,curl)
("cyrus-sasl" ,cyrus-sasl)
("gd" ,gd)
("gdbm" ,gdbm)
("glibc" ,glibc)
("gmp" ,gmp)
("gnutls" ,gnutls)
("icu4c" ,icu4c)
("libgcrypt" ,libgcrypt)
("libpng" ,libpng)
("libsodium" ,libsodium)
("libxml2" ,libxml2)
("libxslt" ,libxslt)
("libx11" ,libx11)
("libzip" ,libzip)
("oniguruma" ,oniguruma)
("openldap" ,openldap)
("openssl" ,openssl)
("pcre" ,pcre2)
("postgresql" ,postgresql)
("readline" ,readline)
("sqlite" ,sqlite)
("tidy" ,tidy)
("zlib" ,zlib)))
(native-inputs
`(("pkg-config" ,pkg-config)
("bison" ,bison)
("intltool" ,intltool)
("procps" ,procps))) ; for tests
(synopsis "PHP programming language")
(description
"PHP (PHP Hypertext Processor) is a server-side (CGI) scripting
language designed primarily for web development but is also used as
a general-purpose programming language. PHP code may be embedded into
HTML code, or it can be used in combination with various web template
systems, web content management systems and web frameworks." )
(license (list
(license:non-copyleft "file") ; The PHP license.
The Zend license .
license:lgpl2.1 ; ext/mbstring/libmbfl
license:lgpl2.1+ ; ext/bcmath/libbcmath
license:bsd-2 ; ext/fileinfo/libmagic
license:expat)))) ; ext/date/lib
(define-public php81
(package
(name "php")
(version "8.1.14")
(home-page "/")
(source (origin
(method url-fetch)
(uri (string-append home-page "distributions/"
"php-" version ".tar.xz"))
(sha256
(base32
"06jaf845l9mnbz39h30i50c2rx29xj9cwj687a8qb1nmfal4fvp1"))
(modules '((guix build utils)))
(snippet
'(with-directory-excursion "ext"
(for-each delete-file-recursively
;; Some of the bundled libraries have no proper upstream.
;; Ideally we'd extract these out as separate packages:
;;"mbstring/libmbfl"
;;"date/lib"
;;"bcmath/libbcmath"
" fileinfo / libmagic " ; a patched version of
'("gd/libgd"
"pcre/pcre2lib"))
#t))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
(let-syntax ((with (syntax-rules ()
((_ option input)
(string-append option "="
(assoc-ref %build-inputs input))))))
(list (with "--with-bz2" "bzip2")
(with "--with-curl" "curl")
(with "--with-gdbm" "gdbm")
libintl.h
(with "--with-gmp" "gmp")
(with "--with-ldap" "openldap")
(with "--with-ldap-sasl" "cyrus-sasl")
(with "--with-pdo-pgsql" "postgresql")
(with "--with-pdo-sqlite" "sqlite")
(with "--with-pgsql" "postgresql")
PHP ’s Pspell extension , while retaining its current name ,
now uses the Aspell library .
(with "--with-pspell" "aspell")
(with "--with-readline" "readline")
(with "--with-sqlite3" "sqlite")
(with "--with-tidy" "tidy")
(with "--with-xsl" "libxslt")
(with "--with-zlib-dir" "zlib")
;; We could add "--with-snmp", but it requires netsnmp that
;; we don't have a package for. It is used to build the snmp
;; extension of php.
"--with-external-pcre"
"--with-external-gd"
"--with-iconv"
"--with-openssl"
"--with-mysqli" ; Required for, e.g. wordpress
"--with-pdo-mysql"
"--with-zip"
"--with-zlib"
"--with-sodium"
Required for , e.g. Zabbix frontend
"--enable-calendar"
"--enable-iconv"
"--enable-ctype"
"--enable-dom"
"--enable-json"
"--enable-hash"
"--enable-libxml"
"--enable-mbstring"
"--enable-openssl"
"--enable-prce"
"--enable-pdo_mysql"
"--enable-simplexml"
"--enable-sodium"
"--enable-xmlwriter"
"--enable-xsl"
"--enable-zip"
"--enable-libxml"
"--enable-lib-openssl"
"--enable-fileinfo"
"--enable-dba=shared"
"--enable-exif"
"--enable-flatfile"
"--enable-fpm"
"--enable-ftp"
"--enable-soap"
"--enable-gd"
"--enable-inifile"
"--enable-intl"
"--enable-mbstring"
"--enable-pcntl"
"--enable-sockets"))
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'do-not-record-build-flags
(lambda _
;; Prevent configure flags from being stored and causing
;; unnecessary runtime dependencies.
(substitute* "scripts/php-config.in"
(("@CONFIGURE_OPTIONS@") "")
(("@PHP_LDFLAGS@") ""))
This file has ISO-8859 - 1 encoding .
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* "main/build-defs.h.in"
(("@CONFIGURE_COMMAND@") "(omitted)")))
#t))
(add-before 'build 'patch-/bin/sh
(lambda _
(substitute* '("run-tests.php" "ext/standard/proc_open.c")
(("/bin/sh") (which "sh")))
#t))
(add-before 'check 'prepare-tests
(lambda _
;; Some of these files have ISO-8859-1 encoding, whereas others
;; use ASCII, so we can't use a "catch-all" find-files here.
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* '("ext/mbstring/tests/mb_send_mail02.phpt"
"ext/mbstring/tests/mb_send_mail04.phpt"
"ext/mbstring/tests/mb_send_mail05.phpt"
"ext/mbstring/tests/mb_send_mail06.phpt")
(("/bin/cat") (which "cat"))))
(substitute* '("ext/mbstring/tests/mb_send_mail01.phpt"
"ext/mbstring/tests/mb_send_mail03.phpt"
"ext/mbstring/tests/bug52681.phpt"
"ext/standard/tests/general_functions/bug34794.phpt"
"ext/standard/tests/general_functions/bug44667.phpt"
"ext/standard/tests/general_functions/proc_open.phpt")
(("/bin/cat") (which "cat")))
;; The encoding of this file is not recognized, so we simply drop it.
(delete-file "ext/mbstring/tests/mb_send_mail07.phpt")
(substitute* "ext/standard/tests/streams/bug60602.phpt"
(("'ls'") (string-append "'" (which "ls") "'")))
,@(if (string-prefix? "arm" (or (%current-system)
(%current-target-system)))
Drop tests known to fail on armhf .
'((for-each delete-file
(list
"ext/calendar/tests/unixtojd_error1.phpt"
;; arm can be a lot slower, so a time-related test fails
"ext/fileinfo/tests/cve-2014-3538-nojit.phpt"
"ext/pcntl/tests/pcntl_unshare_01.phpt"
"ext/pcre/tests/bug76514.phpt"
"ext/pcre/tests/preg_match_error3.phpt"
"ext/pcre/tests/cache_limit.phpt"
"ext/sockets/tests/socket_getopt.phpt"
"ext/sockets/tests/socket_sendrecvmsg_error.phpt"
"ext/standard/tests/general_functions/var_export-locale.phpt"
"ext/standard/tests/general_functions/var_export_basic1.phpt"
"ext/intl/tests/timezone_getErrorCodeMessage_basic.phpt"
"ext/intl/tests/timezone_getOffset_error.phpt"
"sapi/cli/tests/cli_process_title_unix.phpt"
"sapi/cli/tests/upload_2G.phpt"
"Zend/tests/concat_003.phpt")))
'())
;; Drop tests that are known to fail.
(for-each delete-file
'("ext/posix/tests/posix_getgrgid.phpt" ; Requires /etc/group.
"ext/posix/tests/posix_getgrnam_basic.phpt" ; Requires /etc/group.
"ext/sockets/tests/bug63000.phpt" ; Fails to detect OS.
"ext/sockets/tests/socket_shutdown.phpt" ; Requires DNS.
"ext/sockets/tests/socket_send.phpt" ; Likewise.
"ext/sockets/tests/mcast_ipv4_recv.phpt" ; Requires multicast.
;; These needs /etc/services.
"ext/standard/tests/general_functions/getservbyname_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_variation1.phpt"
;; And /etc/protocols.
"ext/standard/tests/network/getprotobyname_basic.phpt"
"ext/standard/tests/network/getprotobynumber_basic.phpt"
;; And exotic locales.
"ext/standard/tests/strings/setlocale_basic1.phpt"
"ext/intl/tests/locale_filter_matches3.phpt"
"ext/intl/tests/locale_get_display_name7.phpt"
"ext/intl/tests/rbbiter_getBinaryRules_basic2.phpt"
"ext/intl/tests/rbbiter_getRules_basic2.phpt"
"ext/intl/tests/locale_lookup_variant2.phpt"
"ext/intl/tests/locale_get_display_language.phpt"
"ext/standard/tests/strings/setlocale_basic2.phpt"
"ext/standard/tests/strings/setlocale_basic3.phpt"
"ext/standard/tests/strings/setlocale_variation1.phpt"
"ext/dom/tests/DOMDocument_loadXML_error1.phpt"
"ext/dom/tests/DOMDocument_load_error1.phpt"
"ext/dom/tests/bug43364.phpt"
"ext/libxml/tests/bug61367-read.phpt"
"ext/libxml/tests/libxml_disable_entity_loader.phpt"
"ext/openssl/tests/openssl_x509_checkpurpose_basic.phpt"
This failing test is skipped on PHP 's as it is
;; supposedly inaccurate.
"ext/standard/tests/file/disk_free_space_basic.phpt"
;; The following test erroneously expect the link
;; count of a sub-directory to increase compared to
;; its parent.
"ext/standard/tests/file/lstat_stat_variation8.phpt"
;; This tests whether microseconds ‘differ enough’ and
;; fails inconsistently on ‘fast’ machines.
"ext/date/tests/bug73837.phpt"
;; XXX: These gd tests fails. Likely because our version
;; is different from the (patched) bundled one.
;; Here, gd quits immediately after "fatal libpng error"; while the
;; test expects it to additionally return a "setjmp" error and warning.
"ext/gd/tests/bug39780_extern.phpt"
"ext/gd/tests/libgd00086_extern.phpt"
"ext/gd/tests/imagecreatefromstring_avif.phpt"
"ext/gd/tests/bug77272.phpt"
"ext/gd/tests/bug72339.phpt"
"ext/gd/tests/bug66356.phpt"
"ext/gd/tests/avif_decode_encode.phpt"
;; Extra newline in gd-png output.
"ext/gd/tests/bug45799.phpt"
;; Test expects generic "gd warning" but gets the actual function name.
"ext/gd/tests/createfromwbmp2_extern.phpt"
This bug should have been fixed in gd 2.2.2 .
;; Is it a regression?
"ext/gd/tests/bug65148.phpt"
This bug should have been fixed in the gd 2.2
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug66590.phpt"
;; This bug should have been fixed in the php-5.5
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug70102.phpt"
;; This bug should have been fixed in the php-5.6
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug73869.phpt"
;; Some WebP related tests fail.
"ext/gd/tests/webp_basic.phpt"
"ext/gd/tests/imagecreatefromstring_webp.phpt"
;; Expected error message, but from the wrong function
"ext/gd/tests/bug77269.phpt"
TODO : Enable these when libgd is built with xpm support .
"ext/gd/tests/xpm2gd.phpt"
"ext/gd/tests/xpm2jpg.phpt"
"ext/gd/tests/xpm2png.phpt"
Whitespace difference , probably caused by a very
;; long store path
"ext/gd/tests/bug77479.phpt"
Expected invalid XBM but got EOF before image was
;; complete. It's a warning in both cases and test
;; result is the same.
"ext/gd/tests/bug77973.phpt"
;; Test expects uninitialized value to be false, but
;; instead gets "resource(5) of type (gd)".
"ext/gd/tests/bug79067.phpt"
;; The following test fails with "The image size
;; differs: expected 114x115, got 117x117".
"ext/gd/tests/bug79068.phpt"
;; XXX: These iconv tests have the expected outcome,
;; but with different error messages.
Expects " illegal character " , instead gets " unknown error ( 84 ) " .
"ext/iconv/tests/bug52211.phpt"
"ext/iconv/tests/bug60494.phpt"
Expects " wrong charset " , gets unknown error ( 22 ) .
"ext/iconv/tests/iconv_strlen_error2.phpt"
"ext/iconv/tests/iconv_substr_error2.phpt"
;; Expects conversion error, gets "error condition Termsig=11".
"ext/iconv/tests/iconv_strpos_error2.phpt"
"ext/iconv/tests/iconv_strrpos_error2.phpt"
Expects " invalid sequence " but got
;; "unknown error".
"ext/iconv/tests/bug76249.phpt"
;; XXX: These test failures appear legitimate, needs investigation.
;; open_basedir() restriction failure.
"ext/curl/tests/bug61948-unix.phpt"
"ext/curl/tests/curl_setopt_ssl.phpt"
;; Expects a false boolean, gets empty array from glob().
"ext/standard/tests/file/bug41655_1.phpt"
"ext/standard/tests/file/glob_variation5.phpt"
;; The test expects an Array, but instead get the contents(?).
"ext/gd/tests/bug43073.phpt"
;; imagettftext() returns wrong coordinates.
"ext/gd/tests/bug48732-mb.phpt"
"ext/gd/tests/bug48732.phpt"
Similarly for ( ) .
"ext/gd/tests/bug48801-mb.phpt"
"ext/gd/tests/bug48801.phpt"
;; Different expected output from imagecolorallocate().
"ext/gd/tests/bug53504.phpt"
;; Wrong image size after scaling an image.
"ext/gd/tests/bug73272.phpt"
;; Expects iconv to detect illegal characters, instead gets
" unknown error ( 84 ) " and heap corruption ( ! ) .
"ext/iconv/tests/bug48147.phpt"
;; Expects illegal character ".", gets "=?utf-8?Q?."
"ext/iconv/tests/bug51250.phpt"
;; iconv throws "buffer length exceeded" on some string checks.
"ext/iconv/tests/iconv_mime_encode.phpt"
;; file_get_contents(): iconv stream filter
;; ("ISO-8859-1"=>"UTF-8") unknown error.
"ext/standard/tests/file/bug43008.phpt"
;; Table data not created in sqlite(?).
"ext/pdo_sqlite/tests/bug_42589.phpt"
;; Renicing a process fails in the build environment.
"ext/standard/tests/general_functions/proc_nice_basic.phpt"))
;; Skip tests requiring network access.
(setenv "SKIP_ONLINE_TESTS" "1")
;; Without this variable, 'make test' passes regardless of failures.
(setenv "REPORT_EXIT_STATUS" "1")
;; Skip tests requiring I/O facilities that are unavailable in the
;; build environment
(setenv "SKIP_IO_CAPTURE_TESTS" "1")
#t)))
#:test-target "test"))
(inputs
`(("aspell" ,aspell)
("bzip2" ,bzip2)
("curl" ,curl)
("cyrus-sasl" ,cyrus-sasl)
("gd" ,gd)
("gdbm" ,gdbm)
("glibc" ,glibc)
("gmp" ,gmp)
("gnutls" ,gnutls)
("icu4c" ,icu4c)
("libgcrypt" ,libgcrypt)
("libpng" ,libpng)
("libsodium" ,libsodium)
("libxml2" ,libxml2)
("libxslt" ,libxslt)
("libx11" ,libx11)
("libzip" ,libzip)
("oniguruma" ,oniguruma)
("openldap" ,openldap)
("openssl" ,openssl)
("pcre" ,pcre2)
("postgresql" ,postgresql)
("readline" ,readline)
("sqlite" ,sqlite)
("tidy" ,tidy)
("zlib" ,zlib)))
(native-inputs
`(("pkg-config" ,pkg-config)
("bison" ,bison)
("intltool" ,intltool)
("procps" ,procps))) ; for tests
(synopsis "PHP programming language")
(description
"PHP (PHP Hypertext Processor) is a server-side (CGI) scripting
language designed primarily for web development but is also used as
a general-purpose programming language. PHP code may be embedded into
HTML code, or it can be used in combination with various web template
systems, web content management systems and web frameworks." )
(license (list
(license:non-copyleft "file") ; The PHP license.
The Zend license .
license:lgpl2.1 ; ext/mbstring/libmbfl
license:lgpl2.1+ ; ext/bcmath/libbcmath
license:bsd-2 ; ext/fileinfo/libmagic
license:expat)))) ; ext/date/lib
(define-public php82
(package/inherit
php81
(name "php")
(version "8.2.1")
(home-page "/")
(source (origin
(method url-fetch)
(uri (string-append home-page "distributions/"
"php-" version ".tar.xz"))
(sha256
(base32
"1bqp5hhww7kxvqvamgjbaxlx6p54igfz3xm0yq3vzjjnl3bkn3b5"))
(modules '((guix build utils)))
(snippet
'(with-directory-excursion "ext"
(for-each delete-file-recursively
;; Some of the bundled libraries have no proper upstream.
;; Ideally we'd extract these out as separate packages:
;;"mbstring/libmbfl"
;;"date/lib"
;;"bcmath/libbcmath"
" fileinfo / libmagic " ; a patched version of
'("gd/libgd"
"pcre/pcre2lib"))
#t))))
(arguments
`(#:configure-flags
(let-syntax ((with (syntax-rules ()
((_ option input)
(string-append option "="
(assoc-ref %build-inputs input))))))
(list (with "--with-bz2" "bzip2")
(with "--with-curl" "curl")
(with "--with-gdbm" "gdbm")
libintl.h
(with "--with-gmp" "gmp")
(with "--with-ldap" "openldap")
(with "--with-ldap-sasl" "cyrus-sasl")
(with "--with-pdo-pgsql" "postgresql")
(with "--with-pdo-sqlite" "sqlite")
(with "--with-pgsql" "postgresql")
PHP ’s Pspell extension , while retaining its current name ,
now uses the Aspell library .
(with "--with-pspell" "aspell")
(with "--with-readline" "readline")
(with "--with-sqlite3" "sqlite")
(with "--with-tidy" "tidy")
(with "--with-xsl" "libxslt")
(with "--with-zlib-dir" "zlib")
;; We could add "--with-snmp", but it requires netsnmp that
;; we don't have a package for. It is used to build the snmp
;; extension of php.
"--with-external-pcre"
"--with-external-gd"
"--with-iconv"
"--with-openssl"
"--with-mysqli" ; Required for, e.g. wordpress
"--with-pdo-mysql"
"--with-zip"
"--with-zlib"
"--with-sodium"
Required for , e.g. Zabbix frontend
"--enable-calendar"
"--enable-iconv"
"--enable-ctype"
"--enable-dom"
"--enable-json"
"--enable-hash"
"--enable-libxml"
"--enable-mbstring"
"--enable-openssl"
"--enable-prce"
"--enable-pdo_mysql"
"--enable-simplexml"
"--enable-sodium"
"--enable-xmlwriter"
"--enable-xsl"
"--enable-zip"
"--enable-libxml"
"--enable-lib-openssl"
"--enable-fileinfo"
"--enable-dba=shared"
"--enable-exif"
"--enable-flatfile"
"--enable-fpm"
"--enable-ftp"
"--enable-soap"
"--enable-gd"
"--enable-inifile"
"--enable-intl"
"--enable-mbstring"
"--enable-pcntl"
"--enable-sockets"))
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'do-not-record-build-flags
(lambda _
;; Prevent configure flags from being stored and causing
;; unnecessary runtime dependencies.
(substitute* "scripts/php-config.in"
(("@CONFIGURE_OPTIONS@") "")
(("@PHP_LDFLAGS@") ""))
This file has ISO-8859 - 1 encoding .
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* "main/build-defs.h.in"
(("@CONFIGURE_COMMAND@") "(omitted)")))
#t))
(add-before 'build 'patch-/bin/sh
(lambda _
(substitute* '("run-tests.php" "ext/standard/proc_open.c")
(("/bin/sh") (which "sh")))
#t))
(add-before 'check 'prepare-tests
(lambda _
;; Some of these files have ISO-8859-1 encoding, whereas others
;; use ASCII, so we can't use a "catch-all" find-files here.
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* '("ext/mbstring/tests/mb_send_mail02.phpt"
"ext/mbstring/tests/mb_send_mail04.phpt"
"ext/mbstring/tests/mb_send_mail05.phpt"
"ext/mbstring/tests/mb_send_mail06.phpt")
(("/bin/cat") (which "cat"))))
(substitute* '("ext/mbstring/tests/mb_send_mail01.phpt"
"ext/mbstring/tests/mb_send_mail03.phpt"
"ext/mbstring/tests/bug52681.phpt"
"ext/standard/tests/general_functions/bug34794.phpt"
"ext/standard/tests/general_functions/bug44667.phpt"
"ext/standard/tests/general_functions/proc_open.phpt")
(("/bin/cat") (which "cat")))
;; The encoding of this file is not recognized, so we simply drop it.
(delete-file "ext/mbstring/tests/mb_send_mail07.phpt")
(substitute* "ext/standard/tests/streams/bug60602.phpt"
(("'ls'") (string-append "'" (which "ls") "'")))
,@(if (string-prefix? "arm" (or (%current-system)
(%current-target-system)))
Drop tests known to fail on armhf .
'((for-each delete-file
(list
"ext/calendar/tests/unixtojd_error1.phpt"
;; arm can be a lot slower, so a time-related test fails
"ext/fileinfo/tests/cve-2014-3538-nojit.phpt"
"ext/pcntl/tests/pcntl_unshare_01.phpt"
"ext/pcre/tests/bug76514.phpt"
"ext/pcre/tests/preg_match_error3.phpt"
"ext/pcre/tests/cache_limit.phpt"
"ext/sockets/tests/socket_getopt.phpt"
"ext/sockets/tests/socket_sendrecvmsg_error.phpt"
"ext/standard/tests/general_functions/var_export-locale.phpt"
"ext/standard/tests/general_functions/var_export_basic1.phpt"
"ext/intl/tests/timezone_getErrorCodeMessage_basic.phpt"
"ext/intl/tests/timezone_getOffset_error.phpt"
"sapi/cli/tests/cli_process_title_unix.phpt"
"sapi/cli/tests/upload_2G.phpt"
"Zend/tests/concat_003.phpt")))
'())
;; Drop tests that are known to fail.
(for-each delete-file
'("ext/posix/tests/posix_getgrgid.phpt" ; Requires /etc/group.
"ext/posix/tests/posix_getgrnam_basic.phpt" ; Requires /etc/group.
"ext/sockets/tests/bug63000.phpt" ; Fails to detect OS.
"ext/sockets/tests/socket_shutdown.phpt" ; Requires DNS.
"ext/sockets/tests/socket_send.phpt" ; Likewise.
"ext/sockets/tests/mcast_ipv4_recv.phpt" ; Requires multicast.
;; These needs /etc/services.
"ext/standard/tests/general_functions/getservbyname_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_variation1.phpt"
;; And /etc/protocols.
"ext/standard/tests/network/getprotobyname_basic.phpt"
"ext/dba/tests/dba_gdbm_creation_matrix.phpt"
"ext/standard/tests/network/getprotobynumber_basic.phpt"
;; And exotic locales.
"ext/standard/tests/strings/setlocale_basic1.phpt"
"ext/intl/tests/locale_filter_matches3.phpt"
"ext/intl/tests/locale_get_display_name7.phpt"
"ext/intl/tests/rbbiter_getBinaryRules_basic2.phpt"
"ext/intl/tests/rbbiter_getRules_basic2.phpt"
"ext/intl/tests/locale_lookup_variant2.phpt"
"ext/intl/tests/locale_get_display_language.phpt"
"ext/standard/tests/strings/setlocale_basic2.phpt"
"ext/standard/tests/strings/setlocale_basic3.phpt"
"ext/standard/tests/strings/setlocale_variation1.phpt"
"ext/dom/tests/DOMDocument_loadXML_error1.phpt"
"ext/dom/tests/DOMDocument_load_error1.phpt"
"ext/dom/tests/bug43364.phpt"
"ext/libxml/tests/bug61367-read.phpt"
"ext/libxml/tests/libxml_disable_entity_loader.phpt"
"ext/openssl/tests/openssl_x509_checkpurpose_basic.phpt"
This failing test is skipped on PHP 's as it is
;; supposedly inaccurate.
"ext/standard/tests/file/disk_free_space_basic.phpt"
;; The following test erroneously expect the link
;; count of a sub-directory to increase compared to
;; its parent.
"ext/standard/tests/file/lstat_stat_variation8.phpt"
;; This tests whether microseconds ‘differ enough’ and
;; fails inconsistently on ‘fast’ machines.
"ext/date/tests/bug73837.phpt"
;; XXX: These gd tests fails. Likely because our version
;; is different from the (patched) bundled one.
;; Here, gd quits immediately after "fatal libpng error"; while the
;; test expects it to additionally return a "setjmp" error and warning.
"ext/gd/tests/bug39780_extern.phpt"
"ext/gd/tests/libgd00086_extern.phpt"
"ext/gd/tests/imagecreatefromstring_avif.phpt"
"ext/gd/tests/bug77272.phpt"
"ext/gd/tests/bug72339.phpt"
"ext/gd/tests/bug66356.phpt"
"ext/gd/tests/avif_decode_encode.phpt"
;; Extra newline in gd-png output.
"ext/gd/tests/bug45799.phpt"
;; Test expects generic "gd warning" but gets the actual function name.
"ext/gd/tests/createfromwbmp2_extern.phpt"
This bug should have been fixed in gd 2.2.2 .
;; Is it a regression?
"ext/gd/tests/bug65148.phpt"
This bug should have been fixed in the gd 2.2
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug66590.phpt"
;; This bug should have been fixed in the php-5.5
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug70102.phpt"
;; This bug should have been fixed in the php-5.6
;; series. Perhaps a regression introduced by gd
;; 2.3.0?
"ext/gd/tests/bug73869.phpt"
;; Some WebP related tests fail.
"ext/gd/tests/webp_basic.phpt"
"ext/gd/tests/imagecreatefromstring_webp.phpt"
TODO : Enable these when libgd is built with xpm support .
"ext/gd/tests/xpm2gd.phpt"
"ext/gd/tests/xpm2jpg.phpt"
"ext/gd/tests/xpm2png.phpt"
Whitespace difference , probably caused by a very
;; long store path
"ext/gd/tests/bug77479.phpt"
Expected invalid XBM but got EOF before image was
;; complete. It's a warning in both cases and test
;; result is the same.
"ext/gd/tests/bug77973.phpt"
;; Test expects uninitialized value to be false, but
;; instead gets "resource(5) of type (gd)".
"ext/gd/tests/bug79067.phpt"
;; The following test fails with "The image size
;; differs: expected 114x115, got 117x117".
"ext/gd/tests/bug79068.phpt"
;; XXX: These iconv tests have the expected outcome,
;; but with different error messages.
Expects " illegal character " , instead gets " unknown error ( 84 ) " .
"ext/iconv/tests/bug52211.phpt"
"ext/iconv/tests/bug60494.phpt"
Expects " wrong charset " , gets unknown error ( 22 ) .
"ext/iconv/tests/iconv_strlen_error2.phpt"
"ext/iconv/tests/iconv_substr_error2.phpt"
;; Expects conversion error, gets "error condition Termsig=11".
"ext/iconv/tests/iconv_strpos_error2.phpt"
"ext/iconv/tests/iconv_strrpos_error2.phpt"
Expects " invalid sequence " but got
;; "unknown error".
"ext/iconv/tests/bug76249.phpt"
;; XXX: These test failures appear legitimate, needs investigation.
;; open_basedir() restriction failure.
"ext/curl/tests/bug61948-unix.phpt"
"ext/curl/tests/curl_setopt_ssl.phpt"
;; Expects a false boolean, gets empty array from glob().
"ext/standard/tests/file/bug41655_1.phpt"
"ext/standard/tests/file/glob_variation5.phpt"
;; The test expects an Array, but instead get the contents(?).
"ext/gd/tests/bug43073.phpt"
;; imagettftext() returns wrong coordinates.
"ext/gd/tests/bug48732-mb.phpt"
"ext/gd/tests/bug48732.phpt"
Similarly for ( ) .
"ext/gd/tests/bug48801-mb.phpt"
"ext/gd/tests/bug48801.phpt"
;; Different expected output from imagecolorallocate().
"ext/gd/tests/bug53504.phpt"
;; Wrong image size after scaling an image.
"ext/gd/tests/bug73272.phpt"
;; Expects iconv to detect illegal characters, instead gets
" unknown error ( 84 ) " and heap corruption ( ! ) .
"ext/iconv/tests/bug48147.phpt"
;; Expects illegal character ".", gets "=?utf-8?Q?."
"ext/iconv/tests/bug51250.phpt"
;; iconv throws "buffer length exceeded" on some string checks.
"ext/iconv/tests/iconv_mime_encode.phpt"
;; file_get_contents(): iconv stream filter
;; ("ISO-8859-1"=>"UTF-8") unknown error.
"ext/standard/tests/file/bug43008.phpt"
;; Table data not created in sqlite(?).
"ext/pdo_sqlite/tests/bug_42589.phpt"
;; Renicing a process fails in the build environment.
"ext/standard/tests/general_functions/proc_nice_basic.phpt"))
;; Skip tests requiring network access.
(setenv "SKIP_ONLINE_TESTS" "1")
;; Without this variable, 'make test' passes regardless of failures.
(setenv "REPORT_EXIT_STATUS" "1")
;; Skip tests requiring I/O facilities that are unavailable in the
;; build environment
(setenv "SKIP_IO_CAPTURE_TESTS" "1")
#t)))
#:test-target "test"))))
| null | https://raw.githubusercontent.com/Nazar65/guix-phps-channel/04c182b5c254c3f89d680af1d6cd44e1a4f5d9ce/packages/php.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Some of the bundled libraries have no proper upstream.
Ideally we'd extract these out as separate packages:
"mbstring/libmbfl"
"date/lib"
"bcmath/libbcmath"
a patched version of
We could add "--with-snmp", but it requires netsnmp that
we don't have a package for. It is used to build the snmp
extension of php.
Required for, e.g. wordpress
Prevent configure flags from being stored and causing
unnecessary runtime dependencies.
Some of these files have ISO-8859-1 encoding, whereas others
use ASCII, so we can't use a "catch-all" find-files here.
The encoding of this file is not recognized, so we simply drop it.
arm can be a lot slower, so a time-related test fails
Drop tests that are known to fail.
Requires /etc/group.
Requires /etc/group.
Fails to detect OS.
Requires DNS.
Likewise.
Requires multicast.
These needs /etc/services.
And /etc/protocols.
And exotic locales.
supposedly inaccurate.
The following test erroneously expect the link
count of a sub-directory to increase compared to
its parent.
This tests whether microseconds ‘differ enough’ and
fails inconsistently on ‘fast’ machines.
XXX: These gd tests fails. Likely because our version
is different from the (patched) bundled one.
Here, gd quits immediately after "fatal libpng error"; while the
test expects it to additionally return a "setjmp" error and warning.
Extra newline in gd-png output.
Test expects generic "gd warning" but gets the actual function name.
Is it a regression?
series. Perhaps a regression introduced by gd
2.3.0?
This bug should have been fixed in the php-5.5
series. Perhaps a regression introduced by gd
2.3.0?
This bug should have been fixed in the php-5.6
series. Perhaps a regression introduced by gd
2.3.0?
Some WebP related tests fail.
Expected error message, but from the wrong function
long store path
complete. It's a warning in both cases and test
result is the same.
Test expects uninitialized value to be false, but
instead gets "resource(5) of type (gd)".
The following test fails with "The image size
differs: expected 114x115, got 117x117".
XXX: These iconv tests have the expected outcome,
but with different error messages.
Expects conversion error, gets "error condition Termsig=11".
"unknown error".
XXX: These test failures appear legitimate, needs investigation.
open_basedir() restriction failure.
Expects a false boolean, gets empty array from glob().
The test expects an Array, but instead get the contents(?).
imagettftext() returns wrong coordinates.
Different expected output from imagecolorallocate().
Wrong image size after scaling an image.
Expects iconv to detect illegal characters, instead gets
Expects illegal character ".", gets "=?utf-8?Q?."
iconv throws "buffer length exceeded" on some string checks.
file_get_contents(): iconv stream filter
("ISO-8859-1"=>"UTF-8") unknown error.
Table data not created in sqlite(?).
Renicing a process fails in the build environment.
Skip tests requiring network access.
Without this variable, 'make test' passes regardless of failures.
Skip tests requiring I/O facilities that are unavailable in the
build environment
for tests
The PHP license.
ext/mbstring/libmbfl
ext/bcmath/libbcmath
ext/fileinfo/libmagic
ext/date/lib
Some of the bundled libraries have no proper upstream.
Ideally we'd extract these out as separate packages:
"mbstring/libmbfl"
"date/lib"
"bcmath/libbcmath"
a patched version of
We could add "--with-snmp", but it requires netsnmp that
we don't have a package for. It is used to build the snmp
extension of php.
Required for, e.g. wordpress
Prevent configure flags from being stored and causing
unnecessary runtime dependencies.
Some of these files have ISO-8859-1 encoding, whereas others
use ASCII, so we can't use a "catch-all" find-files here.
The encoding of this file is not recognized, so we simply drop it.
arm can be a lot slower, so a time-related test fails
Drop tests that are known to fail.
Requires /etc/group.
Requires /etc/group.
Fails to detect OS.
Requires DNS.
Likewise.
Requires multicast.
These needs /etc/services.
And /etc/protocols.
And exotic locales.
supposedly inaccurate.
The following test erroneously expect the link
count of a sub-directory to increase compared to
its parent.
This tests whether microseconds ‘differ enough’ and
fails inconsistently on ‘fast’ machines.
XXX: These gd tests fails. Likely because our version
is different from the (patched) bundled one.
Here, gd quits immediately after "fatal libpng error"; while the
test expects it to additionally return a "setjmp" error and warning.
Extra newline in gd-png output.
Test expects generic "gd warning" but gets the actual function name.
Is it a regression?
series. Perhaps a regression introduced by gd
2.3.0?
This bug should have been fixed in the php-5.5
series. Perhaps a regression introduced by gd
2.3.0?
This bug should have been fixed in the php-5.6
series. Perhaps a regression introduced by gd
2.3.0?
Some WebP related tests fail.
Expected error message, but from the wrong function
long store path
complete. It's a warning in both cases and test
result is the same.
Test expects uninitialized value to be false, but
instead gets "resource(5) of type (gd)".
The following test fails with "The image size
differs: expected 114x115, got 117x117".
XXX: These iconv tests have the expected outcome,
but with different error messages.
Expects conversion error, gets "error condition Termsig=11".
"unknown error".
XXX: These test failures appear legitimate, needs investigation.
open_basedir() restriction failure.
Expects a false boolean, gets empty array from glob().
The test expects an Array, but instead get the contents(?).
imagettftext() returns wrong coordinates.
Different expected output from imagecolorallocate().
Wrong image size after scaling an image.
Expects iconv to detect illegal characters, instead gets
Expects illegal character ".", gets "=?utf-8?Q?."
iconv throws "buffer length exceeded" on some string checks.
file_get_contents(): iconv stream filter
("ISO-8859-1"=>"UTF-8") unknown error.
Table data not created in sqlite(?).
Renicing a process fails in the build environment.
Skip tests requiring network access.
Without this variable, 'make test' passes regardless of failures.
Skip tests requiring I/O facilities that are unavailable in the
build environment
for tests
The PHP license.
ext/mbstring/libmbfl
ext/bcmath/libbcmath
ext/fileinfo/libmagic
ext/date/lib
Some of the bundled libraries have no proper upstream.
Ideally we'd extract these out as separate packages:
"mbstring/libmbfl"
"date/lib"
"bcmath/libbcmath"
a patched version of
We could add "--with-snmp", but it requires netsnmp that
we don't have a package for. It is used to build the snmp
extension of php.
Required for, e.g. wordpress
Prevent configure flags from being stored and causing
unnecessary runtime dependencies.
Some of these files have ISO-8859-1 encoding, whereas others
use ASCII, so we can't use a "catch-all" find-files here.
The encoding of this file is not recognized, so we simply drop it.
arm can be a lot slower, so a time-related test fails
Drop tests that are known to fail.
Requires /etc/group.
Requires /etc/group.
Fails to detect OS.
Requires DNS.
Likewise.
Requires multicast.
These needs /etc/services.
And /etc/protocols.
And exotic locales.
supposedly inaccurate.
The following test erroneously expect the link
count of a sub-directory to increase compared to
its parent.
This tests whether microseconds ‘differ enough’ and
fails inconsistently on ‘fast’ machines.
XXX: These gd tests fails. Likely because our version
is different from the (patched) bundled one.
Here, gd quits immediately after "fatal libpng error"; while the
test expects it to additionally return a "setjmp" error and warning.
Extra newline in gd-png output.
Test expects generic "gd warning" but gets the actual function name.
Is it a regression?
series. Perhaps a regression introduced by gd
2.3.0?
This bug should have been fixed in the php-5.5
series. Perhaps a regression introduced by gd
2.3.0?
This bug should have been fixed in the php-5.6
series. Perhaps a regression introduced by gd
2.3.0?
Some WebP related tests fail.
long store path
complete. It's a warning in both cases and test
result is the same.
Test expects uninitialized value to be false, but
instead gets "resource(5) of type (gd)".
The following test fails with "The image size
differs: expected 114x115, got 117x117".
XXX: These iconv tests have the expected outcome,
but with different error messages.
Expects conversion error, gets "error condition Termsig=11".
"unknown error".
XXX: These test failures appear legitimate, needs investigation.
open_basedir() restriction failure.
Expects a false boolean, gets empty array from glob().
The test expects an Array, but instead get the contents(?).
imagettftext() returns wrong coordinates.
Different expected output from imagecolorallocate().
Wrong image size after scaling an image.
Expects iconv to detect illegal characters, instead gets
Expects illegal character ".", gets "=?utf-8?Q?."
iconv throws "buffer length exceeded" on some string checks.
file_get_contents(): iconv stream filter
("ISO-8859-1"=>"UTF-8") unknown error.
Table data not created in sqlite(?).
Renicing a process fails in the build environment.
Skip tests requiring network access.
Without this variable, 'make test' passes regardless of failures.
Skip tests requiring I/O facilities that are unavailable in the
build environment | Copyright © 2016 - 2020 < >
Copyright © 2016 < >
Copyright © 2018 , 2020 , 2021 < >
Copyright © 2018 < >
Copyright © 2019 < >
Copyright © 2020 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (packages php)
#:use-module (gnu packages)
#:use-module (gnu packages algebra)
#:use-module (gnu packages aspell)
#:use-module (gnu packages base)
#:use-module (gnu packages bison)
#:use-module (gnu packages compression)
#:use-module (gnu packages curl)
#:use-module (gnu packages cyrus-sasl)
#:use-module (gnu packages crypto)
#:use-module (gnu packages databases)
#:use-module (gnu packages dbm)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gd)
#:use-module (gnu packages gettext)
#:use-module (gnu packages glib)
#:use-module (gnu packages gnupg)
#:use-module (gnu packages icu4c)
#:use-module (gnu packages image)
#:use-module (gnu packages linux)
#:use-module (gnu packages multiprecision)
#:use-module (gnu packages openldap)
#:use-module (gnu packages pcre)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages readline)
#:use-module (gnu packages sqlite)
#:use-module (gnu packages textutils)
#:use-module (gnu packages tls)
#:use-module (gnu packages web)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu)
#:use-module (guix utils)
#:use-module ((guix licenses) #:prefix license:))
(define-public php74
(package
(name "php")
(version "7.4.3")
(home-page "/")
(source (origin
(method url-fetch)
(uri (string-append home-page "distributions/"
"php-" version ".tar.xz"))
(sha256
(base32
"0alqqs5hn4jmz1adrbysbw92n55nkw6f9vfivqj829kwhxnqa7yg"))
(modules '((guix build utils)))
(patches
(search-patches "patches/php74.patch"))
(snippet
'(with-directory-excursion "ext"
(for-each delete-file-recursively
'("gd/libgd"
"pcre/pcre2lib"
"xmlrpc/libxmlrpc"))
#t))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
(let-syntax ((with (syntax-rules ()
((_ option input)
(string-append option "="
(assoc-ref %build-inputs input))))))
(list (with "--with-bz2" "bzip2")
(with "--with-curl" "curl")
(with "--with-gdbm" "gdbm")
libintl.h
(with "--with-gmp" "gmp")
(with "--with-ldap" "openldap")
(with "--with-ldap-sasl" "cyrus-sasl")
(with "--with-pdo-pgsql" "postgresql")
(with "--with-pdo-sqlite" "sqlite")
(with "--with-pgsql" "postgresql")
PHP ’s Pspell extension , while retaining its current name ,
now uses the Aspell library .
(with "--with-pspell" "aspell")
(with "--with-readline" "readline")
(with "--with-sqlite3" "sqlite")
(with "--with-tidy" "tidy")
(with "--with-xsl" "libxslt")
(with "--with-zlib-dir" "zlib")
"--with-external-pcre"
"--with-external-gd"
"--with-iconv"
"--with-openssl"
"--with-pdo-mysql"
"--with-zip"
"--with-zlib"
"--with-sodium"
Required for , e.g. Zabbix frontend
"--enable-calendar"
"--enable-iconv"
"--enable-ctype"
"--enable-dom"
"--enable-json"
"--enable-hash"
"--enable-libxml"
"--enable-mbstring"
"--enable-openssl"
"--enable-prce"
"--enable-pdo_mysql"
"--enable-simplexml"
"--enable-sodium"
"--enable-xmlwriter"
"--enable-xsl"
"--enable-zip"
"--enable-libxml"
"--enable-lib-openssl"
"--enable-fileinfo"
"--enable-dba=shared"
"--enable-exif"
"--enable-flatfile"
"--enable-fpm"
"--enable-ftp"
"--enable-soap"
"--enable-gd"
"--enable-inifile"
"--enable-intl"
"--enable-mbstring"
"--enable-pcntl"
"--enable-sockets"))
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'do-not-record-build-flags
(lambda _
(substitute* "scripts/php-config.in"
(("@CONFIGURE_OPTIONS@") "")
(("@PHP_LDFLAGS@") ""))
This file has ISO-8859 - 1 encoding .
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* "main/build-defs.h.in"
(("@CONFIGURE_COMMAND@") "(omitted)")))
#t))
(add-before 'build 'patch-/bin/sh
(lambda _
(substitute* '("run-tests.php" "ext/standard/proc_open.c")
(("/bin/sh") (which "sh")))
#t))
(add-before 'check 'prepare-tests
(lambda _
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* '("ext/mbstring/tests/mb_send_mail02.phpt"
"ext/mbstring/tests/mb_send_mail04.phpt"
"ext/mbstring/tests/mb_send_mail05.phpt"
"ext/mbstring/tests/mb_send_mail06.phpt")
(("/bin/cat") (which "cat"))))
(substitute* '("ext/mbstring/tests/mb_send_mail01.phpt"
"ext/mbstring/tests/mb_send_mail03.phpt"
"ext/mbstring/tests/bug52681.phpt"
"ext/standard/tests/general_functions/bug34794.phpt"
"ext/standard/tests/general_functions/bug44667.phpt"
"ext/standard/tests/general_functions/proc_open.phpt")
(("/bin/cat") (which "cat")))
(delete-file "ext/mbstring/tests/mb_send_mail07.phpt")
(substitute* "ext/standard/tests/streams/bug60602.phpt"
(("'ls'") (string-append "'" (which "ls") "'")))
,@(if (string-prefix? "arm" (or (%current-system)
(%current-target-system)))
Drop tests known to fail on armhf .
'((for-each delete-file
(list
"ext/calendar/tests/unixtojd_error1.phpt"
"ext/fileinfo/tests/cve-2014-3538-nojit.phpt"
"ext/pcntl/tests/pcntl_unshare_01.phpt"
"ext/pcre/tests/bug76514.phpt"
"ext/pcre/tests/preg_match_error3.phpt"
"ext/pcre/tests/cache_limit.phpt"
"ext/sockets/tests/socket_getopt.phpt"
"ext/sockets/tests/socket_sendrecvmsg_error.phpt"
"ext/standard/tests/general_functions/var_export-locale.phpt"
"ext/standard/tests/general_functions/var_export_basic1.phpt"
"ext/intl/tests/timezone_getErrorCodeMessage_basic.phpt"
"ext/intl/tests/timezone_getOffset_error.phpt"
"sapi/cli/tests/cli_process_title_unix.phpt"
"sapi/fpm/tests/socket-ipv4-fallback.phpt"
"sapi/cli/tests/upload_2G.phpt"
"Zend/tests/concat_003.phpt")))
'())
(for-each delete-file
"ext/standard/tests/general_functions/getservbyname_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_variation1.phpt"
"ext/standard/tests/network/getprotobyname_basic.phpt"
"ext/standard/tests/network/getprotobynumber_basic.phpt"
"ext/standard/tests/strings/setlocale_basic1.phpt"
"sapi/fpm/tests/socket-ipv4-fallback.phpt"
"ext/ftp/tests/ftp_site_basic.phpt"
"ext/intl/tests/locale_filter_matches3.phpt"
"ext/intl/tests/locale_get_display_name7.phpt"
"ext/intl/tests/rbbiter_getBinaryRules_basic2.phpt"
"ext/intl/tests/rbbiter_getRules_basic2.phpt"
"ext/intl/tests/locale_lookup_variant2.phpt"
"ext/intl/tests/locale_get_display_language.phpt"
"ext/standard/tests/strings/setlocale_basic2.phpt"
"ext/standard/tests/strings/setlocale_basic3.phpt"
"ext/standard/tests/strings/setlocale_variation1.phpt"
"ext/dom/tests/DOMDocument_loadXML_error1.phpt"
"ext/dom/tests/DOMDocument_load_error1.phpt"
"ext/dom/tests/bug43364.phpt"
"ext/libxml/tests/bug61367-read.phpt"
"ext/libxml/tests/libxml_disable_entity_loader.phpt"
"ext/openssl/tests/openssl_x509_checkpurpose_basic.phpt"
This failing test is skipped on PHP 's as it is
"ext/standard/tests/file/disk_free_space_basic.phpt"
"ext/standard/tests/file/lstat_stat_variation8.phpt"
"ext/date/tests/bug73837.phpt"
"ext/gd/tests/bug39780_extern.phpt"
"ext/gd/tests/libgd00086_extern.phpt"
"ext/gd/tests/bug45799.phpt"
"ext/gd/tests/createfromwbmp2_extern.phpt"
This bug should have been fixed in gd 2.2.2 .
"ext/gd/tests/bug65148.phpt"
This bug should have been fixed in the gd 2.2
"ext/gd/tests/bug66590.phpt"
"ext/gd/tests/bug70102.phpt"
"ext/gd/tests/bug73869.phpt"
"ext/gd/tests/webp_basic.phpt"
"ext/gd/tests/imagecreatefromstring_webp.phpt"
"ext/gd/tests/bug77269.phpt"
TODO : Enable these when libgd is built with xpm support .
"ext/gd/tests/xpm2gd.phpt"
"ext/gd/tests/xpm2jpg.phpt"
"ext/gd/tests/xpm2png.phpt"
Whitespace difference , probably caused by a very
"ext/gd/tests/bug77479.phpt"
Expected invalid XBM but got EOF before image was
"ext/gd/tests/bug77973.phpt"
"ext/gd/tests/bug79067.phpt"
"ext/gd/tests/bug79068.phpt"
Expects " illegal character " , instead gets " unknown error ( 84 ) " .
"ext/iconv/tests/bug52211.phpt"
"ext/iconv/tests/bug60494.phpt"
Expects " wrong charset " , gets unknown error ( 22 ) .
"ext/iconv/tests/iconv_strlen_error2.phpt"
"ext/iconv/tests/iconv_substr_error2.phpt"
"ext/iconv/tests/iconv_strpos_error2.phpt"
"ext/iconv/tests/iconv_strrpos_error2.phpt"
Expects " invalid sequence " but got
"ext/iconv/tests/bug76249.phpt"
"ext/curl/tests/bug61948-unix.phpt"
"ext/standard/tests/file/bug41655_1.phpt"
"ext/standard/tests/file/glob_variation5.phpt"
"ext/gd/tests/bug43073.phpt"
"ext/gd/tests/bug48732-mb.phpt"
"ext/gd/tests/bug48732.phpt"
Similarly for ( ) .
"ext/gd/tests/bug48801-mb.phpt"
"ext/gd/tests/bug48801.phpt"
"ext/gd/tests/bug53504.phpt"
"ext/gd/tests/bug73272.phpt"
" unknown error ( 84 ) " and heap corruption ( ! ) .
"ext/iconv/tests/bug48147.phpt"
"ext/iconv/tests/bug51250.phpt"
"ext/iconv/tests/iconv_mime_encode.phpt"
"ext/standard/tests/file/bug43008.phpt"
"ext/pdo_sqlite/tests/bug_42589.phpt"
"ext/standard/tests/general_functions/proc_nice_basic.phpt"))
(setenv "SKIP_ONLINE_TESTS" "1")
(setenv "REPORT_EXIT_STATUS" "1")
(setenv "SKIP_IO_CAPTURE_TESTS" "1")
#t)))
#:test-target "test"))
(inputs
`(("aspell" ,aspell)
("bzip2" ,bzip2)
("curl" ,curl)
("cyrus-sasl" ,cyrus-sasl)
("gd" ,gd)
("gdbm" ,gdbm)
("glibc" ,glibc)
("gmp" ,gmp)
("gnutls" ,gnutls)
("icu4c" ,icu4c)
("libgcrypt" ,libgcrypt)
("libpng" ,libpng)
("libsodium" ,libsodium)
("libxml2" ,libxml2)
("libxslt" ,libxslt)
("libx11" ,libx11)
("libzip" ,libzip)
("oniguruma" ,oniguruma)
("openldap" ,openldap)
("openssl" ,openssl)
("pcre" ,pcre2)
("postgresql" ,postgresql)
("readline" ,readline)
("sqlite" ,sqlite)
("tidy" ,tidy)
("zlib" ,zlib)))
(native-inputs
`(("pkg-config" ,pkg-config)
("bison" ,bison)
("intltool" ,intltool)
(synopsis "PHP programming language")
(description
"PHP (PHP Hypertext Processor) is a server-side (CGI) scripting
language designed primarily for web development but is also used as
a general-purpose programming language. PHP code may be embedded into
HTML code, or it can be used in combination with various web template
systems, web content management systems and web frameworks." )
(license (list
The Zend license .
(define-public php81
(package
(name "php")
(version "8.1.14")
(home-page "/")
(source (origin
(method url-fetch)
(uri (string-append home-page "distributions/"
"php-" version ".tar.xz"))
(sha256
(base32
"06jaf845l9mnbz39h30i50c2rx29xj9cwj687a8qb1nmfal4fvp1"))
(modules '((guix build utils)))
(snippet
'(with-directory-excursion "ext"
(for-each delete-file-recursively
'("gd/libgd"
"pcre/pcre2lib"))
#t))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
(let-syntax ((with (syntax-rules ()
((_ option input)
(string-append option "="
(assoc-ref %build-inputs input))))))
(list (with "--with-bz2" "bzip2")
(with "--with-curl" "curl")
(with "--with-gdbm" "gdbm")
libintl.h
(with "--with-gmp" "gmp")
(with "--with-ldap" "openldap")
(with "--with-ldap-sasl" "cyrus-sasl")
(with "--with-pdo-pgsql" "postgresql")
(with "--with-pdo-sqlite" "sqlite")
(with "--with-pgsql" "postgresql")
PHP ’s Pspell extension , while retaining its current name ,
now uses the Aspell library .
(with "--with-pspell" "aspell")
(with "--with-readline" "readline")
(with "--with-sqlite3" "sqlite")
(with "--with-tidy" "tidy")
(with "--with-xsl" "libxslt")
(with "--with-zlib-dir" "zlib")
"--with-external-pcre"
"--with-external-gd"
"--with-iconv"
"--with-openssl"
"--with-pdo-mysql"
"--with-zip"
"--with-zlib"
"--with-sodium"
Required for , e.g. Zabbix frontend
"--enable-calendar"
"--enable-iconv"
"--enable-ctype"
"--enable-dom"
"--enable-json"
"--enable-hash"
"--enable-libxml"
"--enable-mbstring"
"--enable-openssl"
"--enable-prce"
"--enable-pdo_mysql"
"--enable-simplexml"
"--enable-sodium"
"--enable-xmlwriter"
"--enable-xsl"
"--enable-zip"
"--enable-libxml"
"--enable-lib-openssl"
"--enable-fileinfo"
"--enable-dba=shared"
"--enable-exif"
"--enable-flatfile"
"--enable-fpm"
"--enable-ftp"
"--enable-soap"
"--enable-gd"
"--enable-inifile"
"--enable-intl"
"--enable-mbstring"
"--enable-pcntl"
"--enable-sockets"))
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'do-not-record-build-flags
(lambda _
(substitute* "scripts/php-config.in"
(("@CONFIGURE_OPTIONS@") "")
(("@PHP_LDFLAGS@") ""))
This file has ISO-8859 - 1 encoding .
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* "main/build-defs.h.in"
(("@CONFIGURE_COMMAND@") "(omitted)")))
#t))
(add-before 'build 'patch-/bin/sh
(lambda _
(substitute* '("run-tests.php" "ext/standard/proc_open.c")
(("/bin/sh") (which "sh")))
#t))
(add-before 'check 'prepare-tests
(lambda _
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* '("ext/mbstring/tests/mb_send_mail02.phpt"
"ext/mbstring/tests/mb_send_mail04.phpt"
"ext/mbstring/tests/mb_send_mail05.phpt"
"ext/mbstring/tests/mb_send_mail06.phpt")
(("/bin/cat") (which "cat"))))
(substitute* '("ext/mbstring/tests/mb_send_mail01.phpt"
"ext/mbstring/tests/mb_send_mail03.phpt"
"ext/mbstring/tests/bug52681.phpt"
"ext/standard/tests/general_functions/bug34794.phpt"
"ext/standard/tests/general_functions/bug44667.phpt"
"ext/standard/tests/general_functions/proc_open.phpt")
(("/bin/cat") (which "cat")))
(delete-file "ext/mbstring/tests/mb_send_mail07.phpt")
(substitute* "ext/standard/tests/streams/bug60602.phpt"
(("'ls'") (string-append "'" (which "ls") "'")))
,@(if (string-prefix? "arm" (or (%current-system)
(%current-target-system)))
Drop tests known to fail on armhf .
'((for-each delete-file
(list
"ext/calendar/tests/unixtojd_error1.phpt"
"ext/fileinfo/tests/cve-2014-3538-nojit.phpt"
"ext/pcntl/tests/pcntl_unshare_01.phpt"
"ext/pcre/tests/bug76514.phpt"
"ext/pcre/tests/preg_match_error3.phpt"
"ext/pcre/tests/cache_limit.phpt"
"ext/sockets/tests/socket_getopt.phpt"
"ext/sockets/tests/socket_sendrecvmsg_error.phpt"
"ext/standard/tests/general_functions/var_export-locale.phpt"
"ext/standard/tests/general_functions/var_export_basic1.phpt"
"ext/intl/tests/timezone_getErrorCodeMessage_basic.phpt"
"ext/intl/tests/timezone_getOffset_error.phpt"
"sapi/cli/tests/cli_process_title_unix.phpt"
"sapi/cli/tests/upload_2G.phpt"
"Zend/tests/concat_003.phpt")))
'())
(for-each delete-file
"ext/standard/tests/general_functions/getservbyname_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_variation1.phpt"
"ext/standard/tests/network/getprotobyname_basic.phpt"
"ext/standard/tests/network/getprotobynumber_basic.phpt"
"ext/standard/tests/strings/setlocale_basic1.phpt"
"ext/intl/tests/locale_filter_matches3.phpt"
"ext/intl/tests/locale_get_display_name7.phpt"
"ext/intl/tests/rbbiter_getBinaryRules_basic2.phpt"
"ext/intl/tests/rbbiter_getRules_basic2.phpt"
"ext/intl/tests/locale_lookup_variant2.phpt"
"ext/intl/tests/locale_get_display_language.phpt"
"ext/standard/tests/strings/setlocale_basic2.phpt"
"ext/standard/tests/strings/setlocale_basic3.phpt"
"ext/standard/tests/strings/setlocale_variation1.phpt"
"ext/dom/tests/DOMDocument_loadXML_error1.phpt"
"ext/dom/tests/DOMDocument_load_error1.phpt"
"ext/dom/tests/bug43364.phpt"
"ext/libxml/tests/bug61367-read.phpt"
"ext/libxml/tests/libxml_disable_entity_loader.phpt"
"ext/openssl/tests/openssl_x509_checkpurpose_basic.phpt"
This failing test is skipped on PHP 's as it is
"ext/standard/tests/file/disk_free_space_basic.phpt"
"ext/standard/tests/file/lstat_stat_variation8.phpt"
"ext/date/tests/bug73837.phpt"
"ext/gd/tests/bug39780_extern.phpt"
"ext/gd/tests/libgd00086_extern.phpt"
"ext/gd/tests/imagecreatefromstring_avif.phpt"
"ext/gd/tests/bug77272.phpt"
"ext/gd/tests/bug72339.phpt"
"ext/gd/tests/bug66356.phpt"
"ext/gd/tests/avif_decode_encode.phpt"
"ext/gd/tests/bug45799.phpt"
"ext/gd/tests/createfromwbmp2_extern.phpt"
This bug should have been fixed in gd 2.2.2 .
"ext/gd/tests/bug65148.phpt"
This bug should have been fixed in the gd 2.2
"ext/gd/tests/bug66590.phpt"
"ext/gd/tests/bug70102.phpt"
"ext/gd/tests/bug73869.phpt"
"ext/gd/tests/webp_basic.phpt"
"ext/gd/tests/imagecreatefromstring_webp.phpt"
"ext/gd/tests/bug77269.phpt"
TODO : Enable these when libgd is built with xpm support .
"ext/gd/tests/xpm2gd.phpt"
"ext/gd/tests/xpm2jpg.phpt"
"ext/gd/tests/xpm2png.phpt"
Whitespace difference , probably caused by a very
"ext/gd/tests/bug77479.phpt"
Expected invalid XBM but got EOF before image was
"ext/gd/tests/bug77973.phpt"
"ext/gd/tests/bug79067.phpt"
"ext/gd/tests/bug79068.phpt"
Expects " illegal character " , instead gets " unknown error ( 84 ) " .
"ext/iconv/tests/bug52211.phpt"
"ext/iconv/tests/bug60494.phpt"
Expects " wrong charset " , gets unknown error ( 22 ) .
"ext/iconv/tests/iconv_strlen_error2.phpt"
"ext/iconv/tests/iconv_substr_error2.phpt"
"ext/iconv/tests/iconv_strpos_error2.phpt"
"ext/iconv/tests/iconv_strrpos_error2.phpt"
Expects " invalid sequence " but got
"ext/iconv/tests/bug76249.phpt"
"ext/curl/tests/bug61948-unix.phpt"
"ext/curl/tests/curl_setopt_ssl.phpt"
"ext/standard/tests/file/bug41655_1.phpt"
"ext/standard/tests/file/glob_variation5.phpt"
"ext/gd/tests/bug43073.phpt"
"ext/gd/tests/bug48732-mb.phpt"
"ext/gd/tests/bug48732.phpt"
Similarly for ( ) .
"ext/gd/tests/bug48801-mb.phpt"
"ext/gd/tests/bug48801.phpt"
"ext/gd/tests/bug53504.phpt"
"ext/gd/tests/bug73272.phpt"
" unknown error ( 84 ) " and heap corruption ( ! ) .
"ext/iconv/tests/bug48147.phpt"
"ext/iconv/tests/bug51250.phpt"
"ext/iconv/tests/iconv_mime_encode.phpt"
"ext/standard/tests/file/bug43008.phpt"
"ext/pdo_sqlite/tests/bug_42589.phpt"
"ext/standard/tests/general_functions/proc_nice_basic.phpt"))
(setenv "SKIP_ONLINE_TESTS" "1")
(setenv "REPORT_EXIT_STATUS" "1")
(setenv "SKIP_IO_CAPTURE_TESTS" "1")
#t)))
#:test-target "test"))
(inputs
`(("aspell" ,aspell)
("bzip2" ,bzip2)
("curl" ,curl)
("cyrus-sasl" ,cyrus-sasl)
("gd" ,gd)
("gdbm" ,gdbm)
("glibc" ,glibc)
("gmp" ,gmp)
("gnutls" ,gnutls)
("icu4c" ,icu4c)
("libgcrypt" ,libgcrypt)
("libpng" ,libpng)
("libsodium" ,libsodium)
("libxml2" ,libxml2)
("libxslt" ,libxslt)
("libx11" ,libx11)
("libzip" ,libzip)
("oniguruma" ,oniguruma)
("openldap" ,openldap)
("openssl" ,openssl)
("pcre" ,pcre2)
("postgresql" ,postgresql)
("readline" ,readline)
("sqlite" ,sqlite)
("tidy" ,tidy)
("zlib" ,zlib)))
(native-inputs
`(("pkg-config" ,pkg-config)
("bison" ,bison)
("intltool" ,intltool)
(synopsis "PHP programming language")
(description
"PHP (PHP Hypertext Processor) is a server-side (CGI) scripting
language designed primarily for web development but is also used as
a general-purpose programming language. PHP code may be embedded into
HTML code, or it can be used in combination with various web template
systems, web content management systems and web frameworks." )
(license (list
The Zend license .
(define-public php82
(package/inherit
php81
(name "php")
(version "8.2.1")
(home-page "/")
(source (origin
(method url-fetch)
(uri (string-append home-page "distributions/"
"php-" version ".tar.xz"))
(sha256
(base32
"1bqp5hhww7kxvqvamgjbaxlx6p54igfz3xm0yq3vzjjnl3bkn3b5"))
(modules '((guix build utils)))
(snippet
'(with-directory-excursion "ext"
(for-each delete-file-recursively
'("gd/libgd"
"pcre/pcre2lib"))
#t))))
(arguments
`(#:configure-flags
(let-syntax ((with (syntax-rules ()
((_ option input)
(string-append option "="
(assoc-ref %build-inputs input))))))
(list (with "--with-bz2" "bzip2")
(with "--with-curl" "curl")
(with "--with-gdbm" "gdbm")
libintl.h
(with "--with-gmp" "gmp")
(with "--with-ldap" "openldap")
(with "--with-ldap-sasl" "cyrus-sasl")
(with "--with-pdo-pgsql" "postgresql")
(with "--with-pdo-sqlite" "sqlite")
(with "--with-pgsql" "postgresql")
PHP ’s Pspell extension , while retaining its current name ,
now uses the Aspell library .
(with "--with-pspell" "aspell")
(with "--with-readline" "readline")
(with "--with-sqlite3" "sqlite")
(with "--with-tidy" "tidy")
(with "--with-xsl" "libxslt")
(with "--with-zlib-dir" "zlib")
"--with-external-pcre"
"--with-external-gd"
"--with-iconv"
"--with-openssl"
"--with-pdo-mysql"
"--with-zip"
"--with-zlib"
"--with-sodium"
Required for , e.g. Zabbix frontend
"--enable-calendar"
"--enable-iconv"
"--enable-ctype"
"--enable-dom"
"--enable-json"
"--enable-hash"
"--enable-libxml"
"--enable-mbstring"
"--enable-openssl"
"--enable-prce"
"--enable-pdo_mysql"
"--enable-simplexml"
"--enable-sodium"
"--enable-xmlwriter"
"--enable-xsl"
"--enable-zip"
"--enable-libxml"
"--enable-lib-openssl"
"--enable-fileinfo"
"--enable-dba=shared"
"--enable-exif"
"--enable-flatfile"
"--enable-fpm"
"--enable-ftp"
"--enable-soap"
"--enable-gd"
"--enable-inifile"
"--enable-intl"
"--enable-mbstring"
"--enable-pcntl"
"--enable-sockets"))
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'do-not-record-build-flags
(lambda _
(substitute* "scripts/php-config.in"
(("@CONFIGURE_OPTIONS@") "")
(("@PHP_LDFLAGS@") ""))
This file has ISO-8859 - 1 encoding .
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* "main/build-defs.h.in"
(("@CONFIGURE_COMMAND@") "(omitted)")))
#t))
(add-before 'build 'patch-/bin/sh
(lambda _
(substitute* '("run-tests.php" "ext/standard/proc_open.c")
(("/bin/sh") (which "sh")))
#t))
(add-before 'check 'prepare-tests
(lambda _
(with-fluids ((%default-port-encoding "ISO-8859-1"))
(substitute* '("ext/mbstring/tests/mb_send_mail02.phpt"
"ext/mbstring/tests/mb_send_mail04.phpt"
"ext/mbstring/tests/mb_send_mail05.phpt"
"ext/mbstring/tests/mb_send_mail06.phpt")
(("/bin/cat") (which "cat"))))
(substitute* '("ext/mbstring/tests/mb_send_mail01.phpt"
"ext/mbstring/tests/mb_send_mail03.phpt"
"ext/mbstring/tests/bug52681.phpt"
"ext/standard/tests/general_functions/bug34794.phpt"
"ext/standard/tests/general_functions/bug44667.phpt"
"ext/standard/tests/general_functions/proc_open.phpt")
(("/bin/cat") (which "cat")))
(delete-file "ext/mbstring/tests/mb_send_mail07.phpt")
(substitute* "ext/standard/tests/streams/bug60602.phpt"
(("'ls'") (string-append "'" (which "ls") "'")))
,@(if (string-prefix? "arm" (or (%current-system)
(%current-target-system)))
Drop tests known to fail on armhf .
'((for-each delete-file
(list
"ext/calendar/tests/unixtojd_error1.phpt"
"ext/fileinfo/tests/cve-2014-3538-nojit.phpt"
"ext/pcntl/tests/pcntl_unshare_01.phpt"
"ext/pcre/tests/bug76514.phpt"
"ext/pcre/tests/preg_match_error3.phpt"
"ext/pcre/tests/cache_limit.phpt"
"ext/sockets/tests/socket_getopt.phpt"
"ext/sockets/tests/socket_sendrecvmsg_error.phpt"
"ext/standard/tests/general_functions/var_export-locale.phpt"
"ext/standard/tests/general_functions/var_export_basic1.phpt"
"ext/intl/tests/timezone_getErrorCodeMessage_basic.phpt"
"ext/intl/tests/timezone_getOffset_error.phpt"
"sapi/cli/tests/cli_process_title_unix.phpt"
"sapi/cli/tests/upload_2G.phpt"
"Zend/tests/concat_003.phpt")))
'())
(for-each delete-file
"ext/standard/tests/general_functions/getservbyname_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_basic.phpt"
"ext/standard/tests/general_functions/getservbyport_variation1.phpt"
"ext/standard/tests/network/getprotobyname_basic.phpt"
"ext/dba/tests/dba_gdbm_creation_matrix.phpt"
"ext/standard/tests/network/getprotobynumber_basic.phpt"
"ext/standard/tests/strings/setlocale_basic1.phpt"
"ext/intl/tests/locale_filter_matches3.phpt"
"ext/intl/tests/locale_get_display_name7.phpt"
"ext/intl/tests/rbbiter_getBinaryRules_basic2.phpt"
"ext/intl/tests/rbbiter_getRules_basic2.phpt"
"ext/intl/tests/locale_lookup_variant2.phpt"
"ext/intl/tests/locale_get_display_language.phpt"
"ext/standard/tests/strings/setlocale_basic2.phpt"
"ext/standard/tests/strings/setlocale_basic3.phpt"
"ext/standard/tests/strings/setlocale_variation1.phpt"
"ext/dom/tests/DOMDocument_loadXML_error1.phpt"
"ext/dom/tests/DOMDocument_load_error1.phpt"
"ext/dom/tests/bug43364.phpt"
"ext/libxml/tests/bug61367-read.phpt"
"ext/libxml/tests/libxml_disable_entity_loader.phpt"
"ext/openssl/tests/openssl_x509_checkpurpose_basic.phpt"
This failing test is skipped on PHP 's as it is
"ext/standard/tests/file/disk_free_space_basic.phpt"
"ext/standard/tests/file/lstat_stat_variation8.phpt"
"ext/date/tests/bug73837.phpt"
"ext/gd/tests/bug39780_extern.phpt"
"ext/gd/tests/libgd00086_extern.phpt"
"ext/gd/tests/imagecreatefromstring_avif.phpt"
"ext/gd/tests/bug77272.phpt"
"ext/gd/tests/bug72339.phpt"
"ext/gd/tests/bug66356.phpt"
"ext/gd/tests/avif_decode_encode.phpt"
"ext/gd/tests/bug45799.phpt"
"ext/gd/tests/createfromwbmp2_extern.phpt"
This bug should have been fixed in gd 2.2.2 .
"ext/gd/tests/bug65148.phpt"
This bug should have been fixed in the gd 2.2
"ext/gd/tests/bug66590.phpt"
"ext/gd/tests/bug70102.phpt"
"ext/gd/tests/bug73869.phpt"
"ext/gd/tests/webp_basic.phpt"
"ext/gd/tests/imagecreatefromstring_webp.phpt"
TODO : Enable these when libgd is built with xpm support .
"ext/gd/tests/xpm2gd.phpt"
"ext/gd/tests/xpm2jpg.phpt"
"ext/gd/tests/xpm2png.phpt"
Whitespace difference , probably caused by a very
"ext/gd/tests/bug77479.phpt"
Expected invalid XBM but got EOF before image was
"ext/gd/tests/bug77973.phpt"
"ext/gd/tests/bug79067.phpt"
"ext/gd/tests/bug79068.phpt"
Expects " illegal character " , instead gets " unknown error ( 84 ) " .
"ext/iconv/tests/bug52211.phpt"
"ext/iconv/tests/bug60494.phpt"
Expects " wrong charset " , gets unknown error ( 22 ) .
"ext/iconv/tests/iconv_strlen_error2.phpt"
"ext/iconv/tests/iconv_substr_error2.phpt"
"ext/iconv/tests/iconv_strpos_error2.phpt"
"ext/iconv/tests/iconv_strrpos_error2.phpt"
Expects " invalid sequence " but got
"ext/iconv/tests/bug76249.phpt"
"ext/curl/tests/bug61948-unix.phpt"
"ext/curl/tests/curl_setopt_ssl.phpt"
"ext/standard/tests/file/bug41655_1.phpt"
"ext/standard/tests/file/glob_variation5.phpt"
"ext/gd/tests/bug43073.phpt"
"ext/gd/tests/bug48732-mb.phpt"
"ext/gd/tests/bug48732.phpt"
Similarly for ( ) .
"ext/gd/tests/bug48801-mb.phpt"
"ext/gd/tests/bug48801.phpt"
"ext/gd/tests/bug53504.phpt"
"ext/gd/tests/bug73272.phpt"
" unknown error ( 84 ) " and heap corruption ( ! ) .
"ext/iconv/tests/bug48147.phpt"
"ext/iconv/tests/bug51250.phpt"
"ext/iconv/tests/iconv_mime_encode.phpt"
"ext/standard/tests/file/bug43008.phpt"
"ext/pdo_sqlite/tests/bug_42589.phpt"
"ext/standard/tests/general_functions/proc_nice_basic.phpt"))
(setenv "SKIP_ONLINE_TESTS" "1")
(setenv "REPORT_EXIT_STATUS" "1")
(setenv "SKIP_IO_CAPTURE_TESTS" "1")
#t)))
#:test-target "test"))))
|
a28bf60661644b0ecd2860ab919958599631f1d467ca452b93fabaebfdc0134b | avsm/platform | root.ml |
* Copyright ( c ) 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2014 Leo White <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let contains_double_underscore s =
let len = String.length s in
let rec aux i =
if i > len - 2 then false else
if s.[i] = '_' && s.[i + 1] = '_' then true
else aux (i + 1)
in
aux 0
module Package =
struct
type t = string
module Table = Hashtbl.Make(struct
type nonrec t = t
let equal : t -> t -> bool = (=)
let hash : t -> int = Hashtbl.hash
end)
end
module Odoc_file =
struct
type compilation_unit = {name : string; hidden : bool}
type t =
| Page of string
| Compilation_unit of compilation_unit
let create_unit ~force_hidden name =
let hidden = force_hidden || contains_double_underscore name in
Compilation_unit {name; hidden}
let create_page name = Page name
let name = function
| Page name
| Compilation_unit {name; _} -> name
end
type t = {
package : Package.t;
file : Odoc_file.t;
digest : Digest.t;
}
let equal : t -> t -> bool = (=)
let hash : t -> int = Hashtbl.hash
let to_string t = Printf.sprintf "%s::%s" t.package (Odoc_file.name t.file)
module Hash_table =
Hashtbl.Make
(struct
type nonrec t = t
let equal = equal
let hash = hash
end)
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/odoc.1.4.2/src/model/root.ml | ocaml |
* Copyright ( c ) 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2014 Leo White <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let contains_double_underscore s =
let len = String.length s in
let rec aux i =
if i > len - 2 then false else
if s.[i] = '_' && s.[i + 1] = '_' then true
else aux (i + 1)
in
aux 0
module Package =
struct
type t = string
module Table = Hashtbl.Make(struct
type nonrec t = t
let equal : t -> t -> bool = (=)
let hash : t -> int = Hashtbl.hash
end)
end
module Odoc_file =
struct
type compilation_unit = {name : string; hidden : bool}
type t =
| Page of string
| Compilation_unit of compilation_unit
let create_unit ~force_hidden name =
let hidden = force_hidden || contains_double_underscore name in
Compilation_unit {name; hidden}
let create_page name = Page name
let name = function
| Page name
| Compilation_unit {name; _} -> name
end
type t = {
package : Package.t;
file : Odoc_file.t;
digest : Digest.t;
}
let equal : t -> t -> bool = (=)
let hash : t -> int = Hashtbl.hash
let to_string t = Printf.sprintf "%s::%s" t.package (Odoc_file.name t.file)
module Hash_table =
Hashtbl.Make
(struct
type nonrec t = t
let equal = equal
let hash = hash
end)
| |
db4beeecde09ca554bf6ec93399d60673b7862af0b72ee0c260403c7d6c01fdd | richcarl/eunit | eunit_striptests.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may
%% not use this file except in compliance with the License. You may obtain
%% a copy of the License at <-2.0>
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% Alternatively, you may use this file under the terms of the GNU Lesser
General Public License ( the " LGPL " ) as published by the Free Software
Foundation ; either version 2.1 , or ( at your option ) any later version .
%% If you wish to allow use of your version of this file only under the
%% terms of the LGPL, you should delete the provisions above and replace
%% them with the notice and other provisions required by the LGPL; see
%% </>. If you do not delete the provisions
%% above, a recipient may use your version of this file under the terms of
either the Apache License or the LGPL .
%%
@author < >
@author < >
2006 ,
@private
%% @see eunit
@doc Parse transform for stripping EUnit test functions .
-module(eunit_striptests).
-include("eunit_internal.hrl").
-export([parse_transform/2]).
parse_transform(Forms, Options) ->
TestSuffix = proplists:get_value(eunit_test_suffix, Options,
?DEFAULT_TEST_SUFFIX),
GeneratorSuffix = proplists:get_value(eunit_generator_suffix,
Options,
?DEFAULT_GENERATOR_SUFFIX),
ExportSuffix = proplists:get_value(eunit_export_suffix, Options,
?DEFAULT_EXPORT_SUFFIX),
Exports = lists:foldl(fun ({attribute,_,export,Es}, S) ->
sets:union(sets:from_list(Es), S);
(_F, S) -> S
end,
sets:new(), Forms),
F = fun (Form, Acc) ->
form(Form, Acc, Exports, TestSuffix, GeneratorSuffix,
ExportSuffix)
end,
lists:reverse(lists:foldl(F, [], Forms)).
form({function, _L, Name, 0, _Cs}=Form, Acc, Exports, TestSuffix,
GeneratorSuffix, ExportSuffix) ->
N = atom_to_list(Name),
case not sets:is_element({Name, 0}, Exports)
andalso (lists:suffix(TestSuffix, N)
orelse lists:suffix(GeneratorSuffix, N)
orelse lists:suffix(ExportSuffix, N))
of
true ->
Acc;
false ->
[Form | Acc]
end;
form({function, _L, ?DEFAULT_MODULE_WRAPPER_NAME, 1, _Cs}, Acc, _, _, _,
_) ->
Acc;
form(Form, Acc, _, _, _, _) ->
[Form | Acc].
| null | https://raw.githubusercontent.com/richcarl/eunit/cb7eb2bc2cec01e405c717b6f6b551be7d256f06/src/eunit_striptests.erl | erlang | not use this file except in compliance with the License. You may obtain
a copy of the License at <-2.0>
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Alternatively, you may use this file under the terms of the GNU Lesser
If you wish to allow use of your version of this file only under the
terms of the LGPL, you should delete the provisions above and replace
them with the notice and other provisions required by the LGPL; see
</>. If you do not delete the provisions
above, a recipient may use your version of this file under the terms of
@see eunit | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may
distributed under the License is distributed on an " AS IS " BASIS ,
General Public License ( the " LGPL " ) as published by the Free Software
Foundation ; either version 2.1 , or ( at your option ) any later version .
either the Apache License or the LGPL .
@author < >
@author < >
2006 ,
@private
@doc Parse transform for stripping EUnit test functions .
-module(eunit_striptests).
-include("eunit_internal.hrl").
-export([parse_transform/2]).
parse_transform(Forms, Options) ->
TestSuffix = proplists:get_value(eunit_test_suffix, Options,
?DEFAULT_TEST_SUFFIX),
GeneratorSuffix = proplists:get_value(eunit_generator_suffix,
Options,
?DEFAULT_GENERATOR_SUFFIX),
ExportSuffix = proplists:get_value(eunit_export_suffix, Options,
?DEFAULT_EXPORT_SUFFIX),
Exports = lists:foldl(fun ({attribute,_,export,Es}, S) ->
sets:union(sets:from_list(Es), S);
(_F, S) -> S
end,
sets:new(), Forms),
F = fun (Form, Acc) ->
form(Form, Acc, Exports, TestSuffix, GeneratorSuffix,
ExportSuffix)
end,
lists:reverse(lists:foldl(F, [], Forms)).
form({function, _L, Name, 0, _Cs}=Form, Acc, Exports, TestSuffix,
GeneratorSuffix, ExportSuffix) ->
N = atom_to_list(Name),
case not sets:is_element({Name, 0}, Exports)
andalso (lists:suffix(TestSuffix, N)
orelse lists:suffix(GeneratorSuffix, N)
orelse lists:suffix(ExportSuffix, N))
of
true ->
Acc;
false ->
[Form | Acc]
end;
form({function, _L, ?DEFAULT_MODULE_WRAPPER_NAME, 1, _Cs}, Acc, _, _, _,
_) ->
Acc;
form(Form, Acc, _, _, _, _) ->
[Form | Acc].
|
53a63f739adb2c7873aad59183384c01034e022a4a4043cfd1f43ddc986aef9e | facebook/flow | lspCommand.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open CommandUtils
(***********************************************************************)
flow lsp command
(***********************************************************************)
let spec =
{
CommandSpec.name = "lsp";
doc = "Acts as a server for the Language Server Protocol over stdin/stdout [experimental]";
usage =
Printf.sprintf
"Usage: %s lsp\n\nRuns a server for the Language Server Protocol\n"
CommandUtils.exe_name;
args =
CommandSpec.ArgSpec.(
empty
|> base_flags
|> temp_dir_flag
|> shm_flags
|> flag "--lazy" truthy ~doc:"Deprecated, has no effect"
|> flag "--lazy-mode" string ~doc:"Deprecated, has no effect"
|> autostop_flag
|> from_flag
);
}
let main
base_flags
(temp_dir : string option)
(shm_flags : CommandUtils.shared_mem_params)
(_lazy : bool)
(_lazy_mode : string option)
(autostop : bool)
(() : unit) : unit =
always set ` quiet ` , since the LSP does n't want any log spew . this only applies to the
` start ` command and does not imply a quiet server , which will still write to its log
file .
`start` command and does not imply a quiet server, which will still write to its log
file. *)
let quiet = true in
let connect_params =
{
retries = 0;
timeout = None;
no_auto_start = false;
temp_dir;
autostop;
lazy_mode = None;
shm_flags;
ignore_version = false;
quiet;
on_mismatch = Choose_newest;
}
in
let flowconfig_name = base_flags.Base_flags.flowconfig_name in
FlowLsp.run ~flowconfig_name ~connect_params
let command = CommandSpec.command spec main
| null | https://raw.githubusercontent.com/facebook/flow/b918b06104ac1489b516988707431d98833ce99f/src/commands/lspCommand.ml | ocaml | *********************************************************************
********************************************************************* |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open CommandUtils
flow lsp command
let spec =
{
CommandSpec.name = "lsp";
doc = "Acts as a server for the Language Server Protocol over stdin/stdout [experimental]";
usage =
Printf.sprintf
"Usage: %s lsp\n\nRuns a server for the Language Server Protocol\n"
CommandUtils.exe_name;
args =
CommandSpec.ArgSpec.(
empty
|> base_flags
|> temp_dir_flag
|> shm_flags
|> flag "--lazy" truthy ~doc:"Deprecated, has no effect"
|> flag "--lazy-mode" string ~doc:"Deprecated, has no effect"
|> autostop_flag
|> from_flag
);
}
let main
base_flags
(temp_dir : string option)
(shm_flags : CommandUtils.shared_mem_params)
(_lazy : bool)
(_lazy_mode : string option)
(autostop : bool)
(() : unit) : unit =
always set ` quiet ` , since the LSP does n't want any log spew . this only applies to the
` start ` command and does not imply a quiet server , which will still write to its log
file .
`start` command and does not imply a quiet server, which will still write to its log
file. *)
let quiet = true in
let connect_params =
{
retries = 0;
timeout = None;
no_auto_start = false;
temp_dir;
autostop;
lazy_mode = None;
shm_flags;
ignore_version = false;
quiet;
on_mismatch = Choose_newest;
}
in
let flowconfig_name = base_flags.Base_flags.flowconfig_name in
FlowLsp.run ~flowconfig_name ~connect_params
let command = CommandSpec.command spec main
|
42f9acd83293385f85db5f24bba7115a18aadf56237e13ab0ca688c3cf93db64 | cronburg/antlr-haskell | lang_demo.hs | # LANGUAGE TypeFamilies #
module LangDemo where
-- If you never change the base value and are only adding expressions
data BaseVal = IntV Int
data BaseExp = ValE BaseVal
| AddE BaseExp BaseExp
data ExtExp = BaseE BaseExp
| SubE ExtExp ExtExp
class Lang lang where
eval :: lang -> BaseVal
instance Lang BaseExp where
eval (ValE i) = i
eval (AddE be1 be2) =
let IntV i1 = eval be1
IntV i2 = eval be2
in IntV $ i1 + i2
instance Lang ExtExp where
eval (BaseE b) = eval b
eval (SubE ee1 ee2) =
let IntV i1 = eval ee1
IntV i2 = eval ee2
in IntV $ i1 - i2
-- if you want to be able to change the set of values you can evaluate to when extending the language
data BaseVal2 = IntV2 Int
data BaseExp2 = ValBE2 BaseVal2
| AddE2 BaseExp2 BaseExp2
data ExtVal2 = StringV2 String
data ExtExp2 = BaseE2 BaseExp2
| ValEE2 ExtVal2
| ConcatE2 ExtExp2 ExtExp2
-- can use type families to suspend the value kind until later.
data family Val2
class Lang2 lang where
eval :: lang -> Val2
-- define at some time t1 for the base language
data instance Val2 = BV2 BaseVal2
instance Lang2 BaseExp2 where
eval ValBE2 bv2 = BV2 bv2
eval AddE2 be1 be2 =
let IntV2 i1 = eval be1
IntV2 i2 = eval be2
in BV2 $ IntV2 $ i1 + i2
-- extend at some later time t2 for the extended language
data instance Val2 = EV2 ExtVal2
instance Lang2 ExtExp2 where
eval BaseE2 be2 = eval be2
eval ValEE2 ev2 = ev2
eval ConcatE2 ee1 ee2 =
case (eval ee1, eval ee2) of
(StringV2 s1, StringV2 s2) -> EV2 $ StringV2 $ s1 ++ s2
_ -> error "trying to concat something other than strings!"
| null | https://raw.githubusercontent.com/cronburg/antlr-haskell/7a9367038eaa58f9764f2ff694269245fbebc155/lang_demo.hs | haskell | If you never change the base value and are only adding expressions
if you want to be able to change the set of values you can evaluate to when extending the language
can use type families to suspend the value kind until later.
define at some time t1 for the base language
extend at some later time t2 for the extended language | # LANGUAGE TypeFamilies #
module LangDemo where
data BaseVal = IntV Int
data BaseExp = ValE BaseVal
| AddE BaseExp BaseExp
data ExtExp = BaseE BaseExp
| SubE ExtExp ExtExp
class Lang lang where
eval :: lang -> BaseVal
instance Lang BaseExp where
eval (ValE i) = i
eval (AddE be1 be2) =
let IntV i1 = eval be1
IntV i2 = eval be2
in IntV $ i1 + i2
instance Lang ExtExp where
eval (BaseE b) = eval b
eval (SubE ee1 ee2) =
let IntV i1 = eval ee1
IntV i2 = eval ee2
in IntV $ i1 - i2
data BaseVal2 = IntV2 Int
data BaseExp2 = ValBE2 BaseVal2
| AddE2 BaseExp2 BaseExp2
data ExtVal2 = StringV2 String
data ExtExp2 = BaseE2 BaseExp2
| ValEE2 ExtVal2
| ConcatE2 ExtExp2 ExtExp2
data family Val2
class Lang2 lang where
eval :: lang -> Val2
data instance Val2 = BV2 BaseVal2
instance Lang2 BaseExp2 where
eval ValBE2 bv2 = BV2 bv2
eval AddE2 be1 be2 =
let IntV2 i1 = eval be1
IntV2 i2 = eval be2
in BV2 $ IntV2 $ i1 + i2
data instance Val2 = EV2 ExtVal2
instance Lang2 ExtExp2 where
eval BaseE2 be2 = eval be2
eval ValEE2 ev2 = ev2
eval ConcatE2 ee1 ee2 =
case (eval ee1, eval ee2) of
(StringV2 s1, StringV2 s2) -> EV2 $ StringV2 $ s1 ++ s2
_ -> error "trying to concat something other than strings!"
|
26fdba59c75087298d7e71f6d80f11451e208c4814a72a5908e3926d034a9552 | hedgehogqa/haskell-hedgehog-classes | Equation.hs | # LANGUAGE CPP #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE StandaloneDeriving #
# LANGUAGE QuantifiedConstraints #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
module Hedgehog.Classes.Common.Equation
( LinearEquation(..), runLinearEquation, genLinearEquation
, LinearEquationTwo(..), runLinearEquationTwo, genLinearEquationTwo
, LinearEquationM(..), runLinearEquationM, genLinearEquationM
, QuadraticEquation(..), runQuadraticEquation, genQuadraticEquation
, CubicEquation(..), runCubicEquation, genCubicEquation
#ifdef HAVE_COMONAD
, LinearEquationW(..), runLinearEquationW, genLinearEquationW
#endif
) where
import Hedgehog
import Hedgehog.Classes.Common.Gen
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Data.List as List
import Data.Monoid (Endo(..))
#ifdef HAVE_COMONAD
import Control.Comonad
#endif
data QuadraticEquation = QuadraticEquation
{ _quadraticEquationQuadratic :: Integer
, _quadraticEquationLinear :: Integer
, _quadraticEquationConstant :: Integer
}
deriving (Eq)
-- This show instance does not actually provide a way
-- to create an equation. Instead, it makes it look
-- like a lambda.
instance Show QuadraticEquation where
show (QuadraticEquation a b c) = "\\x -> " ++ show a ++ " * x ^ 2 + " ++ show b ++ " * x + " ++ show c
genQuadraticEquation :: Gen QuadraticEquation
genQuadraticEquation = do
a <- Gen.integral (Range.linear 0 15)
b <- Gen.integral (Range.linear 0 15)
c <- Gen.integral (Range.linear 0 15)
pure (QuadraticEquation a b c)
runQuadraticEquation :: QuadraticEquation -> Integer -> Integer
runQuadraticEquation (QuadraticEquation a b c) x = a * x ^ (2 :: Integer) + b * x + c
data LinearEquation = LinearEquation
{ _linearEquationLinear :: Integer
, _linearEquationConstant :: Integer
}
deriving (Eq)
instance Show LinearEquation where
showsPrec _ (LinearEquation a b) = shows a . showString " * x + " . shows b
showList xs = appEndo
$ mconcat
$ [Endo (showChar '[')]
++ List.intersperse (Endo (showChar ',')) (map (Endo . showsPrec 0) xs)
++ [Endo (showChar ']')]
runLinearEquation :: LinearEquation -> Integer -> Integer
runLinearEquation (LinearEquation a b) x = a * x + b
genLinearEquation :: Gen LinearEquation
genLinearEquation = LinearEquation <$> genSmallInteger <*> genSmallInteger
#ifdef HAVE_COMONAD
data LinearEquationW w = LinearEquationW (w LinearEquation) (w LinearEquation)
deriving instance (forall x. Eq x => Eq (w x)) => Eq (LinearEquationW w)
instance (forall x. Show x => Show (w x)) => Show (LinearEquationW w) where
show (LinearEquationW a b) = (\f -> f "")
$ showString "\\x -> if odd x then "
. showsPrec 0 a
. showString " else "
. showsPrec 0 b
runLinearEquationW :: Comonad w
=> LinearEquationW w -> w Integer -> Integer
runLinearEquationW (LinearEquationW e1 e2) (extract -> i) = if odd i
then runLinearEquation (extract e1) i
else runLinearEquation (extract e2) i
genLinearEquationW :: Comonad w
=> (forall x. Gen x -> Gen (w x))
-> Gen (LinearEquationW w)
genLinearEquationW fgen = LinearEquationW
<$> fgen genLinearEquation
<*> fgen genLinearEquation
#endif
data LinearEquationM m = LinearEquationM (m LinearEquation) (m LinearEquation)
deriving instance (forall x. Eq x => Eq (m x)) => Eq (LinearEquationM m)
instance (forall x. Show x => Show (m x)) => Show (LinearEquationM m) where
show (LinearEquationM a b) = (\f -> f "")
$ showString "\\x -> if odd x then "
. showsPrec 0 a
. showString " else "
. showsPrec 0 b
runLinearEquationM :: Functor m => LinearEquationM m -> Integer -> m Integer
runLinearEquationM (LinearEquationM e1 e2) i = if odd i
then fmap (flip runLinearEquation i) e1
else fmap (flip runLinearEquation i) e2
genLinearEquationM :: Applicative m => Gen (LinearEquationM m)
genLinearEquationM = LinearEquationM <$> (pure <$> genLinearEquation) <*> (pure <$> genLinearEquation)
data LinearEquationTwo = LinearEquationTwo
{ _linearEquationTwoX :: Integer
, _linearEquationTwoY :: Integer
, _linearEquationTwoConstant :: Integer
}
instance Show LinearEquationTwo where
show (LinearEquationTwo x y c) = "\\x y -> " ++ show x ++ " * x + " ++ show y ++ " * y + " ++ show c
genLinearEquationTwo :: Gen LinearEquationTwo
genLinearEquationTwo = LinearEquationTwo <$> absGenInteger <*> absGenInteger <*> absGenInteger
where
absGenInteger = abs <$> genSmallInteger
runLinearEquationTwo :: LinearEquationTwo -> Integer -> Integer -> Integer
runLinearEquationTwo (LinearEquationTwo a b c) x y = a * x + b * y + c
data CubicEquation = CubicEquation
{ _cubicEquationCubic :: Integer
, _cubicEquationQuadratic :: Integer
, _cubicEquationLinear :: Integer
, _cubicEquationConstant :: Integer
}
instance Show CubicEquation where
show (CubicEquation x y z c) = "\\x -> " ++ show x ++ " * x ^ 3 + " ++ show y ++ " * x ^ 2 + " ++ show z ++ " * x + " ++ show c
genCubicEquation :: Gen CubicEquation
genCubicEquation = CubicEquation <$> genSmallInteger <*> genSmallInteger <*> genSmallInteger <*> genSmallInteger
runCubicEquation :: CubicEquation -> Integer -> Integer -> Integer -> Integer
runCubicEquation (CubicEquation a b c d) x y z = a * x + b * y + c * z + d
| null | https://raw.githubusercontent.com/hedgehogqa/haskell-hedgehog-classes/4d97b000e915de8ba590818f551bce7bd862e7d4/src/Hedgehog/Classes/Common/Equation.hs | haskell | # LANGUAGE RankNTypes #
This show instance does not actually provide a way
to create an equation. Instead, it makes it look
like a lambda. | # LANGUAGE CPP #
# LANGUAGE StandaloneDeriving #
# LANGUAGE QuantifiedConstraints #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
module Hedgehog.Classes.Common.Equation
( LinearEquation(..), runLinearEquation, genLinearEquation
, LinearEquationTwo(..), runLinearEquationTwo, genLinearEquationTwo
, LinearEquationM(..), runLinearEquationM, genLinearEquationM
, QuadraticEquation(..), runQuadraticEquation, genQuadraticEquation
, CubicEquation(..), runCubicEquation, genCubicEquation
#ifdef HAVE_COMONAD
, LinearEquationW(..), runLinearEquationW, genLinearEquationW
#endif
) where
import Hedgehog
import Hedgehog.Classes.Common.Gen
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Data.List as List
import Data.Monoid (Endo(..))
#ifdef HAVE_COMONAD
import Control.Comonad
#endif
data QuadraticEquation = QuadraticEquation
{ _quadraticEquationQuadratic :: Integer
, _quadraticEquationLinear :: Integer
, _quadraticEquationConstant :: Integer
}
deriving (Eq)
instance Show QuadraticEquation where
show (QuadraticEquation a b c) = "\\x -> " ++ show a ++ " * x ^ 2 + " ++ show b ++ " * x + " ++ show c
genQuadraticEquation :: Gen QuadraticEquation
genQuadraticEquation = do
a <- Gen.integral (Range.linear 0 15)
b <- Gen.integral (Range.linear 0 15)
c <- Gen.integral (Range.linear 0 15)
pure (QuadraticEquation a b c)
runQuadraticEquation :: QuadraticEquation -> Integer -> Integer
runQuadraticEquation (QuadraticEquation a b c) x = a * x ^ (2 :: Integer) + b * x + c
data LinearEquation = LinearEquation
{ _linearEquationLinear :: Integer
, _linearEquationConstant :: Integer
}
deriving (Eq)
instance Show LinearEquation where
showsPrec _ (LinearEquation a b) = shows a . showString " * x + " . shows b
showList xs = appEndo
$ mconcat
$ [Endo (showChar '[')]
++ List.intersperse (Endo (showChar ',')) (map (Endo . showsPrec 0) xs)
++ [Endo (showChar ']')]
runLinearEquation :: LinearEquation -> Integer -> Integer
runLinearEquation (LinearEquation a b) x = a * x + b
genLinearEquation :: Gen LinearEquation
genLinearEquation = LinearEquation <$> genSmallInteger <*> genSmallInteger
#ifdef HAVE_COMONAD
data LinearEquationW w = LinearEquationW (w LinearEquation) (w LinearEquation)
deriving instance (forall x. Eq x => Eq (w x)) => Eq (LinearEquationW w)
instance (forall x. Show x => Show (w x)) => Show (LinearEquationW w) where
show (LinearEquationW a b) = (\f -> f "")
$ showString "\\x -> if odd x then "
. showsPrec 0 a
. showString " else "
. showsPrec 0 b
runLinearEquationW :: Comonad w
=> LinearEquationW w -> w Integer -> Integer
runLinearEquationW (LinearEquationW e1 e2) (extract -> i) = if odd i
then runLinearEquation (extract e1) i
else runLinearEquation (extract e2) i
genLinearEquationW :: Comonad w
=> (forall x. Gen x -> Gen (w x))
-> Gen (LinearEquationW w)
genLinearEquationW fgen = LinearEquationW
<$> fgen genLinearEquation
<*> fgen genLinearEquation
#endif
data LinearEquationM m = LinearEquationM (m LinearEquation) (m LinearEquation)
deriving instance (forall x. Eq x => Eq (m x)) => Eq (LinearEquationM m)
instance (forall x. Show x => Show (m x)) => Show (LinearEquationM m) where
show (LinearEquationM a b) = (\f -> f "")
$ showString "\\x -> if odd x then "
. showsPrec 0 a
. showString " else "
. showsPrec 0 b
runLinearEquationM :: Functor m => LinearEquationM m -> Integer -> m Integer
runLinearEquationM (LinearEquationM e1 e2) i = if odd i
then fmap (flip runLinearEquation i) e1
else fmap (flip runLinearEquation i) e2
genLinearEquationM :: Applicative m => Gen (LinearEquationM m)
genLinearEquationM = LinearEquationM <$> (pure <$> genLinearEquation) <*> (pure <$> genLinearEquation)
data LinearEquationTwo = LinearEquationTwo
{ _linearEquationTwoX :: Integer
, _linearEquationTwoY :: Integer
, _linearEquationTwoConstant :: Integer
}
instance Show LinearEquationTwo where
show (LinearEquationTwo x y c) = "\\x y -> " ++ show x ++ " * x + " ++ show y ++ " * y + " ++ show c
genLinearEquationTwo :: Gen LinearEquationTwo
genLinearEquationTwo = LinearEquationTwo <$> absGenInteger <*> absGenInteger <*> absGenInteger
where
absGenInteger = abs <$> genSmallInteger
runLinearEquationTwo :: LinearEquationTwo -> Integer -> Integer -> Integer
runLinearEquationTwo (LinearEquationTwo a b c) x y = a * x + b * y + c
data CubicEquation = CubicEquation
{ _cubicEquationCubic :: Integer
, _cubicEquationQuadratic :: Integer
, _cubicEquationLinear :: Integer
, _cubicEquationConstant :: Integer
}
instance Show CubicEquation where
show (CubicEquation x y z c) = "\\x -> " ++ show x ++ " * x ^ 3 + " ++ show y ++ " * x ^ 2 + " ++ show z ++ " * x + " ++ show c
genCubicEquation :: Gen CubicEquation
genCubicEquation = CubicEquation <$> genSmallInteger <*> genSmallInteger <*> genSmallInteger <*> genSmallInteger
runCubicEquation :: CubicEquation -> Integer -> Integer -> Integer -> Integer
runCubicEquation (CubicEquation a b c d) x y z = a * x + b * y + c * z + d
|
63ebfa599fd97eeb934addbf2c0ad32eda75bd75d2c9a9f76aa23f8e3c8b2fc0 | input-output-hk/project-icarus-importer | Local.hs | {-# LANGUAGE Rank2Types #-}
module Pos.Ssc.Logic.Local
(
sscGetLocalPayload
, sscNormalize
-- * 'Inv|Req|Data' processing.
, sscIsDataUseful
, sscProcessCommitment
, sscProcessOpening
, sscProcessShares
, sscProcessCertificate
-- * Garbage collection
, sscGarbageCollectLocalData
) where
import Universum
import Control.Lens ((+=), (.=))
import Control.Monad.Except (MonadError (throwError), runExceptT)
import Control.Monad.Morph (hoist)
import qualified Crypto.Random as Rand
import qualified Data.HashMap.Strict as HM
import Formatting (int, sformat, (%))
import Serokell.Util (magnify')
import System.Wlog (WithLogger, launchNamedPureLog, logWarning)
import Pos.Binary.Class (biSize)
import Pos.Binary.Ssc ()
import Pos.Core (BlockVersionData (..), EpochIndex, SlotId (..),
StakeholderId, VssCertificate, epochIndexL, HasProtocolMagic,
mkVssCertificatesMapSingleton, HasGenesisData,
HasProtocolConstants, HasGenesisBlockVersionData)
import Pos.Core.Ssc (InnerSharesMap, Opening, SignedCommitment, SscPayload (..),
mkCommitmentsMap)
import Pos.DB (MonadBlockDBRead, MonadDBRead, MonadGState (gsAdoptedBVData))
import Pos.DB.BlockIndex (getTipHeader)
import Pos.Lrc.Context (HasLrcContext)
import Pos.Lrc.Types (RichmenStakes)
import Pos.Slotting (MonadSlots (getCurrentSlot))
import Pos.Ssc.Base (isCommitmentIdx, isOpeningIdx, isSharesIdx)
import Pos.Ssc.Configuration (HasSscConfiguration)
import Pos.Ssc.Error (SscVerifyError (..))
import Pos.Ssc.Lrc (getSscRichmen, tryGetSscRichmen)
import Pos.Ssc.Mem (MonadSscMem, SscLocalQuery, SscLocalUpdate, askSscMem,
sscRunGlobalQuery, sscRunLocalQuery, sscRunLocalSTM, syncingStateWith)
import Pos.Ssc.Toss (PureToss, SscTag (..), TossT, evalPureTossWithLogger, evalTossT,
execTossT, hasCertificateToss, hasCommitmentToss, hasOpeningToss,
hasSharesToss, isGoodSlotForTag, normalizeToss, refreshToss,
supplyPureTossEnv, tmCertificates, tmCommitments, tmOpenings,
tmShares, verifyAndApplySscPayload)
import Pos.Ssc.Types (SscGlobalState, SscLocalData (..), ldEpoch, ldModifier, ldSize,
sscGlobal, sscLocal)
-- | Get local payload to be put into main block and for given
' SlotId ' . If payload for given ' SlotId ' ca n't be constructed ,
-- empty payload can be returned.
sscGetLocalPayload
:: forall ctx m.
(MonadIO m, MonadSscMem ctx m, WithLogger m, HasProtocolConstants)
=> SlotId -> m SscPayload
sscGetLocalPayload = sscRunLocalQuery . sscGetLocalPayloadQ
sscGetLocalPayloadQ
:: (HasProtocolConstants)
=> SlotId -> SscLocalQuery SscPayload
sscGetLocalPayloadQ SlotId {..} = do
expectedEpoch <- view ldEpoch
let warningMsg = sformat warningFmt siEpoch expectedEpoch
isExpected <-
if expectedEpoch == siEpoch then pure True
else False <$ logWarning warningMsg
magnify' ldModifier $
getPayload isExpected <*> getCertificates isExpected
where
warningFmt = "sscGetLocalPayloadQ: unexpected epoch ("%int%
", stored one is "%int%")"
getPayload True
| isCommitmentIdx siSlot = CommitmentsPayload <$> view tmCommitments
| isOpeningIdx siSlot = OpeningsPayload <$> view tmOpenings
| isSharesIdx siSlot = SharesPayload <$> view tmShares
getPayload _ = pure CertificatesPayload
getCertificates isExpected
| isExpected = view tmCertificates
| otherwise = pure mempty
-- | Make 'SscLocalData' valid for given epoch, richmen and global state. of
-- best known chain. This function is assumed to be called after applying
-- block and before releasing lock on block application.
sscNormalize
:: forall ctx m.
( MonadGState m
, MonadBlockDBRead m
, MonadSscMem ctx m
, MonadReader ctx m
, HasLrcContext ctx
, WithLogger m
, MonadIO m
, Rand.MonadRandom m
, HasSscConfiguration
, HasProtocolConstants
, HasGenesisData
, HasProtocolMagic
, HasGenesisBlockVersionData
)
=> m ()
sscNormalize = do
tipEpoch <- view epochIndexL <$> getTipHeader
richmenData <- getSscRichmen "sscNormalize" tipEpoch
bvd <- gsAdoptedBVData
globalVar <- sscGlobal <$> askSscMem
localVar <- sscLocal <$> askSscMem
gs <- atomically $ readTVar globalVar
seed <- Rand.drgNew
launchNamedPureLog atomically $
syncingStateWith localVar $
executeMonadBaseRandom seed $
sscNormalizeU (tipEpoch, richmenData) bvd gs
where
-- (... MonadPseudoRandom) a -> (... n) a
executeMonadBaseRandom seed = hoist $ hoist (pure . fst . Rand.withDRG seed)
sscNormalizeU
:: (HasSscConfiguration, HasProtocolConstants, HasGenesisData, HasProtocolMagic)
=> (EpochIndex, RichmenStakes)
-> BlockVersionData
-> SscGlobalState
-> SscLocalUpdate ()
sscNormalizeU (epoch, stake) bvd gs = do
oldModifier <- use ldModifier
let multiRichmen = HM.fromList [(epoch, stake)]
newModifier <-
evalPureTossWithLogger gs $ supplyPureTossEnv (multiRichmen, bvd) $
execTossT mempty $ normalizeToss epoch oldModifier
ldModifier .= newModifier
ldEpoch .= epoch
ldSize .= biSize newModifier
----------------------------------------------------------------------------
---- Inv processing
----------------------------------------------------------------------------
| Check whether SSC data with given tag and public key can be added
-- to current local data.
sscIsDataUseful
:: ( WithLogger m
, MonadIO m
, MonadSlots ctx m
, MonadSscMem ctx m
, Rand.MonadRandom m
, HasSscConfiguration
, HasGenesisData
, HasProtocolConstants
)
=> SscTag -> StakeholderId -> m Bool
sscIsDataUseful tag id =
ifM
(maybe False (isGoodSlotForTag tag . siSlot) <$> getCurrentSlot)
(evalTossInMem $ sscIsDataUsefulDo tag)
(pure False)
where
sscIsDataUsefulDo CommitmentMsg = not <$> hasCommitmentToss id
sscIsDataUsefulDo OpeningMsg = not <$> hasOpeningToss id
sscIsDataUsefulDo SharesMsg = not <$> hasSharesToss id
sscIsDataUsefulDo VssCertificateMsg = not <$> hasCertificateToss id
evalTossInMem
:: ( WithLogger m
, MonadIO m
, MonadSscMem ctx m
, Rand.MonadRandom m
)
=> TossT PureToss a -> m a
evalTossInMem action = do
gs <- sscRunGlobalQuery ask
ld <- sscRunLocalQuery ask
let modifier = ld ^. ldModifier
evalPureTossWithLogger gs $ evalTossT modifier action
----------------------------------------------------------------------------
---- Data processing
----------------------------------------------------------------------------
type SscDataProcessingMode ctx m =
( WithLogger m
STM at least
, Rand.MonadRandom m -- for crypto
, MonadDBRead m -- to get richmen
, MonadGState m -- to get block size limit
, MonadSlots ctx m
, MonadSscMem ctx m
, HasSscConfiguration
)
-- | Process 'SignedCommitment' received from network, checking it against
-- current state (global + local) and adding to local state if it's valid.
sscProcessCommitment
:: forall ctx m.
(SscDataProcessingMode ctx m, HasProtocolConstants, HasProtocolMagic, HasGenesisData, HasGenesisBlockVersionData)
=> SignedCommitment -> m (Either SscVerifyError ())
sscProcessCommitment comm =
sscProcessData CommitmentMsg $
CommitmentsPayload (mkCommitmentsMap [comm]) mempty
-- | Process 'Opening' received from network, checking it against
-- current state (global + local) and adding to local state if it's valid.
sscProcessOpening
:: (SscDataProcessingMode ctx m, HasProtocolConstants, HasProtocolMagic, HasGenesisData, HasGenesisBlockVersionData)
=> StakeholderId -> Opening -> m (Either SscVerifyError ())
sscProcessOpening id opening =
sscProcessData OpeningMsg $
OpeningsPayload (HM.fromList [(id, opening)]) mempty
-- | Process 'InnerSharesMap' received from network, checking it against
-- current state (global + local) and adding to local state if it's valid.
sscProcessShares
:: (SscDataProcessingMode ctx m, HasGenesisBlockVersionData, HasGenesisData, HasProtocolMagic, HasProtocolConstants)
=> StakeholderId -> InnerSharesMap -> m (Either SscVerifyError ())
sscProcessShares id shares =
sscProcessData SharesMsg $ SharesPayload (HM.fromList [(id, shares)]) mempty
| Process ' VssCertificate ' received from network , checking it against
-- current state (global + local) and adding to local state if it's valid.
sscProcessCertificate
:: (SscDataProcessingMode ctx m, HasGenesisBlockVersionData, HasGenesisData, HasProtocolMagic, HasProtocolConstants)
=> VssCertificate -> m (Either SscVerifyError ())
sscProcessCertificate cert =
sscProcessData VssCertificateMsg $
CertificatesPayload (mkVssCertificatesMapSingleton cert)
sscProcessData
:: forall ctx m.
(SscDataProcessingMode ctx m, HasProtocolConstants, HasProtocolMagic, HasGenesisData, HasGenesisBlockVersionData)
=> SscTag -> SscPayload -> m (Either SscVerifyError ())
sscProcessData tag payload =
runExceptT $ do
getCurrentSlot >>= checkSlot
ld <- sscRunLocalQuery ask
bvd <- gsAdoptedBVData
let epoch = ld ^. ldEpoch
seed <- Rand.drgNew
lift (tryGetSscRichmen epoch) >>= \case
Nothing -> throwError $ TossUnknownRichmen epoch
Just richmen -> do
gs <- sscRunGlobalQuery ask
ExceptT $
sscRunLocalSTM $
executeMonadBaseRandom seed $
sscProcessDataDo (epoch, richmen) bvd gs payload
where
checkSlot Nothing = throwError CurrentSlotUnknown
checkSlot (Just si@SlotId {..})
| isGoodSlotForTag tag siSlot = pass
| CommitmentMsg <- tag = throwError $ NotCommitmentPhase si
| OpeningMsg <- tag = throwError $ NotOpeningPhase si
| SharesMsg <- tag = throwError $ NotSharesPhase si
| otherwise = pass
-- (... MonadPseudoRandom) a -> (... n) a
executeMonadBaseRandom seed = hoist $ hoist (pure . fst . Rand.withDRG seed)
sscProcessDataDo
:: (HasSscConfiguration, MonadState SscLocalData m, HasGenesisData
, WithLogger m, Rand.MonadRandom m, HasProtocolConstants
, HasProtocolMagic)
=> (EpochIndex, RichmenStakes)
-> BlockVersionData
-> SscGlobalState
-> SscPayload
-> m (Either SscVerifyError ())
sscProcessDataDo richmenData bvd gs payload =
runExceptT $ do
storedEpoch <- use ldEpoch
let givenEpoch = fst richmenData
let multiRichmen = HM.fromList [richmenData]
unless (storedEpoch == givenEpoch) $
throwError $ DifferentEpoches storedEpoch givenEpoch
TODO : This is a rather arbitrary limit , we should revisit it ( see CSL-1664 )
let maxMemPoolSize = bvdMaxBlockSize bvd * 2
curSize <- use ldSize
let exhausted = curSize >= maxMemPoolSize
-- If our mempool is exhausted we drop some data from it.
oldTM <-
if | not exhausted -> use ldModifier
| otherwise ->
evalPureTossWithLogger gs .
supplyPureTossEnv (multiRichmen, bvd) .
execTossT mempty . refreshToss givenEpoch =<<
use ldModifier
newTM <-
ExceptT $
evalPureTossWithLogger gs $
supplyPureTossEnv (multiRichmen, bvd) $
runExceptT $
execTossT oldTM $ verifyAndApplySscPayload (Left storedEpoch) payload
ldModifier .= newTM
If was exhausted , it 's easier to recompute total size .
-- Otherwise (most common case) we don't want to spend time on it and
-- just add size of new data.
-- Note that if data is invalid, all this computation will be
-- discarded.
if | exhausted -> ldSize .= biSize newTM
| otherwise -> ldSize += biSize payload
----------------------------------------------------------------------------
-- Clean-up
----------------------------------------------------------------------------
-- | Clean-up some data when new slot starts.
-- This function is only needed for garbage collection, it doesn't affect
-- validity of local data.
-- Currently it does nothing, but maybe later we'll decide to do clean-up.
sscGarbageCollectLocalData
:: MonadSscMem ctx m
=> SlotId -> m ()
sscGarbageCollectLocalData _ = pass
unless ( isCommitmentIdx slotIdx ) $ sscLocalCommitments .=
unless ( ) $ sscLocalOpenings .= mempty
unless ( isSharesIdx slotIdx ) $ sscLocalShares .= mempty
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/ssc/Pos/Ssc/Logic/Local.hs | haskell | # LANGUAGE Rank2Types #
* 'Inv|Req|Data' processing.
* Garbage collection
| Get local payload to be put into main block and for given
empty payload can be returned.
| Make 'SscLocalData' valid for given epoch, richmen and global state. of
best known chain. This function is assumed to be called after applying
block and before releasing lock on block application.
(... MonadPseudoRandom) a -> (... n) a
--------------------------------------------------------------------------
-- Inv processing
--------------------------------------------------------------------------
to current local data.
--------------------------------------------------------------------------
-- Data processing
--------------------------------------------------------------------------
for crypto
to get richmen
to get block size limit
| Process 'SignedCommitment' received from network, checking it against
current state (global + local) and adding to local state if it's valid.
| Process 'Opening' received from network, checking it against
current state (global + local) and adding to local state if it's valid.
| Process 'InnerSharesMap' received from network, checking it against
current state (global + local) and adding to local state if it's valid.
current state (global + local) and adding to local state if it's valid.
(... MonadPseudoRandom) a -> (... n) a
If our mempool is exhausted we drop some data from it.
Otherwise (most common case) we don't want to spend time on it and
just add size of new data.
Note that if data is invalid, all this computation will be
discarded.
--------------------------------------------------------------------------
Clean-up
--------------------------------------------------------------------------
| Clean-up some data when new slot starts.
This function is only needed for garbage collection, it doesn't affect
validity of local data.
Currently it does nothing, but maybe later we'll decide to do clean-up. |
module Pos.Ssc.Logic.Local
(
sscGetLocalPayload
, sscNormalize
, sscIsDataUseful
, sscProcessCommitment
, sscProcessOpening
, sscProcessShares
, sscProcessCertificate
, sscGarbageCollectLocalData
) where
import Universum
import Control.Lens ((+=), (.=))
import Control.Monad.Except (MonadError (throwError), runExceptT)
import Control.Monad.Morph (hoist)
import qualified Crypto.Random as Rand
import qualified Data.HashMap.Strict as HM
import Formatting (int, sformat, (%))
import Serokell.Util (magnify')
import System.Wlog (WithLogger, launchNamedPureLog, logWarning)
import Pos.Binary.Class (biSize)
import Pos.Binary.Ssc ()
import Pos.Core (BlockVersionData (..), EpochIndex, SlotId (..),
StakeholderId, VssCertificate, epochIndexL, HasProtocolMagic,
mkVssCertificatesMapSingleton, HasGenesisData,
HasProtocolConstants, HasGenesisBlockVersionData)
import Pos.Core.Ssc (InnerSharesMap, Opening, SignedCommitment, SscPayload (..),
mkCommitmentsMap)
import Pos.DB (MonadBlockDBRead, MonadDBRead, MonadGState (gsAdoptedBVData))
import Pos.DB.BlockIndex (getTipHeader)
import Pos.Lrc.Context (HasLrcContext)
import Pos.Lrc.Types (RichmenStakes)
import Pos.Slotting (MonadSlots (getCurrentSlot))
import Pos.Ssc.Base (isCommitmentIdx, isOpeningIdx, isSharesIdx)
import Pos.Ssc.Configuration (HasSscConfiguration)
import Pos.Ssc.Error (SscVerifyError (..))
import Pos.Ssc.Lrc (getSscRichmen, tryGetSscRichmen)
import Pos.Ssc.Mem (MonadSscMem, SscLocalQuery, SscLocalUpdate, askSscMem,
sscRunGlobalQuery, sscRunLocalQuery, sscRunLocalSTM, syncingStateWith)
import Pos.Ssc.Toss (PureToss, SscTag (..), TossT, evalPureTossWithLogger, evalTossT,
execTossT, hasCertificateToss, hasCommitmentToss, hasOpeningToss,
hasSharesToss, isGoodSlotForTag, normalizeToss, refreshToss,
supplyPureTossEnv, tmCertificates, tmCommitments, tmOpenings,
tmShares, verifyAndApplySscPayload)
import Pos.Ssc.Types (SscGlobalState, SscLocalData (..), ldEpoch, ldModifier, ldSize,
sscGlobal, sscLocal)
' SlotId ' . If payload for given ' SlotId ' ca n't be constructed ,
sscGetLocalPayload
:: forall ctx m.
(MonadIO m, MonadSscMem ctx m, WithLogger m, HasProtocolConstants)
=> SlotId -> m SscPayload
sscGetLocalPayload = sscRunLocalQuery . sscGetLocalPayloadQ
sscGetLocalPayloadQ
:: (HasProtocolConstants)
=> SlotId -> SscLocalQuery SscPayload
sscGetLocalPayloadQ SlotId {..} = do
expectedEpoch <- view ldEpoch
let warningMsg = sformat warningFmt siEpoch expectedEpoch
isExpected <-
if expectedEpoch == siEpoch then pure True
else False <$ logWarning warningMsg
magnify' ldModifier $
getPayload isExpected <*> getCertificates isExpected
where
warningFmt = "sscGetLocalPayloadQ: unexpected epoch ("%int%
", stored one is "%int%")"
getPayload True
| isCommitmentIdx siSlot = CommitmentsPayload <$> view tmCommitments
| isOpeningIdx siSlot = OpeningsPayload <$> view tmOpenings
| isSharesIdx siSlot = SharesPayload <$> view tmShares
getPayload _ = pure CertificatesPayload
getCertificates isExpected
| isExpected = view tmCertificates
| otherwise = pure mempty
sscNormalize
:: forall ctx m.
( MonadGState m
, MonadBlockDBRead m
, MonadSscMem ctx m
, MonadReader ctx m
, HasLrcContext ctx
, WithLogger m
, MonadIO m
, Rand.MonadRandom m
, HasSscConfiguration
, HasProtocolConstants
, HasGenesisData
, HasProtocolMagic
, HasGenesisBlockVersionData
)
=> m ()
sscNormalize = do
tipEpoch <- view epochIndexL <$> getTipHeader
richmenData <- getSscRichmen "sscNormalize" tipEpoch
bvd <- gsAdoptedBVData
globalVar <- sscGlobal <$> askSscMem
localVar <- sscLocal <$> askSscMem
gs <- atomically $ readTVar globalVar
seed <- Rand.drgNew
launchNamedPureLog atomically $
syncingStateWith localVar $
executeMonadBaseRandom seed $
sscNormalizeU (tipEpoch, richmenData) bvd gs
where
executeMonadBaseRandom seed = hoist $ hoist (pure . fst . Rand.withDRG seed)
sscNormalizeU
:: (HasSscConfiguration, HasProtocolConstants, HasGenesisData, HasProtocolMagic)
=> (EpochIndex, RichmenStakes)
-> BlockVersionData
-> SscGlobalState
-> SscLocalUpdate ()
sscNormalizeU (epoch, stake) bvd gs = do
oldModifier <- use ldModifier
let multiRichmen = HM.fromList [(epoch, stake)]
newModifier <-
evalPureTossWithLogger gs $ supplyPureTossEnv (multiRichmen, bvd) $
execTossT mempty $ normalizeToss epoch oldModifier
ldModifier .= newModifier
ldEpoch .= epoch
ldSize .= biSize newModifier
| Check whether SSC data with given tag and public key can be added
sscIsDataUseful
:: ( WithLogger m
, MonadIO m
, MonadSlots ctx m
, MonadSscMem ctx m
, Rand.MonadRandom m
, HasSscConfiguration
, HasGenesisData
, HasProtocolConstants
)
=> SscTag -> StakeholderId -> m Bool
sscIsDataUseful tag id =
ifM
(maybe False (isGoodSlotForTag tag . siSlot) <$> getCurrentSlot)
(evalTossInMem $ sscIsDataUsefulDo tag)
(pure False)
where
sscIsDataUsefulDo CommitmentMsg = not <$> hasCommitmentToss id
sscIsDataUsefulDo OpeningMsg = not <$> hasOpeningToss id
sscIsDataUsefulDo SharesMsg = not <$> hasSharesToss id
sscIsDataUsefulDo VssCertificateMsg = not <$> hasCertificateToss id
evalTossInMem
:: ( WithLogger m
, MonadIO m
, MonadSscMem ctx m
, Rand.MonadRandom m
)
=> TossT PureToss a -> m a
evalTossInMem action = do
gs <- sscRunGlobalQuery ask
ld <- sscRunLocalQuery ask
let modifier = ld ^. ldModifier
evalPureTossWithLogger gs $ evalTossT modifier action
type SscDataProcessingMode ctx m =
( WithLogger m
STM at least
, MonadSlots ctx m
, MonadSscMem ctx m
, HasSscConfiguration
)
sscProcessCommitment
:: forall ctx m.
(SscDataProcessingMode ctx m, HasProtocolConstants, HasProtocolMagic, HasGenesisData, HasGenesisBlockVersionData)
=> SignedCommitment -> m (Either SscVerifyError ())
sscProcessCommitment comm =
sscProcessData CommitmentMsg $
CommitmentsPayload (mkCommitmentsMap [comm]) mempty
sscProcessOpening
:: (SscDataProcessingMode ctx m, HasProtocolConstants, HasProtocolMagic, HasGenesisData, HasGenesisBlockVersionData)
=> StakeholderId -> Opening -> m (Either SscVerifyError ())
sscProcessOpening id opening =
sscProcessData OpeningMsg $
OpeningsPayload (HM.fromList [(id, opening)]) mempty
sscProcessShares
:: (SscDataProcessingMode ctx m, HasGenesisBlockVersionData, HasGenesisData, HasProtocolMagic, HasProtocolConstants)
=> StakeholderId -> InnerSharesMap -> m (Either SscVerifyError ())
sscProcessShares id shares =
sscProcessData SharesMsg $ SharesPayload (HM.fromList [(id, shares)]) mempty
| Process ' VssCertificate ' received from network , checking it against
sscProcessCertificate
:: (SscDataProcessingMode ctx m, HasGenesisBlockVersionData, HasGenesisData, HasProtocolMagic, HasProtocolConstants)
=> VssCertificate -> m (Either SscVerifyError ())
sscProcessCertificate cert =
sscProcessData VssCertificateMsg $
CertificatesPayload (mkVssCertificatesMapSingleton cert)
sscProcessData
:: forall ctx m.
(SscDataProcessingMode ctx m, HasProtocolConstants, HasProtocolMagic, HasGenesisData, HasGenesisBlockVersionData)
=> SscTag -> SscPayload -> m (Either SscVerifyError ())
sscProcessData tag payload =
runExceptT $ do
getCurrentSlot >>= checkSlot
ld <- sscRunLocalQuery ask
bvd <- gsAdoptedBVData
let epoch = ld ^. ldEpoch
seed <- Rand.drgNew
lift (tryGetSscRichmen epoch) >>= \case
Nothing -> throwError $ TossUnknownRichmen epoch
Just richmen -> do
gs <- sscRunGlobalQuery ask
ExceptT $
sscRunLocalSTM $
executeMonadBaseRandom seed $
sscProcessDataDo (epoch, richmen) bvd gs payload
where
checkSlot Nothing = throwError CurrentSlotUnknown
checkSlot (Just si@SlotId {..})
| isGoodSlotForTag tag siSlot = pass
| CommitmentMsg <- tag = throwError $ NotCommitmentPhase si
| OpeningMsg <- tag = throwError $ NotOpeningPhase si
| SharesMsg <- tag = throwError $ NotSharesPhase si
| otherwise = pass
executeMonadBaseRandom seed = hoist $ hoist (pure . fst . Rand.withDRG seed)
sscProcessDataDo
:: (HasSscConfiguration, MonadState SscLocalData m, HasGenesisData
, WithLogger m, Rand.MonadRandom m, HasProtocolConstants
, HasProtocolMagic)
=> (EpochIndex, RichmenStakes)
-> BlockVersionData
-> SscGlobalState
-> SscPayload
-> m (Either SscVerifyError ())
sscProcessDataDo richmenData bvd gs payload =
runExceptT $ do
storedEpoch <- use ldEpoch
let givenEpoch = fst richmenData
let multiRichmen = HM.fromList [richmenData]
unless (storedEpoch == givenEpoch) $
throwError $ DifferentEpoches storedEpoch givenEpoch
TODO : This is a rather arbitrary limit , we should revisit it ( see CSL-1664 )
let maxMemPoolSize = bvdMaxBlockSize bvd * 2
curSize <- use ldSize
let exhausted = curSize >= maxMemPoolSize
oldTM <-
if | not exhausted -> use ldModifier
| otherwise ->
evalPureTossWithLogger gs .
supplyPureTossEnv (multiRichmen, bvd) .
execTossT mempty . refreshToss givenEpoch =<<
use ldModifier
newTM <-
ExceptT $
evalPureTossWithLogger gs $
supplyPureTossEnv (multiRichmen, bvd) $
runExceptT $
execTossT oldTM $ verifyAndApplySscPayload (Left storedEpoch) payload
ldModifier .= newTM
If was exhausted , it 's easier to recompute total size .
if | exhausted -> ldSize .= biSize newTM
| otherwise -> ldSize += biSize payload
sscGarbageCollectLocalData
:: MonadSscMem ctx m
=> SlotId -> m ()
sscGarbageCollectLocalData _ = pass
unless ( isCommitmentIdx slotIdx ) $ sscLocalCommitments .=
unless ( ) $ sscLocalOpenings .= mempty
unless ( isSharesIdx slotIdx ) $ sscLocalShares .= mempty
|
6a3c1c1f3e3bb760d033878343c8728a7b15892303e9da01d473b9016322f47f | exercism/haskell | Tests.hs | import Test.Hspec (Spec, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import ZebraPuzzle (Resident(..), Solution(..), solve)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = it "solve" $ solve `shouldBe` Solution { waterDrinker = Norwegian
, zebraOwner = Japanese }
| null | https://raw.githubusercontent.com/exercism/haskell/f81ee7dc338294b3dbefb7bd39fc193546fcec26/exercises/practice/zebra-puzzle/test/Tests.hs | haskell | import Test.Hspec (Spec, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import ZebraPuzzle (Resident(..), Solution(..), solve)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = it "solve" $ solve `shouldBe` Solution { waterDrinker = Norwegian
, zebraOwner = Japanese }
| |
b4422e7a55fc326f2befb2129795aa21f4dda73f300053ec8f8961e61e500a17 | reasonml-old/bs-containers | camlinternalFormatBasics.mli | (* No comments, OCaml stdlib internal use only. *)
type padty = Left | Right | Zeros
type int_conv =
| Int_d | Int_pd | Int_sd | Int_i | Int_pi | Int_si
| Int_x | Int_Cx | Int_X | Int_CX | Int_o | Int_Co | Int_u
type float_conv =
| Float_f | Float_pf | Float_sf | Float_e | Float_pe | Float_se
| Float_E | Float_pE | Float_sE | Float_g | Float_pg | Float_sg
| Float_G | Float_pG | Float_sG | Float_F
type char_set = string
type counter = Line_counter | Char_counter | Token_counter
type ('a, 'b) padding =
| No_padding : ('a, 'a) padding
| Lit_padding : padty * int -> ('a, 'a) padding
| Arg_padding : padty -> (int -> 'a, 'a) padding
type pad_option = int option
type ('a, 'b) precision =
| No_precision : ('a, 'a) precision
| Lit_precision : int -> ('a, 'a) precision
| Arg_precision : (int -> 'a, 'a) precision
type prec_option = int option
type ('a, 'b, 'c) custom_arity =
| Custom_zero : ('a, string, 'a) custom_arity
| Custom_succ : ('a, 'b, 'c) custom_arity ->
('a, 'x -> 'b, 'x -> 'c) custom_arity
type block_type = Pp_hbox | Pp_vbox | Pp_hvbox | Pp_hovbox | Pp_box | Pp_fits
type formatting_lit =
| Close_box
| Close_tag
| Break of string * int * int
| FFlush
| Force_newline
| Flush_newline
| Magic_size of string * int
| Escaped_at
| Escaped_percent
| Scan_indic of char
type ('a, 'b, 'c, 'd, 'e, 'f) formatting_gen =
| Open_tag : ('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('a, 'b, 'c, 'd, 'e, 'f) formatting_gen
| Open_box : ('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('a, 'b, 'c, 'd, 'e, 'f) formatting_gen
and ('a, 'b, 'c, 'd, 'e, 'f) fmtty =
('a, 'b, 'c, 'd, 'e, 'f,
'a, 'b, 'c, 'd, 'e, 'f) fmtty_rel
and ('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel =
| Char_ty : (* %c *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(char -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
char -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| String_ty : (* %s *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(string -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
string -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Int_ty : (* %d *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(int -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
int -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Int32_ty : (* %ld *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(int32 -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
int32 -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Nativeint_ty : (* %nd *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(nativeint -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
nativeint -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Int64_ty : (* %Ld *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(int64 -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
int64 -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Float_ty : (* %f *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(float -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
float -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Bool_ty : (* %B *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(bool -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
bool -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Format_arg_ty : (* %{...%} *)
('g, 'h, 'i, 'j, 'k, 'l) fmtty *
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Format_subst_ty : (* %(...%) *)
('g, 'h, 'i, 'j, 'k, 'l,
'g1, 'b1, 'c1, 'j1, 'd1, 'a1) fmtty_rel *
('g, 'h, 'i, 'j, 'k, 'l,
'g2, 'b2, 'c2, 'j2, 'd2, 'a2) fmtty_rel *
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'g1, 'b1, 'c1, 'j1, 'e1, 'f1,
('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'g2, 'b2, 'c2, 'j2, 'e2, 'f2) fmtty_rel
Printf and Format specific constructors .
| Alpha_ty : (* %a *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('b1 -> 'x -> 'c1) -> 'x -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
('b2 -> 'x -> 'c2) -> 'x -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Theta_ty : (* %t *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('b1 -> 'c1) -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
('b2 -> 'c2) -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
| Any_ty : (* Used for custom formats *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('x -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'x -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
(* Scanf specific constructor. *)
| Reader_ty : (* %r *)
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('x -> 'a1, 'b1, 'c1, ('b1 -> 'x) -> 'd1, 'e1, 'f1,
'x -> 'a2, 'b2, 'c2, ('b2 -> 'x) -> 'd2, 'e2, 'f2) fmtty_rel
% _
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('a1, 'b1, 'c1, ('b1 -> 'x) -> 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, ('b2 -> 'x) -> 'd2, 'e2, 'f2) fmtty_rel
| End_of_fmtty :
('f1, 'b1, 'c1, 'd1, 'd1, 'f1,
'f2, 'b2, 'c2, 'd2, 'd2, 'f2) fmtty_rel
(**)
(** List of format elements. *)
and ('a, 'b, 'c, 'd, 'e, 'f) fmt =
| Char : (* %c *)
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(char -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
| Caml_char : (* %C *)
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(char -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
| String : (* %s *)
('x, string -> 'a) padding * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
| Caml_string : (* %S *)
('x, string -> 'a) padding * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
| Int : (* %[dixXuo] *)
int_conv * ('x, 'y) padding * ('y, int -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
| Int32 : (* %l[dixXuo] *)
int_conv * ('x, 'y) padding * ('y, int32 -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
| Nativeint : (* %n[dixXuo] *)
int_conv * ('x, 'y) padding * ('y, nativeint -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
| Int64 : (* %L[dixXuo] *)
int_conv * ('x, 'y) padding * ('y, int64 -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
| Float : (* %[feEgGF] *)
float_conv * ('x, 'y) padding * ('y, float -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
% [ bB ]
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(bool -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
| Flush : (* %! *)
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
abc
string * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
| Char_literal : (* x *)
char * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
| Format_arg : (* %{...%} *)
pad_option * ('g, 'h, 'i, 'j, 'k, 'l) fmtty *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
| Format_subst : (* %(...%) *)
pad_option *
('g, 'h, 'i, 'j, 'k, 'l,
'g2, 'b, 'c, 'j2, 'd, 'a) fmtty_rel *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'g2, 'b, 'c, 'j2, 'e, 'f) fmt
Printf and Format specific constructor .
| Alpha : (* %a *)
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('b -> 'x -> 'c) -> 'x -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
| Theta : (* %t *)
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('b -> 'c) -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
(* Format specific constructor: *)
| Formatting_lit : (* @_ *)
formatting_lit * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
| Formatting_gen : (* @_ *)
('a1, 'b, 'c, 'd1, 'e1, 'f1) formatting_gen *
('f1, 'b, 'c, 'e1, 'e2, 'f2) fmt -> ('a1, 'b, 'c, 'd1, 'e2, 'f2) fmt
(* Scanf specific constructors: *)
| Reader : (* %r *)
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x -> 'a, 'b, 'c, ('b -> 'x) -> 'd, 'e, 'f) fmt
| Scan_char_set : (* %[...] *)
pad_option * char_set * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(string -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
| Scan_get_counter : (* %[nlNL] *)
counter * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(int -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
% 0c
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(char -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
% 0c behaves as % c for printing , but when scanning it does not
consume the character from the input stream
consume the character from the input stream *)
| Ignored_param : (* %_ *)
('a, 'b, 'c, 'd, 'y, 'x) ignored * ('x, 'b, 'c, 'y, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
(* Custom printing format *)
| Custom :
('a, 'x, 'y) custom_arity * (unit -> 'x) * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('y, 'b, 'c, 'd, 'e, 'f) fmt
| End_of_format :
('f, 'b, 'c, 'e, 'e, 'f) fmt
and ('a, 'b, 'c, 'd, 'e, 'f) ignored =
| Ignored_char :
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_caml_char :
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_string :
pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_caml_string :
pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_int :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_int32 :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_nativeint :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_int64 :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_float :
pad_option * prec_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_bool :
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_format_arg :
pad_option * ('g, 'h, 'i, 'j, 'k, 'l) fmtty ->
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_format_subst :
pad_option * ('a, 'b, 'c, 'd, 'e, 'f) fmtty ->
('a, 'b, 'c, 'd, 'e, 'f) ignored
| Ignored_reader :
('a, 'b, 'c, ('b -> 'x) -> 'd, 'd, 'a) ignored
| Ignored_scan_char_set :
pad_option * char_set -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_scan_get_counter :
counter -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_scan_next_char :
('a, 'b, 'c, 'd, 'd, 'a) ignored
and ('a, 'b, 'c, 'd, 'e, 'f) format6 =
Format of ('a, 'b, 'c, 'd, 'e, 'f) fmt * string
val concat_fmtty :
('g1, 'b1, 'c1, 'j1, 'd1, 'a1,
'g2, 'b2, 'c2, 'j2, 'd2, 'a2) fmtty_rel ->
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('g1, 'b1, 'c1, 'j1, 'e1, 'f1,
'g2, 'b2, 'c2, 'j2, 'e2, 'f2) fmtty_rel
val erase_rel :
('a, 'b, 'c, 'd, 'e, 'f,
'g, 'h, 'i, 'j, 'k, 'l) fmtty_rel -> ('a, 'b, 'c, 'd, 'e, 'f) fmtty
val concat_fmt :
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('f, 'b, 'c, 'e, 'g, 'h) fmt ->
('a, 'b, 'c, 'd, 'g, 'h) fmt
| null | https://raw.githubusercontent.com/reasonml-old/bs-containers/dfd1360dc74ede57c09e9f9d837ce59285af6fd2/doc/stdlib-cmi/camlinternalFormatBasics.mli | ocaml | No comments, OCaml stdlib internal use only.
%c
%s
%d
%ld
%nd
%Ld
%f
%B
%{...%}
%(...%)
%a
%t
Used for custom formats
Scanf specific constructor.
%r
* List of format elements.
%c
%C
%s
%S
%[dixXuo]
%l[dixXuo]
%n[dixXuo]
%L[dixXuo]
%[feEgGF]
%!
x
%{...%}
%(...%)
%a
%t
Format specific constructor:
@_
@_
Scanf specific constructors:
%r
%[...]
%[nlNL]
%_
Custom printing format |
type padty = Left | Right | Zeros
type int_conv =
| Int_d | Int_pd | Int_sd | Int_i | Int_pi | Int_si
| Int_x | Int_Cx | Int_X | Int_CX | Int_o | Int_Co | Int_u
type float_conv =
| Float_f | Float_pf | Float_sf | Float_e | Float_pe | Float_se
| Float_E | Float_pE | Float_sE | Float_g | Float_pg | Float_sg
| Float_G | Float_pG | Float_sG | Float_F
type char_set = string
type counter = Line_counter | Char_counter | Token_counter
type ('a, 'b) padding =
| No_padding : ('a, 'a) padding
| Lit_padding : padty * int -> ('a, 'a) padding
| Arg_padding : padty -> (int -> 'a, 'a) padding
type pad_option = int option
type ('a, 'b) precision =
| No_precision : ('a, 'a) precision
| Lit_precision : int -> ('a, 'a) precision
| Arg_precision : (int -> 'a, 'a) precision
type prec_option = int option
type ('a, 'b, 'c) custom_arity =
| Custom_zero : ('a, string, 'a) custom_arity
| Custom_succ : ('a, 'b, 'c) custom_arity ->
('a, 'x -> 'b, 'x -> 'c) custom_arity
type block_type = Pp_hbox | Pp_vbox | Pp_hvbox | Pp_hovbox | Pp_box | Pp_fits
type formatting_lit =
| Close_box
| Close_tag
| Break of string * int * int
| FFlush
| Force_newline
| Flush_newline
| Magic_size of string * int
| Escaped_at
| Escaped_percent
| Scan_indic of char
type ('a, 'b, 'c, 'd, 'e, 'f) formatting_gen =
| Open_tag : ('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('a, 'b, 'c, 'd, 'e, 'f) formatting_gen
| Open_box : ('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('a, 'b, 'c, 'd, 'e, 'f) formatting_gen
and ('a, 'b, 'c, 'd, 'e, 'f) fmtty =
('a, 'b, 'c, 'd, 'e, 'f,
'a, 'b, 'c, 'd, 'e, 'f) fmtty_rel
and ('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel =
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(char -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
char -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(string -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
string -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(int -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
int -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(int32 -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
int32 -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(nativeint -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
nativeint -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(int64 -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
int64 -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(float -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
float -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(bool -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
bool -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('g, 'h, 'i, 'j, 'k, 'l) fmtty *
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('g, 'h, 'i, 'j, 'k, 'l,
'g1, 'b1, 'c1, 'j1, 'd1, 'a1) fmtty_rel *
('g, 'h, 'i, 'j, 'k, 'l,
'g2, 'b2, 'c2, 'j2, 'd2, 'a2) fmtty_rel *
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'g1, 'b1, 'c1, 'j1, 'e1, 'f1,
('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'g2, 'b2, 'c2, 'j2, 'e2, 'f2) fmtty_rel
Printf and Format specific constructors .
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('b1 -> 'x -> 'c1) -> 'x -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
('b2 -> 'x -> 'c2) -> 'x -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
(('b1 -> 'c1) -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
('b2 -> 'c2) -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('x -> 'a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'x -> 'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('x -> 'a1, 'b1, 'c1, ('b1 -> 'x) -> 'd1, 'e1, 'f1,
'x -> 'a2, 'b2, 'c2, ('b2 -> 'x) -> 'd2, 'e2, 'f2) fmtty_rel
% _
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('a1, 'b1, 'c1, ('b1 -> 'x) -> 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, ('b2 -> 'x) -> 'd2, 'e2, 'f2) fmtty_rel
| End_of_fmtty :
('f1, 'b1, 'c1, 'd1, 'd1, 'f1,
'f2, 'b2, 'c2, 'd2, 'd2, 'f2) fmtty_rel
and ('a, 'b, 'c, 'd, 'e, 'f) fmt =
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(char -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(char -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
('x, string -> 'a) padding * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
('x, string -> 'a) padding * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
int_conv * ('x, 'y) padding * ('y, int -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
int_conv * ('x, 'y) padding * ('y, int32 -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
int_conv * ('x, 'y) padding * ('y, nativeint -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
int_conv * ('x, 'y) padding * ('y, int64 -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
float_conv * ('x, 'y) padding * ('y, float -> 'a) precision *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x, 'b, 'c, 'd, 'e, 'f) fmt
% [ bB ]
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(bool -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
abc
string * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
char * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
pad_option * ('g, 'h, 'i, 'j, 'k, 'l) fmtty *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
pad_option *
('g, 'h, 'i, 'j, 'k, 'l,
'g2, 'b, 'c, 'j2, 'd, 'a) fmtty_rel *
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('g, 'h, 'i, 'j, 'k, 'l) format6 -> 'g2, 'b, 'c, 'j2, 'e, 'f) fmt
Printf and Format specific constructor .
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('b -> 'x -> 'c) -> 'x -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(('b -> 'c) -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
formatting_lit * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
('a1, 'b, 'c, 'd1, 'e1, 'f1) formatting_gen *
('f1, 'b, 'c, 'e1, 'e2, 'f2) fmt -> ('a1, 'b, 'c, 'd1, 'e2, 'f2) fmt
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('x -> 'a, 'b, 'c, ('b -> 'x) -> 'd, 'e, 'f) fmt
pad_option * char_set * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(string -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
counter * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(int -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
% 0c
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
(char -> 'a, 'b, 'c, 'd, 'e, 'f) fmt
% 0c behaves as % c for printing , but when scanning it does not
consume the character from the input stream
consume the character from the input stream *)
('a, 'b, 'c, 'd, 'y, 'x) ignored * ('x, 'b, 'c, 'y, 'e, 'f) fmt ->
('a, 'b, 'c, 'd, 'e, 'f) fmt
| Custom :
('a, 'x, 'y) custom_arity * (unit -> 'x) * ('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('y, 'b, 'c, 'd, 'e, 'f) fmt
| End_of_format :
('f, 'b, 'c, 'e, 'e, 'f) fmt
and ('a, 'b, 'c, 'd, 'e, 'f) ignored =
| Ignored_char :
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_caml_char :
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_string :
pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_caml_string :
pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_int :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_int32 :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_nativeint :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_int64 :
int_conv * pad_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_float :
pad_option * prec_option -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_bool :
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_format_arg :
pad_option * ('g, 'h, 'i, 'j, 'k, 'l) fmtty ->
('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_format_subst :
pad_option * ('a, 'b, 'c, 'd, 'e, 'f) fmtty ->
('a, 'b, 'c, 'd, 'e, 'f) ignored
| Ignored_reader :
('a, 'b, 'c, ('b -> 'x) -> 'd, 'd, 'a) ignored
| Ignored_scan_char_set :
pad_option * char_set -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_scan_get_counter :
counter -> ('a, 'b, 'c, 'd, 'd, 'a) ignored
| Ignored_scan_next_char :
('a, 'b, 'c, 'd, 'd, 'a) ignored
and ('a, 'b, 'c, 'd, 'e, 'f) format6 =
Format of ('a, 'b, 'c, 'd, 'e, 'f) fmt * string
val concat_fmtty :
('g1, 'b1, 'c1, 'j1, 'd1, 'a1,
'g2, 'b2, 'c2, 'j2, 'd2, 'a2) fmtty_rel ->
('a1, 'b1, 'c1, 'd1, 'e1, 'f1,
'a2, 'b2, 'c2, 'd2, 'e2, 'f2) fmtty_rel ->
('g1, 'b1, 'c1, 'j1, 'e1, 'f1,
'g2, 'b2, 'c2, 'j2, 'e2, 'f2) fmtty_rel
val erase_rel :
('a, 'b, 'c, 'd, 'e, 'f,
'g, 'h, 'i, 'j, 'k, 'l) fmtty_rel -> ('a, 'b, 'c, 'd, 'e, 'f) fmtty
val concat_fmt :
('a, 'b, 'c, 'd, 'e, 'f) fmt ->
('f, 'b, 'c, 'e, 'g, 'h) fmt ->
('a, 'b, 'c, 'd, 'g, 'h) fmt
|
18473b71520b2a368f2831baa7f861ae02e110f1f49cceaf88baa00bcbedb8c1 | huffyhenry/sync.soccer | Tracab.hs | module Tracab where
import qualified Data.IntMap as Map
import qualified Data.List.Split as Split
import Data.Maybe (maybe, Maybe, listToMaybe)
import System.IO (openFile, hGetContents, hClose, IOMode(ReadMode))
import System.Environment (getArgs)
import Text.XML.Light.Types (Element)
import Text.Printf (printf)
import XmlUtils (attrLookupStrict, attrLookup)
import qualified XmlUtils as Xml
Complete Tracab data
type Tracab positions = (Metadata, Frames positions)
metadata :: Tracab positions -> Metadata
metadata = fst
frames :: Tracab positions -> Frames positions
frames = snd
parseTracab :: String -> String -> IO (Tracab Positions)
parseTracab metafile datafile = do
tracabMeta <- parseMetaFile metafile
tracabData <- parseDataFile tracabMeta datafile
return (tracabMeta, tracabData)
data Positions = Positions {
agents :: [ Position ],
ball :: Position
}
data Coordinates = Coordinates {
x :: Int,
y :: Int
}
-- The position information of a single player/ball in a single snapshot
data Position = Position {
participantId :: Int,
shirtNumber :: Maybe Int,
coordinates :: Coordinates,
mTeam :: Maybe TeamKind,
speed :: Float,
mBallStatus :: Maybe BallStatus
}
data BallStatus = Alive | Dead
deriving Eq
-- A single complete snapshot of tracking data
data Frame positions = Frame {
frameId :: Int,
positions :: positions,
clock :: Maybe Double
}
type Frames positions = [Frame positions]
instance Show (Frame pos) where
show f =
let formatClock :: Double -> String
formatClock c = printf "%02.d:%02d.%03d" mins secs msec where
mins = floor (c / 60.0) :: Int
secs = floor (c - 60.0 * fromIntegral mins) :: Int
msec = round (1000.0 * (c - 60.0 * fromIntegral mins - fromIntegral secs)) :: Int
base = "Frame " ++ show (frameId f)
extra = case clock f of
Nothing -> ""
Just c -> printf " (implied clock: %s)" (formatClock c)
in base ++ extra
The key method parsing a line of the Tracab data file into a Frame object
parseFrame :: Metadata -> String -> Frame Positions
parseFrame meta inputLine =
Frame {
frameId = frameId,
positions = positions,
clock = clock
} where
-- Split input data into chunks
[dataLineIdStr, positionsString, ballString, _] = splitOn ':' inputLine
positionsStrings = splitOn ';' positionsString
-- Assemble parsed data
frameId = read dataLineIdStr
positions =
Positions {
agents = map parsePosition positionsStrings,
ball = parseBallPosition ballString
}
Compute the implied timestamp of the frame in seconds from game start
inPeriodClock p = let offset = frameId - startFrame p
fps = frameRateFps meta
clockStart = if periodId p == 2 then 45.0*60.0 else 0.0
in clockStart + fromIntegral offset / fromIntegral fps
candidatePeriods = [p | p <- periods meta,
startFrame p <= frameId,
endFrame p >= frameId]
clock = fmap inPeriodClock (listToMaybe candidatePeriods)
Parse individual chunks
splitOn c = Split.wordsBy (==c)
parsePosition inputStr =
Position
{ participantId = read idStr
, shirtNumber = if jerseyStr == "-1" then Nothing else Just (read jerseyStr)
, coordinates = Coordinates { x = read xStr , y = read yStr }
, mTeam = team
, speed = read speedStr
, mBallStatus = Nothing
}
where
[teamStr,idStr,jerseyStr,xStr,yStr,speedStr] = splitOn ',' inputStr
team = case teamStr of
"1" -> Just Home
"0" -> Just Away
_ -> Nothing
parseBallPosition inputStr =
Position
{ participantId = 0
, shirtNumber = Nothing
, coordinates = Coordinates { x = read xStr , y = read yStr }
, mTeam = team
, mBallStatus = ballStatus
, speed = read speedStr
}
where
xStr:yStr:zStr:speedStr:rest = splitOn ',' inputStr
(team, otherFields) = case rest of
"H" : remainder -> (Just Home, remainder)
"A" : remainder -> (Just Away, remainder)
_ -> (Nothing, rest)
ballStatus = case otherFields of
"Alive" : _ -> Just Alive
"Dead" : _ -> Just Dead
_ -> Nothing
Parse the entire Tracab data file into a list of frames
parseDataFile :: Metadata -> String -> IO (Frames Positions)
parseDataFile meta filename = do
handle <- openFile filename ReadMode
contents <- hGetContents handle
let frames = map (parseFrame meta) $ lines contents
return frames
An example meta file :
< TracabMetaData sVersion="1.0 " >
< match iId="803174 " dtDate="2015 - 08 - 16 17:00:00 " iFrameRateFps="25 "
fPitchXSizeMeters="105.00 " fPitchYSizeMeters="68.00 "
fTrackingAreaXSizeMeters="111.00 " fTrackingAreaYSizeMeters="88.00 " >
< period iId="1 " iStartFrame="1349935 " iEndFrame="1424747"/ >
< period iId="2 " iStartFrame="1449116 " iEndFrame="1521187"/ >
< period iId="3 " iStartFrame="0 " iEndFrame="0"/ >
< period iId="4 " iStartFrame="0 " iEndFrame="0"/ >
< /match >
< /TracabMetaData >
<TracabMetaData sVersion="1.0">
<match iId="803174" dtDate="2015-08-16 17:00:00" iFrameRateFps="25"
fPitchXSizeMeters="105.00" fPitchYSizeMeters="68.00"
fTrackingAreaXSizeMeters="111.00" fTrackingAreaYSizeMeters="88.00">
<period iId="1" iStartFrame="1349935" iEndFrame="1424747"/>
<period iId="2" iStartFrame="1449116" iEndFrame="1521187"/>
<period iId="3" iStartFrame="0" iEndFrame="0"/>
<period iId="4" iStartFrame="0" iEndFrame="0"/>
</match>
</TracabMetaData>
-}
The type of Tracab metadata
data Metadata = Metadata{
matchId :: String,
frameRateFps :: Int,
pitchSizeX :: Float,
pitchSizeY :: Float,
trackingX :: Float,
trackingY :: Float,
periods :: Periods
}
data Period = Period {
periodId :: Int,
startFrame :: Int,
endFrame :: Int
}
type Periods = [Period]
indentLines :: [String] -> String
indentLines inputLines =
unlines $ map (" " ++) inputLines
indent :: String -> String
indent input =
indentLines $ lines input
instance Show Metadata where
show match =
unlines
[ "matchId: " ++ matchId match
, "frameRateFps: " ++ show (frameRateFps match)
, "periods: " ++ indentLines (map show (periods match))
]
instance Show Period where
show period =
unwords
[ "["
, show (periodId period)
, "]"
, "start:"
, show $ startFrame period
, show $ endFrame period
]
parseMetaFile :: String -> IO Metadata
parseMetaFile filename = do
root <- Xml.loadXmlFromFile filename
return $ makeMetadata (head $ Xml.getAllChildren root)
makeMetadata :: Element -> Metadata
makeMetadata element =
Metadata
{ matchId = attrLookupStrict element id "iId"
, frameRateFps = attrLookupStrict element read "iFrameRateFps"
, pitchSizeX = attrLookupStrict element read "fPitchXSizeMeters"
, pitchSizeY = attrLookupStrict element read "fPitchYSizeMeters"
, trackingX = attrLookupStrict element read "fTrackingAreaXSizeMeters"
, trackingY = attrLookupStrict element read "fTrackingAreaYSizeMeters"
, periods = map makePeriod $ Xml.getChildrenWithQName "period" element
}
makePeriod :: Element -> Period
makePeriod element =
Period{
periodId = attrLookupStrict element read "iId",
startFrame = attrLookupStrict element read "iStartFrame",
endFrame = attrLookupStrict element read "iEndFrame"
}
data TeamKind = Home | Away deriving (Eq, Show)
oppositionKind :: TeamKind -> TeamKind
oppositionKind Home = Away
oppositionKind Away = Home
rightToLeftKickOff :: Frame Positions -> TeamKind
rightToLeftKickOff kickOffFrame = if homeX > awayX then Home else Away
where
-- Might be able to do better than this.
kickOffPositions = agents $ positions kickOffFrame
homePositions = filter (\p -> mTeam p == Just Home) kickOffPositions
awayPositions = filter (\p -> mTeam p == Just Away) kickOffPositions
sumX positions = sum $ map (x . coordinates) positions
homeX = sumX homePositions
awayX = sumX awayPositions | null | https://raw.githubusercontent.com/huffyhenry/sync.soccer/c740446a65a8ff50d67a9110af7d0fcffb01b900/src/Tracab.hs | haskell | The position information of a single player/ball in a single snapshot
A single complete snapshot of tracking data
Split input data into chunks
Assemble parsed data
Might be able to do better than this. | module Tracab where
import qualified Data.IntMap as Map
import qualified Data.List.Split as Split
import Data.Maybe (maybe, Maybe, listToMaybe)
import System.IO (openFile, hGetContents, hClose, IOMode(ReadMode))
import System.Environment (getArgs)
import Text.XML.Light.Types (Element)
import Text.Printf (printf)
import XmlUtils (attrLookupStrict, attrLookup)
import qualified XmlUtils as Xml
Complete Tracab data
type Tracab positions = (Metadata, Frames positions)
metadata :: Tracab positions -> Metadata
metadata = fst
frames :: Tracab positions -> Frames positions
frames = snd
parseTracab :: String -> String -> IO (Tracab Positions)
parseTracab metafile datafile = do
tracabMeta <- parseMetaFile metafile
tracabData <- parseDataFile tracabMeta datafile
return (tracabMeta, tracabData)
data Positions = Positions {
agents :: [ Position ],
ball :: Position
}
data Coordinates = Coordinates {
x :: Int,
y :: Int
}
data Position = Position {
participantId :: Int,
shirtNumber :: Maybe Int,
coordinates :: Coordinates,
mTeam :: Maybe TeamKind,
speed :: Float,
mBallStatus :: Maybe BallStatus
}
data BallStatus = Alive | Dead
deriving Eq
data Frame positions = Frame {
frameId :: Int,
positions :: positions,
clock :: Maybe Double
}
type Frames positions = [Frame positions]
instance Show (Frame pos) where
show f =
let formatClock :: Double -> String
formatClock c = printf "%02.d:%02d.%03d" mins secs msec where
mins = floor (c / 60.0) :: Int
secs = floor (c - 60.0 * fromIntegral mins) :: Int
msec = round (1000.0 * (c - 60.0 * fromIntegral mins - fromIntegral secs)) :: Int
base = "Frame " ++ show (frameId f)
extra = case clock f of
Nothing -> ""
Just c -> printf " (implied clock: %s)" (formatClock c)
in base ++ extra
The key method parsing a line of the Tracab data file into a Frame object
parseFrame :: Metadata -> String -> Frame Positions
parseFrame meta inputLine =
Frame {
frameId = frameId,
positions = positions,
clock = clock
} where
[dataLineIdStr, positionsString, ballString, _] = splitOn ':' inputLine
positionsStrings = splitOn ';' positionsString
frameId = read dataLineIdStr
positions =
Positions {
agents = map parsePosition positionsStrings,
ball = parseBallPosition ballString
}
Compute the implied timestamp of the frame in seconds from game start
inPeriodClock p = let offset = frameId - startFrame p
fps = frameRateFps meta
clockStart = if periodId p == 2 then 45.0*60.0 else 0.0
in clockStart + fromIntegral offset / fromIntegral fps
candidatePeriods = [p | p <- periods meta,
startFrame p <= frameId,
endFrame p >= frameId]
clock = fmap inPeriodClock (listToMaybe candidatePeriods)
Parse individual chunks
splitOn c = Split.wordsBy (==c)
parsePosition inputStr =
Position
{ participantId = read idStr
, shirtNumber = if jerseyStr == "-1" then Nothing else Just (read jerseyStr)
, coordinates = Coordinates { x = read xStr , y = read yStr }
, mTeam = team
, speed = read speedStr
, mBallStatus = Nothing
}
where
[teamStr,idStr,jerseyStr,xStr,yStr,speedStr] = splitOn ',' inputStr
team = case teamStr of
"1" -> Just Home
"0" -> Just Away
_ -> Nothing
parseBallPosition inputStr =
Position
{ participantId = 0
, shirtNumber = Nothing
, coordinates = Coordinates { x = read xStr , y = read yStr }
, mTeam = team
, mBallStatus = ballStatus
, speed = read speedStr
}
where
xStr:yStr:zStr:speedStr:rest = splitOn ',' inputStr
(team, otherFields) = case rest of
"H" : remainder -> (Just Home, remainder)
"A" : remainder -> (Just Away, remainder)
_ -> (Nothing, rest)
ballStatus = case otherFields of
"Alive" : _ -> Just Alive
"Dead" : _ -> Just Dead
_ -> Nothing
Parse the entire Tracab data file into a list of frames
parseDataFile :: Metadata -> String -> IO (Frames Positions)
parseDataFile meta filename = do
handle <- openFile filename ReadMode
contents <- hGetContents handle
let frames = map (parseFrame meta) $ lines contents
return frames
An example meta file :
< TracabMetaData sVersion="1.0 " >
< match iId="803174 " dtDate="2015 - 08 - 16 17:00:00 " iFrameRateFps="25 "
fPitchXSizeMeters="105.00 " fPitchYSizeMeters="68.00 "
fTrackingAreaXSizeMeters="111.00 " fTrackingAreaYSizeMeters="88.00 " >
< period iId="1 " iStartFrame="1349935 " iEndFrame="1424747"/ >
< period iId="2 " iStartFrame="1449116 " iEndFrame="1521187"/ >
< period iId="3 " iStartFrame="0 " iEndFrame="0"/ >
< period iId="4 " iStartFrame="0 " iEndFrame="0"/ >
< /match >
< /TracabMetaData >
<TracabMetaData sVersion="1.0">
<match iId="803174" dtDate="2015-08-16 17:00:00" iFrameRateFps="25"
fPitchXSizeMeters="105.00" fPitchYSizeMeters="68.00"
fTrackingAreaXSizeMeters="111.00" fTrackingAreaYSizeMeters="88.00">
<period iId="1" iStartFrame="1349935" iEndFrame="1424747"/>
<period iId="2" iStartFrame="1449116" iEndFrame="1521187"/>
<period iId="3" iStartFrame="0" iEndFrame="0"/>
<period iId="4" iStartFrame="0" iEndFrame="0"/>
</match>
</TracabMetaData>
-}
The type of Tracab metadata
data Metadata = Metadata{
matchId :: String,
frameRateFps :: Int,
pitchSizeX :: Float,
pitchSizeY :: Float,
trackingX :: Float,
trackingY :: Float,
periods :: Periods
}
data Period = Period {
periodId :: Int,
startFrame :: Int,
endFrame :: Int
}
type Periods = [Period]
indentLines :: [String] -> String
indentLines inputLines =
unlines $ map (" " ++) inputLines
indent :: String -> String
indent input =
indentLines $ lines input
instance Show Metadata where
show match =
unlines
[ "matchId: " ++ matchId match
, "frameRateFps: " ++ show (frameRateFps match)
, "periods: " ++ indentLines (map show (periods match))
]
instance Show Period where
show period =
unwords
[ "["
, show (periodId period)
, "]"
, "start:"
, show $ startFrame period
, show $ endFrame period
]
parseMetaFile :: String -> IO Metadata
parseMetaFile filename = do
root <- Xml.loadXmlFromFile filename
return $ makeMetadata (head $ Xml.getAllChildren root)
makeMetadata :: Element -> Metadata
makeMetadata element =
Metadata
{ matchId = attrLookupStrict element id "iId"
, frameRateFps = attrLookupStrict element read "iFrameRateFps"
, pitchSizeX = attrLookupStrict element read "fPitchXSizeMeters"
, pitchSizeY = attrLookupStrict element read "fPitchYSizeMeters"
, trackingX = attrLookupStrict element read "fTrackingAreaXSizeMeters"
, trackingY = attrLookupStrict element read "fTrackingAreaYSizeMeters"
, periods = map makePeriod $ Xml.getChildrenWithQName "period" element
}
makePeriod :: Element -> Period
makePeriod element =
Period{
periodId = attrLookupStrict element read "iId",
startFrame = attrLookupStrict element read "iStartFrame",
endFrame = attrLookupStrict element read "iEndFrame"
}
data TeamKind = Home | Away deriving (Eq, Show)
oppositionKind :: TeamKind -> TeamKind
oppositionKind Home = Away
oppositionKind Away = Home
rightToLeftKickOff :: Frame Positions -> TeamKind
rightToLeftKickOff kickOffFrame = if homeX > awayX then Home else Away
where
kickOffPositions = agents $ positions kickOffFrame
homePositions = filter (\p -> mTeam p == Just Home) kickOffPositions
awayPositions = filter (\p -> mTeam p == Just Away) kickOffPositions
sumX positions = sum $ map (x . coordinates) positions
homeX = sumX homePositions
awayX = sumX awayPositions |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.