_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
fe0e9d302588762c6ec314be2ed66b1c8454c11cdfad92fdff0b157e84a2317a | deadpendency/deadpendency | MavenLatestReleaseTime.hs | module DF.Effect.FetchRegistryRepoInfo.Backend.LanguageRegistryFiles.Maven.MavenLatestReleaseTime
( MavenLatestReleaseTime (..),
fetchLatestReleaseTime,
)
where
import Common.Parsing.Megaparsec
import DF.Effect.FetchRegistryRepoInfo.Backend.LanguageRegistryFiles.Internal
import DF.Effect.FetchRegistryRepoInfo.Backend.LanguageRegistryFiles.Maven.Version.MavenLatestVersion
import DF.Effect.FetchRegistryRepoInfo.Backend.Model.FetchDependencyRegistryError
import Network.HTTP.Req
import Text.HTML.TagSoup qualified as T
import Text.Megaparsec qualified as M
import Text.Megaparsec.Char qualified as M
fetchLatestReleaseTime :: Text -> Text -> MavenLatestVersion -> ExceptT FetchDependencyRegistryError IO MavenLatestReleaseTime
fetchLatestReleaseTime namespace name latestVersion = do
let versionText = latestVersion ^. #_version
maybeScalaVersion = latestVersion ^. #_maybeScalaVersion
-- -core/5.4.9.Final/
-- seems the trailing slash is actually important ie. -databind/2.12.2
url = getUrlBaseWithVersion namespace name maybeScalaVersion /: versionText /: mempty
maven sometimes will 404 for no apparent reason , so we retry to avoid that problem
maybeResult <- ExceptT $ fetchUrlRetry404 2 url
result <- hoistEither $ maybeToRight (FDRRegistryDataInconsistent $ "Missing files pages for fetching latest release time: " <> show url) maybeResult
releaseTime <- hoistEither $ first FDRFailureToParseResult $ getLatestReleaseTime name latestVersion result
pure $ MavenLatestReleaseTime releaseTime
newtype MavenLatestReleaseTime = MavenLatestReleaseTime
{ _time :: UTCTime
}
deriving stock (Show, Generic)
< ! DOCTYPE html >
< html >
< head >
< /head >
< body >
< header >
< h1 > org / hibernate / hibernate - core/5.4.9.Final</h1 >
< /header >
< hr/ >
< main >
< pre id="contents " >
< a href="hibernate - core-5.4.9.Final.jar.md5 " title="hibernate - core-5.4.9.Final.jar.md5">hibernate - core-5.4.9.Final.jar.md5</a > 2019 - 11 - 14 16:19 32
< a href="hibernate - core-5.4.9.Final.jar.sha1 " title="hibernate - core-5.4.9.Final.jar.sha1">hibernate - core-5.4.9.Final.jar.sha1</a > 2019 - 11 - 14 16:19 40
< a href="hibernate - core-5.4.9.Final.pom " title="hibernate - core-5.4.9.Final.pom">hibernate - core-5.4.9.Final.pom</a > 2019 - 11 - 14 16:19 6135
< /pre >
< /main >
< hr/ >
< /body >
< /html >
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<header>
<h1>org/hibernate/hibernate-core/5.4.9.Final</h1>
</header>
<hr/>
<main>
<pre id="contents">
<a href="hibernate-core-5.4.9.Final.jar.md5" title="hibernate-core-5.4.9.Final.jar.md5">hibernate-core-5.4.9.Final.jar.md5</a> 2019-11-14 16:19 32
<a href="hibernate-core-5.4.9.Final.jar.sha1" title="hibernate-core-5.4.9.Final.jar.sha1">hibernate-core-5.4.9.Final.jar.sha1</a> 2019-11-14 16:19 40
<a href="hibernate-core-5.4.9.Final.pom" title="hibernate-core-5.4.9.Final.pom">hibernate-core-5.4.9.Final.pom</a> 2019-11-14 16:19 6135
</pre>
</main>
<hr/>
</body>
</html>
-}
getLatestReleaseTime :: Text -> MavenLatestVersion -> ByteString -> Either Text UTCTime
getLatestReleaseTime name latestVersion htmlBS = do
let versionText = latestVersion ^. #_version
maybeScalaVersion = latestVersion ^. #_maybeScalaVersion
gross hack because they renamed the old pom file not the new one
-- -connector-java/8.0.31/
nameWithMysqlHack =
if name == "mysql-connector-java"
then "mysql-connector-j"
else name
depNameWithScala = maybe nameWithMysqlHack (\scalaVersion -> nameWithMysqlHack <> "_" <> scalaVersion) maybeScalaVersion
jarFileName = depNameWithScala <> "-" <> versionText <> ".jar"
pomFileName = depNameWithScala <> "-" <> versionText <> ".pom"
jarLink = T.TagOpen "a" [("href", jarFileName)]
pomLink = T.TagOpen "a" [("href", pomFileName)]
jarAscLink = T.TagOpen "a" [("href", jarFileName <> ".asc")]
pomAscLink = T.TagOpen "a" [("href", pomFileName <> ".asc")]
tags = T.parseTags htmlBS
statsTag <- maybeToRight "Unexpected missing version time as text" $ do
let jarOrPomAsText = (!!? 3) $ take 4 $ dropWhile (\a -> (a T.~/= jarLink) && (a T.~/= pomLink)) tags
case jarOrPomAsText of
Just tag -> pure tag
-- fallback to .asc files as in some cases the latest release will not have a pom or jar..
Nothing -> (!!? 3) $ take 4 $ dropWhile (\a -> (a T.~/= jarAscLink) && (a T.~/= pomAscLink)) tags
let statsText = decodeUtf8 $ T.innerText [statsTag]
first (const $ "Failure to parse stats text: " <> statsText) $ M.parse parseTimeSegment "Maven Release Time" statsText
parseTimeSegment :: MParser UTCTime
parseTimeSegment = do
M.hspace
timeAsText <- M.someTill timeChars (M.string " ")
parseTimeM False defaultTimeLocale "%Y-%-m-%-d %H:%M" timeAsText
timeChars :: MParser Char
timeChars = M.numberChar <|> M.char ':' <|> M.char '-' <|> M.char ' '
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/dependency-fetcher/src/DF/Effect/FetchRegistryRepoInfo/Backend/LanguageRegistryFiles/Maven/MavenLatestReleaseTime.hs | haskell | -core/5.4.9.Final/
seems the trailing slash is actually important ie. -databind/2.12.2
-connector-java/8.0.31/
fallback to .asc files as in some cases the latest release will not have a pom or jar.. | module DF.Effect.FetchRegistryRepoInfo.Backend.LanguageRegistryFiles.Maven.MavenLatestReleaseTime
( MavenLatestReleaseTime (..),
fetchLatestReleaseTime,
)
where
import Common.Parsing.Megaparsec
import DF.Effect.FetchRegistryRepoInfo.Backend.LanguageRegistryFiles.Internal
import DF.Effect.FetchRegistryRepoInfo.Backend.LanguageRegistryFiles.Maven.Version.MavenLatestVersion
import DF.Effect.FetchRegistryRepoInfo.Backend.Model.FetchDependencyRegistryError
import Network.HTTP.Req
import Text.HTML.TagSoup qualified as T
import Text.Megaparsec qualified as M
import Text.Megaparsec.Char qualified as M
fetchLatestReleaseTime :: Text -> Text -> MavenLatestVersion -> ExceptT FetchDependencyRegistryError IO MavenLatestReleaseTime
fetchLatestReleaseTime namespace name latestVersion = do
let versionText = latestVersion ^. #_version
maybeScalaVersion = latestVersion ^. #_maybeScalaVersion
url = getUrlBaseWithVersion namespace name maybeScalaVersion /: versionText /: mempty
maven sometimes will 404 for no apparent reason , so we retry to avoid that problem
maybeResult <- ExceptT $ fetchUrlRetry404 2 url
result <- hoistEither $ maybeToRight (FDRRegistryDataInconsistent $ "Missing files pages for fetching latest release time: " <> show url) maybeResult
releaseTime <- hoistEither $ first FDRFailureToParseResult $ getLatestReleaseTime name latestVersion result
pure $ MavenLatestReleaseTime releaseTime
newtype MavenLatestReleaseTime = MavenLatestReleaseTime
{ _time :: UTCTime
}
deriving stock (Show, Generic)
< ! DOCTYPE html >
< html >
< head >
< /head >
< body >
< header >
< h1 > org / hibernate / hibernate - core/5.4.9.Final</h1 >
< /header >
< hr/ >
< main >
< pre id="contents " >
< a href="hibernate - core-5.4.9.Final.jar.md5 " title="hibernate - core-5.4.9.Final.jar.md5">hibernate - core-5.4.9.Final.jar.md5</a > 2019 - 11 - 14 16:19 32
< a href="hibernate - core-5.4.9.Final.jar.sha1 " title="hibernate - core-5.4.9.Final.jar.sha1">hibernate - core-5.4.9.Final.jar.sha1</a > 2019 - 11 - 14 16:19 40
< a href="hibernate - core-5.4.9.Final.pom " title="hibernate - core-5.4.9.Final.pom">hibernate - core-5.4.9.Final.pom</a > 2019 - 11 - 14 16:19 6135
< /pre >
< /main >
< hr/ >
< /body >
< /html >
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<header>
<h1>org/hibernate/hibernate-core/5.4.9.Final</h1>
</header>
<hr/>
<main>
<pre id="contents">
<a href="hibernate-core-5.4.9.Final.jar.md5" title="hibernate-core-5.4.9.Final.jar.md5">hibernate-core-5.4.9.Final.jar.md5</a> 2019-11-14 16:19 32
<a href="hibernate-core-5.4.9.Final.jar.sha1" title="hibernate-core-5.4.9.Final.jar.sha1">hibernate-core-5.4.9.Final.jar.sha1</a> 2019-11-14 16:19 40
<a href="hibernate-core-5.4.9.Final.pom" title="hibernate-core-5.4.9.Final.pom">hibernate-core-5.4.9.Final.pom</a> 2019-11-14 16:19 6135
</pre>
</main>
<hr/>
</body>
</html>
-}
getLatestReleaseTime :: Text -> MavenLatestVersion -> ByteString -> Either Text UTCTime
getLatestReleaseTime name latestVersion htmlBS = do
let versionText = latestVersion ^. #_version
maybeScalaVersion = latestVersion ^. #_maybeScalaVersion
gross hack because they renamed the old pom file not the new one
nameWithMysqlHack =
if name == "mysql-connector-java"
then "mysql-connector-j"
else name
depNameWithScala = maybe nameWithMysqlHack (\scalaVersion -> nameWithMysqlHack <> "_" <> scalaVersion) maybeScalaVersion
jarFileName = depNameWithScala <> "-" <> versionText <> ".jar"
pomFileName = depNameWithScala <> "-" <> versionText <> ".pom"
jarLink = T.TagOpen "a" [("href", jarFileName)]
pomLink = T.TagOpen "a" [("href", pomFileName)]
jarAscLink = T.TagOpen "a" [("href", jarFileName <> ".asc")]
pomAscLink = T.TagOpen "a" [("href", pomFileName <> ".asc")]
tags = T.parseTags htmlBS
statsTag <- maybeToRight "Unexpected missing version time as text" $ do
let jarOrPomAsText = (!!? 3) $ take 4 $ dropWhile (\a -> (a T.~/= jarLink) && (a T.~/= pomLink)) tags
case jarOrPomAsText of
Just tag -> pure tag
Nothing -> (!!? 3) $ take 4 $ dropWhile (\a -> (a T.~/= jarAscLink) && (a T.~/= pomAscLink)) tags
let statsText = decodeUtf8 $ T.innerText [statsTag]
first (const $ "Failure to parse stats text: " <> statsText) $ M.parse parseTimeSegment "Maven Release Time" statsText
parseTimeSegment :: MParser UTCTime
parseTimeSegment = do
M.hspace
timeAsText <- M.someTill timeChars (M.string " ")
parseTimeM False defaultTimeLocale "%Y-%-m-%-d %H:%M" timeAsText
timeChars :: MParser Char
timeChars = M.numberChar <|> M.char ':' <|> M.char '-' <|> M.char ' '
|
3e18881bb37b132a89c2138bbe2595f0d327a0c495a58a23a68f6bbd68f49084 | flavioc/cl-hurd | io-revoke.lisp |
(in-package :hurd-translator)
(def-io-interface :io-revoke ((file port))
(with-lookup protid file
(let ((this-node (get-node protid))
(bucket (port-bucket *translator*)))
(cond
((is-owner-p this-node (get-user protid))
(bucket-iterate bucket
(lambda (port)
(when (and (typep port 'protid)
(eq this-node (get-node port)))
(unless (eq port protid)
(bucket-remove-port bucket port)))))
t)
(t :not-permitted)))))
| null | https://raw.githubusercontent.com/flavioc/cl-hurd/982232f47d1a0ff4df5fde2edad03b9df871470a/translator/interfaces/io-revoke.lisp | lisp |
(in-package :hurd-translator)
(def-io-interface :io-revoke ((file port))
(with-lookup protid file
(let ((this-node (get-node protid))
(bucket (port-bucket *translator*)))
(cond
((is-owner-p this-node (get-user protid))
(bucket-iterate bucket
(lambda (port)
(when (and (typep port 'protid)
(eq this-node (get-node port)))
(unless (eq port protid)
(bucket-remove-port bucket port)))))
t)
(t :not-permitted)))))
| |
3920b133e416e5dcb0b95af99ed407ce533ba6cb4a3dec4e690b6f62c8d13ceb | thiago-negri/DuDuHoX | Animation.hs | module DuDuHoX.OpenGL.Animation where
import qualified Graphics.Rendering.OpenGL as GL
newtype Animation = Animation [Frame]
type Frame = (Sprite, Duration)
type Duration = Time
^ Seconds
type Sprite = (GL.TextureObject, Dimension)
type Dimension = (Width, Height)
type Width = Int
type Height = Int
animate :: Animation -> Time -> Animation
animate (Animation []) _ = error "animate: empty list"
animate (Animation ((s, d) : ss)) t | t < d = Animation ((s, d - t) : ss)
| otherwise = animate (Animation ss) (t - d)
createAnimation :: [Frame] -> Animation
createAnimation [] = error "createAnimation: empty list"
createAnimation a = Animation $ cycle a
currentSprite :: Animation -> Sprite
currentSprite (Animation []) = error "currentSprite: empty list"
currentSprite (Animation ((s, _):_)) = s
| null | https://raw.githubusercontent.com/thiago-negri/DuDuHoX/34942751c807ce69cd20d1a1a6fde4d6520460a4/src/DuDuHoX/OpenGL/Animation.hs | haskell | module DuDuHoX.OpenGL.Animation where
import qualified Graphics.Rendering.OpenGL as GL
newtype Animation = Animation [Frame]
type Frame = (Sprite, Duration)
type Duration = Time
^ Seconds
type Sprite = (GL.TextureObject, Dimension)
type Dimension = (Width, Height)
type Width = Int
type Height = Int
animate :: Animation -> Time -> Animation
animate (Animation []) _ = error "animate: empty list"
animate (Animation ((s, d) : ss)) t | t < d = Animation ((s, d - t) : ss)
| otherwise = animate (Animation ss) (t - d)
createAnimation :: [Frame] -> Animation
createAnimation [] = error "createAnimation: empty list"
createAnimation a = Animation $ cycle a
currentSprite :: Animation -> Sprite
currentSprite (Animation []) = error "currentSprite: empty list"
currentSprite (Animation ((s, _):_)) = s
| |
cff07190da447ffb26cd7731d65bf0a45ae1293f286635e5197dc3b2850b5139 | craff/simple_httpd | gentags.ml |
adapted from -tags ( MIT licensed )
let pf = Printf.printf
let spf = Printf.sprintf
let void = [
"area";
"base";
"br";
"col";
"embed";
"hr";
"img";
"input";
"link";
"menuitem";
"meta";
"param";
"source";
"track";
"wbr";
]
let normal = [
"a";
"abbr";
"address";
"area";
"article";
"aside";
"audio";
"b";
"base";
"bdi";
"bdo";
"blockquote";
"body";
"br";
"button";
"canvas";
"caption";
"cite";
"code";
"col";
"colgroup";
"data";
"datalist";
"dd";
"del";
"details";
"dfn";
"dialog";
"div";
"dl";
"dt";
"em";
"embed";
"fieldset";
"figcaption";
"figure";
"footer";
"form";
"h1";
"h2";
"h3";
"h4";
"h5";
"h6";
"head";
"header";
"hgroup";
"hr";
"html";
"i";
"iframe";
"img";
"input";
"ins";
"kbd";
"label";
"legend";
"li";
"link";
"main";
"map";
"mark";
"math";
"menu";
"menuitem";
"meta";
"meter";
"nav";
"noscript";
"object";
"ol";
"optgroup";
"option";
"output";
"p";
"param";
"picture";
"pre";
"progress";
"q";
"rb";
"rp";
"rt";
"rtc";
"ruby";
"s";
"samp";
"script";
"section";
"select";
"slot";
"small";
"source";
"span";
"strong";
"style";
"sub";
"summary";
"sup";
"svg";
"table";
"tbody";
"td";
"template";
"textarea";
"tfoot";
"th";
"thead";
"time";
"title";
"tr";
"track";
"u";
"ul";
"var";
"video";
"wbr";
] |> List.filter (fun s -> not (List.mem s void))
obtained via :
{ [
l = Array( ... document.querySelectorAll('div tbody td code a')).map (
x = > x.firstChild.textContent ) ;
JSON.stringify(l )
] }
on -US/docs/Web/HTML/Attributes
{[
l = Array(...document.querySelectorAll('div tbody td code a')).map(
x => x.firstChild.textContent);
JSON.stringify(l)
]}
on -US/docs/Web/HTML/Attributes
*)
let attrs = [
"accept";
"accept-charset";
"accesskey";
"action";
"align";
"allow";
"alt";
"async";
"autocapitalize";
"autocomplete";
"autofocus";
"autoplay";
"buffered";
"capture";
"challenge";
"charset";
"checked";
"cite";
"class";
"code";
"codebase";
"cols";
"colspan";
"content";
"contenteditable";
"contextmenu";
"controls";
"coords";
"crossorigin";
"csp";
"data";
"data-*";
"datetime";
"decoding";
"default";
"defer";
"dir";
"dirname";
"disabled";
"download";
"draggable";
"enctype";
"enterkeyhint";
"for";
"form";
"formaction";
"formenctype";
"formmethod";
"formnovalidate";
"formtarget";
"headers";
"hidden";
"high";
"href";
"hreflang";
"http-equiv";
"icon";
"id";
"importance";
"integrity";
"ismap";
"itemprop";
"keytype";
"kind";
"label";
"lang";
"language";
"list";
"loop";
"low";
"manifest";
"max";
"maxlength";
"minlength";
"media";
"method";
"min";
"multiple";
"muted";
"name";
"novalidate";
"open";
"optimum";
"pattern";
"ping";
"placeholder";
"poster";
"preload";
"radiogroup";
"readonly";
"referrerpolicy";
"rel";
"required";
"reversed";
"rows";
"rowspan";
"sandbox";
"scope";
"scoped";
"selected";
"shape";
"size";
"sizes";
"slot";
"span";
"spellcheck";
"src";
"srcdoc";
"srclang";
"srcset";
"start";
"step";
"style";
"summary";
"tabindex";
"target";
"title";
"translate";
"Text";
"type";
"usemap";
"value";
"width";
"wrap";
]
let prelude = {|
(** Output for HTML combinators.
This output type is used to produce a string reasonably efficiently from
a tree of combinators. *)
module Out : sig
type t
val create : unit -> t
val clear : t -> unit
val add_char : t -> char -> unit
val add_string : t -> string -> unit
val add_format_nl : t -> unit
val with_no_format_nl : t -> (unit -> 'a) -> 'a
val to_string : t -> string
end = struct
type t = {
buf: Buffer.t;
if true , we print \b around to format the html
}
let create () = {buf=Buffer.create 256; fmt_nl=true}
let clear self = Buffer.clear self.buf; self.fmt_nl <- true
let[@inline] add_char self c = Buffer.add_char self.buf c
let[@inline] add_string self s = Buffer.add_string self.buf s
let add_format_nl self = if self.fmt_nl then add_char self '\n'
let to_string self = add_format_nl self; Buffer.contents self.buf
let with_no_format_nl self f =
if self.fmt_nl then (
self.fmt_nl <- false;
try let x=f() in self.fmt_nl <- true; x with e -> self.fmt_nl <- true; raise e
) else f()
end
type attribute = string * string
(** An attribute, i.e. a key/value pair *)
type elt = Out.t -> unit
(** A html element. It is represented by its output function, so we
can directly print it. *)
type void = ?if_:bool -> attribute list -> elt
(** Element without children. *)
type nary = ?if_:bool -> attribute list -> elt list -> elt
(** Element with children, represented as a list.
@param if_ if false, do not print anything (default true) *)
(** A chunk of sub-elements, possibly empty. *)
type sub_elt = [ `E of elt | `L of elt list | `S of elt Seq.t | `Nil]
type nary' = ?if_:bool -> attribute list -> sub_elt list -> elt
(** Element with children, represented as a list of {!sub_elt} to be flattened
@param if_ if false, do not print anything (default true) *)
(**/**)
module Helpers_ = struct
(** Escape string so it can be safely embedded in HTML text. *)
let _str_escape (out:Out.t) (s:string) : unit =
String.iter (function
| '<' -> Out.add_string out "<"
| '>' -> Out.add_string out ">"
| '&' -> Out.add_string out "&"
| '"' -> Out.add_string out """
| '\'' -> Out.add_string out "'"
| c -> Out.add_char out c)
s
(** Print the value part of an attribute *)
let _attr_escape (out:Out.t) (s:string) =
Out.add_char out '"';
_str_escape out s;
Out.add_char out '"'
(** Output a list of attributes. *)
let _write_attrs (out:Out.t) (l:attribute list) : unit =
List.iter
(fun (k,v) ->
Out.add_char out ' ';
Out.add_string out k;
Out.add_char out '=';
_attr_escape out v)
l
(** Write sub-elements of a {!nary'} element, returns [true] iff
at least one sub-element was written. *)
let _write_subs (out:Out.t) (l:sub_elt list) : bool =
let has_sub = ref false in
let prepend_white () = has_sub := true; Out.add_format_nl out; in
let emit1 = function
| `E x -> prepend_white(); x out
| `L l -> List.iter (fun e -> prepend_white(); e out) l
| `S l -> Seq.iter (fun e -> prepend_white(); e out) l
| `Nil -> ()
in
List.iter emit1 l;
!has_sub
(** Write a tag, with its attributes.
@param void if true, end with "/>", otherwise end with ">" *)
let _write_tag_attrs ~void (out:Out.t) (tag:string) (attrs:attribute list) : unit =
Out.add_string out "<";
Out.add_string out tag;
_write_attrs out attrs;
if void then Out.add_string out "/>" else Out.add_string out ">"
end
open Helpers_
(**/**)
(** Sub-element with a single element inside. *)
let[@inline] sub_e (elt:elt) : sub_elt = `E elt
(** Sub-element with a list of items inside. *)
let[@inline] sub_l (l:elt list) : sub_elt = `L l
(** Sub-element with a sequence ([!Seq.t]) of items inside. *)
let[@inline] sub_seq (l:elt Seq.t) : sub_elt = `S l
(** Helper to build a [Seq.t] from an array. *)
let seq_of_array (a:_ array) : _ Seq.t =
let rec loop i () =
if i=Array.length a then Seq.Nil
else Seq.Cons (a.(i), loop (i+1))
in loop 0
* Sub - element with nothing inside . Useful in conditionals , when one
decides not to emit a sub - element at all .
decides not to emit a sub-element at all. *)
let sub_empty : sub_elt = `Nil
(** Emit a string value, which will be escaped. *)
let txt (txt:string) : elt = fun out -> _str_escape out txt
(** Formatted version of {!txt} *)
let txtf fmt = Format.kasprintf (fun s -> fun out -> _str_escape out s) fmt
(** Emit raw HTML. Caution, this can lead to injection vulnerabilities,
never use with text that comes from untrusted users. *)
let raw_html (s:string) : elt = fun out -> Out.add_string out s
|}
let oname = function
| "object" -> "object_"
| "class" -> "class_"
| "method" -> "method_"
| "data-*" -> "data_star"
| "for" -> "for_"
| "open" -> "open_"
| "Text" -> "text"
| "type" -> "type_"
| name ->
String.map (function '-' -> '_' | c -> c) name
let emit_void name =
let oname = oname name in
pf "(** tag %S, see {{:-US/docs/Web/HTML/Element/%s} mdn} *)\n"
name name;
pf "let %s : void = fun ?(if_=true) attrs out ->\n" oname;
pf " if if_ then (\n";
pf " _write_tag_attrs ~void:true out %S attrs;\n" name;
pf " )";
pf "\n\n";
()
let emit_normal name =
let oname = oname name in
pf "(** tag %S, see {{:-US/docs/Web/HTML/Element/%s} mdn} *)\n"
name name;
pf "let %s : nary = fun ?(if_=true) attrs sub out ->\n" oname;
pf " if if_ then (\n";
(* for <pre>, newlines actually matter *)
if name="pre" then pf " Out.with_no_format_nl out @@ fun () ->\n";
pf " _write_tag_attrs ~void:false out %S attrs;\n" name;
pf " List.iter (fun sub -> Out.add_format_nl out; sub out) sub;\n";
pf " if sub <> [] then Out.add_format_nl out;\n";
pf " Out.add_string out \"</%s>\")" name;
pf "\n\n";
(* block version *)
let oname = oname ^ "'" in
pf "(** tag %S, see {{:-US/docs/Web/HTML/Element/%s} mdn} *)\n"
name name;
pf "let %s : nary' = fun ?(if_=true) attrs l out ->\n" oname;
pf " if if_ then (\n";
if name="pre" then pf " Out.with_no_format_nl out @@ fun () ->\n";
pf " _write_tag_attrs ~void:false out %S attrs;\n" name;
pf " let has_sub = _write_subs out l in\n";
pf " if has_sub then Out.add_format_nl out;\n";
pf " Out.add_string out \"</%s>\")" name;
pf "\n\n";
()
let doc_attrs = {|Attributes.
This module contains combinator for the standard attributes.
One can also just use a pair of strings. |}
let emit_attr name =
let oname = oname name in
pf " (** Attribute %S. *)\n" name;
pf " let %s : t = fun v -> %S, v\n" oname name;
pf "\n"
let () =
pf "%s\n" prelude;
List.iter emit_void void;
List.iter emit_normal normal;
pf "(** %s *)\n" doc_attrs;
pf "module A = struct\n";
pf " type t = string -> attribute\n";
pf " (** Attribute builder *)\n";
pf "\n";
List.iter emit_attr attrs;
pf "end\n";
()
| null | https://raw.githubusercontent.com/craff/simple_httpd/2683094688b4e0acac5590c50a72f2d6a49e0cca/src/gen/gentags.ml | ocaml | * Output for HTML combinators.
This output type is used to produce a string reasonably efficiently from
a tree of combinators.
* An attribute, i.e. a key/value pair
* A html element. It is represented by its output function, so we
can directly print it.
* Element without children.
* Element with children, represented as a list.
@param if_ if false, do not print anything (default true)
* A chunk of sub-elements, possibly empty.
* Element with children, represented as a list of {!sub_elt} to be flattened
@param if_ if false, do not print anything (default true)
*/*
* Escape string so it can be safely embedded in HTML text.
* Print the value part of an attribute
* Output a list of attributes.
* Write sub-elements of a {!nary'} element, returns [true] iff
at least one sub-element was written.
* Write a tag, with its attributes.
@param void if true, end with "/>", otherwise end with ">"
*/*
* Sub-element with a single element inside.
* Sub-element with a list of items inside.
* Sub-element with a sequence ([!Seq.t]) of items inside.
* Helper to build a [Seq.t] from an array.
* Emit a string value, which will be escaped.
* Formatted version of {!txt}
* Emit raw HTML. Caution, this can lead to injection vulnerabilities,
never use with text that comes from untrusted users.
for <pre>, newlines actually matter
block version |
adapted from -tags ( MIT licensed )
let pf = Printf.printf
let spf = Printf.sprintf
let void = [
"area";
"base";
"br";
"col";
"embed";
"hr";
"img";
"input";
"link";
"menuitem";
"meta";
"param";
"source";
"track";
"wbr";
]
let normal = [
"a";
"abbr";
"address";
"area";
"article";
"aside";
"audio";
"b";
"base";
"bdi";
"bdo";
"blockquote";
"body";
"br";
"button";
"canvas";
"caption";
"cite";
"code";
"col";
"colgroup";
"data";
"datalist";
"dd";
"del";
"details";
"dfn";
"dialog";
"div";
"dl";
"dt";
"em";
"embed";
"fieldset";
"figcaption";
"figure";
"footer";
"form";
"h1";
"h2";
"h3";
"h4";
"h5";
"h6";
"head";
"header";
"hgroup";
"hr";
"html";
"i";
"iframe";
"img";
"input";
"ins";
"kbd";
"label";
"legend";
"li";
"link";
"main";
"map";
"mark";
"math";
"menu";
"menuitem";
"meta";
"meter";
"nav";
"noscript";
"object";
"ol";
"optgroup";
"option";
"output";
"p";
"param";
"picture";
"pre";
"progress";
"q";
"rb";
"rp";
"rt";
"rtc";
"ruby";
"s";
"samp";
"script";
"section";
"select";
"slot";
"small";
"source";
"span";
"strong";
"style";
"sub";
"summary";
"sup";
"svg";
"table";
"tbody";
"td";
"template";
"textarea";
"tfoot";
"th";
"thead";
"time";
"title";
"tr";
"track";
"u";
"ul";
"var";
"video";
"wbr";
] |> List.filter (fun s -> not (List.mem s void))
obtained via :
{ [
l = Array( ... document.querySelectorAll('div tbody td code a')).map (
x = > x.firstChild.textContent ) ;
JSON.stringify(l )
] }
on -US/docs/Web/HTML/Attributes
{[
l = Array(...document.querySelectorAll('div tbody td code a')).map(
x => x.firstChild.textContent);
JSON.stringify(l)
]}
on -US/docs/Web/HTML/Attributes
*)
let attrs = [
"accept";
"accept-charset";
"accesskey";
"action";
"align";
"allow";
"alt";
"async";
"autocapitalize";
"autocomplete";
"autofocus";
"autoplay";
"buffered";
"capture";
"challenge";
"charset";
"checked";
"cite";
"class";
"code";
"codebase";
"cols";
"colspan";
"content";
"contenteditable";
"contextmenu";
"controls";
"coords";
"crossorigin";
"csp";
"data";
"data-*";
"datetime";
"decoding";
"default";
"defer";
"dir";
"dirname";
"disabled";
"download";
"draggable";
"enctype";
"enterkeyhint";
"for";
"form";
"formaction";
"formenctype";
"formmethod";
"formnovalidate";
"formtarget";
"headers";
"hidden";
"high";
"href";
"hreflang";
"http-equiv";
"icon";
"id";
"importance";
"integrity";
"ismap";
"itemprop";
"keytype";
"kind";
"label";
"lang";
"language";
"list";
"loop";
"low";
"manifest";
"max";
"maxlength";
"minlength";
"media";
"method";
"min";
"multiple";
"muted";
"name";
"novalidate";
"open";
"optimum";
"pattern";
"ping";
"placeholder";
"poster";
"preload";
"radiogroup";
"readonly";
"referrerpolicy";
"rel";
"required";
"reversed";
"rows";
"rowspan";
"sandbox";
"scope";
"scoped";
"selected";
"shape";
"size";
"sizes";
"slot";
"span";
"spellcheck";
"src";
"srcdoc";
"srclang";
"srcset";
"start";
"step";
"style";
"summary";
"tabindex";
"target";
"title";
"translate";
"Text";
"type";
"usemap";
"value";
"width";
"wrap";
]
let prelude = {|
module Out : sig
type t
val create : unit -> t
val clear : t -> unit
val add_char : t -> char -> unit
val add_string : t -> string -> unit
val add_format_nl : t -> unit
val with_no_format_nl : t -> (unit -> 'a) -> 'a
val to_string : t -> string
end = struct
type t = {
buf: Buffer.t;
if true , we print \b around to format the html
}
let create () = {buf=Buffer.create 256; fmt_nl=true}
let clear self = Buffer.clear self.buf; self.fmt_nl <- true
let[@inline] add_char self c = Buffer.add_char self.buf c
let[@inline] add_string self s = Buffer.add_string self.buf s
let add_format_nl self = if self.fmt_nl then add_char self '\n'
let to_string self = add_format_nl self; Buffer.contents self.buf
let with_no_format_nl self f =
if self.fmt_nl then (
self.fmt_nl <- false;
try let x=f() in self.fmt_nl <- true; x with e -> self.fmt_nl <- true; raise e
) else f()
end
type attribute = string * string
type elt = Out.t -> unit
type void = ?if_:bool -> attribute list -> elt
type nary = ?if_:bool -> attribute list -> elt list -> elt
type sub_elt = [ `E of elt | `L of elt list | `S of elt Seq.t | `Nil]
type nary' = ?if_:bool -> attribute list -> sub_elt list -> elt
module Helpers_ = struct
let _str_escape (out:Out.t) (s:string) : unit =
String.iter (function
| '<' -> Out.add_string out "<"
| '>' -> Out.add_string out ">"
| '&' -> Out.add_string out "&"
| '"' -> Out.add_string out """
| '\'' -> Out.add_string out "'"
| c -> Out.add_char out c)
s
let _attr_escape (out:Out.t) (s:string) =
Out.add_char out '"';
_str_escape out s;
Out.add_char out '"'
let _write_attrs (out:Out.t) (l:attribute list) : unit =
List.iter
(fun (k,v) ->
Out.add_char out ' ';
Out.add_string out k;
Out.add_char out '=';
_attr_escape out v)
l
let _write_subs (out:Out.t) (l:sub_elt list) : bool =
let has_sub = ref false in
let prepend_white () = has_sub := true; Out.add_format_nl out; in
let emit1 = function
| `E x -> prepend_white(); x out
| `L l -> List.iter (fun e -> prepend_white(); e out) l
| `S l -> Seq.iter (fun e -> prepend_white(); e out) l
| `Nil -> ()
in
List.iter emit1 l;
!has_sub
let _write_tag_attrs ~void (out:Out.t) (tag:string) (attrs:attribute list) : unit =
Out.add_string out "<";
Out.add_string out tag;
_write_attrs out attrs;
if void then Out.add_string out "/>" else Out.add_string out ">"
end
open Helpers_
let[@inline] sub_e (elt:elt) : sub_elt = `E elt
let[@inline] sub_l (l:elt list) : sub_elt = `L l
let[@inline] sub_seq (l:elt Seq.t) : sub_elt = `S l
let seq_of_array (a:_ array) : _ Seq.t =
let rec loop i () =
if i=Array.length a then Seq.Nil
else Seq.Cons (a.(i), loop (i+1))
in loop 0
* Sub - element with nothing inside . Useful in conditionals , when one
decides not to emit a sub - element at all .
decides not to emit a sub-element at all. *)
let sub_empty : sub_elt = `Nil
let txt (txt:string) : elt = fun out -> _str_escape out txt
let txtf fmt = Format.kasprintf (fun s -> fun out -> _str_escape out s) fmt
let raw_html (s:string) : elt = fun out -> Out.add_string out s
|}
let oname = function
| "object" -> "object_"
| "class" -> "class_"
| "method" -> "method_"
| "data-*" -> "data_star"
| "for" -> "for_"
| "open" -> "open_"
| "Text" -> "text"
| "type" -> "type_"
| name ->
String.map (function '-' -> '_' | c -> c) name
let emit_void name =
let oname = oname name in
pf "(** tag %S, see {{:-US/docs/Web/HTML/Element/%s} mdn} *)\n"
name name;
pf "let %s : void = fun ?(if_=true) attrs out ->\n" oname;
pf " if if_ then (\n";
pf " _write_tag_attrs ~void:true out %S attrs;\n" name;
pf " )";
pf "\n\n";
()
let emit_normal name =
let oname = oname name in
pf "(** tag %S, see {{:-US/docs/Web/HTML/Element/%s} mdn} *)\n"
name name;
pf "let %s : nary = fun ?(if_=true) attrs sub out ->\n" oname;
pf " if if_ then (\n";
if name="pre" then pf " Out.with_no_format_nl out @@ fun () ->\n";
pf " _write_tag_attrs ~void:false out %S attrs;\n" name;
pf " List.iter (fun sub -> Out.add_format_nl out; sub out) sub;\n";
pf " if sub <> [] then Out.add_format_nl out;\n";
pf " Out.add_string out \"</%s>\")" name;
pf "\n\n";
let oname = oname ^ "'" in
pf "(** tag %S, see {{:-US/docs/Web/HTML/Element/%s} mdn} *)\n"
name name;
pf "let %s : nary' = fun ?(if_=true) attrs l out ->\n" oname;
pf " if if_ then (\n";
if name="pre" then pf " Out.with_no_format_nl out @@ fun () ->\n";
pf " _write_tag_attrs ~void:false out %S attrs;\n" name;
pf " let has_sub = _write_subs out l in\n";
pf " if has_sub then Out.add_format_nl out;\n";
pf " Out.add_string out \"</%s>\")" name;
pf "\n\n";
()
let doc_attrs = {|Attributes.
This module contains combinator for the standard attributes.
One can also just use a pair of strings. |}
let emit_attr name =
let oname = oname name in
pf " (** Attribute %S. *)\n" name;
pf " let %s : t = fun v -> %S, v\n" oname name;
pf "\n"
let () =
pf "%s\n" prelude;
List.iter emit_void void;
List.iter emit_normal normal;
pf "(** %s *)\n" doc_attrs;
pf "module A = struct\n";
pf " type t = string -> attribute\n";
pf " (** Attribute builder *)\n";
pf "\n";
List.iter emit_attr attrs;
pf "end\n";
()
|
a9a40478a6c62bb0da0a177f6dd4d18bc25eb9579b2747e0ca37f5a2dc876bed | biocaml/biocaml | iset.ml | Copyright 2003 . distributed with LGPL
Modified by < >
Modified by < >
Copyright 2003 . distributed with LGPL
Modified by < >
module Int = Int
open CFStream
module BatAvlTree = struct
type 'a tree = Empty | Node of 'a tree * 'a * 'a tree * int (* height *)
let empty = Empty
let is_empty = function Empty -> true | Node _ -> false
let singleton_tree x = Node (Empty, x, Empty, 1)
let left_branch = function
| Empty -> raise Caml.Not_found
| Node (l, _, _, _) -> l
let right_branch = function
| Empty -> raise Caml.Not_found
| Node (_, _, r, _) -> r
let root = function Empty -> raise Caml.Not_found | Node (_, v, _, _) -> v
let height = function Empty -> 0 | Node (_, _, _, h) -> h
let create l v r =
let h' = 1 + Int.max (height l) (height r) in
assert (abs (height l - height r) < 2);
Node (l, v, r, h')
Assume |hl - hr| < 3
let bal l v r =
let hl = height l in
let hr = height r in
if hl >= hr + 2 then
match l with
| Empty -> assert false
| Node (ll, lv, lr, _) -> (
if height ll >= height lr then create ll lv (create lr v r)
else
match lr with
| Empty -> assert false
| Node (lrl, lrv, lrr, _) ->
create (create ll lv lrl) lrv (create lrr v r))
else if hr >= hl + 2 then
match r with
| Empty -> assert false
| Node (rl, rv, rr, _) -> (
if height rr >= height rl then create (create l v rl) rv rr
else
match rl with
| Empty -> assert false
| Node (rll, rlv, rlr, _) ->
create (create l v rll) rlv (create rlr rv rr))
else create l v r
let rec add_left v = function
| Empty -> Node (Empty, v, Empty, 1)
| Node (l, v', r, _) -> bal (add_left v l) v' r
let rec add_right v = function
| Empty -> Node (Empty, v, Empty, 1)
| Node (l, v', r, _) -> bal l v' (add_right v r)
No assumption of height of l and r.
let rec make_tree l v r =
match (l, r) with
| Empty, _ -> add_left v r
| _, Empty -> add_right v l
| Node (ll, lv, lr, lh), Node (rl, rv, rr, rh) ->
if lh > rh + 1 then bal ll lv (make_tree lr v r)
else if rh > lh + 1 then bal (make_tree l v rl) rv rr
else create l v r
Utilities
let rec split_leftmost = function
| Empty -> raise Caml.Not_found
| Node (Empty, v, r, _) -> (v, r)
| Node (l, v, r, _) ->
let v0, l' = split_leftmost l in
(v0, make_tree l' v r)
let rec split_rightmost = function
| Empty -> raise Caml.Not_found
| Node (l, v, Empty, _) -> (v, l)
| Node (l, v, r, _) ->
let v0, r' = split_rightmost r in
(v0, make_tree l v r')
let rec concat t1 t2 =
match (t1, t2) with
| Empty, _ -> t2
| _, Empty -> t1
| Node (l1, v1, r1, h1), Node (l2, v2, r2, h2) ->
if h1 < h2 then make_tree (concat t1 l2) v2 r2
else make_tree l1 v1 (concat r1 t2)
let rec iter proc = function
| Empty -> ()
| Node (l, v, r, _) ->
iter proc l;
proc v;
iter proc r
let rec fold f t init =
match t with
| Empty -> init
| Node (l, v, r, _) ->
let x = fold f l init in
let x = f v x in
fold f r x
(* FIXME: this is nlog n because of the left nesting of appends *)
let rec to_stream = function
| Empty -> Stream.empty ()
| Node (l, v, r, _) ->
Stream.append
(Stream.append
(Stream.of_lazy (lazy (to_stream l)))
(Stream.singleton v))
(Stream.of_lazy (lazy (to_stream r)))
end
include BatAvlTree
type t = (int * int) tree
let rec mem s (n : int) =
if is_empty s then false
else
let v1, v2 = root s in
if n < v1 then mem (left_branch s) n
else if v1 <= n && n <= v2 then true
else mem (right_branch s) n
let rec intersects_range s i j =
if i > j then raise (Invalid_argument "iset_intersects_range");
if is_empty s then false
else
let v1, v2 = root s in
if j < v1 then intersects_range (left_branch s) i j
else if v2 < i then intersects_range (right_branch s) i j
else true
let rec add s n =
if is_empty s then make_tree empty (n, n) empty
else
let ((v1, v2) as v) = root s in
let s0 = left_branch s in
let s1 = right_branch s in
if v1 <> Int.min_value && n < v1 - 1 then make_tree (add s0 n) v s1
else if v2 <> Int.max_value && n > v2 + 1 then make_tree s0 v (add s1 n)
else if n + 1 = v1 then
if not (is_empty s0) then
let (u1, u2), s0' = split_rightmost s0 in
if u2 <> Int.max_value && u2 + 1 = n then make_tree s0' (u1, v2) s1
else make_tree s0 (n, v2) s1
else make_tree s0 (n, v2) s1
else if v2 + 1 = n then
if not (is_empty s1) then
let (u1, u2), s1' = split_leftmost s1 in
if n <> Int.max_value && n + 1 = u1 then make_tree s0 (v1, u2) s1'
else make_tree s0 (v1, n) s1
else make_tree s0 (v1, n) s1
else s
let rec from s ~n =
if is_empty s then empty
else
let ((v1, v2) as v) = root s in
let s0 = left_branch s in
let s1 = right_branch s in
if n < v1 then make_tree (from s0 ~n) v s1
else if n > v2 then from s1 ~n
else make_tree empty (n, v2) s1
let after s ~n = if n = Int.max_value then empty else from s ~n:(n + 1)
let rec until s ~n =
if is_empty s then empty
else
let ((v1, v2) as v) = root s in
let s0 = left_branch s in
let s1 = right_branch s in
if n > v2 then make_tree s0 v (until s1 ~n)
else if n < v1 then until s0 ~n
else make_tree s0 (v1, n) empty
let before s ~n = if n = Int.min_value then empty else until s ~n:(n - 1)
let add_range s n1 n2 =
if n1 > n2 then invalid_arg (Printf.sprintf "ISet.add_range - %d > %d" n1 n2)
else
let n1, l =
if n1 = Int.min_value then (n1, empty)
else
let l = until s ~n:(n1 - 1) in
if is_empty l then (n1, empty)
else
let (v1, v2), l' = split_rightmost l in
if v2 + 1 = n1 then (v1, l') else (n1, l)
in
let n2, r =
if n2 = Int.max_value then (n2, empty)
else
let r = from s ~n:(n2 + 1) in
if is_empty r then (n2, empty)
else
let (v1, v2), r' = split_leftmost r in
if n2 + 1 = v1 then (v2, r') else (n2, r)
in
make_tree l (n1, n2) r
let singleton n = singleton_tree (n, n)
let rec remove s n =
if is_empty s then empty
else
let ((v1, v2) as v) = root s in
let s1 = left_branch s in
let s2 = right_branch s in
if n < v1 then make_tree (remove s1 n) v s2
else if n = v1 then
if v1 = v2 then concat s1 s2 else make_tree s1 (v1 + 1, v2) s2
else if n > v1 && n < v2 then
let s = make_tree s1 (v1, n - 1) empty in
make_tree s (n + 1, v2) s2
else if n = v2 then make_tree s1 (v1, v2 - 1) s2
else make_tree s1 v (remove s2 n)
let remove_range s n1 n2 =
if n1 > n2 then invalid_arg "ISet.remove_range"
else concat (before s ~n:n1) (after s ~n:n2)
let rec union s1 s2 =
if is_empty s1 then s2
else if is_empty s2 then s1
else
let s1, s2 = if height s1 > height s2 then (s1, s2) else (s2, s1) in
let n1, n2 = root s1 in
let l1 = left_branch s1 in
let r1 = right_branch s1 in
let l2 = before s2 ~n:n1 in
let r2 = after s2 ~n:n2 in
let n1, l =
if n1 = Int.min_value then (n1, empty)
else
let l = union l1 l2 in
if is_empty l then (n1, l)
else
let (v1, v2), l' = split_rightmost l in
(* merge left *)
if v2 + 1 = n1 then (v1, l') else (n1, l)
in
let n2, r =
if n1 = Int.max_value then (n2, empty)
else
let r = union r1 r2 in
if is_empty r then (n2, r)
else
let (v1, v2), r' = split_leftmost r in
(* merge right *)
if n2 + 1 = v1 then (v2, r') else (n2, r)
in
make_tree l (n1, n2) r
$ = union & ~cmp : equal ~printer:(IO.to_string print )
( union ( of_list [ 3,5 ] ) ( of_list [ 1,3 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 3,5 ] ) ( of_list [ 1,2 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 3,5 ] ) ( of_list [ 1,5 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 1,5 ] ) ( of_list [ 3,5 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 1,2 ] ) ( of_list [ 4,5 ] ) ) ( of_list [ 1,2;4,5 ] )
(union (of_list [3,5]) (of_list [1,3])) (of_list [1,5])
(union (of_list [3,5]) (of_list [1,2])) (of_list [1,5])
(union (of_list [3,5]) (of_list [1,5])) (of_list [1,5])
(union (of_list [1,5]) (of_list [3,5])) (of_list [1,5])
(union (of_list [1,2]) (of_list [4,5])) (of_list [1,2;4,5])
*)
let rec inter s1 s2 =
if is_empty s1 then empty
else if is_empty s2 then empty
else
let s1, s2 = if height s1 > height s2 then (s1, s2) else (s2, s1) in
let n1, n2 = root s1 in
let l1 = left_branch s1 in
let r1 = right_branch s1 in
let l2 = before s2 ~n:n1 in
let r2 = after s2 ~n:n2 in
let m = until (from s2 ~n:n1) ~n:n2 in
concat (concat (inter l1 l2) m) (inter r1 r2)
$ = inter & ~cmp : equal ~printer:(IO.to_string print )
( inter ( of_list [ 1,5 ] ) ( of_list [ 2,3 ] ) ) ( of_list [ 2,3 ] )
( inter ( of_list [ 1,4 ] ) ( of_list [ 2,6 ] ) ) ( of_list [ 2,4 ] )
(inter (of_list [1,5]) (of_list [2,3])) (of_list [2,3])
(inter (of_list [1,4]) (of_list [2,6])) (of_list [2,4])
*)
let rec compl_aux n1 n2 s =
if is_empty s then add_range empty n1 n2
else
let v1, v2 = root s in
let l = left_branch s in
let r = right_branch s in
let l = if v1 = Int.min_value then empty else compl_aux n1 (v1 - 1) l in
let r = if v2 = Int.max_value then empty else compl_aux (v2 + 1) n2 r in
concat l r
let compl s = compl_aux Int.min_value Int.max_value s
let diff s1 s2 = inter s1 (compl s2)
let rec compare_aux x1 x2 =
match (x1, x2) with
| [], [] -> 0
| `Set s :: rest, x ->
if is_empty s then compare_aux rest x2
else
let l = left_branch s in
let v = root s in
let r = right_branch s in
compare_aux (`Set l :: `Range v :: `Set r :: rest) x
| _x, `Set s :: rest ->
if is_empty s then compare_aux x1 rest
else
let l = left_branch s in
let v = root s in
let r = right_branch s in
compare_aux x1 (`Set l :: `Range v :: `Set r :: rest)
| `Range (v1, v2) :: rest1, `Range (v3, v4) :: rest2 ->
let sgn = Int.compare v1 v3 in
if sgn <> 0 then sgn
else
let sgn = Int.compare v2 v4 in
if sgn <> 0 then sgn else compare_aux rest1 rest2
| [], _ -> ~-1
| _, [] -> 1
let compare s1 s2 = compare_aux [ `Set s1 ] [ `Set s2 ]
let equal s1 s2 = compare s1 s2 = 0
let rec subset s1 s2 =
if is_empty s1 then true
else if is_empty s2 then false
else
let v1, v2 = root s2 in
let l2 = left_branch s2 in
let r2 = right_branch s2 in
let l1 = before s1 ~n:v1 in
let r1 = after s1 ~n:v2 in
subset l1 l2 && subset r1 r2
let fold_range s ~init ~f = BatAvlTree.fold (fun (n1, n2) x -> f n1 n2 x) s init
let fold s ~init ~f =
let rec g n1 n2 a = if n1 = n2 then f n1 a else g (n1 + 1) n2 (f n1 a) in
fold_range ~f:g s ~init
let iter s ~f = fold s ~init:() ~f:(fun n () -> f n)
let iter_range s ~f = BatAvlTree.iter (fun (n1, n2) -> f n1 n2) s
let for_all s ~f =
let rec test_range n1 n2 =
if n1 = n2 then f n1 else f n1 && test_range (n1 + 1) n2
in
let rec test_set s =
if is_empty s then true
else
let n1, n2 = root s in
test_range n1 n2 && test_set (left_branch s) && test_set (right_branch s)
in
test_set s
$ T for_all
for_all ( fun x - > x < 10 ) ( of_list [ 1,3;2,7 ] )
not ( for_all ( fun x - > x = 5 ) ( of_list [ 4,5 ] ) )
for_all (fun x -> x < 10) (of_list [1,3;2,7])
not (for_all (fun x -> x = 5) (of_list [4,5]))
*)
let exists s ~f =
let rec test_range n1 n2 =
if n1 = n2 then f n1 else f n1 || test_range (n1 + 1) n2
in
let rec test_set s =
if is_empty s then false
else
let n1, n2 = root s in
test_range n1 n2 || test_set (left_branch s) || test_set (right_branch s)
in
test_set s
$ T exists
exists ( fun x - > x = 5 ) ( of_list [ 1,10 ] )
not ( exists ( fun x - > x = 5 ) ( of_list [ 1,3;7,10 ] ) )
exists (fun x -> x = 5) (of_list [1,10])
not (exists (fun x -> x = 5) (of_list [1,3;7,10]))
*)
let filter_range p n1 n2 a =
let rec loop n1 n2 a = function
| None ->
if n1 = n2 then make_tree a (n1, n1) empty
else loop (n1 + 1) n2 a (if p n1 then Some n1 else None)
| Some v1 as x ->
if n1 = n2 then make_tree a (v1, n1) empty
else if p n1 then loop (n1 + 1) n2 a x
else loop (n1 + 1) n2 (make_tree a (v1, n1 - 1) empty) None
in
loop n1 n2 a None
let filter s ~f = fold_range s ~f:(filter_range f) ~init:empty
let partition_range p n1 n2 (a, b) =
let rec loop n1 n2 acc =
let acc =
let a, b, (v, n) = acc in
if Bool.(p n1 = v) then acc
else if v then (make_tree a (n, n1) empty, b, (not v, n1))
else (a, make_tree b (n, n1) empty, (not v, n1))
in
if n1 = n2 then
let a, b, (v, n) = acc in
if v then (make_tree a (n, n1) empty, b)
else (a, make_tree b (n, n1) empty)
else loop (n1 + 1) n2 acc
in
loop n1 n2 (a, b, (p n1, n1))
let partition s ~f = fold_range ~f:(partition_range f) s ~init:(empty, empty)
let cardinal s = fold_range ~f:(fun n1 n2 c -> c + n2 - n1 + 1) s ~init:0
$ T cardinal
cardinal ( of_list [ 1,3;5,9 ] ) = 8
cardinal (of_list [1,3;5,9]) = 8
*)
let rev_ranges s = fold_range ~f:(fun n1 n2 a -> (n1, n2) :: a) s ~init:[]
let rec burst_range n1 n2 a =
if n1 = n2 then n1 :: a else burst_range n1 (n2 - 1) (n2 :: a)
let elements s =
let f a (n1, n2) = burst_range n1 n2 a in
List.fold_left ~f ~init:[] (rev_ranges s)
$ Q ranges;of_list
( Q.list ( Q.pair Q.int Q.int ) ) ( fun l - > \
let norml = List.map ( fun ( x , y ) - > if x < y then ( x , y ) else ( y , x ) ) l in \
let set = of_list norml in \
equal set ( ranges set | > of_list ) \
)
(Q.list (Q.pair Q.int Q.int)) (fun l -> \
let norml = List.map (fun (x,y) -> if x < y then (x,y) else (y,x)) l in \
let set = of_list norml in \
equal set (ranges set |> of_list) \
)
*)
let ranges s = List.rev (rev_ranges s)
let min_elt s =
let (n, _), _ = split_leftmost s in
n
let max_elt s =
let (_, n), _ = split_rightmost s in
n
let choose s = fst (root s)
let of_list l =
List.fold_left ~f:(fun s (lo, hi) -> add_range s lo hi) ~init:empty l
let of_stream e =
Stream.fold ~f:(fun s (lo, hi) -> add_range s lo hi) ~init:empty e
| null | https://raw.githubusercontent.com/biocaml/biocaml/ac619539fed348747d686b8f628e80c1bb8bfc59/lib/unix/iset.ml | ocaml | height
FIXME: this is nlog n because of the left nesting of appends
merge left
merge right | Copyright 2003 . distributed with LGPL
Modified by < >
Modified by < >
Copyright 2003 . distributed with LGPL
Modified by < >
module Int = Int
open CFStream
module BatAvlTree = struct
let empty = Empty
let is_empty = function Empty -> true | Node _ -> false
let singleton_tree x = Node (Empty, x, Empty, 1)
let left_branch = function
| Empty -> raise Caml.Not_found
| Node (l, _, _, _) -> l
let right_branch = function
| Empty -> raise Caml.Not_found
| Node (_, _, r, _) -> r
let root = function Empty -> raise Caml.Not_found | Node (_, v, _, _) -> v
let height = function Empty -> 0 | Node (_, _, _, h) -> h
let create l v r =
let h' = 1 + Int.max (height l) (height r) in
assert (abs (height l - height r) < 2);
Node (l, v, r, h')
Assume |hl - hr| < 3
let bal l v r =
let hl = height l in
let hr = height r in
if hl >= hr + 2 then
match l with
| Empty -> assert false
| Node (ll, lv, lr, _) -> (
if height ll >= height lr then create ll lv (create lr v r)
else
match lr with
| Empty -> assert false
| Node (lrl, lrv, lrr, _) ->
create (create ll lv lrl) lrv (create lrr v r))
else if hr >= hl + 2 then
match r with
| Empty -> assert false
| Node (rl, rv, rr, _) -> (
if height rr >= height rl then create (create l v rl) rv rr
else
match rl with
| Empty -> assert false
| Node (rll, rlv, rlr, _) ->
create (create l v rll) rlv (create rlr rv rr))
else create l v r
let rec add_left v = function
| Empty -> Node (Empty, v, Empty, 1)
| Node (l, v', r, _) -> bal (add_left v l) v' r
let rec add_right v = function
| Empty -> Node (Empty, v, Empty, 1)
| Node (l, v', r, _) -> bal l v' (add_right v r)
No assumption of height of l and r.
let rec make_tree l v r =
match (l, r) with
| Empty, _ -> add_left v r
| _, Empty -> add_right v l
| Node (ll, lv, lr, lh), Node (rl, rv, rr, rh) ->
if lh > rh + 1 then bal ll lv (make_tree lr v r)
else if rh > lh + 1 then bal (make_tree l v rl) rv rr
else create l v r
Utilities
let rec split_leftmost = function
| Empty -> raise Caml.Not_found
| Node (Empty, v, r, _) -> (v, r)
| Node (l, v, r, _) ->
let v0, l' = split_leftmost l in
(v0, make_tree l' v r)
let rec split_rightmost = function
| Empty -> raise Caml.Not_found
| Node (l, v, Empty, _) -> (v, l)
| Node (l, v, r, _) ->
let v0, r' = split_rightmost r in
(v0, make_tree l v r')
let rec concat t1 t2 =
match (t1, t2) with
| Empty, _ -> t2
| _, Empty -> t1
| Node (l1, v1, r1, h1), Node (l2, v2, r2, h2) ->
if h1 < h2 then make_tree (concat t1 l2) v2 r2
else make_tree l1 v1 (concat r1 t2)
let rec iter proc = function
| Empty -> ()
| Node (l, v, r, _) ->
iter proc l;
proc v;
iter proc r
let rec fold f t init =
match t with
| Empty -> init
| Node (l, v, r, _) ->
let x = fold f l init in
let x = f v x in
fold f r x
let rec to_stream = function
| Empty -> Stream.empty ()
| Node (l, v, r, _) ->
Stream.append
(Stream.append
(Stream.of_lazy (lazy (to_stream l)))
(Stream.singleton v))
(Stream.of_lazy (lazy (to_stream r)))
end
include BatAvlTree
type t = (int * int) tree
let rec mem s (n : int) =
if is_empty s then false
else
let v1, v2 = root s in
if n < v1 then mem (left_branch s) n
else if v1 <= n && n <= v2 then true
else mem (right_branch s) n
let rec intersects_range s i j =
if i > j then raise (Invalid_argument "iset_intersects_range");
if is_empty s then false
else
let v1, v2 = root s in
if j < v1 then intersects_range (left_branch s) i j
else if v2 < i then intersects_range (right_branch s) i j
else true
let rec add s n =
if is_empty s then make_tree empty (n, n) empty
else
let ((v1, v2) as v) = root s in
let s0 = left_branch s in
let s1 = right_branch s in
if v1 <> Int.min_value && n < v1 - 1 then make_tree (add s0 n) v s1
else if v2 <> Int.max_value && n > v2 + 1 then make_tree s0 v (add s1 n)
else if n + 1 = v1 then
if not (is_empty s0) then
let (u1, u2), s0' = split_rightmost s0 in
if u2 <> Int.max_value && u2 + 1 = n then make_tree s0' (u1, v2) s1
else make_tree s0 (n, v2) s1
else make_tree s0 (n, v2) s1
else if v2 + 1 = n then
if not (is_empty s1) then
let (u1, u2), s1' = split_leftmost s1 in
if n <> Int.max_value && n + 1 = u1 then make_tree s0 (v1, u2) s1'
else make_tree s0 (v1, n) s1
else make_tree s0 (v1, n) s1
else s
let rec from s ~n =
if is_empty s then empty
else
let ((v1, v2) as v) = root s in
let s0 = left_branch s in
let s1 = right_branch s in
if n < v1 then make_tree (from s0 ~n) v s1
else if n > v2 then from s1 ~n
else make_tree empty (n, v2) s1
let after s ~n = if n = Int.max_value then empty else from s ~n:(n + 1)
let rec until s ~n =
if is_empty s then empty
else
let ((v1, v2) as v) = root s in
let s0 = left_branch s in
let s1 = right_branch s in
if n > v2 then make_tree s0 v (until s1 ~n)
else if n < v1 then until s0 ~n
else make_tree s0 (v1, n) empty
let before s ~n = if n = Int.min_value then empty else until s ~n:(n - 1)
let add_range s n1 n2 =
if n1 > n2 then invalid_arg (Printf.sprintf "ISet.add_range - %d > %d" n1 n2)
else
let n1, l =
if n1 = Int.min_value then (n1, empty)
else
let l = until s ~n:(n1 - 1) in
if is_empty l then (n1, empty)
else
let (v1, v2), l' = split_rightmost l in
if v2 + 1 = n1 then (v1, l') else (n1, l)
in
let n2, r =
if n2 = Int.max_value then (n2, empty)
else
let r = from s ~n:(n2 + 1) in
if is_empty r then (n2, empty)
else
let (v1, v2), r' = split_leftmost r in
if n2 + 1 = v1 then (v2, r') else (n2, r)
in
make_tree l (n1, n2) r
let singleton n = singleton_tree (n, n)
let rec remove s n =
if is_empty s then empty
else
let ((v1, v2) as v) = root s in
let s1 = left_branch s in
let s2 = right_branch s in
if n < v1 then make_tree (remove s1 n) v s2
else if n = v1 then
if v1 = v2 then concat s1 s2 else make_tree s1 (v1 + 1, v2) s2
else if n > v1 && n < v2 then
let s = make_tree s1 (v1, n - 1) empty in
make_tree s (n + 1, v2) s2
else if n = v2 then make_tree s1 (v1, v2 - 1) s2
else make_tree s1 v (remove s2 n)
let remove_range s n1 n2 =
if n1 > n2 then invalid_arg "ISet.remove_range"
else concat (before s ~n:n1) (after s ~n:n2)
let rec union s1 s2 =
if is_empty s1 then s2
else if is_empty s2 then s1
else
let s1, s2 = if height s1 > height s2 then (s1, s2) else (s2, s1) in
let n1, n2 = root s1 in
let l1 = left_branch s1 in
let r1 = right_branch s1 in
let l2 = before s2 ~n:n1 in
let r2 = after s2 ~n:n2 in
let n1, l =
if n1 = Int.min_value then (n1, empty)
else
let l = union l1 l2 in
if is_empty l then (n1, l)
else
let (v1, v2), l' = split_rightmost l in
if v2 + 1 = n1 then (v1, l') else (n1, l)
in
let n2, r =
if n1 = Int.max_value then (n2, empty)
else
let r = union r1 r2 in
if is_empty r then (n2, r)
else
let (v1, v2), r' = split_leftmost r in
if n2 + 1 = v1 then (v2, r') else (n2, r)
in
make_tree l (n1, n2) r
$ = union & ~cmp : equal ~printer:(IO.to_string print )
( union ( of_list [ 3,5 ] ) ( of_list [ 1,3 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 3,5 ] ) ( of_list [ 1,2 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 3,5 ] ) ( of_list [ 1,5 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 1,5 ] ) ( of_list [ 3,5 ] ) ) ( of_list [ 1,5 ] )
( union ( of_list [ 1,2 ] ) ( of_list [ 4,5 ] ) ) ( of_list [ 1,2;4,5 ] )
(union (of_list [3,5]) (of_list [1,3])) (of_list [1,5])
(union (of_list [3,5]) (of_list [1,2])) (of_list [1,5])
(union (of_list [3,5]) (of_list [1,5])) (of_list [1,5])
(union (of_list [1,5]) (of_list [3,5])) (of_list [1,5])
(union (of_list [1,2]) (of_list [4,5])) (of_list [1,2;4,5])
*)
let rec inter s1 s2 =
if is_empty s1 then empty
else if is_empty s2 then empty
else
let s1, s2 = if height s1 > height s2 then (s1, s2) else (s2, s1) in
let n1, n2 = root s1 in
let l1 = left_branch s1 in
let r1 = right_branch s1 in
let l2 = before s2 ~n:n1 in
let r2 = after s2 ~n:n2 in
let m = until (from s2 ~n:n1) ~n:n2 in
concat (concat (inter l1 l2) m) (inter r1 r2)
$ = inter & ~cmp : equal ~printer:(IO.to_string print )
( inter ( of_list [ 1,5 ] ) ( of_list [ 2,3 ] ) ) ( of_list [ 2,3 ] )
( inter ( of_list [ 1,4 ] ) ( of_list [ 2,6 ] ) ) ( of_list [ 2,4 ] )
(inter (of_list [1,5]) (of_list [2,3])) (of_list [2,3])
(inter (of_list [1,4]) (of_list [2,6])) (of_list [2,4])
*)
let rec compl_aux n1 n2 s =
if is_empty s then add_range empty n1 n2
else
let v1, v2 = root s in
let l = left_branch s in
let r = right_branch s in
let l = if v1 = Int.min_value then empty else compl_aux n1 (v1 - 1) l in
let r = if v2 = Int.max_value then empty else compl_aux (v2 + 1) n2 r in
concat l r
let compl s = compl_aux Int.min_value Int.max_value s
let diff s1 s2 = inter s1 (compl s2)
let rec compare_aux x1 x2 =
match (x1, x2) with
| [], [] -> 0
| `Set s :: rest, x ->
if is_empty s then compare_aux rest x2
else
let l = left_branch s in
let v = root s in
let r = right_branch s in
compare_aux (`Set l :: `Range v :: `Set r :: rest) x
| _x, `Set s :: rest ->
if is_empty s then compare_aux x1 rest
else
let l = left_branch s in
let v = root s in
let r = right_branch s in
compare_aux x1 (`Set l :: `Range v :: `Set r :: rest)
| `Range (v1, v2) :: rest1, `Range (v3, v4) :: rest2 ->
let sgn = Int.compare v1 v3 in
if sgn <> 0 then sgn
else
let sgn = Int.compare v2 v4 in
if sgn <> 0 then sgn else compare_aux rest1 rest2
| [], _ -> ~-1
| _, [] -> 1
let compare s1 s2 = compare_aux [ `Set s1 ] [ `Set s2 ]
let equal s1 s2 = compare s1 s2 = 0
let rec subset s1 s2 =
if is_empty s1 then true
else if is_empty s2 then false
else
let v1, v2 = root s2 in
let l2 = left_branch s2 in
let r2 = right_branch s2 in
let l1 = before s1 ~n:v1 in
let r1 = after s1 ~n:v2 in
subset l1 l2 && subset r1 r2
let fold_range s ~init ~f = BatAvlTree.fold (fun (n1, n2) x -> f n1 n2 x) s init
let fold s ~init ~f =
let rec g n1 n2 a = if n1 = n2 then f n1 a else g (n1 + 1) n2 (f n1 a) in
fold_range ~f:g s ~init
let iter s ~f = fold s ~init:() ~f:(fun n () -> f n)
let iter_range s ~f = BatAvlTree.iter (fun (n1, n2) -> f n1 n2) s
let for_all s ~f =
let rec test_range n1 n2 =
if n1 = n2 then f n1 else f n1 && test_range (n1 + 1) n2
in
let rec test_set s =
if is_empty s then true
else
let n1, n2 = root s in
test_range n1 n2 && test_set (left_branch s) && test_set (right_branch s)
in
test_set s
$ T for_all
for_all ( fun x - > x < 10 ) ( of_list [ 1,3;2,7 ] )
not ( for_all ( fun x - > x = 5 ) ( of_list [ 4,5 ] ) )
for_all (fun x -> x < 10) (of_list [1,3;2,7])
not (for_all (fun x -> x = 5) (of_list [4,5]))
*)
let exists s ~f =
let rec test_range n1 n2 =
if n1 = n2 then f n1 else f n1 || test_range (n1 + 1) n2
in
let rec test_set s =
if is_empty s then false
else
let n1, n2 = root s in
test_range n1 n2 || test_set (left_branch s) || test_set (right_branch s)
in
test_set s
$ T exists
exists ( fun x - > x = 5 ) ( of_list [ 1,10 ] )
not ( exists ( fun x - > x = 5 ) ( of_list [ 1,3;7,10 ] ) )
exists (fun x -> x = 5) (of_list [1,10])
not (exists (fun x -> x = 5) (of_list [1,3;7,10]))
*)
let filter_range p n1 n2 a =
let rec loop n1 n2 a = function
| None ->
if n1 = n2 then make_tree a (n1, n1) empty
else loop (n1 + 1) n2 a (if p n1 then Some n1 else None)
| Some v1 as x ->
if n1 = n2 then make_tree a (v1, n1) empty
else if p n1 then loop (n1 + 1) n2 a x
else loop (n1 + 1) n2 (make_tree a (v1, n1 - 1) empty) None
in
loop n1 n2 a None
let filter s ~f = fold_range s ~f:(filter_range f) ~init:empty
let partition_range p n1 n2 (a, b) =
let rec loop n1 n2 acc =
let acc =
let a, b, (v, n) = acc in
if Bool.(p n1 = v) then acc
else if v then (make_tree a (n, n1) empty, b, (not v, n1))
else (a, make_tree b (n, n1) empty, (not v, n1))
in
if n1 = n2 then
let a, b, (v, n) = acc in
if v then (make_tree a (n, n1) empty, b)
else (a, make_tree b (n, n1) empty)
else loop (n1 + 1) n2 acc
in
loop n1 n2 (a, b, (p n1, n1))
let partition s ~f = fold_range ~f:(partition_range f) s ~init:(empty, empty)
let cardinal s = fold_range ~f:(fun n1 n2 c -> c + n2 - n1 + 1) s ~init:0
$ T cardinal
cardinal ( of_list [ 1,3;5,9 ] ) = 8
cardinal (of_list [1,3;5,9]) = 8
*)
let rev_ranges s = fold_range ~f:(fun n1 n2 a -> (n1, n2) :: a) s ~init:[]
let rec burst_range n1 n2 a =
if n1 = n2 then n1 :: a else burst_range n1 (n2 - 1) (n2 :: a)
let elements s =
let f a (n1, n2) = burst_range n1 n2 a in
List.fold_left ~f ~init:[] (rev_ranges s)
$ Q ranges;of_list
( Q.list ( Q.pair Q.int Q.int ) ) ( fun l - > \
let norml = List.map ( fun ( x , y ) - > if x < y then ( x , y ) else ( y , x ) ) l in \
let set = of_list norml in \
equal set ( ranges set | > of_list ) \
)
(Q.list (Q.pair Q.int Q.int)) (fun l -> \
let norml = List.map (fun (x,y) -> if x < y then (x,y) else (y,x)) l in \
let set = of_list norml in \
equal set (ranges set |> of_list) \
)
*)
let ranges s = List.rev (rev_ranges s)
let min_elt s =
let (n, _), _ = split_leftmost s in
n
let max_elt s =
let (_, n), _ = split_rightmost s in
n
let choose s = fst (root s)
let of_list l =
List.fold_left ~f:(fun s (lo, hi) -> add_range s lo hi) ~init:empty l
let of_stream e =
Stream.fold ~f:(fun s (lo, hi) -> add_range s lo hi) ~init:empty e
|
227e3c7ad2f8738cbf95fe2dd22510a84cf95a966c09dd0adbd284eb7143d387 | rzezeski/try-try-try | rts_vnode.erl | -module(rts_vnode).
-behaviour(riak_core_vnode).
-include("rts.hrl").
-export([start_vnode/1,
init/1,
terminate/2,
handle_command/3,
is_empty/1,
delete/1,
handle_handoff_command/3,
handoff_starting/2,
handoff_cancelled/1,
handoff_finished/2,
handle_handoff_data/2,
encode_handoff_item/2,
handle_coverage/4,
handle_exit/3]).
-record(state, {partition}).
%% API
start_vnode(I) ->
riak_core_vnode_master:get_vnode_pid(I, ?MODULE).
init([Partition]) ->
{ok, #state { partition=Partition }}.
% Sample command: respond to a ping
handle_command(ping, _Sender, State) ->
{reply, {pong, State#state.partition}, State};
handle_command(Message, _Sender, State) ->
?PRINT({unhandled_command, Message}),
{noreply, State}.
handle_handoff_command(_Message, _Sender, State) ->
{noreply, State}.
handoff_starting(_TargetNode, State) ->
{true, State}.
handoff_cancelled(State) ->
{ok, State}.
handoff_finished(_TargetNode, State) ->
{ok, State}.
handle_handoff_data(_Data, State) ->
{reply, ok, State}.
encode_handoff_item(_ObjectName, _ObjectValue) ->
<<>>.
is_empty(State) ->
{true, State}.
delete(State) ->
{ok, State}.
handle_coverage(_Req, _KeySpaces, _Sender, State) ->
{stop, not_implemented, State}.
handle_exit(_Pid, _Reason, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
| null | https://raw.githubusercontent.com/rzezeski/try-try-try/c5d99f29fb3380f8653efdd1aa6a8f52143a9717/2011/riak-core-the-vnode/rts/src/rts_vnode.erl | erlang | API
Sample command: respond to a ping | -module(rts_vnode).
-behaviour(riak_core_vnode).
-include("rts.hrl").
-export([start_vnode/1,
init/1,
terminate/2,
handle_command/3,
is_empty/1,
delete/1,
handle_handoff_command/3,
handoff_starting/2,
handoff_cancelled/1,
handoff_finished/2,
handle_handoff_data/2,
encode_handoff_item/2,
handle_coverage/4,
handle_exit/3]).
-record(state, {partition}).
start_vnode(I) ->
riak_core_vnode_master:get_vnode_pid(I, ?MODULE).
init([Partition]) ->
{ok, #state { partition=Partition }}.
handle_command(ping, _Sender, State) ->
{reply, {pong, State#state.partition}, State};
handle_command(Message, _Sender, State) ->
?PRINT({unhandled_command, Message}),
{noreply, State}.
handle_handoff_command(_Message, _Sender, State) ->
{noreply, State}.
handoff_starting(_TargetNode, State) ->
{true, State}.
handoff_cancelled(State) ->
{ok, State}.
handoff_finished(_TargetNode, State) ->
{ok, State}.
handle_handoff_data(_Data, State) ->
{reply, ok, State}.
encode_handoff_item(_ObjectName, _ObjectValue) ->
<<>>.
is_empty(State) ->
{true, State}.
delete(State) ->
{ok, State}.
handle_coverage(_Req, _KeySpaces, _Sender, State) ->
{stop, not_implemented, State}.
handle_exit(_Pid, _Reason, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
|
11adde98fea8c4118b677d785302b23060e3720507ea25a307c4b64c6b96134e | SimulaVR/godot-haskell | RegExMatch.hs | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.RegExMatch
(Godot.Core.RegExMatch.get_end,
Godot.Core.RegExMatch.get_group_count,
Godot.Core.RegExMatch.get_names, Godot.Core.RegExMatch.get_start,
Godot.Core.RegExMatch.get_string,
Godot.Core.RegExMatch.get_strings,
Godot.Core.RegExMatch.get_subject)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.Reference()
instance NodeProperty RegExMatch "names" Dictionary 'True where
nodeProperty = (get_names, (), Nothing)
instance NodeProperty RegExMatch "strings" Array 'True where
nodeProperty = (get_strings, (), Nothing)
instance NodeProperty RegExMatch "subject" GodotString 'True where
nodeProperty = (get_subject, (), Nothing)
# NOINLINE bindRegExMatch_get_end #
bindRegExMatch_get_end :: MethodBind
bindRegExMatch_get_end
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_end" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_end ::
(RegExMatch :< cls, Object :< cls) =>
cls -> Maybe GodotVariant -> IO Int
get_end cls arg1
= withVariantArray [maybe (VariantInt 0) toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_end (upcast cls) arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_end" '[Maybe GodotVariant]
(IO Int)
where
nodeMethod = Godot.Core.RegExMatch.get_end
# NOINLINE bindRegExMatch_get_group_count #
bindRegExMatch_get_group_count :: MethodBind
bindRegExMatch_get_group_count
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_group_count" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_group_count ::
(RegExMatch :< cls, Object :< cls) => cls -> IO Int
get_group_count cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_group_count (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_group_count" '[] (IO Int) where
nodeMethod = Godot.Core.RegExMatch.get_group_count
# NOINLINE bindRegExMatch_get_names #
bindRegExMatch_get_names :: MethodBind
bindRegExMatch_get_names
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_names" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_names ::
(RegExMatch :< cls, Object :< cls) => cls -> IO Dictionary
get_names cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_names (upcast cls) arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_names" '[] (IO Dictionary)
where
nodeMethod = Godot.Core.RegExMatch.get_names
# NOINLINE bindRegExMatch_get_start #
bindRegExMatch_get_start :: MethodBind
bindRegExMatch_get_start
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_start" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_start ::
(RegExMatch :< cls, Object :< cls) =>
cls -> Maybe GodotVariant -> IO Int
get_start cls arg1
= withVariantArray [maybe (VariantInt 0) toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_start (upcast cls) arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_start" '[Maybe GodotVariant]
(IO Int)
where
nodeMethod = Godot.Core.RegExMatch.get_start
# NOINLINE bindRegExMatch_get_string #
bindRegExMatch_get_string :: MethodBind
bindRegExMatch_get_string
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_string" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_string ::
(RegExMatch :< cls, Object :< cls) =>
cls -> Maybe GodotVariant -> IO GodotString
get_string cls arg1
= withVariantArray [maybe (VariantInt 0) toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_string (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_string" '[Maybe GodotVariant]
(IO GodotString)
where
nodeMethod = Godot.Core.RegExMatch.get_string
# NOINLINE bindRegExMatch_get_strings #
bindRegExMatch_get_strings :: MethodBind
bindRegExMatch_get_strings
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_strings" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_strings ::
(RegExMatch :< cls, Object :< cls) => cls -> IO Array
get_strings cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_strings (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_strings" '[] (IO Array) where
nodeMethod = Godot.Core.RegExMatch.get_strings
# NOINLINE bindRegExMatch_get_subject #
bindRegExMatch_get_subject :: MethodBind
bindRegExMatch_get_subject
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_subject" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_subject ::
(RegExMatch :< cls, Object :< cls) => cls -> IO GodotString
get_subject cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_subject (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_subject" '[] (IO GodotString)
where
nodeMethod = Godot.Core.RegExMatch.get_subject | null | https://raw.githubusercontent.com/SimulaVR/godot-haskell/e8f2c45f1b9cc2f0586ebdc9ec6002c8c2d384ae/src/Godot/Core/RegExMatch.hs | haskell | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.RegExMatch
(Godot.Core.RegExMatch.get_end,
Godot.Core.RegExMatch.get_group_count,
Godot.Core.RegExMatch.get_names, Godot.Core.RegExMatch.get_start,
Godot.Core.RegExMatch.get_string,
Godot.Core.RegExMatch.get_strings,
Godot.Core.RegExMatch.get_subject)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.Reference()
instance NodeProperty RegExMatch "names" Dictionary 'True where
nodeProperty = (get_names, (), Nothing)
instance NodeProperty RegExMatch "strings" Array 'True where
nodeProperty = (get_strings, (), Nothing)
instance NodeProperty RegExMatch "subject" GodotString 'True where
nodeProperty = (get_subject, (), Nothing)
# NOINLINE bindRegExMatch_get_end #
bindRegExMatch_get_end :: MethodBind
bindRegExMatch_get_end
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_end" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_end ::
(RegExMatch :< cls, Object :< cls) =>
cls -> Maybe GodotVariant -> IO Int
get_end cls arg1
= withVariantArray [maybe (VariantInt 0) toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_end (upcast cls) arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_end" '[Maybe GodotVariant]
(IO Int)
where
nodeMethod = Godot.Core.RegExMatch.get_end
# NOINLINE bindRegExMatch_get_group_count #
bindRegExMatch_get_group_count :: MethodBind
bindRegExMatch_get_group_count
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_group_count" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_group_count ::
(RegExMatch :< cls, Object :< cls) => cls -> IO Int
get_group_count cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_group_count (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_group_count" '[] (IO Int) where
nodeMethod = Godot.Core.RegExMatch.get_group_count
# NOINLINE bindRegExMatch_get_names #
bindRegExMatch_get_names :: MethodBind
bindRegExMatch_get_names
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_names" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_names ::
(RegExMatch :< cls, Object :< cls) => cls -> IO Dictionary
get_names cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_names (upcast cls) arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_names" '[] (IO Dictionary)
where
nodeMethod = Godot.Core.RegExMatch.get_names
# NOINLINE bindRegExMatch_get_start #
bindRegExMatch_get_start :: MethodBind
bindRegExMatch_get_start
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_start" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_start ::
(RegExMatch :< cls, Object :< cls) =>
cls -> Maybe GodotVariant -> IO Int
get_start cls arg1
= withVariantArray [maybe (VariantInt 0) toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_start (upcast cls) arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_start" '[Maybe GodotVariant]
(IO Int)
where
nodeMethod = Godot.Core.RegExMatch.get_start
# NOINLINE bindRegExMatch_get_string #
bindRegExMatch_get_string :: MethodBind
bindRegExMatch_get_string
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_string" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_string ::
(RegExMatch :< cls, Object :< cls) =>
cls -> Maybe GodotVariant -> IO GodotString
get_string cls arg1
= withVariantArray [maybe (VariantInt 0) toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_string (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_string" '[Maybe GodotVariant]
(IO GodotString)
where
nodeMethod = Godot.Core.RegExMatch.get_string
# NOINLINE bindRegExMatch_get_strings #
bindRegExMatch_get_strings :: MethodBind
bindRegExMatch_get_strings
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_strings" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_strings ::
(RegExMatch :< cls, Object :< cls) => cls -> IO Array
get_strings cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_strings (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_strings" '[] (IO Array) where
nodeMethod = Godot.Core.RegExMatch.get_strings
# NOINLINE bindRegExMatch_get_subject #
bindRegExMatch_get_subject :: MethodBind
bindRegExMatch_get_subject
= unsafePerformIO $
withCString "RegExMatch" $
\ clsNamePtr ->
withCString "get_subject" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
get_subject ::
(RegExMatch :< cls, Object :< cls) => cls -> IO GodotString
get_subject cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindRegExMatch_get_subject (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod RegExMatch "get_subject" '[] (IO GodotString)
where
nodeMethod = Godot.Core.RegExMatch.get_subject | |
6fd221769de879f70065cef25cdf6a601fcf5f7cf8263e01e00fd9c72c93df88 | tcsprojects/ocaml-sat-solvers | pseudosatwrapper.mli | open Satwrapper;;
class pseudoSolverFactory: object inherit solverFactory
method description: string
method identifier: string
method short_identifier: string
method copyright: string
method url: string
method new_timed_instance: Timing.timetable -> abstractSolver
method new_instance: abstractSolver
end
val get_pseudo_factory: pseudoSolverFactory
| null | https://raw.githubusercontent.com/tcsprojects/ocaml-sat-solvers/2c36605fb3e38a1bee41e079031ab5b173794910/src/pseudosat/pseudosatwrapper.mli | ocaml | open Satwrapper;;
class pseudoSolverFactory: object inherit solverFactory
method description: string
method identifier: string
method short_identifier: string
method copyright: string
method url: string
method new_timed_instance: Timing.timetable -> abstractSolver
method new_instance: abstractSolver
end
val get_pseudo_factory: pseudoSolverFactory
| |
9645a41f98948b73033c15d3067244366a8b2dc09530355a5cfed09b5ec3d986 | ariesteam/aries | repl_utils.clj | Copyright 2010
;;;
This file is part of clj - span .
;;;
;;; clj-span is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation , either version 3 of the License ,
;;; or (at your option) any later version.
;;;
;;; clj-span is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with clj - span . If not , see < / > .
;;;
;;;-------------------------------------------------------------------
;;;
This namespace defines functions for testing the various SPAN
;;; simulations at the REPL.
(ns clj-span.repl-utils
(:use (clj-span core commandline aries-span-bridge analyzer gui)
[clj-span.worldgen :only [read-layer-from-file]]
(clj-misc utils matrix-ops stats)
clojure.pprint)
(:require (clj-misc [numbers :as nb] [varprop :as vp] [randvars :as rv])))
(defn load-layers
[filename]
(let [[s k u f cell-w cell-h] (read-span-layers filename)]
(def source-layer s)
(def sink-layer k)
(def use-layer u)
(def flow-layers f)
(def cell-width cell-w)
(def cell-height cell-h)
(def rows (get-rows s))
(def cols (get-cols s))))
(defn load-layer
[layer-name filename]
(let [data (read-layer-from-file filename)]
(case layer-name
:source (def source-layer data)
:sink (def sink-layer data)
:use (def use-layer data)
:flow (def flow-layers data))))
(defn extract-results
[value-type result-map]
(let [_0_ (case value-type
:numbers nb/_0_
:varprop vp/_0_
:randvars rv/_0_)]
(def tsrc (let [rmap ((:theoretical-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def isrc (let [rmap ((:inaccessible-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def psrc (let [rmap ((:possible-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def bsrc (let [rmap ((:blocked-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def asrc (let [rmap ((:actual-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def tsnk (let [rmap ((:theoretical-sink result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def isnk (let [rmap ((:inaccessible-sink result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def asnk (let [rmap ((:actual-sink result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def tuse (let [rmap ((:theoretical-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def iuse (let [rmap ((:inaccessible-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def puse (let [rmap ((:possible-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def buse (let [rmap ((:blocked-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def ause (let [rmap ((:actual-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def pflow (let [rmap ((:possible-flow result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def bflow (let [rmap ((:blocked-flow result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def aflow (let [rmap ((:actual-flow result-map))] (make-matrix rows cols #(get rmap % _0_))))))
(defn test-run-sediment
[value-type]
(run-span {:flow-model "SedimentTransport"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 0.0
:trans-threshold 1.0
:source-type :finite
:sink-type :finite
:use-type :infinite
:benefit-type :rival ;; or :non-rival for turbidity
:value-type value-type
:downscaling-factor 4
:rv-max-states 10
:animation? true
:result-type :closure-map}))
(defn test-run-flood
[value-type]
(run-span {:flow-model "FloodWaterMovement"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 50.0
:sink-threshold 3000.0
:use-threshold 0.0
:trans-threshold 5.0
:source-type :finite
:sink-type :finite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 3
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-storm
[value-type]
(run-span {:flow-model "CoastalStormMovement"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 0.0
:trans-threshold 0.1
:source-type :finite
:sink-type :infinite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 1
:rv-max-states 10
:animation? true
:result-type :closure-map}))
(defn test-run-fishing
[value-type]
(run-span {:flow-model "SubsistenceFishAccessibility"
:source-layer source-layer
:sink-layer nil
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold nil
:use-threshold 0.0
:trans-threshold 0.1
:source-type :finite
:sink-type nil
:use-type :finite
:benefit-type :rival
:value-type value-type
:downscaling-factor 1
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-water
[value-type]
(run-span {:flow-model "SurfaceWaterMovement"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 1.0
:trans-threshold 1.0
:source-type :finite
:sink-type :finite
:use-type :finite
:benefit-type :rival
:value-type value-type
:downscaling-factor 8
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-carbon
[value-type]
(run-span {:flow-model "CO2Removed"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 0.0
:trans-threshold 0.1
:source-type :finite
:sink-type :finite
:use-type :finite
:benefit-type :rival
:value-type value-type
:downscaling-factor 20
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-view
[value-type]
(run-span {:flow-model "LineOfSight"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 25.0
:sink-threshold 25.0
:use-threshold 0.2
:trans-threshold 1.0
:source-type :infinite
:sink-type :infinite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 2
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-proximity
[value-type]
(run-span {:flow-model "Proximity"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 40.0
:sink-threshold 0.0
:use-threshold 0.2
:trans-threshold 1.0
:source-type :infinite
:sink-type :infinite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 1
:rv-max-states 10
:animation? false
:result-type :closure-map}))
| null | https://raw.githubusercontent.com/ariesteam/aries/b3fafd4640f4e7950fff3791bc4ea4c06ee4dcdf/plugins/org.integratedmodelling.aries.core/bindings/clojure/clj_span/repl_utils.clj | clojure |
clj-span is free software: you can redistribute it and/or modify
or (at your option) any later version.
clj-span is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
-------------------------------------------------------------------
simulations at the REPL.
or :non-rival for turbidity | Copyright 2010
This file is part of clj - span .
it under the terms of the GNU General Public License as published
by the Free Software Foundation , either version 3 of the License ,
You should have received a copy of the GNU General Public License
along with clj - span . If not , see < / > .
This namespace defines functions for testing the various SPAN
(ns clj-span.repl-utils
(:use (clj-span core commandline aries-span-bridge analyzer gui)
[clj-span.worldgen :only [read-layer-from-file]]
(clj-misc utils matrix-ops stats)
clojure.pprint)
(:require (clj-misc [numbers :as nb] [varprop :as vp] [randvars :as rv])))
(defn load-layers
[filename]
(let [[s k u f cell-w cell-h] (read-span-layers filename)]
(def source-layer s)
(def sink-layer k)
(def use-layer u)
(def flow-layers f)
(def cell-width cell-w)
(def cell-height cell-h)
(def rows (get-rows s))
(def cols (get-cols s))))
(defn load-layer
[layer-name filename]
(let [data (read-layer-from-file filename)]
(case layer-name
:source (def source-layer data)
:sink (def sink-layer data)
:use (def use-layer data)
:flow (def flow-layers data))))
(defn extract-results
[value-type result-map]
(let [_0_ (case value-type
:numbers nb/_0_
:varprop vp/_0_
:randvars rv/_0_)]
(def tsrc (let [rmap ((:theoretical-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def isrc (let [rmap ((:inaccessible-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def psrc (let [rmap ((:possible-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def bsrc (let [rmap ((:blocked-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def asrc (let [rmap ((:actual-source result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def tsnk (let [rmap ((:theoretical-sink result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def isnk (let [rmap ((:inaccessible-sink result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def asnk (let [rmap ((:actual-sink result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def tuse (let [rmap ((:theoretical-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def iuse (let [rmap ((:inaccessible-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def puse (let [rmap ((:possible-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def buse (let [rmap ((:blocked-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def ause (let [rmap ((:actual-use result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def pflow (let [rmap ((:possible-flow result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def bflow (let [rmap ((:blocked-flow result-map))] (make-matrix rows cols #(get rmap % _0_))))
(def aflow (let [rmap ((:actual-flow result-map))] (make-matrix rows cols #(get rmap % _0_))))))
(defn test-run-sediment
[value-type]
(run-span {:flow-model "SedimentTransport"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 0.0
:trans-threshold 1.0
:source-type :finite
:sink-type :finite
:use-type :infinite
:value-type value-type
:downscaling-factor 4
:rv-max-states 10
:animation? true
:result-type :closure-map}))
(defn test-run-flood
[value-type]
(run-span {:flow-model "FloodWaterMovement"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 50.0
:sink-threshold 3000.0
:use-threshold 0.0
:trans-threshold 5.0
:source-type :finite
:sink-type :finite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 3
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-storm
[value-type]
(run-span {:flow-model "CoastalStormMovement"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 0.0
:trans-threshold 0.1
:source-type :finite
:sink-type :infinite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 1
:rv-max-states 10
:animation? true
:result-type :closure-map}))
(defn test-run-fishing
[value-type]
(run-span {:flow-model "SubsistenceFishAccessibility"
:source-layer source-layer
:sink-layer nil
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold nil
:use-threshold 0.0
:trans-threshold 0.1
:source-type :finite
:sink-type nil
:use-type :finite
:benefit-type :rival
:value-type value-type
:downscaling-factor 1
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-water
[value-type]
(run-span {:flow-model "SurfaceWaterMovement"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 1.0
:trans-threshold 1.0
:source-type :finite
:sink-type :finite
:use-type :finite
:benefit-type :rival
:value-type value-type
:downscaling-factor 8
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-carbon
[value-type]
(run-span {:flow-model "CO2Removed"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 0.0
:sink-threshold 0.0
:use-threshold 0.0
:trans-threshold 0.1
:source-type :finite
:sink-type :finite
:use-type :finite
:benefit-type :rival
:value-type value-type
:downscaling-factor 20
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-view
[value-type]
(run-span {:flow-model "LineOfSight"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 25.0
:sink-threshold 25.0
:use-threshold 0.2
:trans-threshold 1.0
:source-type :infinite
:sink-type :infinite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 2
:rv-max-states 10
:animation? false
:result-type :closure-map}))
(defn test-run-proximity
[value-type]
(run-span {:flow-model "Proximity"
:source-layer source-layer
:sink-layer sink-layer
:use-layer use-layer
:flow-layers flow-layers
:cell-width cell-width
:cell-height cell-height
:source-threshold 40.0
:sink-threshold 0.0
:use-threshold 0.2
:trans-threshold 1.0
:source-type :infinite
:sink-type :infinite
:use-type :infinite
:benefit-type :non-rival
:value-type value-type
:downscaling-factor 1
:rv-max-states 10
:animation? false
:result-type :closure-map}))
|
2f9b6ebbb6d87ee172715a78e783d24a52f16d4473c12a4e4d154de6cd27b635 | simplegeo/erlang | lib.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(lib).
-export([flush_receive/0, error_message/2, progname/0, nonl/1, send/2,
sendw/2, eval_str/1]).
-export([format_exception/6, format_stacktrace/4,
format_call/4, format_fun/1]).
-spec flush_receive() -> 'ok'.
flush_receive() ->
receive
_Any ->
flush_receive()
after
0 ->
ok
end.
%%
Functions for doing standard system format i / o.
%%
-spec error_message(atom() | string() | binary(), [term()]) -> 'ok'.
error_message(Format, Args) ->
io:format(<<"** ~s **\n">>, [io_lib:format(Format, Args)]).
%% Return the name of the script that starts (this) erlang
%%
-spec progname() -> atom().
progname() ->
case init:get_argument(progname) of
{ok, [[Prog]]} ->
list_to_atom(Prog);
_Other ->
no_prog_name
end.
-spec nonl(string()) -> string().
nonl([10]) -> [];
nonl([]) -> [];
nonl([H|T]) -> [H|nonl(T)].
-spec send(pid() | atom() | {atom(), node()}, term()) -> term().
send(To, Msg) -> To ! Msg.
-spec sendw(pid() | atom() | {atom(), node()}, term()) -> term().
sendw(To, Msg) ->
To ! {self(), Msg},
receive
Reply -> Reply
end.
eval_str(InStr ) - > { ok , OutStr } | { error , ErrStr ' }
InStr must represent a body
-define(result(F,D), lists:flatten(io_lib:format(F, D))).
-spec eval_str(string() | binary()) -> {'ok', string()} | {'error', string()}.
eval_str(Str) when is_list(Str) ->
case erl_scan:tokens([], Str, 0) of
{more, _} ->
{error, "Incomplete form (missing .<cr>)??"};
{done, {ok, Toks, _}, Rest} ->
case all_white(Rest) of
true ->
case erl_parse:parse_exprs(Toks) of
{ok, Exprs} ->
case catch erl_eval:exprs(Exprs, []) of
{value, Val, _} ->
{ok, Val};
Other ->
{error, ?result("*** eval: ~p", [Other])}
end;
{error, {_Line, Mod, Args}} ->
Msg = ?result("*** ~s",[Mod:format_error(Args)]),
{error, Msg}
end;
false ->
{error, ?result("Non-white space found after "
"end-of-form :~s", [Rest])}
end
end;
eval_str(Bin) when is_binary(Bin) ->
eval_str(binary_to_list(Bin)).
all_white([$\s|T]) -> all_white(T);
all_white([$\n|T]) -> all_white(T);
all_white([$\t|T]) -> all_white(T);
all_white([]) -> true;
all_white(_) -> false.
Formatting of exceptions , : s and funs .
%% -> iolist() (no \n at end)
I is the current column , starting from 1 ( it will be used
%% as indentation whenever newline has been inserted);
Class , Reason and StackTrace are the exception ;
FormatFun = fun(Term , I ) - > iolist ( ) formats terms ;
%% StackFun = fun(Mod, Fun, Arity) -> bool() is used for trimming the
%% end of the stack (typically calls to erl_eval are skipped).
format_exception(I, Class, Reason, StackTrace, StackFun, FormatFun)
when is_integer(I), I >= 1, is_function(StackFun, 3),
is_function(FormatFun, 2) ->
S = n_spaces(I-1),
{Term,Trace1,Trace} = analyze_exception(Class, Reason, StackTrace),
Expl0 = explain_reason(Term, Class, Trace1, FormatFun, S),
Expl = io_lib:fwrite(<<"~s~s">>, [exited(Class), Expl0]),
case format_stacktrace1(S, Trace, FormatFun, StackFun) of
[] -> Expl;
Stack -> [Expl, $\n, Stack]
end.
%% -> iolist() (no \n at end)
format_stacktrace(I, StackTrace, StackFun, FormatFun)
when is_integer(I), I >= 1, is_function(StackFun, 3),
is_function(FormatFun, 2) ->
S = n_spaces(I-1),
format_stacktrace1(S, StackTrace, FormatFun, StackFun).
%% -> iolist() (no \n at end)
format_call(I, ForMForFun, As, FormatFun) when is_integer(I), I >= 1,
is_list(As),
is_function(FormatFun, 2) ->
format_call("", n_spaces(I-1), ForMForFun, As, FormatFun).
%% -> iolist() (no \n at end)
format_fun(Fun) when is_function(Fun) ->
{module, M} = erlang:fun_info(Fun, module),
{name, F} = erlang:fun_info(Fun, name),
{arity, A} = erlang:fun_info(Fun, arity),
case erlang:fun_info(Fun, type) of
{type, local} when F =:= "" ->
io_lib:fwrite(<<"~w">>, [Fun]);
{type, local} when M =:= erl_eval ->
io_lib:fwrite(<<"interpreted function with arity ~w">>, [A]);
{type, local} ->
mfa_to_string(M, F, A);
{type, external} ->
mfa_to_string(M, F, A)
end.
analyze_exception(error, Term, Stack) ->
case {is_stacktrace(Stack), Stack, Term} of
{true, [{_M,_F,As}=MFA|MFAs], function_clause} when is_list(As) ->
{Term,[MFA],MFAs};
{true, [{shell,F,A}], function_clause} when is_integer(A) ->
{Term, [{F,A}], []};
{true, [{_M,_F,_AorAs}=MFA|MFAs], undef} ->
{Term,[MFA],MFAs};
{true, _, _} ->
{Term,[],Stack};
{false, _, _} ->
{{Term,Stack},[],[]}
end;
analyze_exception(_Class, Term, Stack) ->
case is_stacktrace(Stack) of
true ->
{Term,[],Stack};
false ->
{{Term,Stack},[],[]}
end.
is_stacktrace([]) ->
true;
is_stacktrace([{M,F,A}|Fs]) when is_atom(M), is_atom(F), is_integer(A) ->
is_stacktrace(Fs);
is_stacktrace([{M,F,As}|Fs]) when is_atom(M), is_atom(F), length(As) >= 0 ->
is_stacktrace(Fs);
is_stacktrace(_) ->
false.
%% ERTS exit codes (some of them are also returned by erl_eval):
explain_reason(badarg, error, [], _PF, _S) ->
<<"bad argument">>;
explain_reason({badarg,V}, error=Cl, [], PF, S) -> % orelse, andalso
format_value(V, <<"bad argument: ">>, Cl, PF, S);
explain_reason(badarith, error, [], _PF, _S) ->
<<"bad argument in an arithmetic expression">>;
explain_reason({badarity,{Fun,As}}, error, [], _PF, _S)
when is_function(Fun) ->
%% Only the arity is displayed, not the arguments As.
io_lib:fwrite(<<"~s called with ~s">>,
[format_fun(Fun), argss(length(As))]);
explain_reason({badfun,Term}, error=Cl, [], PF, S) ->
format_value(Term, <<"bad function ">>, Cl, PF, S);
explain_reason({badmatch,Term}, error=Cl, [], PF, S) ->
format_value(Term, <<"no match of right hand side value ">>, Cl, PF, S);
explain_reason({case_clause,V}, error=Cl, [], PF, S) ->
%% "there is no case clause with a true guard sequence and a
%% pattern matching..."
format_value(V, <<"no case clause matching ">>, Cl, PF, S);
explain_reason(function_clause, error, [{F,A}], _PF, _S) ->
Shell commands
FAs = io_lib:fwrite(<<"~w/~w">>, [F, A]),
[<<"no function clause matching call to ">> | FAs];
explain_reason(function_clause, error=Cl, [{M,F,As}], PF, S) ->
Str = <<"no function clause matching ">>,
format_errstr_call(Str, Cl, {M,F}, As, PF, S);
explain_reason(if_clause, error, [], _PF, _S) ->
<<"no true branch found when evaluating an if expression">>;
explain_reason(noproc, error, [], _PF, _S) ->
<<"no such process or port">>;
explain_reason(notalive, error, [], _PF, _S) ->
<<"the node cannot be part of a distributed system">>;
explain_reason(system_limit, error, [], _PF, _S) ->
<<"a system limit has been reached">>;
explain_reason(timeout_value, error, [], _PF, _S) ->
<<"bad receive timeout value">>;
explain_reason({try_clause,V}, error=Cl, [], PF, S) ->
%% "there is no try clause with a true guard sequence and a
%% pattern matching..."
format_value(V, <<"no try clause matching ">>, Cl, PF, S);
explain_reason(undef, error, [{M,F,A}], _PF, _S) ->
%% Only the arity is displayed, not the arguments, if there are any.
io_lib:fwrite(<<"undefined function ~s">>,
[mfa_to_string(M, F, n_args(A))]);
explain_reason({shell_undef,F,A}, error, [], _PF, _S) ->
%% Give nicer reports for undefined shell functions
( but not when the user actively calls shell_default : F ( ... ) ) .
io_lib:fwrite(<<"undefined shell command ~s/~w">>, [F, n_args(A)]);
%% Exit codes returned by erl_eval only:
explain_reason({argument_limit,_Fun}, error, [], _PF, _S) ->
io_lib:fwrite(<<"limit of number of arguments to interpreted function"
" exceeded">>, []);
explain_reason({bad_filter,V}, error=Cl, [], PF, S) ->
format_value(V, <<"bad filter ">>, Cl, PF, S);
explain_reason({bad_generator,V}, error=Cl, [], PF, S) ->
format_value(V, <<"bad generator ">>, Cl, PF, S);
explain_reason({unbound,V}, error, [], _PF, _S) ->
io_lib:fwrite(<<"variable ~w is unbound">>, [V]);
Exit codes local to the shell module ( restricted shell ):
explain_reason({restricted_shell_bad_return, V}, exit=Cl, [], PF, S) ->
Str = <<"restricted shell module returned bad value ">>,
format_value(V, Str, Cl, PF, S);
explain_reason({restricted_shell_disallowed,{ForMF,As}},
exit=Cl, [], PF, S) ->
%% ForMF can be a fun, but not a shell fun.
Str = <<"restricted shell does not allow ">>,
format_errstr_call(Str, Cl, ForMF, As, PF, S);
explain_reason(restricted_shell_started, exit, [], _PF, _S) ->
<<"restricted shell starts now">>;
explain_reason(restricted_shell_stopped, exit, [], _PF, _S) ->
<<"restricted shell stopped">>;
%% Other exit code:
explain_reason(Reason, Class, [], PF, S) ->
PF(Reason, (iolist_size(S)+1) + exited_size(Class)).
n_args(A) when is_integer(A) ->
A;
n_args(As) when is_list(As) ->
length(As).
argss(0) ->
<<"no arguments">>;
argss(1) ->
<<"one argument">>;
argss(2) ->
<<"two arguments">>;
argss(I) ->
io_lib:fwrite(<<"~w arguments">>, [I]).
format_stacktrace1(S0, Stack0, PF, SF) ->
Stack1 = lists:dropwhile(fun({M,F,A}) -> SF(M, F, A)
end, lists:reverse(Stack0)),
S = [" " | S0],
Stack = lists:reverse(Stack1),
format_stacktrace2(S, Stack, 1, PF).
format_stacktrace2(S, [{M,F,A}|Fs], N, PF) when is_integer(A) ->
[io_lib:fwrite(<<"~s~s ~s">>,
[sep(N, S), origin(N, M, F, A), mfa_to_string(M, F, A)])
| format_stacktrace2(S, Fs, N + 1, PF)];
format_stacktrace2(S, [{M,F,As}|Fs], N, PF) when is_list(As) ->
A = length(As),
CalledAs = [S,<<" called as ">>],
C = format_call("", CalledAs, {M,F}, As, PF),
[io_lib:fwrite(<<"~s~s ~s\n~s~s">>,
[sep(N, S), origin(N, M, F, A), mfa_to_string(M, F, A),
CalledAs, C])
| format_stacktrace2(S, Fs, N + 1, PF)];
format_stacktrace2(_S, [], _N, _PF) ->
"".
sep(1, S) -> S;
sep(_, S) -> [$\n | S].
origin(1, M, F, A) ->
case is_op({M, F}, n_args(A)) of
{yes, F} -> <<"in operator ">>;
no -> <<"in function ">>
end;
origin(_N, _M, _F, _A) ->
<<"in call from">>.
format_errstr_call(ErrStr, Class, ForMForFun, As, PF, Pre0) ->
Pre1 = [Pre0 | n_spaces(exited_size(Class))],
format_call(ErrStr, Pre1, ForMForFun, As, PF).
format_call(ErrStr, Pre1, ForMForFun, As, PF) ->
Arity = length(As),
[ErrStr |
case is_op(ForMForFun, Arity) of
{yes,Op} ->
format_op(ErrStr, Pre1, Op, As, PF);
no ->
MFs = mf_to_string(ForMForFun, Arity),
I1 = iolist_size([Pre1,ErrStr|MFs]),
S1 = pp_arguments(PF, As, I1),
S2 = pp_arguments(PF, As, iolist_size([Pre1|MFs])),
Long = count_nl(pp_arguments(PF, [a2345,b2345], I1)) > 0,
case Long or (count_nl(S2) < count_nl(S1)) of
true ->
[$\n, Pre1, MFs, S2];
false ->
[MFs, S1]
end
end].
format_op(ErrStr, Pre, Op, [A1], PF) ->
OpS = io_lib:fwrite(<<"~s ">>, [Op]),
I1 = iolist_size([ErrStr,Pre,OpS]),
[OpS | PF(A1, I1+1)];
format_op(ErrStr, Pre, Op, [A1, A2], PF) ->
I1 = iolist_size([ErrStr,Pre]),
S1 = PF(A1, I1+1),
S2 = PF(A2, I1+1),
OpS = atom_to_list(Op),
Pre1 = [$\n | n_spaces(I1)],
case count_nl(S1) > 0 of
true ->
[S1,Pre1,OpS,Pre1|S2];
false ->
OpS2 = io_lib:fwrite(<<" ~s ">>, [Op]),
S2_2 = PF(A2, iolist_size([ErrStr,Pre,S1|OpS2])+1),
case count_nl(S2) < count_nl(S2_2) of
true ->
[S1,Pre1,OpS,Pre1|S2];
false ->
[S1,OpS2|S2_2]
end
end.
pp_arguments(PF, As, I) ->
case {As, io_lib:printable_list(As)} of
{[Int | T], true} ->
L = integer_to_list(Int),
Ll = length(L),
A = list_to_atom(lists:duplicate(Ll, $a)),
S0 = binary_to_list(iolist_to_binary(PF([A | T], I+1))),
brackets_to_parens([$[,L,string:sub_string(S0, 2+Ll)]);
_ ->
brackets_to_parens(PF(As, I+1))
end.
brackets_to_parens(S) ->
B = iolist_to_binary(S),
Sz = byte_size(B) - 2,
<<$[,R:Sz/binary,$]>> = B,
[$(,R,$)].
mfa_to_string(M, F, A) ->
io_lib:fwrite(<<"~s/~w">>, [mf_to_string({M, F}, A), A]).
mf_to_string({M, F}, A) ->
case erl_internal:bif(M, F, A) of
true ->
io_lib:fwrite(<<"~w">>, [F]);
false ->
case is_op({M, F}, A) of
{yes, '/'} ->
io_lib:fwrite(<<"~w">>, [F]);
{yes, F} ->
atom_to_list(F);
no ->
io_lib:fwrite(<<"~w:~w">>, [M, F])
end
end;
mf_to_string(Fun, _A) when is_function(Fun) ->
format_fun(Fun);
mf_to_string(F, _A) ->
io_lib:fwrite(<<"~w">>, [F]).
format_value(V, ErrStr, Class, PF, S) ->
Pre1Sz = exited_size(Class),
S1 = PF(V, Pre1Sz + iolist_size([S, ErrStr])+1),
[ErrStr | case count_nl(S1) of
N1 when N1 > 1 ->
S2 = PF(V, iolist_size(S) + 1 + Pre1Sz),
case count_nl(S2) < N1 of
true ->
[$\n, S, n_spaces(Pre1Sz) | S2];
false ->
S1
end;
_ ->
S1
end].
Handles deep lists , but not all iolists .
count_nl([E | Es]) ->
count_nl(E) + count_nl(Es);
count_nl($\n) ->
1;
count_nl(Bin) when is_binary(Bin) ->
count_nl(binary_to_list(Bin));
count_nl(_) ->
0.
n_spaces(N) ->
lists:duplicate(N, $\s).
is_op(ForMForFun, A) ->
try
{erlang,F} = ForMForFun,
_ = erl_internal:op_type(F, A),
{yes,F}
catch error:_ -> no
end.
exited_size(Class) ->
iolist_size(exited(Class)).
exited(error) ->
<<"exception error: ">>;
exited(exit) ->
<<"exception exit: ">>;
exited(throw) ->
<<"exception throw: ">>.
| null | https://raw.githubusercontent.com/simplegeo/erlang/15eda8de27ba73d176c7eeb3a70a64167f50e2c4/lib/stdlib/src/lib.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Return the name of the script that starts (this) erlang
-> iolist() (no \n at end)
as indentation whenever newline has been inserted);
StackFun = fun(Mod, Fun, Arity) -> bool() is used for trimming the
end of the stack (typically calls to erl_eval are skipped).
-> iolist() (no \n at end)
-> iolist() (no \n at end)
-> iolist() (no \n at end)
ERTS exit codes (some of them are also returned by erl_eval):
orelse, andalso
Only the arity is displayed, not the arguments As.
"there is no case clause with a true guard sequence and a
pattern matching..."
"there is no try clause with a true guard sequence and a
pattern matching..."
Only the arity is displayed, not the arguments, if there are any.
Give nicer reports for undefined shell functions
Exit codes returned by erl_eval only:
ForMF can be a fun, but not a shell fun.
Other exit code: | Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(lib).
-export([flush_receive/0, error_message/2, progname/0, nonl/1, send/2,
sendw/2, eval_str/1]).
-export([format_exception/6, format_stacktrace/4,
format_call/4, format_fun/1]).
-spec flush_receive() -> 'ok'.
flush_receive() ->
receive
_Any ->
flush_receive()
after
0 ->
ok
end.
Functions for doing standard system format i / o.
-spec error_message(atom() | string() | binary(), [term()]) -> 'ok'.
error_message(Format, Args) ->
io:format(<<"** ~s **\n">>, [io_lib:format(Format, Args)]).
-spec progname() -> atom().
progname() ->
case init:get_argument(progname) of
{ok, [[Prog]]} ->
list_to_atom(Prog);
_Other ->
no_prog_name
end.
-spec nonl(string()) -> string().
nonl([10]) -> [];
nonl([]) -> [];
nonl([H|T]) -> [H|nonl(T)].
-spec send(pid() | atom() | {atom(), node()}, term()) -> term().
send(To, Msg) -> To ! Msg.
-spec sendw(pid() | atom() | {atom(), node()}, term()) -> term().
sendw(To, Msg) ->
To ! {self(), Msg},
receive
Reply -> Reply
end.
eval_str(InStr ) - > { ok , OutStr } | { error , ErrStr ' }
InStr must represent a body
-define(result(F,D), lists:flatten(io_lib:format(F, D))).
-spec eval_str(string() | binary()) -> {'ok', string()} | {'error', string()}.
eval_str(Str) when is_list(Str) ->
case erl_scan:tokens([], Str, 0) of
{more, _} ->
{error, "Incomplete form (missing .<cr>)??"};
{done, {ok, Toks, _}, Rest} ->
case all_white(Rest) of
true ->
case erl_parse:parse_exprs(Toks) of
{ok, Exprs} ->
case catch erl_eval:exprs(Exprs, []) of
{value, Val, _} ->
{ok, Val};
Other ->
{error, ?result("*** eval: ~p", [Other])}
end;
{error, {_Line, Mod, Args}} ->
Msg = ?result("*** ~s",[Mod:format_error(Args)]),
{error, Msg}
end;
false ->
{error, ?result("Non-white space found after "
"end-of-form :~s", [Rest])}
end
end;
eval_str(Bin) when is_binary(Bin) ->
eval_str(binary_to_list(Bin)).
all_white([$\s|T]) -> all_white(T);
all_white([$\n|T]) -> all_white(T);
all_white([$\t|T]) -> all_white(T);
all_white([]) -> true;
all_white(_) -> false.
Formatting of exceptions , : s and funs .
I is the current column , starting from 1 ( it will be used
Class , Reason and StackTrace are the exception ;
FormatFun = fun(Term , I ) - > iolist ( ) formats terms ;
format_exception(I, Class, Reason, StackTrace, StackFun, FormatFun)
when is_integer(I), I >= 1, is_function(StackFun, 3),
is_function(FormatFun, 2) ->
S = n_spaces(I-1),
{Term,Trace1,Trace} = analyze_exception(Class, Reason, StackTrace),
Expl0 = explain_reason(Term, Class, Trace1, FormatFun, S),
Expl = io_lib:fwrite(<<"~s~s">>, [exited(Class), Expl0]),
case format_stacktrace1(S, Trace, FormatFun, StackFun) of
[] -> Expl;
Stack -> [Expl, $\n, Stack]
end.
format_stacktrace(I, StackTrace, StackFun, FormatFun)
when is_integer(I), I >= 1, is_function(StackFun, 3),
is_function(FormatFun, 2) ->
S = n_spaces(I-1),
format_stacktrace1(S, StackTrace, FormatFun, StackFun).
format_call(I, ForMForFun, As, FormatFun) when is_integer(I), I >= 1,
is_list(As),
is_function(FormatFun, 2) ->
format_call("", n_spaces(I-1), ForMForFun, As, FormatFun).
format_fun(Fun) when is_function(Fun) ->
{module, M} = erlang:fun_info(Fun, module),
{name, F} = erlang:fun_info(Fun, name),
{arity, A} = erlang:fun_info(Fun, arity),
case erlang:fun_info(Fun, type) of
{type, local} when F =:= "" ->
io_lib:fwrite(<<"~w">>, [Fun]);
{type, local} when M =:= erl_eval ->
io_lib:fwrite(<<"interpreted function with arity ~w">>, [A]);
{type, local} ->
mfa_to_string(M, F, A);
{type, external} ->
mfa_to_string(M, F, A)
end.
analyze_exception(error, Term, Stack) ->
case {is_stacktrace(Stack), Stack, Term} of
{true, [{_M,_F,As}=MFA|MFAs], function_clause} when is_list(As) ->
{Term,[MFA],MFAs};
{true, [{shell,F,A}], function_clause} when is_integer(A) ->
{Term, [{F,A}], []};
{true, [{_M,_F,_AorAs}=MFA|MFAs], undef} ->
{Term,[MFA],MFAs};
{true, _, _} ->
{Term,[],Stack};
{false, _, _} ->
{{Term,Stack},[],[]}
end;
analyze_exception(_Class, Term, Stack) ->
case is_stacktrace(Stack) of
true ->
{Term,[],Stack};
false ->
{{Term,Stack},[],[]}
end.
is_stacktrace([]) ->
true;
is_stacktrace([{M,F,A}|Fs]) when is_atom(M), is_atom(F), is_integer(A) ->
is_stacktrace(Fs);
is_stacktrace([{M,F,As}|Fs]) when is_atom(M), is_atom(F), length(As) >= 0 ->
is_stacktrace(Fs);
is_stacktrace(_) ->
false.
explain_reason(badarg, error, [], _PF, _S) ->
<<"bad argument">>;
format_value(V, <<"bad argument: ">>, Cl, PF, S);
explain_reason(badarith, error, [], _PF, _S) ->
<<"bad argument in an arithmetic expression">>;
explain_reason({badarity,{Fun,As}}, error, [], _PF, _S)
when is_function(Fun) ->
io_lib:fwrite(<<"~s called with ~s">>,
[format_fun(Fun), argss(length(As))]);
explain_reason({badfun,Term}, error=Cl, [], PF, S) ->
format_value(Term, <<"bad function ">>, Cl, PF, S);
explain_reason({badmatch,Term}, error=Cl, [], PF, S) ->
format_value(Term, <<"no match of right hand side value ">>, Cl, PF, S);
explain_reason({case_clause,V}, error=Cl, [], PF, S) ->
format_value(V, <<"no case clause matching ">>, Cl, PF, S);
explain_reason(function_clause, error, [{F,A}], _PF, _S) ->
Shell commands
FAs = io_lib:fwrite(<<"~w/~w">>, [F, A]),
[<<"no function clause matching call to ">> | FAs];
explain_reason(function_clause, error=Cl, [{M,F,As}], PF, S) ->
Str = <<"no function clause matching ">>,
format_errstr_call(Str, Cl, {M,F}, As, PF, S);
explain_reason(if_clause, error, [], _PF, _S) ->
<<"no true branch found when evaluating an if expression">>;
explain_reason(noproc, error, [], _PF, _S) ->
<<"no such process or port">>;
explain_reason(notalive, error, [], _PF, _S) ->
<<"the node cannot be part of a distributed system">>;
explain_reason(system_limit, error, [], _PF, _S) ->
<<"a system limit has been reached">>;
explain_reason(timeout_value, error, [], _PF, _S) ->
<<"bad receive timeout value">>;
explain_reason({try_clause,V}, error=Cl, [], PF, S) ->
format_value(V, <<"no try clause matching ">>, Cl, PF, S);
explain_reason(undef, error, [{M,F,A}], _PF, _S) ->
io_lib:fwrite(<<"undefined function ~s">>,
[mfa_to_string(M, F, n_args(A))]);
explain_reason({shell_undef,F,A}, error, [], _PF, _S) ->
( but not when the user actively calls shell_default : F ( ... ) ) .
io_lib:fwrite(<<"undefined shell command ~s/~w">>, [F, n_args(A)]);
explain_reason({argument_limit,_Fun}, error, [], _PF, _S) ->
io_lib:fwrite(<<"limit of number of arguments to interpreted function"
" exceeded">>, []);
explain_reason({bad_filter,V}, error=Cl, [], PF, S) ->
format_value(V, <<"bad filter ">>, Cl, PF, S);
explain_reason({bad_generator,V}, error=Cl, [], PF, S) ->
format_value(V, <<"bad generator ">>, Cl, PF, S);
explain_reason({unbound,V}, error, [], _PF, _S) ->
io_lib:fwrite(<<"variable ~w is unbound">>, [V]);
Exit codes local to the shell module ( restricted shell ):
explain_reason({restricted_shell_bad_return, V}, exit=Cl, [], PF, S) ->
Str = <<"restricted shell module returned bad value ">>,
format_value(V, Str, Cl, PF, S);
explain_reason({restricted_shell_disallowed,{ForMF,As}},
exit=Cl, [], PF, S) ->
Str = <<"restricted shell does not allow ">>,
format_errstr_call(Str, Cl, ForMF, As, PF, S);
explain_reason(restricted_shell_started, exit, [], _PF, _S) ->
<<"restricted shell starts now">>;
explain_reason(restricted_shell_stopped, exit, [], _PF, _S) ->
<<"restricted shell stopped">>;
explain_reason(Reason, Class, [], PF, S) ->
PF(Reason, (iolist_size(S)+1) + exited_size(Class)).
n_args(A) when is_integer(A) ->
A;
n_args(As) when is_list(As) ->
length(As).
argss(0) ->
<<"no arguments">>;
argss(1) ->
<<"one argument">>;
argss(2) ->
<<"two arguments">>;
argss(I) ->
io_lib:fwrite(<<"~w arguments">>, [I]).
format_stacktrace1(S0, Stack0, PF, SF) ->
Stack1 = lists:dropwhile(fun({M,F,A}) -> SF(M, F, A)
end, lists:reverse(Stack0)),
S = [" " | S0],
Stack = lists:reverse(Stack1),
format_stacktrace2(S, Stack, 1, PF).
format_stacktrace2(S, [{M,F,A}|Fs], N, PF) when is_integer(A) ->
[io_lib:fwrite(<<"~s~s ~s">>,
[sep(N, S), origin(N, M, F, A), mfa_to_string(M, F, A)])
| format_stacktrace2(S, Fs, N + 1, PF)];
format_stacktrace2(S, [{M,F,As}|Fs], N, PF) when is_list(As) ->
A = length(As),
CalledAs = [S,<<" called as ">>],
C = format_call("", CalledAs, {M,F}, As, PF),
[io_lib:fwrite(<<"~s~s ~s\n~s~s">>,
[sep(N, S), origin(N, M, F, A), mfa_to_string(M, F, A),
CalledAs, C])
| format_stacktrace2(S, Fs, N + 1, PF)];
format_stacktrace2(_S, [], _N, _PF) ->
"".
sep(1, S) -> S;
sep(_, S) -> [$\n | S].
origin(1, M, F, A) ->
case is_op({M, F}, n_args(A)) of
{yes, F} -> <<"in operator ">>;
no -> <<"in function ">>
end;
origin(_N, _M, _F, _A) ->
<<"in call from">>.
format_errstr_call(ErrStr, Class, ForMForFun, As, PF, Pre0) ->
Pre1 = [Pre0 | n_spaces(exited_size(Class))],
format_call(ErrStr, Pre1, ForMForFun, As, PF).
format_call(ErrStr, Pre1, ForMForFun, As, PF) ->
Arity = length(As),
[ErrStr |
case is_op(ForMForFun, Arity) of
{yes,Op} ->
format_op(ErrStr, Pre1, Op, As, PF);
no ->
MFs = mf_to_string(ForMForFun, Arity),
I1 = iolist_size([Pre1,ErrStr|MFs]),
S1 = pp_arguments(PF, As, I1),
S2 = pp_arguments(PF, As, iolist_size([Pre1|MFs])),
Long = count_nl(pp_arguments(PF, [a2345,b2345], I1)) > 0,
case Long or (count_nl(S2) < count_nl(S1)) of
true ->
[$\n, Pre1, MFs, S2];
false ->
[MFs, S1]
end
end].
format_op(ErrStr, Pre, Op, [A1], PF) ->
OpS = io_lib:fwrite(<<"~s ">>, [Op]),
I1 = iolist_size([ErrStr,Pre,OpS]),
[OpS | PF(A1, I1+1)];
format_op(ErrStr, Pre, Op, [A1, A2], PF) ->
I1 = iolist_size([ErrStr,Pre]),
S1 = PF(A1, I1+1),
S2 = PF(A2, I1+1),
OpS = atom_to_list(Op),
Pre1 = [$\n | n_spaces(I1)],
case count_nl(S1) > 0 of
true ->
[S1,Pre1,OpS,Pre1|S2];
false ->
OpS2 = io_lib:fwrite(<<" ~s ">>, [Op]),
S2_2 = PF(A2, iolist_size([ErrStr,Pre,S1|OpS2])+1),
case count_nl(S2) < count_nl(S2_2) of
true ->
[S1,Pre1,OpS,Pre1|S2];
false ->
[S1,OpS2|S2_2]
end
end.
pp_arguments(PF, As, I) ->
case {As, io_lib:printable_list(As)} of
{[Int | T], true} ->
L = integer_to_list(Int),
Ll = length(L),
A = list_to_atom(lists:duplicate(Ll, $a)),
S0 = binary_to_list(iolist_to_binary(PF([A | T], I+1))),
brackets_to_parens([$[,L,string:sub_string(S0, 2+Ll)]);
_ ->
brackets_to_parens(PF(As, I+1))
end.
brackets_to_parens(S) ->
B = iolist_to_binary(S),
Sz = byte_size(B) - 2,
<<$[,R:Sz/binary,$]>> = B,
[$(,R,$)].
mfa_to_string(M, F, A) ->
io_lib:fwrite(<<"~s/~w">>, [mf_to_string({M, F}, A), A]).
mf_to_string({M, F}, A) ->
case erl_internal:bif(M, F, A) of
true ->
io_lib:fwrite(<<"~w">>, [F]);
false ->
case is_op({M, F}, A) of
{yes, '/'} ->
io_lib:fwrite(<<"~w">>, [F]);
{yes, F} ->
atom_to_list(F);
no ->
io_lib:fwrite(<<"~w:~w">>, [M, F])
end
end;
mf_to_string(Fun, _A) when is_function(Fun) ->
format_fun(Fun);
mf_to_string(F, _A) ->
io_lib:fwrite(<<"~w">>, [F]).
format_value(V, ErrStr, Class, PF, S) ->
Pre1Sz = exited_size(Class),
S1 = PF(V, Pre1Sz + iolist_size([S, ErrStr])+1),
[ErrStr | case count_nl(S1) of
N1 when N1 > 1 ->
S2 = PF(V, iolist_size(S) + 1 + Pre1Sz),
case count_nl(S2) < N1 of
true ->
[$\n, S, n_spaces(Pre1Sz) | S2];
false ->
S1
end;
_ ->
S1
end].
Handles deep lists , but not all iolists .
count_nl([E | Es]) ->
count_nl(E) + count_nl(Es);
count_nl($\n) ->
1;
count_nl(Bin) when is_binary(Bin) ->
count_nl(binary_to_list(Bin));
count_nl(_) ->
0.
n_spaces(N) ->
lists:duplicate(N, $\s).
is_op(ForMForFun, A) ->
try
{erlang,F} = ForMForFun,
_ = erl_internal:op_type(F, A),
{yes,F}
catch error:_ -> no
end.
exited_size(Class) ->
iolist_size(exited(Class)).
exited(error) ->
<<"exception error: ">>;
exited(exit) ->
<<"exception exit: ">>;
exited(throw) ->
<<"exception throw: ">>.
|
bf0d377d9758a4ecc4a45e17002b793187b8eb2287e0f01a76f5b14c41a0e75a | nonguix/nonguix | messaging.scm | SPDX - License - Identifier : GPL-3.0 - or - later
Copyright © 2021 , 2022 PantherX OS Team < >
Copyright © 2022 , 2023 < >
Copyright © 2022 < >
(define-module (nongnu packages messaging)
#:use-module (gnu packages base)
#:use-module (gnu packages bash)
#:use-module (gnu packages compression)
#:use-module (gnu packages cups)
#:use-module (gnu packages databases)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gcc)
#:use-module (gnu packages gl)
#:use-module (gnu packages glib)
#:use-module (gnu packages gnome)
#:use-module (gnu packages gtk)
#:use-module (gnu packages linux)
#:use-module (gnu packages nss)
#:use-module (gnu packages pulseaudio)
#:use-module (gnu packages qt)
#:use-module (gnu packages xdisorg)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (guix download)
#:use-module (guix gexp)
#:use-module (guix packages)
#:use-module (guix utils)
#:use-module ((guix licenses) :prefix license:)
#:use-module (nonguix build-system binary)
#:use-module ((nonguix licenses) :prefix license:)
#:use-module (ice-9 match))
(define-public element-desktop
(package
(name "element-desktop")
(version "1.11.17")
(source
(origin
(method url-fetch)
(uri
(string-append
"/" name "/" name "_" version
"_amd64.deb"))
(sha256
(base32 "0by2ci5bbc42v71vsqrdigiv0ywqnpdd5mh63pgl9n7kmxqfdzhl"))))
(supported-systems '("x86_64-linux"))
(build-system binary-build-system)
(arguments
TODO : fails on wrapped binary and included other files
#:patchelf-plan
#~'(("lib/Element/element-desktop"
("alsa-lib" "at-spi2-atk" "at-spi2-core" "atk" "cairo" "cups"
"dbus" "expat" "fontconfig-minimal" "gcc" "gdk-pixbuf" "glib"
"gtk+" "libdrm" "libnotify" "libsecret" "libx11" "libxcb"
"libxcomposite" "libxcursor" "libxdamage" "libxext" "libxfixes"
"libxi" "libxkbcommon" "libxkbfile" "libxrandr" "libxrender"
"libxtst" "mesa" "nspr" "pango" "zlib")))
#:phases
#~(modify-phases %standard-phases
(replace 'unpack
(lambda _
(invoke "ar" "x" #$source)
(invoke "tar" "xvf" "data.tar.xz")
(copy-recursively "usr/" ".")
;; Use the more standard lib directory for everything.
(rename-file "opt/" "lib")
;; Remove unneeded files.
(delete-file-recursively "usr")
(delete-file "control.tar.gz")
(delete-file "data.tar.xz")
(delete-file "debian-binary")
;; Fix the .desktop file binary location.
(substitute* '("share/applications/element-desktop.desktop")
(("/opt/Element/")
(string-append #$output "/lib/Element/")))))
(add-after 'install 'symlink-binary-file-and-cleanup
(lambda _
(delete-file (string-append #$output "/environment-variables"))
(mkdir-p (string-append #$output "/bin"))
(symlink (string-append #$output "/lib/Element/element-desktop")
(string-append #$output "/bin/element-desktop"))))
(add-after 'install 'wrap-where-patchelf-does-not-work
(lambda _
(wrap-program (string-append #$output "/lib/Element/element-desktop")
`("FONTCONFIG_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "fontconfig-minimal") "/etc/fonts")
#$output)
":")))
`("LD_LIBRARY_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "nss") "/lib/nss")
(string-append #$(this-package-input "eudev") "/lib")
(string-append #$(this-package-input "gcc") "/lib")
(string-append #$(this-package-input "mesa") "/lib")
(string-append #$(this-package-input "libxkbfile") "/lib")
(string-append #$(this-package-input "zlib") "/lib")
(string-append #$(this-package-input "libsecret") "/lib")
(string-append #$(this-package-input "sqlcipher") "/lib")
(string-append #$(this-package-input "libnotify") "/lib")
(string-append #$output "/lib/Element")
#$output)
":")))))))))
(native-inputs (list tar))
(inputs
(list alsa-lib
at-spi2-atk
at-spi2-core
atk
cairo
cups
dbus
eudev
expat
fontconfig
`(,gcc "lib")
glib
gtk+
libdrm
libnotify
librsvg
libsecret
libx11
libxcb
libxcomposite
libxcursor
libxdamage
libxext
libxfixes
libxi
libxkbcommon
libxkbfile
libxrandr
libxrender
libxtst
mesa
nspr
nss
pango
sqlcipher
zlib))
(home-page "-im/element-desktop")
(synopsis "Matrix collaboration client for desktop")
(description "Element Desktop is a Matrix client for desktop with Element Web at
its core.")
;; not working?
(properties
'((release-monitoring-url . "-im/element-desktop/releases")))
(license license:asl2.0)))
(define-public signal-desktop
(package
(name "signal-desktop")
(version "6.2.0")
(source
(origin
(method url-fetch)
(uri
(string-append
"/" name "/" name "_" version
"_amd64.deb"))
(sha256
(base32 "1ms2fv6hmg17vggbv3f2a730kvk15iz2nqbrn9mkwghabhr9rqva"))))
(supported-systems '("x86_64-linux"))
(build-system binary-build-system)
(arguments
TODO : fails on wrapped binary and included other files
#:patchelf-plan
#~'(("lib/Signal/signal-desktop"
("alsa-lib" "at-spi2-atk" "at-spi2-core" "atk" "cairo" "cups"
"dbus" "expat" "fontconfig-minimal" "gcc" "gdk-pixbuf" "glib"
"gtk+" "libdrm" "libsecret" "libx11" "libxcb" "libxcomposite"
"libxcursor" "libxdamage" "libxext" "libxfixes" "libxi"
"libxkbcommon" "libxkbfile" "libxrandr" "libxshmfence" "libxtst"
"mesa" "nspr" "pango" "pulseaudio" "zlib")))
#:phases
#~(modify-phases %standard-phases
(replace 'unpack
(lambda _
(invoke "ar" "x" #$source)
(invoke "tar" "xvf" "data.tar.xz")
(copy-recursively "usr/" ".")
;; Use the more standard lib directory for everything.
(rename-file "opt/" "lib")
;; Remove unneeded files.
(delete-file-recursively "usr")
(delete-file "control.tar.gz")
(delete-file "data.tar.xz")
(delete-file "debian-binary")
(delete-file "environment-variables")
;; Fix the .desktop file binary location.
(substitute* '("share/applications/signal-desktop.desktop")
(("/opt/Signal/")
(string-append #$output "/lib/Signal/")))))
(add-after 'install 'symlink-binary-file-and-cleanup
(lambda _
(delete-file (string-append #$output "/environment-variables"))
(mkdir-p (string-append #$output "/bin"))
(symlink (string-append #$output "/lib/Signal/signal-desktop")
(string-append #$output "/bin/signal-desktop"))))
(add-after 'install 'wrap-where-patchelf-does-not-work
(lambda _
(wrap-program (string-append #$output "/lib/Signal/signal-desktop")
`("FONTCONFIG_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "fontconfig-minimal") "/etc/fonts")
#$output)
":")))
`("LD_LIBRARY_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "nss") "/lib/nss")
(string-append #$(this-package-input "eudev") "/lib")
(string-append #$(this-package-input "gcc") "/lib")
(string-append #$(this-package-input "mesa") "/lib")
(string-append #$(this-package-input "libxkbfile") "/lib")
(string-append #$(this-package-input "pulseaudio") "/lib")
(string-append #$(this-package-input "zlib") "/lib")
(string-append #$(this-package-input "libsecret") "/lib")
(string-append #$output "/lib/Signal")
#$output)
":")))))))))
(native-inputs (list tar))
(inputs (list alsa-lib
at-spi2-atk
at-spi2-core
atk
cairo
cups
dbus
eudev
expat
fontconfig
`(,gcc "lib")
glib
gtk+
libdrm
librsvg
libsecret
libx11
libxcb
libxcomposite
libxdamage
libxext
libxfixes
libxkbcommon
libxkbfile
libxrandr
libxshmfence
mesa
nspr
nss
pango
pulseaudio
zlib))
(home-page "/")
(synopsis "Private messenger using the Signal protocol")
(description "Signal Desktop is an Electron application that links with Signal on Android
or iOS.")
;; doesn't work?
(properties
'((release-monitoring-url . "-Desktop/releases")))
(license license:agpl3)))
(define-public zoom
(package
(name "zoom")
(version "5.13.4.711")
(source
(origin
(method url-fetch)
(uri (string-append "/" version "/zoom_x86_64.tar.xz"))
(file-name (string-append name "-" version "-x86_64.tar.xz"))
(sha256
(base32 "0528ywkjl50vd0m0isxicg9mn5fv1w3lqrc10nkynb29hcqlacgv"))))
(supported-systems '("x86_64-linux"))
(build-system binary-build-system)
(arguments
TODO : fails on wrapped binary and included other files
#:patchelf-plan
#~(let ((libs '("alsa-lib"
"at-spi2-atk"
"at-spi2-core"
"atk"
"cairo"
"cups"
"dbus"
"eudev"
"expat"
"fontconfig-minimal"
"gcc"
"glib"
"gtk+"
"libdrm"
"libx11"
"libxcb"
"libxcomposite"
"libxcursor"
"libxdamage"
"libxext"
"libxfixes"
"libxi"
"libxkbcommon"
"libxkbfile"
"libxrandr"
"libxshmfence"
"libxtst"
"mesa"
"nspr"
"pango"
"pulseaudio"
"xcb-util-image"
"xcb-util-keysyms"
"zlib")))
`(("lib/zoom/ZoomLauncher"
,libs)
("lib/zoom/zoom"
,libs)
("lib/zoom/zopen"
,libs)))
#:phases
#~(modify-phases %standard-phases
(replace 'unpack
(lambda _
(invoke "tar" "xvf" #$source)
;; Use the more standard lib directory for everything.
(mkdir-p "lib")
(rename-file "zoom/" "lib/zoom")))
(add-after 'install 'wrap-where-patchelf-does-not-work
(lambda _
(wrap-program (string-append #$output "/lib/zoom/zopen")
`("LD_LIBRARY_PATH" prefix
,(list #$@(map (lambda (pkg)
(file-append (this-package-input pkg) "/lib"))
'("fontconfig-minimal"
"freetype"
"gcc"
"glib"
"libxcomposite"
"libxdamage"
"libxkbcommon"
"libxkbfile"
"libxrandr"
"libxrender"
"zlib")))))
(wrap-program (string-append #$output "/lib/zoom/zoom")
`("FONTCONFIG_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "fontconfig-minimal") "/etc/fonts")
#$output)
":")))
`("LD_LIBRARY_PATH" prefix
,(list (string-append #$(this-package-input "nss") "/lib/nss")
#$@(map (lambda (pkg)
(file-append (this-package-input pkg) "/lib"))
'("alsa-lib"
"atk"
"at-spi2-atk"
"at-spi2-core"
"cairo"
"cups"
"dbus"
"eudev"
"expat"
"gcc"
"glib"
"mesa"
"nspr"
"libxcomposite"
"libxdamage"
"libxkbcommon"
"libxkbfile"
"libxrandr"
"libxshmfence"
"pango"
"pulseaudio"
"zlib")))))))
(add-after 'wrap-where-patchelf-does-not-work 'rename-binary
IPC ( for single sign - on and handling links ) fails if the
;; name does not end in "zoom," so rename the real binary.
Thanks to the packagers for figuring this out .
(lambda _
(rename-file (string-append #$output "/lib/zoom/.zoom-real")
(string-append #$output "/lib/zoom/.zoom"))
(substitute* (string-append #$output "/lib/zoom/zoom")
(("zoom-real")
"zoom"))))
(add-after 'rename-binary 'symlink-binaries
(lambda _
(delete-file (string-append #$output "/environment-variables"))
(mkdir-p (string-append #$output "/bin"))
(symlink (string-append #$output "/lib/zoom/zoom")
(string-append #$output "/bin/zoom"))
(symlink (string-append #$output "/lib/zoom/zopen")
(string-append #$output "/bin/zopen"))
(symlink (string-append #$output "/lib/zoom/ZoomLauncher")
(string-append #$output "/bin/ZoomLauncher"))))
(add-after 'symlink-binaries 'create-desktop-file
(lambda _
(let ((apps (string-append #$output "/share/applications")))
(mkdir-p apps)
(make-desktop-entry-file
(string-append apps "/zoom.desktop")
#:name "Zoom"
#:generic-name "Zoom Client for Linux"
#:exec (string-append #$output "/bin/ZoomLauncher %U")
#:mime-type (list
"x-scheme-handler/zoommtg"
"x-scheme-handler/zoomus"
"x-scheme-handler/tel"
"x-scheme-handler/callto"
"x-scheme-handler/zoomphonecall"
"application/x-zoom")
#:categories '("Network" "InstantMessaging"
"VideoConference" "Telephony")
#:startup-w-m-class "zoom"
#:comment
'(("en" "Zoom Video Conference")
(#f "Zoom Video Conference")))))))))
(native-inputs (list tar))
(inputs (list alsa-lib
at-spi2-atk
at-spi2-core
atk
bash-minimal
cairo
cups
dbus
eudev
expat
fontconfig
freetype
`(,gcc "lib")
glib
gtk+
libdrm
librsvg
libx11
libxcb
libxcomposite
libxdamage
libxext
libxfixes
libxkbcommon
libxkbfile
libxrandr
libxrender
libxshmfence
mesa
nspr
nss
pango
pulseaudio
qtmultimedia
xcb-util-image
xcb-util-keysyms
zlib))
(home-page "/")
(synopsis "Video conference client")
(description "The Zoom video conferencing and messaging client. Zoom must be run via an
app launcher to use its .desktop file, or with @code{ZoomLauncher}.")
(license (license:nonfree "/"))))
| null | https://raw.githubusercontent.com/nonguix/nonguix/8c0a857cebd1d06f3ef1b61689e33060151885fa/nongnu/packages/messaging.scm | scheme | Use the more standard lib directory for everything.
Remove unneeded files.
Fix the .desktop file binary location.
not working?
Use the more standard lib directory for everything.
Remove unneeded files.
Fix the .desktop file binary location.
doesn't work?
Use the more standard lib directory for everything.
name does not end in "zoom," so rename the real binary. | SPDX - License - Identifier : GPL-3.0 - or - later
Copyright © 2021 , 2022 PantherX OS Team < >
Copyright © 2022 , 2023 < >
Copyright © 2022 < >
(define-module (nongnu packages messaging)
#:use-module (gnu packages base)
#:use-module (gnu packages bash)
#:use-module (gnu packages compression)
#:use-module (gnu packages cups)
#:use-module (gnu packages databases)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gcc)
#:use-module (gnu packages gl)
#:use-module (gnu packages glib)
#:use-module (gnu packages gnome)
#:use-module (gnu packages gtk)
#:use-module (gnu packages linux)
#:use-module (gnu packages nss)
#:use-module (gnu packages pulseaudio)
#:use-module (gnu packages qt)
#:use-module (gnu packages xdisorg)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (guix download)
#:use-module (guix gexp)
#:use-module (guix packages)
#:use-module (guix utils)
#:use-module ((guix licenses) :prefix license:)
#:use-module (nonguix build-system binary)
#:use-module ((nonguix licenses) :prefix license:)
#:use-module (ice-9 match))
(define-public element-desktop
(package
(name "element-desktop")
(version "1.11.17")
(source
(origin
(method url-fetch)
(uri
(string-append
"/" name "/" name "_" version
"_amd64.deb"))
(sha256
(base32 "0by2ci5bbc42v71vsqrdigiv0ywqnpdd5mh63pgl9n7kmxqfdzhl"))))
(supported-systems '("x86_64-linux"))
(build-system binary-build-system)
(arguments
TODO : fails on wrapped binary and included other files
#:patchelf-plan
#~'(("lib/Element/element-desktop"
("alsa-lib" "at-spi2-atk" "at-spi2-core" "atk" "cairo" "cups"
"dbus" "expat" "fontconfig-minimal" "gcc" "gdk-pixbuf" "glib"
"gtk+" "libdrm" "libnotify" "libsecret" "libx11" "libxcb"
"libxcomposite" "libxcursor" "libxdamage" "libxext" "libxfixes"
"libxi" "libxkbcommon" "libxkbfile" "libxrandr" "libxrender"
"libxtst" "mesa" "nspr" "pango" "zlib")))
#:phases
#~(modify-phases %standard-phases
(replace 'unpack
(lambda _
(invoke "ar" "x" #$source)
(invoke "tar" "xvf" "data.tar.xz")
(copy-recursively "usr/" ".")
(rename-file "opt/" "lib")
(delete-file-recursively "usr")
(delete-file "control.tar.gz")
(delete-file "data.tar.xz")
(delete-file "debian-binary")
(substitute* '("share/applications/element-desktop.desktop")
(("/opt/Element/")
(string-append #$output "/lib/Element/")))))
(add-after 'install 'symlink-binary-file-and-cleanup
(lambda _
(delete-file (string-append #$output "/environment-variables"))
(mkdir-p (string-append #$output "/bin"))
(symlink (string-append #$output "/lib/Element/element-desktop")
(string-append #$output "/bin/element-desktop"))))
(add-after 'install 'wrap-where-patchelf-does-not-work
(lambda _
(wrap-program (string-append #$output "/lib/Element/element-desktop")
`("FONTCONFIG_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "fontconfig-minimal") "/etc/fonts")
#$output)
":")))
`("LD_LIBRARY_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "nss") "/lib/nss")
(string-append #$(this-package-input "eudev") "/lib")
(string-append #$(this-package-input "gcc") "/lib")
(string-append #$(this-package-input "mesa") "/lib")
(string-append #$(this-package-input "libxkbfile") "/lib")
(string-append #$(this-package-input "zlib") "/lib")
(string-append #$(this-package-input "libsecret") "/lib")
(string-append #$(this-package-input "sqlcipher") "/lib")
(string-append #$(this-package-input "libnotify") "/lib")
(string-append #$output "/lib/Element")
#$output)
":")))))))))
(native-inputs (list tar))
(inputs
(list alsa-lib
at-spi2-atk
at-spi2-core
atk
cairo
cups
dbus
eudev
expat
fontconfig
`(,gcc "lib")
glib
gtk+
libdrm
libnotify
librsvg
libsecret
libx11
libxcb
libxcomposite
libxcursor
libxdamage
libxext
libxfixes
libxi
libxkbcommon
libxkbfile
libxrandr
libxrender
libxtst
mesa
nspr
nss
pango
sqlcipher
zlib))
(home-page "-im/element-desktop")
(synopsis "Matrix collaboration client for desktop")
(description "Element Desktop is a Matrix client for desktop with Element Web at
its core.")
(properties
'((release-monitoring-url . "-im/element-desktop/releases")))
(license license:asl2.0)))
(define-public signal-desktop
(package
(name "signal-desktop")
(version "6.2.0")
(source
(origin
(method url-fetch)
(uri
(string-append
"/" name "/" name "_" version
"_amd64.deb"))
(sha256
(base32 "1ms2fv6hmg17vggbv3f2a730kvk15iz2nqbrn9mkwghabhr9rqva"))))
(supported-systems '("x86_64-linux"))
(build-system binary-build-system)
(arguments
TODO : fails on wrapped binary and included other files
#:patchelf-plan
#~'(("lib/Signal/signal-desktop"
("alsa-lib" "at-spi2-atk" "at-spi2-core" "atk" "cairo" "cups"
"dbus" "expat" "fontconfig-minimal" "gcc" "gdk-pixbuf" "glib"
"gtk+" "libdrm" "libsecret" "libx11" "libxcb" "libxcomposite"
"libxcursor" "libxdamage" "libxext" "libxfixes" "libxi"
"libxkbcommon" "libxkbfile" "libxrandr" "libxshmfence" "libxtst"
"mesa" "nspr" "pango" "pulseaudio" "zlib")))
#:phases
#~(modify-phases %standard-phases
(replace 'unpack
(lambda _
(invoke "ar" "x" #$source)
(invoke "tar" "xvf" "data.tar.xz")
(copy-recursively "usr/" ".")
(rename-file "opt/" "lib")
(delete-file-recursively "usr")
(delete-file "control.tar.gz")
(delete-file "data.tar.xz")
(delete-file "debian-binary")
(delete-file "environment-variables")
(substitute* '("share/applications/signal-desktop.desktop")
(("/opt/Signal/")
(string-append #$output "/lib/Signal/")))))
(add-after 'install 'symlink-binary-file-and-cleanup
(lambda _
(delete-file (string-append #$output "/environment-variables"))
(mkdir-p (string-append #$output "/bin"))
(symlink (string-append #$output "/lib/Signal/signal-desktop")
(string-append #$output "/bin/signal-desktop"))))
(add-after 'install 'wrap-where-patchelf-does-not-work
(lambda _
(wrap-program (string-append #$output "/lib/Signal/signal-desktop")
`("FONTCONFIG_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "fontconfig-minimal") "/etc/fonts")
#$output)
":")))
`("LD_LIBRARY_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "nss") "/lib/nss")
(string-append #$(this-package-input "eudev") "/lib")
(string-append #$(this-package-input "gcc") "/lib")
(string-append #$(this-package-input "mesa") "/lib")
(string-append #$(this-package-input "libxkbfile") "/lib")
(string-append #$(this-package-input "pulseaudio") "/lib")
(string-append #$(this-package-input "zlib") "/lib")
(string-append #$(this-package-input "libsecret") "/lib")
(string-append #$output "/lib/Signal")
#$output)
":")))))))))
(native-inputs (list tar))
(inputs (list alsa-lib
at-spi2-atk
at-spi2-core
atk
cairo
cups
dbus
eudev
expat
fontconfig
`(,gcc "lib")
glib
gtk+
libdrm
librsvg
libsecret
libx11
libxcb
libxcomposite
libxdamage
libxext
libxfixes
libxkbcommon
libxkbfile
libxrandr
libxshmfence
mesa
nspr
nss
pango
pulseaudio
zlib))
(home-page "/")
(synopsis "Private messenger using the Signal protocol")
(description "Signal Desktop is an Electron application that links with Signal on Android
or iOS.")
(properties
'((release-monitoring-url . "-Desktop/releases")))
(license license:agpl3)))
(define-public zoom
(package
(name "zoom")
(version "5.13.4.711")
(source
(origin
(method url-fetch)
(uri (string-append "/" version "/zoom_x86_64.tar.xz"))
(file-name (string-append name "-" version "-x86_64.tar.xz"))
(sha256
(base32 "0528ywkjl50vd0m0isxicg9mn5fv1w3lqrc10nkynb29hcqlacgv"))))
(supported-systems '("x86_64-linux"))
(build-system binary-build-system)
(arguments
TODO : fails on wrapped binary and included other files
#:patchelf-plan
#~(let ((libs '("alsa-lib"
"at-spi2-atk"
"at-spi2-core"
"atk"
"cairo"
"cups"
"dbus"
"eudev"
"expat"
"fontconfig-minimal"
"gcc"
"glib"
"gtk+"
"libdrm"
"libx11"
"libxcb"
"libxcomposite"
"libxcursor"
"libxdamage"
"libxext"
"libxfixes"
"libxi"
"libxkbcommon"
"libxkbfile"
"libxrandr"
"libxshmfence"
"libxtst"
"mesa"
"nspr"
"pango"
"pulseaudio"
"xcb-util-image"
"xcb-util-keysyms"
"zlib")))
`(("lib/zoom/ZoomLauncher"
,libs)
("lib/zoom/zoom"
,libs)
("lib/zoom/zopen"
,libs)))
#:phases
#~(modify-phases %standard-phases
(replace 'unpack
(lambda _
(invoke "tar" "xvf" #$source)
(mkdir-p "lib")
(rename-file "zoom/" "lib/zoom")))
(add-after 'install 'wrap-where-patchelf-does-not-work
(lambda _
(wrap-program (string-append #$output "/lib/zoom/zopen")
`("LD_LIBRARY_PATH" prefix
,(list #$@(map (lambda (pkg)
(file-append (this-package-input pkg) "/lib"))
'("fontconfig-minimal"
"freetype"
"gcc"
"glib"
"libxcomposite"
"libxdamage"
"libxkbcommon"
"libxkbfile"
"libxrandr"
"libxrender"
"zlib")))))
(wrap-program (string-append #$output "/lib/zoom/zoom")
`("FONTCONFIG_PATH" ":" prefix
(,(string-join
(list
(string-append #$(this-package-input "fontconfig-minimal") "/etc/fonts")
#$output)
":")))
`("LD_LIBRARY_PATH" prefix
,(list (string-append #$(this-package-input "nss") "/lib/nss")
#$@(map (lambda (pkg)
(file-append (this-package-input pkg) "/lib"))
'("alsa-lib"
"atk"
"at-spi2-atk"
"at-spi2-core"
"cairo"
"cups"
"dbus"
"eudev"
"expat"
"gcc"
"glib"
"mesa"
"nspr"
"libxcomposite"
"libxdamage"
"libxkbcommon"
"libxkbfile"
"libxrandr"
"libxshmfence"
"pango"
"pulseaudio"
"zlib")))))))
(add-after 'wrap-where-patchelf-does-not-work 'rename-binary
IPC ( for single sign - on and handling links ) fails if the
Thanks to the packagers for figuring this out .
(lambda _
(rename-file (string-append #$output "/lib/zoom/.zoom-real")
(string-append #$output "/lib/zoom/.zoom"))
(substitute* (string-append #$output "/lib/zoom/zoom")
(("zoom-real")
"zoom"))))
(add-after 'rename-binary 'symlink-binaries
(lambda _
(delete-file (string-append #$output "/environment-variables"))
(mkdir-p (string-append #$output "/bin"))
(symlink (string-append #$output "/lib/zoom/zoom")
(string-append #$output "/bin/zoom"))
(symlink (string-append #$output "/lib/zoom/zopen")
(string-append #$output "/bin/zopen"))
(symlink (string-append #$output "/lib/zoom/ZoomLauncher")
(string-append #$output "/bin/ZoomLauncher"))))
(add-after 'symlink-binaries 'create-desktop-file
(lambda _
(let ((apps (string-append #$output "/share/applications")))
(mkdir-p apps)
(make-desktop-entry-file
(string-append apps "/zoom.desktop")
#:name "Zoom"
#:generic-name "Zoom Client for Linux"
#:exec (string-append #$output "/bin/ZoomLauncher %U")
#:mime-type (list
"x-scheme-handler/zoommtg"
"x-scheme-handler/zoomus"
"x-scheme-handler/tel"
"x-scheme-handler/callto"
"x-scheme-handler/zoomphonecall"
"application/x-zoom")
#:categories '("Network" "InstantMessaging"
"VideoConference" "Telephony")
#:startup-w-m-class "zoom"
#:comment
'(("en" "Zoom Video Conference")
(#f "Zoom Video Conference")))))))))
(native-inputs (list tar))
(inputs (list alsa-lib
at-spi2-atk
at-spi2-core
atk
bash-minimal
cairo
cups
dbus
eudev
expat
fontconfig
freetype
`(,gcc "lib")
glib
gtk+
libdrm
librsvg
libx11
libxcb
libxcomposite
libxdamage
libxext
libxfixes
libxkbcommon
libxkbfile
libxrandr
libxrender
libxshmfence
mesa
nspr
nss
pango
pulseaudio
qtmultimedia
xcb-util-image
xcb-util-keysyms
zlib))
(home-page "/")
(synopsis "Video conference client")
(description "The Zoom video conferencing and messaging client. Zoom must be run via an
app launcher to use its .desktop file, or with @code{ZoomLauncher}.")
(license (license:nonfree "/"))))
|
81c25f71fe8c0e6eecfcae7d2516c9eab3746ca0aea593c725ad937e60452471 | adamwalker/clash-riscv | Decode.hs | # LANGUAGE ScopedTypeVariables #
module Core.Decode where
import Clash.Prelude
import Data.Bool
import Core.ALU
--
--Extract parts of the instruction
--
opcode :: BitVector 32 -> BitVector 7
opcode = slice d6 d0
rOp1 :: BitVector 32 -> BitVector 7
rOp1 = slice d31 d25
rs2 :: BitVector 32 -> BitVector 5
rs2 = slice d24 d20
rs1 :: BitVector 32 -> BitVector 5
rs1 = slice d19 d15
rOp2 :: BitVector 32 -> BitVector 3
rOp2 = slice d14 d12
rd :: BitVector 32 -> BitVector 5
rd = slice d11 d7
--I type immediate
iImm :: BitVector 32 -> BitVector 12
iImm = slice d31 d20
--U type immediate
uImm :: BitVector 32 -> BitVector 20
uImm = slice d31 d12
--UJ type immediate
ujImm :: BitVector 32 -> BitVector 20
ujImm x = slice d31 d31 x ++# slice d19 d12 x ++# slice d20 d20 x ++# slice d30 d21 x
--S type immediate
sImm :: BitVector 32 -> BitVector 12
sImm x = slice d31 d25 x ++# slice d11 d7 x
SB type immediate - used for branches
sbImm :: BitVector 32 -> BitVector 12
sbImm x = slice d31 d31 x ++# slice d7 d7 x ++# slice d30 d25 x ++# slice d11 d8 x
--
Opcode predicates
--
rType :: BitVector 32 -> Bool
rType = (== 0b0110011) . opcode
iType :: BitVector 32 -> Bool
iType = (== 0b0010011) . opcode
lui :: BitVector 32 -> Bool
lui = (== 0b0110111) . opcode
jal :: BitVector 32 -> Bool
jal = (== 0b1101111) . opcode
jalr :: BitVector 32 -> Bool
jalr = (== 0b1100111) . opcode
branch :: BitVector 32 -> Bool
branch = (== 0b1100011) . opcode
load :: BitVector 32 -> Bool
load = (== 0b0000011) . opcode
store :: BitVector 32 -> Bool
store = (== 0b0100011) . opcode
auipc :: BitVector 32 -> Bool
auipc = (== 0b0010111) . opcode
sys :: BitVector 32 -> Bool
sys = (== 0b1110011) . opcode
--
--ALU operation decoding
--
decodeAluPrimaryOp :: BitVector 32 -> PrimaryOp
decodeAluPrimaryOp instr
| iType instr || rType instr =
case rOp2 instr of
0 -> ADDSUB
1 -> SLL
2 -> SLT
3 -> SLTU
4 -> XOR
5 -> SR
6 -> OR
7 -> AND
| auipc instr = ADDSUB
TODO
decodeAluSecondaryOp :: BitVector 32 -> SecondaryOp
decodeAluSecondaryOp instr
| rType instr
= unpack $ slice d30 d30 instr
| auipc instr
= False
TODO
--
--Control signal decoding
--
The first operand to the ALU can fome from
- The first read port of the register file for R and I type instructions
- The PC for AUIPC
The first operand to the ALU can fome from
- The first read port of the register file for R and I type instructions
- The PC for AUIPC
-}
firstOpIsRegister = not . auipc
The second operand to the ALU can come from
- The second read port of the register file for R type instructions
- The immediate field for I type instructions
The second operand to the ALU can come from
- The second read port of the register file for R type instructions
- The immediate field for I type instructions
-}
secondOpIsRegister instr = rType instr || branch instr
--Does the instruction actually use r1
usesRegister1 instr = rType instr || iType instr || jalr instr || branch instr || store instr || load instr
--Does the instruction actually use r2
usesRegister2 instr = rType instr || branch instr
--A memory write only happens for store instructions
enableMemWrite = store
{-
The instruction results in a register write to rd if the instruction is
- load
- lui
- iType
- rType
-}
enableRegWrite instr = load instr || iType instr || rType instr || auipc instr || lui instr || jal instr || jalr instr
{-
The source of the register write back is the alu (as opposed to a memory read) if the instruction is
- iType
- rType
-}
data DestRegSource
= SourceALU
| SourceMem
| SourceSpec
deriving (Show)
decodeDestRegSource instr
| iType instr || rType instr || auipc instr = SourceALU
| load instr = SourceMem
| lui instr = SourceALU
| jal instr || jalr instr = SourceALU
| specialReg instr = SourceSpec
TODO
--All immedates in RiscV are sign extended
signExtendImmediate :: forall n. KnownNat n => BitVector n -> BitVector 32
signExtendImmediate x = pack (resize (unpack x :: Signed n))
--Upper immediates
alignUpperImmediate :: BitVector 20 -> BitVector 32
alignUpperImmediate = (++# 0)
Where does the immediate ( always the second alu operand ) come from ?
extractImmediate :: BitVector 32 -> BitVector 32
extractImmediate instr
| auipc instr = alignUpperImmediate $ uImm instr
| load instr = signExtendImmediate $ iImm instr
| store instr = signExtendImmediate $ sImm instr
| iType instr = signExtendImmediate $ iImm instr
| jalr instr = signExtendImmediate $ iImm instr
TODO
--
--Load / Store
--
data MemSize
= Byte
| HalfWord
| Word
extractMemSize :: BitVector 32 -> MemSize
extractMemSize x
= case slice d13 d12 x of
0 -> Byte
1 -> HalfWord
otherwise -> Word
loadUnsigned :: BitVector 32 -> Bool
loadUnsigned = unpack . slice d14 d14
--
--Branch
--
extractBranchType :: BitVector 32 -> BitVector 3
extractBranchType = slice d14 d12
--
--System instructions
--
specialReg :: BitVector 32 -> Bool
specialReg instr
= slice d31 d28 instr == 0b1100
&& slice d26 d22 instr == 0
data SpecialReg
= Cycle
| Time
| Retired
extractSpecialReg :: BitVector 32 -> BitVector 2
extractSpecialReg = slice d21 d20
decodeSpecialReg :: BitVector 2 -> SpecialReg
decodeSpecialReg 0 = Cycle
decodeSpecialReg 1 = Time
decodeSpecialReg 2 = Retired
TODO
specialRegHigh :: BitVector 32 -> Bool
specialRegHigh = unpack . slice d27 d27
| null | https://raw.githubusercontent.com/adamwalker/clash-riscv/84a90731a07c3427695b4926d7159f9e9902c1a1/src/Core/Decode.hs | haskell |
Extract parts of the instruction
I type immediate
U type immediate
UJ type immediate
S type immediate
ALU operation decoding
Control signal decoding
Does the instruction actually use r1
Does the instruction actually use r2
A memory write only happens for store instructions
The instruction results in a register write to rd if the instruction is
- load
- lui
- iType
- rType
The source of the register write back is the alu (as opposed to a memory read) if the instruction is
- iType
- rType
All immedates in RiscV are sign extended
Upper immediates
Load / Store
Branch
System instructions
| # LANGUAGE ScopedTypeVariables #
module Core.Decode where
import Clash.Prelude
import Data.Bool
import Core.ALU
opcode :: BitVector 32 -> BitVector 7
opcode = slice d6 d0
rOp1 :: BitVector 32 -> BitVector 7
rOp1 = slice d31 d25
rs2 :: BitVector 32 -> BitVector 5
rs2 = slice d24 d20
rs1 :: BitVector 32 -> BitVector 5
rs1 = slice d19 d15
rOp2 :: BitVector 32 -> BitVector 3
rOp2 = slice d14 d12
rd :: BitVector 32 -> BitVector 5
rd = slice d11 d7
iImm :: BitVector 32 -> BitVector 12
iImm = slice d31 d20
uImm :: BitVector 32 -> BitVector 20
uImm = slice d31 d12
ujImm :: BitVector 32 -> BitVector 20
ujImm x = slice d31 d31 x ++# slice d19 d12 x ++# slice d20 d20 x ++# slice d30 d21 x
sImm :: BitVector 32 -> BitVector 12
sImm x = slice d31 d25 x ++# slice d11 d7 x
SB type immediate - used for branches
sbImm :: BitVector 32 -> BitVector 12
sbImm x = slice d31 d31 x ++# slice d7 d7 x ++# slice d30 d25 x ++# slice d11 d8 x
Opcode predicates
rType :: BitVector 32 -> Bool
rType = (== 0b0110011) . opcode
iType :: BitVector 32 -> Bool
iType = (== 0b0010011) . opcode
lui :: BitVector 32 -> Bool
lui = (== 0b0110111) . opcode
jal :: BitVector 32 -> Bool
jal = (== 0b1101111) . opcode
jalr :: BitVector 32 -> Bool
jalr = (== 0b1100111) . opcode
branch :: BitVector 32 -> Bool
branch = (== 0b1100011) . opcode
load :: BitVector 32 -> Bool
load = (== 0b0000011) . opcode
store :: BitVector 32 -> Bool
store = (== 0b0100011) . opcode
auipc :: BitVector 32 -> Bool
auipc = (== 0b0010111) . opcode
sys :: BitVector 32 -> Bool
sys = (== 0b1110011) . opcode
decodeAluPrimaryOp :: BitVector 32 -> PrimaryOp
decodeAluPrimaryOp instr
| iType instr || rType instr =
case rOp2 instr of
0 -> ADDSUB
1 -> SLL
2 -> SLT
3 -> SLTU
4 -> XOR
5 -> SR
6 -> OR
7 -> AND
| auipc instr = ADDSUB
TODO
decodeAluSecondaryOp :: BitVector 32 -> SecondaryOp
decodeAluSecondaryOp instr
| rType instr
= unpack $ slice d30 d30 instr
| auipc instr
= False
TODO
The first operand to the ALU can fome from
- The first read port of the register file for R and I type instructions
- The PC for AUIPC
The first operand to the ALU can fome from
- The first read port of the register file for R and I type instructions
- The PC for AUIPC
-}
firstOpIsRegister = not . auipc
The second operand to the ALU can come from
- The second read port of the register file for R type instructions
- The immediate field for I type instructions
The second operand to the ALU can come from
- The second read port of the register file for R type instructions
- The immediate field for I type instructions
-}
secondOpIsRegister instr = rType instr || branch instr
usesRegister1 instr = rType instr || iType instr || jalr instr || branch instr || store instr || load instr
usesRegister2 instr = rType instr || branch instr
enableMemWrite = store
enableRegWrite instr = load instr || iType instr || rType instr || auipc instr || lui instr || jal instr || jalr instr
data DestRegSource
= SourceALU
| SourceMem
| SourceSpec
deriving (Show)
decodeDestRegSource instr
| iType instr || rType instr || auipc instr = SourceALU
| load instr = SourceMem
| lui instr = SourceALU
| jal instr || jalr instr = SourceALU
| specialReg instr = SourceSpec
TODO
signExtendImmediate :: forall n. KnownNat n => BitVector n -> BitVector 32
signExtendImmediate x = pack (resize (unpack x :: Signed n))
alignUpperImmediate :: BitVector 20 -> BitVector 32
alignUpperImmediate = (++# 0)
Where does the immediate ( always the second alu operand ) come from ?
extractImmediate :: BitVector 32 -> BitVector 32
extractImmediate instr
| auipc instr = alignUpperImmediate $ uImm instr
| load instr = signExtendImmediate $ iImm instr
| store instr = signExtendImmediate $ sImm instr
| iType instr = signExtendImmediate $ iImm instr
| jalr instr = signExtendImmediate $ iImm instr
TODO
data MemSize
= Byte
| HalfWord
| Word
extractMemSize :: BitVector 32 -> MemSize
extractMemSize x
= case slice d13 d12 x of
0 -> Byte
1 -> HalfWord
otherwise -> Word
loadUnsigned :: BitVector 32 -> Bool
loadUnsigned = unpack . slice d14 d14
extractBranchType :: BitVector 32 -> BitVector 3
extractBranchType = slice d14 d12
specialReg :: BitVector 32 -> Bool
specialReg instr
= slice d31 d28 instr == 0b1100
&& slice d26 d22 instr == 0
data SpecialReg
= Cycle
| Time
| Retired
extractSpecialReg :: BitVector 32 -> BitVector 2
extractSpecialReg = slice d21 d20
decodeSpecialReg :: BitVector 2 -> SpecialReg
decodeSpecialReg 0 = Cycle
decodeSpecialReg 1 = Time
decodeSpecialReg 2 = Retired
TODO
specialRegHigh :: BitVector 32 -> Bool
specialRegHigh = unpack . slice d27 d27
|
ea300f3057f9261e093a756802e1f9797950579285084287efe7c42e31528b7a | 1Jajen1/Brokkr | TH.hs | # LANGUAGE DataKinds #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
{-# LANGUAGE MultiWayIf #-}
# LANGUAGE MagicHash #
module Block.Internal.TH (
generateBlockStatePatterns
, genPaletteMapping
) where
import Data.Text ( Text )
import Data.ByteString ( ByteString )
import Data.Aeson
import Data.Maybe
import qualified Block.Internal.BlockEntry as BE
import Language.Haskell.TH
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Foldable (foldl')
import Data.Char (toUpper)
import qualified Language.Haskell.TH as TH
import Data.List (sortOn)
import Data.Semigroup
import Data.Coerce
import qualified Data.IntSet as IS
import qualified Data.Map.Strict as M
import Data.Hashable
import Data.Word
import GHC.Exts
import Foreign.ForeignPtr
import qualified Data.Vector.Storable as S
Generates two datatypes :
I d to nbt
Vector ( ByteString , [ ( ByteString , ByteString ) ] )
nbt to i d
( ByteString , [ ( ByteString , ByteString ) ] ) - > Int
Generates two datatypes:
Id to nbt
Vector (ByteString, [(ByteString, ByteString)])
nbt to id
(ByteString, [(ByteString, ByteString)]) -> Int
-}
genPaletteMapping :: Q [Dec]
genPaletteMapping = do
entries <- runIO $ readBlocks
let namesAndPropsToId = sortOn fst $ do
(nameSpacedName, BE.BlockEntry{..}) <- sortOn (\(_, BE.BlockEntry{blockStates}) -> BE.stateId $ head blockStates) $ M.toList entries
BE.BlockState{..} <- blockStates
pure (fromIntegral $ hash (T.encodeUtf8 nameSpacedName, maybe [] (sortOn fst . fmap (\(k,v) -> (T.encodeUtf8 k, T.encodeUtf8 v)) . M.toList) stateProperties), fromIntegral stateId)
highestId = maximum $ do
(_, BE.BlockEntry{blockStates}) <- M.toList entries
BE.BlockState{stateId} <- blockStates
pure stateId
setOfHashes = IS.size . IS.fromList $ fmap (fromIntegral . fst) namesAndPropsToId
hLiFptrSz = setOfHashes * 8
vLiFptrSz = setOfHashes * 4
hLitArr = S.fromListN setOfHashes $ fmap fst namesAndPropsToId
valLitArr = S.fromListN setOfHashes $ fmap snd namesAndPropsToId
(hLitFptr, _, _) = S.unsafeToForeignPtr hLitArr
(vLitFptr, _, _) = S.unsafeToForeignPtr valLitArr
error . show $ S.elem 5369188014199616130 hLitArr
let hashLit = litE . BytesPrimL . mkBytes (coerce @(ForeignPtr Word64) hLitFptr) 0 $ fromIntegral hLiFptrSz
valsLit = litE . BytesPrimL . mkBytes (coerce @(ForeignPtr Word32) vLitFptr) 0 $ fromIntegral vLiFptrSz
-- Quick check if we have no hash collisions
-- We will probably run into one at some point, but that's for future me to worry about
if (setOfHashes /= length namesAndPropsToId)
then error $ "Expected " <> show (length namesAndPropsToId) <> " but actual is " <> show setOfHashes
else
pure ()
[d|
hashProps :: ByteString -> [(ByteString, ByteString)] -> Int
hashProps !ns !arr = hash (ns, arr)
# INLINE hashProps #
propsToId :: ByteString -> [(ByteString, ByteString)] -> Int
propsToId !n !props = I# (int32ToInt# (loop 0 setOfHashes))
where
!hs = fromIntegral $ hashProps n props
!hsLit = $(hashLit)
!valLit = $(valsLit)
loop !l !u
| u <= l = error $ "Unknown hash" <> show hs <> " for name " <> show n <> " and props " <> show props
| otherwise =
let a = W# (word64ToWord# (indexWord64OffAddr# hsLit mid#))
in case compare a hs of
LT -> loop (mid + 1) u
EQ -> indexInt32OffAddr# valLit mid#
GT -> loop l mid
where
mid@(I# mid#) = (l + u) `div` 2
type HighestBlockStateId = $(pure . LitT . NumTyLit $ fromIntegral highestId)
|]
Generates bidirectional patterns of form :
pattern XXXX : : Arg1 - > ... ArgN - > BlockState
pattern XXXX x1 ... xN < - isStateXXXX - > Just ( x1 , ... , xN ) where
XXXX x1 ... xN = BlockState toBlockStateXXXX
isStateXXXX ( BlockState i ) = if inRange i lowestId highestId then Just ( getArg i 1 , ... , ) else Nothing
getArg i nr = ( i ` mod ` cardinalities ) ` div ` prevCardinalities
toBlockState x1 ... xN = foldl ' ( \acc x - > acc + toId x * cardinalities ) 0 properties
-- cardinalities and is precomputed at compile time and is the product of the cardinalities of each property that follows this one
Generates bidirectional patterns of form:
pattern XXXX :: Arg1 -> ... ArgN -> BlockState
pattern XXXX x1 ... xN <- isStateXXXX -> Just (x1, ..., xN) where
XXXX x1 ... xN = BlockState toBlockStateXXXX
isStateXXXX (BlockState i) = if inRange i lowestId highestId then Just (getArg i 1, ..., getArg i n) else Nothing
getArg i nr = (i `mod` cardinalities) `div` prevCardinalities
toBlockState x1 ... xN = foldl' (\acc x -> acc + toId x * cardinalities) 0 properties
-- cardinalities and prevCardinalities is precomputed at compile time and is the product of the cardinalities of each property that follows this one
-}
generateBlockStatePatterns :: Q [Dec]
generateBlockStatePatterns = do
entries <- runIO $ readBlocks
sequence $ do
(namespacedName, BE.BlockEntry{..}) <- M.toList entries
let name = fromSnakeCase $ T.drop 10 namespacedName
pName = mkName $ T.unpack name
let props = sortOn fst $ maybe [] M.toList blockProperties
propsDown = reverse props
lowId :: Int = coerce $ foldMap (\BE.BlockState{..} -> Min stateId) blockStates
highId :: Int = 1 + (coerce $ foldMap (\BE.BlockState{..} -> Max stateId) blockStates)
patType = foldl' (\ty x -> AppT (AppT ArrowT $ conFromProps name x) ty) (ConT $ mkName "BlockState") props
args = zipWith (\x _ -> mkName $ "x" <> show x) [1..] props
lN = mkName "l"
toArgs = [| \n -> if inRange (coerce n) lowId highId then let ($(varP lN) :: Int) = coerce n - lowId in Just $(mkTup) else Nothing |]
fromArgs = foldl' (\x (nr, arg) -> [| $(x) + (toId $(varE arg)) * $(getCard $ drop nr propsDown) |]) [| 0 |] $ zip [1..] args
viewPat = if | length props == 1 -> varP $ mkName "x1"
| otherwise -> tupP $ fmap varP args
constrPat = varP <$> args
getCard ps = foldl' (\x y -> [| $(x) * $(getCard1 y) |]) [| 1 |] ps
getCard1 prop = appTypeE [| cardinality |] . pure $ conFromProps name prop
mkTup = if | length props == 1 -> [| fromId $(varE lN) |]
| null props -> [| () |]
| otherwise -> tupE $ do
(nr, _) <- zip [1..] args
pure [| fromId $ ($(varE lN) `mod` $(getCard $ drop (nr - 1) propsDown)) `div` $(getCard $ drop nr propsDown) |]
[ patSynSigD pName (pure patType)
, patSynD pName
(prefixPatSyn args)
(explBidir [ clause constrPat (normalB [| BlockState $ $(fromArgs) |]) [] ])
(parensP $ viewP toArgs $ conP (mkName "Just") [viewPat])
-- TODO Add pragmas to inline these patterns?
]
conFromProps :: Text -> (Text, [Text]) -> TH.Type
conFromProps ty ("north", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "North")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "North")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "North")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "North")
| otherwise = error $ "Unknown north: " <> T.unpack ty
conFromProps ty ("south", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "South")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "South")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "South")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "South")
| otherwise = error $ "Unknown south: " <> T.unpack ty
conFromProps ty ("east", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "East")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "East")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "East")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "East")
| otherwise = error $ "Unknown east: " <> T.unpack ty
conFromProps ty ("west", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "West")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "West")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "West")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "West")
| otherwise = error $ "Unknown west: " <> T.unpack ty
conFromProps ty ("up", _)
| isWall ty || ty == "Fire" || ty == "ChorusPlant" || ty == "GlowLichen" || ty == "Vine" || ty == "SculkVein"
= AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "True")) (PromotedT $ mkName "False")) (PromotedT $ mkName "Up")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "Up")
| otherwise = error $ "Unknown up: " <> T.unpack ty
conFromProps ty ("down", _)
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "Down")
| ty == "ChorusPlant" || ty == "GlowLichen" || ty == "SculkVein" = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "True")) (PromotedT $ mkName "Down")
| otherwise = error $ "Unknown down: " <> T.unpack ty
conFromProps _ ("waterlogged", _) = ConT $ mkName "Waterlogged"
conFromProps _ ("powered", _) = ConT $ mkName "Powered"
conFromProps ty ("facing", _)
| isBanner ty || isButton ty || isWallSkull ty || isStair ty || isBed ty || isWallTorch ty || ty == "BeeNest" || isChest ty ||
isTrapdoor ty || isGlazedTerracotta ty || isDoor ty || isWallSign ty || isWallFan ty || ty == "Smoker" || isFenceGate ty ||
ty == "BigDripleafStem" || ty == "Repeater" || ty == "Furnace" || ty == "EndPortalFrame" || ty == "Cocoa" || ty == "Bell" ||
ty == "Beehive" || ty == "JackOLantern" || isAnvil ty || ty == "Comparator" || ty == "AttachedMelonStem" || ty == "AttachedPumpkinStem" ||
ty == "Ladder" || ty == "Stonecutter" || ty == "Grindstone" || isDripleaf ty || isCampfire ty || ty == "Lectern" || ty == "Lever" ||
ty == "TripwireHook" || ty == "CarvedPumpkin" || ty == "BlastFurnace" || ty == "Loom" || isHangingSign ty || ty == "ChiseledBookshelf"
= facingNoUpNoDown
| isShulkerBox ty || isCommandBlock ty || ty == "LightningRod" || ty == "Dropper" || ty == "AmethystCluster" || isAmethystBud ty ||
isPiston ty || ty == "Dispenser" || ty == "Barrel" || ty == "EndRod" || ty == "Observer"
= facingUpAndDown
| ty == "Hopper" = facingNoUpDown
| otherwise = error $ "Unknown facing: " <> T.unpack ty
where
facingUpAndDown = AppT (AppT (ConT $ mkName "Facing") (PromotedT $ mkName "True")) (PromotedT $ mkName "True")
facingNoUpNoDown = AppT (AppT (ConT $ mkName "Facing") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")
facingNoUpDown = AppT (AppT (ConT $ mkName "Facing") (PromotedT $ mkName "False")) (PromotedT $ mkName "True")
conFromProps _ ("face", _) = AppT (ConT $ mkName "Face") (PromotedT $ mkName "False")
conFromProps _ ("attachment", _) = AppT (ConT $ mkName "Face") (PromotedT $ mkName "True")
conFromProps ty ("type", _)
| isSlab ty = ConT $ mkName "SlabType"
| isChest ty = ConT $ mkName "ChestType"
| isPiston ty = ConT $ mkName "PistonType"
| otherwise = error $ "Unknown type: " <> T.unpack ty
conFromProps _ ("axis", _) = ConT $ mkName "Axis"
conFromProps _ ("stage", _) = AppT (ConT $ mkName "GrowthStage") (LitT $ NumTyLit 1)
conFromProps _ ("shape", _) = ConT $ mkName "StairShape"
conFromProps _ ("half", _) = ConT $ mkName "Half"
conFromProps _ ("part", _) = ConT $ mkName "BedPart"
conFromProps _ ("occupied", _) = ConT $ mkName "Occupied"
conFromProps _ ("persistent", _) = ConT $ mkName "Persistent"
conFromProps _ ("distance", _) = ConT $ mkName "LeafDistance"
conFromProps _ ("honey_level", _) = ConT $ mkName "HoneyLevel"
conFromProps _ ("lit", _) = ConT $ mkName "Lit"
conFromProps _ ("open", _) = ConT $ mkName "Open"
conFromProps _ ("conditional", _) = ConT $ mkName "Conditional"
conFromProps _ ("candles", _) = ConT $ mkName "Candles"
conFromProps _ ("berries", _) = ConT $ mkName "Berries"
conFromProps _ ("power", _) = ConT $ mkName "Power"
conFromProps _ ("hinge", _) = ConT $ mkName "HingeSide"
conFromProps _ ("snowy", _) = ConT $ mkName "Snowy"
conFromProps _ ("sculk_sensor_phase", _) = ConT $ mkName "SculkSensorPhase"
conFromProps _ ("in_wall", _) = ConT $ mkName "FenceGateInWall"
conFromProps _ ("rotation", _) = ConT $ mkName "Rotation"
conFromProps _ ("unstable", _) = ConT $ mkName "TnTStable"
conFromProps _ ("thickness", _) = ConT $ mkName "DripstoneThickness"
conFromProps _ ("has_record", _) = ConT $ mkName "HasRecord"
conFromProps _ ("orientation", _) = ConT $ mkName "Orientation"
conFromProps _ ("locked", _) = ConT $ mkName "Locked"
conFromProps _ ("delay", _) = ConT $ mkName "Delay"
conFromProps _ ("triggered", _) = ConT $ mkName "Triggered"
conFromProps _ ("eye", _) = ConT $ mkName "HasEye"
conFromProps _ ("inverted", _) = ConT $ mkName "Inverted"
conFromProps _ ("mode", _) = ConT $ mkName "StructureBlockMode"
conFromProps _ ("enabled", _) = ConT $ mkName "Enabled"
conFromProps _ ("short", _) = ConT $ mkName "Short"
conFromProps _ ("extended", _) = ConT $ mkName "Extended"
conFromProps _ ("hanging", _) = ConT $ mkName "Hanging"
conFromProps _ ("charges", _) = ConT $ mkName "Charges"
conFromProps _ ("bottom", _) = ConT $ mkName "Bottom"
conFromProps _ ("pickles", _) = ConT $ mkName "Pickles"
conFromProps _ ("layers", _) = ConT $ mkName "SnowLayers"
conFromProps _ ("bites", _) = ConT $ mkName "Bites"
conFromProps _ ("drag", _) = ConT $ mkName "Drag"
conFromProps _ ("leaves", _) = ConT $ mkName "BambooLeaves"
conFromProps _ ("signal_fire", _) = ConT $ mkName "SignalFire"
conFromProps _ ("has_book", _) = ConT $ mkName "HasBook"
conFromProps _ ("hatch", _) = ConT $ mkName "Hatch"
conFromProps _ ("eggs", _) = ConT $ mkName "Eggs"
conFromProps _ ("attached", _) = ConT $ mkName "WireAttached"
conFromProps _ ("tilt", _) = ConT $ mkName "Tilt"
conFromProps _ ("instrument", _) = ConT $ mkName "Instrument"
conFromProps _ ("note", _) = ConT $ mkName "Note"
conFromProps _ ("disarmed", _) = ConT $ mkName "Disarmed"
conFromProps _ ("moisture", _) = ConT $ mkName "Moisture"
conFromProps _ ("vertical_direction", _) = ConT $ mkName "VerticalDirection"
conFromProps _ ("has_bottle_0", _) = AppT (ConT $ mkName "HasBottle") (LitT $ NumTyLit 0)
conFromProps _ ("has_bottle_1", _) = AppT (ConT $ mkName "HasBottle") (LitT $ NumTyLit 1)
conFromProps _ ("has_bottle_2", _) = AppT (ConT $ mkName "HasBottle") (LitT $ NumTyLit 2)
conFromProps "Composter" ("level", _) = ConT $ mkName "ComposterFill"
conFromProps "Light" ("level", _) = ConT $ mkName "LightLevel"
conFromProps "WaterCauldron" ("level", _) = ConT $ mkName "CauldronFill"
conFromProps "PowderSnowCauldron" ("level", _) = ConT $ mkName "CauldronFill"
conFromProps "Water" ("level", _) = AppT (ConT $ mkName "FluidLevel") (LitT $ NumTyLit 15)
conFromProps "Lava" ("level", _) = AppT (ConT $ mkName "FluidLevel") (LitT $ NumTyLit 15)
conFromProps "MelonStem" ("age", _) = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 7)
conFromProps ty ("age", _)
| isVines ty || ty == "Kelp" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 25)
| ty == "Fire" || ty == "SugarCane" || ty == "Cactus"
= AppT (ConT $ mkName "Age") (LitT $ NumTyLit 15)
| ty == "Beetroots" || ty == "FrostedIce" || ty == "NetherWart" || ty == "SweetBerryBush" || ty == "MangrovePropagule"
= AppT (ConT $ mkName "Age") (LitT $ NumTyLit 4)
| ty == "Cocoa" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 2)
| ty == "Bamboo" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 1)
| ty == "Potatoes" || ty == "PumpkinStem" || ty == "MelonStem" || ty == "Carrots" ||
ty == "Wheat"
= AppT (ConT $ mkName "Age") (LitT $ NumTyLit 7)
| ty == "ChorusFlower" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 5)
conFromProps "ChiseledBookshelf" (pTy, _)
| pTy == "slot_0_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 0)
| pTy == "slot_1_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 1)
| pTy == "slot_2_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 2)
| pTy == "slot_3_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 3)
| pTy == "slot_4_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 4)
| pTy == "slot_5_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 5)
conFromProps "SculkCatalyst" ("bloom", _) = ConT $ mkName "Bloom"
conFromProps "SculkShrieker" ("can_summon", _) = ConT $ mkName "CanSummon"
conFromProps "SculkShrieker" ("shrieking", _) = ConT $ mkName "Shrieking"
conFromProps ty (pTy, _) = error $ "Unknown datatype: " <> T.unpack ty <> " : " <> T.unpack pTy
attachable :: Text -> Bool
attachable ty =
T.isSuffixOf "GlassPane" ty || ty == "Fire" || T.isSuffixOf "Fence" ty || ty == "ChorusPlant" || ty == "GlowLichen" ||
ty == "Vine" || ty == "IronBars" || ty == "Tripwire" || ty == "SculkVein"
isSlab :: Text -> Bool
isSlab ty = T.isSuffixOf "Slab" ty
isBanner :: Text -> Bool
isBanner ty = T.isSuffixOf "Banner" ty
isButton :: Text -> Bool
isButton ty = T.isSuffixOf "Button" ty
isWall :: Text -> Bool
isWall ty = T.isSuffixOf "Wall" ty
isWallSkull :: Text -> Bool
isWallSkull ty = T.isSuffixOf "WallSkull" ty || T.isSuffixOf "WallHead" ty
isStair :: Text -> Bool
isStair ty = T.isSuffixOf "Stairs" ty
isBed :: Text -> Bool
isBed ty = T.isSuffixOf "Bed" ty
isShulkerBox :: Text -> Bool
isShulkerBox ty = T.isSuffixOf "ShulkerBox" ty
isChest :: Text -> Bool
isChest ty = T.isSuffixOf "Chest" ty
isTrapdoor :: Text -> Bool
isTrapdoor ty = T.isSuffixOf "Trapdoor" ty
isCommandBlock :: Text -> Bool
isCommandBlock ty = T.isSuffixOf "CommandBlock" ty
isMushroomBlock :: Text -> Bool
isMushroomBlock ty = T.isSuffixOf "MushroomBlock" ty || T.isSuffixOf "MushroomStem" ty
isGlazedTerracotta :: Text -> Bool
isGlazedTerracotta ty = T.isSuffixOf "GlazedTerracotta" ty
isVines :: Text -> Bool
isVines ty = T.isSuffixOf "Vines" ty
isDoor :: Text -> Bool
isDoor ty = T.isSuffixOf "Door" ty
isWallSign :: Text -> Bool
isWallSign ty = T.isSuffixOf "WallSign" ty
isWallFan :: Text -> Bool
isWallFan ty = T.isSuffixOf "WallFan" ty
isFenceGate :: Text -> Bool
isFenceGate ty = T.isSuffixOf "FenceGate" ty
isAmethystBud :: Text -> Bool
isAmethystBud ty = T.isSuffixOf "AmethystBud" ty
isPiston :: Text -> Bool
isPiston ty = T.isSuffixOf "Piston" ty || ty == "PistonHead"
isAnvil :: Text -> Bool
isAnvil ty = T.isSuffixOf "Anvil" ty
isWallTorch :: Text -> Bool
isWallTorch ty = T.isSuffixOf "WallTorch" ty
isDripleaf :: Text -> Bool
isDripleaf ty = T.isSuffixOf "Dripleaf" ty
isCampfire :: Text -> Bool
isCampfire ty = T.isSuffixOf "Campfire" ty
isHangingSign :: Text -> Bool
isHangingSign ty = T.isSuffixOf "HangingSign" ty
-- Reading blocks
type BlockEntries = M.Map Text BE.BlockEntry
readBlocks :: IO BlockEntries
readBlocks = fromJust <$> decodeFileStrict' @BlockEntries "./blocks.json"
fromSnakeCase :: Text -> Text
fromSnakeCase t = firstUpperCase $ replaceSnake t
where
replaceSnake t1 = T.concat $ firstUpperCase <$> T.splitOn "_" t1
firstUpperCase :: Text -> Text
firstUpperCase t = case T.uncons t of
Just (c, xs) -> T.cons (toUpper c) xs
Nothing -> t
| null | https://raw.githubusercontent.com/1Jajen1/Brokkr/04aa75433186e461feb7437c3694ace0204c54f8/data/src/Block/Internal/TH.hs | haskell | # LANGUAGE MultiWayIf #
Quick check if we have no hash collisions
We will probably run into one at some point, but that's for future me to worry about
cardinalities and is precomputed at compile time and is the product of the cardinalities of each property that follows this one
cardinalities and prevCardinalities is precomputed at compile time and is the product of the cardinalities of each property that follows this one
TODO Add pragmas to inline these patterns?
Reading blocks | # LANGUAGE DataKinds #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# LANGUAGE MagicHash #
module Block.Internal.TH (
generateBlockStatePatterns
, genPaletteMapping
) where
import Data.Text ( Text )
import Data.ByteString ( ByteString )
import Data.Aeson
import Data.Maybe
import qualified Block.Internal.BlockEntry as BE
import Language.Haskell.TH
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Foldable (foldl')
import Data.Char (toUpper)
import qualified Language.Haskell.TH as TH
import Data.List (sortOn)
import Data.Semigroup
import Data.Coerce
import qualified Data.IntSet as IS
import qualified Data.Map.Strict as M
import Data.Hashable
import Data.Word
import GHC.Exts
import Foreign.ForeignPtr
import qualified Data.Vector.Storable as S
Generates two datatypes :
I d to nbt
Vector ( ByteString , [ ( ByteString , ByteString ) ] )
nbt to i d
( ByteString , [ ( ByteString , ByteString ) ] ) - > Int
Generates two datatypes:
Id to nbt
Vector (ByteString, [(ByteString, ByteString)])
nbt to id
(ByteString, [(ByteString, ByteString)]) -> Int
-}
genPaletteMapping :: Q [Dec]
genPaletteMapping = do
entries <- runIO $ readBlocks
let namesAndPropsToId = sortOn fst $ do
(nameSpacedName, BE.BlockEntry{..}) <- sortOn (\(_, BE.BlockEntry{blockStates}) -> BE.stateId $ head blockStates) $ M.toList entries
BE.BlockState{..} <- blockStates
pure (fromIntegral $ hash (T.encodeUtf8 nameSpacedName, maybe [] (sortOn fst . fmap (\(k,v) -> (T.encodeUtf8 k, T.encodeUtf8 v)) . M.toList) stateProperties), fromIntegral stateId)
highestId = maximum $ do
(_, BE.BlockEntry{blockStates}) <- M.toList entries
BE.BlockState{stateId} <- blockStates
pure stateId
setOfHashes = IS.size . IS.fromList $ fmap (fromIntegral . fst) namesAndPropsToId
hLiFptrSz = setOfHashes * 8
vLiFptrSz = setOfHashes * 4
hLitArr = S.fromListN setOfHashes $ fmap fst namesAndPropsToId
valLitArr = S.fromListN setOfHashes $ fmap snd namesAndPropsToId
(hLitFptr, _, _) = S.unsafeToForeignPtr hLitArr
(vLitFptr, _, _) = S.unsafeToForeignPtr valLitArr
error . show $ S.elem 5369188014199616130 hLitArr
let hashLit = litE . BytesPrimL . mkBytes (coerce @(ForeignPtr Word64) hLitFptr) 0 $ fromIntegral hLiFptrSz
valsLit = litE . BytesPrimL . mkBytes (coerce @(ForeignPtr Word32) vLitFptr) 0 $ fromIntegral vLiFptrSz
if (setOfHashes /= length namesAndPropsToId)
then error $ "Expected " <> show (length namesAndPropsToId) <> " but actual is " <> show setOfHashes
else
pure ()
[d|
hashProps :: ByteString -> [(ByteString, ByteString)] -> Int
hashProps !ns !arr = hash (ns, arr)
# INLINE hashProps #
propsToId :: ByteString -> [(ByteString, ByteString)] -> Int
propsToId !n !props = I# (int32ToInt# (loop 0 setOfHashes))
where
!hs = fromIntegral $ hashProps n props
!hsLit = $(hashLit)
!valLit = $(valsLit)
loop !l !u
| u <= l = error $ "Unknown hash" <> show hs <> " for name " <> show n <> " and props " <> show props
| otherwise =
let a = W# (word64ToWord# (indexWord64OffAddr# hsLit mid#))
in case compare a hs of
LT -> loop (mid + 1) u
EQ -> indexInt32OffAddr# valLit mid#
GT -> loop l mid
where
mid@(I# mid#) = (l + u) `div` 2
type HighestBlockStateId = $(pure . LitT . NumTyLit $ fromIntegral highestId)
|]
Generates bidirectional patterns of form :
pattern XXXX : : Arg1 - > ... ArgN - > BlockState
pattern XXXX x1 ... xN < - isStateXXXX - > Just ( x1 , ... , xN ) where
XXXX x1 ... xN = BlockState toBlockStateXXXX
isStateXXXX ( BlockState i ) = if inRange i lowestId highestId then Just ( getArg i 1 , ... , ) else Nothing
getArg i nr = ( i ` mod ` cardinalities ) ` div ` prevCardinalities
toBlockState x1 ... xN = foldl ' ( \acc x - > acc + toId x * cardinalities ) 0 properties
Generates bidirectional patterns of form:
pattern XXXX :: Arg1 -> ... ArgN -> BlockState
pattern XXXX x1 ... xN <- isStateXXXX -> Just (x1, ..., xN) where
XXXX x1 ... xN = BlockState toBlockStateXXXX
isStateXXXX (BlockState i) = if inRange i lowestId highestId then Just (getArg i 1, ..., getArg i n) else Nothing
getArg i nr = (i `mod` cardinalities) `div` prevCardinalities
toBlockState x1 ... xN = foldl' (\acc x -> acc + toId x * cardinalities) 0 properties
-}
generateBlockStatePatterns :: Q [Dec]
generateBlockStatePatterns = do
entries <- runIO $ readBlocks
sequence $ do
(namespacedName, BE.BlockEntry{..}) <- M.toList entries
let name = fromSnakeCase $ T.drop 10 namespacedName
pName = mkName $ T.unpack name
let props = sortOn fst $ maybe [] M.toList blockProperties
propsDown = reverse props
lowId :: Int = coerce $ foldMap (\BE.BlockState{..} -> Min stateId) blockStates
highId :: Int = 1 + (coerce $ foldMap (\BE.BlockState{..} -> Max stateId) blockStates)
patType = foldl' (\ty x -> AppT (AppT ArrowT $ conFromProps name x) ty) (ConT $ mkName "BlockState") props
args = zipWith (\x _ -> mkName $ "x" <> show x) [1..] props
lN = mkName "l"
toArgs = [| \n -> if inRange (coerce n) lowId highId then let ($(varP lN) :: Int) = coerce n - lowId in Just $(mkTup) else Nothing |]
fromArgs = foldl' (\x (nr, arg) -> [| $(x) + (toId $(varE arg)) * $(getCard $ drop nr propsDown) |]) [| 0 |] $ zip [1..] args
viewPat = if | length props == 1 -> varP $ mkName "x1"
| otherwise -> tupP $ fmap varP args
constrPat = varP <$> args
getCard ps = foldl' (\x y -> [| $(x) * $(getCard1 y) |]) [| 1 |] ps
getCard1 prop = appTypeE [| cardinality |] . pure $ conFromProps name prop
mkTup = if | length props == 1 -> [| fromId $(varE lN) |]
| null props -> [| () |]
| otherwise -> tupE $ do
(nr, _) <- zip [1..] args
pure [| fromId $ ($(varE lN) `mod` $(getCard $ drop (nr - 1) propsDown)) `div` $(getCard $ drop nr propsDown) |]
[ patSynSigD pName (pure patType)
, patSynD pName
(prefixPatSyn args)
(explBidir [ clause constrPat (normalB [| BlockState $ $(fromArgs) |]) [] ])
(parensP $ viewP toArgs $ conP (mkName "Just") [viewPat])
]
conFromProps :: Text -> (Text, [Text]) -> TH.Type
conFromProps ty ("north", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "North")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "North")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "North")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "North")
| otherwise = error $ "Unknown north: " <> T.unpack ty
conFromProps ty ("south", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "South")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "South")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "South")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "South")
| otherwise = error $ "Unknown south: " <> T.unpack ty
conFromProps ty ("east", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "East")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "East")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "East")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "East")
| otherwise = error $ "Unknown east: " <> T.unpack ty
conFromProps ty ("west", _)
| attachable ty = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")) (PromotedT $ mkName "West")
| isWall ty = AppT (ConT . mkName $ T.unpack "WallAttached") (PromotedT $ mkName "West")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "West")
| ty == "RedstoneWire" = AppT (ConT . mkName $ T.unpack "RedstonePlacement") (PromotedT $ mkName "West")
| otherwise = error $ "Unknown west: " <> T.unpack ty
conFromProps ty ("up", _)
| isWall ty || ty == "Fire" || ty == "ChorusPlant" || ty == "GlowLichen" || ty == "Vine" || ty == "SculkVein"
= AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "True")) (PromotedT $ mkName "False")) (PromotedT $ mkName "Up")
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "Up")
| otherwise = error $ "Unknown up: " <> T.unpack ty
conFromProps ty ("down", _)
| isMushroomBlock ty = AppT (ConT . mkName $ T.unpack "MushroomExposed") (PromotedT $ mkName "Down")
| ty == "ChorusPlant" || ty == "GlowLichen" || ty == "SculkVein" = AppT (AppT (AppT (ConT . mkName $ T.unpack "Attached") (PromotedT $ mkName "False")) (PromotedT $ mkName "True")) (PromotedT $ mkName "Down")
| otherwise = error $ "Unknown down: " <> T.unpack ty
conFromProps _ ("waterlogged", _) = ConT $ mkName "Waterlogged"
conFromProps _ ("powered", _) = ConT $ mkName "Powered"
conFromProps ty ("facing", _)
| isBanner ty || isButton ty || isWallSkull ty || isStair ty || isBed ty || isWallTorch ty || ty == "BeeNest" || isChest ty ||
isTrapdoor ty || isGlazedTerracotta ty || isDoor ty || isWallSign ty || isWallFan ty || ty == "Smoker" || isFenceGate ty ||
ty == "BigDripleafStem" || ty == "Repeater" || ty == "Furnace" || ty == "EndPortalFrame" || ty == "Cocoa" || ty == "Bell" ||
ty == "Beehive" || ty == "JackOLantern" || isAnvil ty || ty == "Comparator" || ty == "AttachedMelonStem" || ty == "AttachedPumpkinStem" ||
ty == "Ladder" || ty == "Stonecutter" || ty == "Grindstone" || isDripleaf ty || isCampfire ty || ty == "Lectern" || ty == "Lever" ||
ty == "TripwireHook" || ty == "CarvedPumpkin" || ty == "BlastFurnace" || ty == "Loom" || isHangingSign ty || ty == "ChiseledBookshelf"
= facingNoUpNoDown
| isShulkerBox ty || isCommandBlock ty || ty == "LightningRod" || ty == "Dropper" || ty == "AmethystCluster" || isAmethystBud ty ||
isPiston ty || ty == "Dispenser" || ty == "Barrel" || ty == "EndRod" || ty == "Observer"
= facingUpAndDown
| ty == "Hopper" = facingNoUpDown
| otherwise = error $ "Unknown facing: " <> T.unpack ty
where
facingUpAndDown = AppT (AppT (ConT $ mkName "Facing") (PromotedT $ mkName "True")) (PromotedT $ mkName "True")
facingNoUpNoDown = AppT (AppT (ConT $ mkName "Facing") (PromotedT $ mkName "False")) (PromotedT $ mkName "False")
facingNoUpDown = AppT (AppT (ConT $ mkName "Facing") (PromotedT $ mkName "False")) (PromotedT $ mkName "True")
conFromProps _ ("face", _) = AppT (ConT $ mkName "Face") (PromotedT $ mkName "False")
conFromProps _ ("attachment", _) = AppT (ConT $ mkName "Face") (PromotedT $ mkName "True")
conFromProps ty ("type", _)
| isSlab ty = ConT $ mkName "SlabType"
| isChest ty = ConT $ mkName "ChestType"
| isPiston ty = ConT $ mkName "PistonType"
| otherwise = error $ "Unknown type: " <> T.unpack ty
conFromProps _ ("axis", _) = ConT $ mkName "Axis"
conFromProps _ ("stage", _) = AppT (ConT $ mkName "GrowthStage") (LitT $ NumTyLit 1)
conFromProps _ ("shape", _) = ConT $ mkName "StairShape"
conFromProps _ ("half", _) = ConT $ mkName "Half"
conFromProps _ ("part", _) = ConT $ mkName "BedPart"
conFromProps _ ("occupied", _) = ConT $ mkName "Occupied"
conFromProps _ ("persistent", _) = ConT $ mkName "Persistent"
conFromProps _ ("distance", _) = ConT $ mkName "LeafDistance"
conFromProps _ ("honey_level", _) = ConT $ mkName "HoneyLevel"
conFromProps _ ("lit", _) = ConT $ mkName "Lit"
conFromProps _ ("open", _) = ConT $ mkName "Open"
conFromProps _ ("conditional", _) = ConT $ mkName "Conditional"
conFromProps _ ("candles", _) = ConT $ mkName "Candles"
conFromProps _ ("berries", _) = ConT $ mkName "Berries"
conFromProps _ ("power", _) = ConT $ mkName "Power"
conFromProps _ ("hinge", _) = ConT $ mkName "HingeSide"
conFromProps _ ("snowy", _) = ConT $ mkName "Snowy"
conFromProps _ ("sculk_sensor_phase", _) = ConT $ mkName "SculkSensorPhase"
conFromProps _ ("in_wall", _) = ConT $ mkName "FenceGateInWall"
conFromProps _ ("rotation", _) = ConT $ mkName "Rotation"
conFromProps _ ("unstable", _) = ConT $ mkName "TnTStable"
conFromProps _ ("thickness", _) = ConT $ mkName "DripstoneThickness"
conFromProps _ ("has_record", _) = ConT $ mkName "HasRecord"
conFromProps _ ("orientation", _) = ConT $ mkName "Orientation"
conFromProps _ ("locked", _) = ConT $ mkName "Locked"
conFromProps _ ("delay", _) = ConT $ mkName "Delay"
conFromProps _ ("triggered", _) = ConT $ mkName "Triggered"
conFromProps _ ("eye", _) = ConT $ mkName "HasEye"
conFromProps _ ("inverted", _) = ConT $ mkName "Inverted"
conFromProps _ ("mode", _) = ConT $ mkName "StructureBlockMode"
conFromProps _ ("enabled", _) = ConT $ mkName "Enabled"
conFromProps _ ("short", _) = ConT $ mkName "Short"
conFromProps _ ("extended", _) = ConT $ mkName "Extended"
conFromProps _ ("hanging", _) = ConT $ mkName "Hanging"
conFromProps _ ("charges", _) = ConT $ mkName "Charges"
conFromProps _ ("bottom", _) = ConT $ mkName "Bottom"
conFromProps _ ("pickles", _) = ConT $ mkName "Pickles"
conFromProps _ ("layers", _) = ConT $ mkName "SnowLayers"
conFromProps _ ("bites", _) = ConT $ mkName "Bites"
conFromProps _ ("drag", _) = ConT $ mkName "Drag"
conFromProps _ ("leaves", _) = ConT $ mkName "BambooLeaves"
conFromProps _ ("signal_fire", _) = ConT $ mkName "SignalFire"
conFromProps _ ("has_book", _) = ConT $ mkName "HasBook"
conFromProps _ ("hatch", _) = ConT $ mkName "Hatch"
conFromProps _ ("eggs", _) = ConT $ mkName "Eggs"
conFromProps _ ("attached", _) = ConT $ mkName "WireAttached"
conFromProps _ ("tilt", _) = ConT $ mkName "Tilt"
conFromProps _ ("instrument", _) = ConT $ mkName "Instrument"
conFromProps _ ("note", _) = ConT $ mkName "Note"
conFromProps _ ("disarmed", _) = ConT $ mkName "Disarmed"
conFromProps _ ("moisture", _) = ConT $ mkName "Moisture"
conFromProps _ ("vertical_direction", _) = ConT $ mkName "VerticalDirection"
conFromProps _ ("has_bottle_0", _) = AppT (ConT $ mkName "HasBottle") (LitT $ NumTyLit 0)
conFromProps _ ("has_bottle_1", _) = AppT (ConT $ mkName "HasBottle") (LitT $ NumTyLit 1)
conFromProps _ ("has_bottle_2", _) = AppT (ConT $ mkName "HasBottle") (LitT $ NumTyLit 2)
conFromProps "Composter" ("level", _) = ConT $ mkName "ComposterFill"
conFromProps "Light" ("level", _) = ConT $ mkName "LightLevel"
conFromProps "WaterCauldron" ("level", _) = ConT $ mkName "CauldronFill"
conFromProps "PowderSnowCauldron" ("level", _) = ConT $ mkName "CauldronFill"
conFromProps "Water" ("level", _) = AppT (ConT $ mkName "FluidLevel") (LitT $ NumTyLit 15)
conFromProps "Lava" ("level", _) = AppT (ConT $ mkName "FluidLevel") (LitT $ NumTyLit 15)
conFromProps "MelonStem" ("age", _) = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 7)
conFromProps ty ("age", _)
| isVines ty || ty == "Kelp" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 25)
| ty == "Fire" || ty == "SugarCane" || ty == "Cactus"
= AppT (ConT $ mkName "Age") (LitT $ NumTyLit 15)
| ty == "Beetroots" || ty == "FrostedIce" || ty == "NetherWart" || ty == "SweetBerryBush" || ty == "MangrovePropagule"
= AppT (ConT $ mkName "Age") (LitT $ NumTyLit 4)
| ty == "Cocoa" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 2)
| ty == "Bamboo" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 1)
| ty == "Potatoes" || ty == "PumpkinStem" || ty == "MelonStem" || ty == "Carrots" ||
ty == "Wheat"
= AppT (ConT $ mkName "Age") (LitT $ NumTyLit 7)
| ty == "ChorusFlower" = AppT (ConT $ mkName "Age") (LitT $ NumTyLit 5)
conFromProps "ChiseledBookshelf" (pTy, _)
| pTy == "slot_0_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 0)
| pTy == "slot_1_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 1)
| pTy == "slot_2_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 2)
| pTy == "slot_3_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 3)
| pTy == "slot_4_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 4)
| pTy == "slot_5_occupied" = AppT (ConT $ mkName "SlotOccupied") (LitT $ NumTyLit 5)
conFromProps "SculkCatalyst" ("bloom", _) = ConT $ mkName "Bloom"
conFromProps "SculkShrieker" ("can_summon", _) = ConT $ mkName "CanSummon"
conFromProps "SculkShrieker" ("shrieking", _) = ConT $ mkName "Shrieking"
conFromProps ty (pTy, _) = error $ "Unknown datatype: " <> T.unpack ty <> " : " <> T.unpack pTy
attachable :: Text -> Bool
attachable ty =
T.isSuffixOf "GlassPane" ty || ty == "Fire" || T.isSuffixOf "Fence" ty || ty == "ChorusPlant" || ty == "GlowLichen" ||
ty == "Vine" || ty == "IronBars" || ty == "Tripwire" || ty == "SculkVein"
isSlab :: Text -> Bool
isSlab ty = T.isSuffixOf "Slab" ty
isBanner :: Text -> Bool
isBanner ty = T.isSuffixOf "Banner" ty
isButton :: Text -> Bool
isButton ty = T.isSuffixOf "Button" ty
isWall :: Text -> Bool
isWall ty = T.isSuffixOf "Wall" ty
isWallSkull :: Text -> Bool
isWallSkull ty = T.isSuffixOf "WallSkull" ty || T.isSuffixOf "WallHead" ty
isStair :: Text -> Bool
isStair ty = T.isSuffixOf "Stairs" ty
isBed :: Text -> Bool
isBed ty = T.isSuffixOf "Bed" ty
isShulkerBox :: Text -> Bool
isShulkerBox ty = T.isSuffixOf "ShulkerBox" ty
isChest :: Text -> Bool
isChest ty = T.isSuffixOf "Chest" ty
isTrapdoor :: Text -> Bool
isTrapdoor ty = T.isSuffixOf "Trapdoor" ty
isCommandBlock :: Text -> Bool
isCommandBlock ty = T.isSuffixOf "CommandBlock" ty
isMushroomBlock :: Text -> Bool
isMushroomBlock ty = T.isSuffixOf "MushroomBlock" ty || T.isSuffixOf "MushroomStem" ty
isGlazedTerracotta :: Text -> Bool
isGlazedTerracotta ty = T.isSuffixOf "GlazedTerracotta" ty
isVines :: Text -> Bool
isVines ty = T.isSuffixOf "Vines" ty
isDoor :: Text -> Bool
isDoor ty = T.isSuffixOf "Door" ty
isWallSign :: Text -> Bool
isWallSign ty = T.isSuffixOf "WallSign" ty
isWallFan :: Text -> Bool
isWallFan ty = T.isSuffixOf "WallFan" ty
isFenceGate :: Text -> Bool
isFenceGate ty = T.isSuffixOf "FenceGate" ty
isAmethystBud :: Text -> Bool
isAmethystBud ty = T.isSuffixOf "AmethystBud" ty
isPiston :: Text -> Bool
isPiston ty = T.isSuffixOf "Piston" ty || ty == "PistonHead"
isAnvil :: Text -> Bool
isAnvil ty = T.isSuffixOf "Anvil" ty
isWallTorch :: Text -> Bool
isWallTorch ty = T.isSuffixOf "WallTorch" ty
isDripleaf :: Text -> Bool
isDripleaf ty = T.isSuffixOf "Dripleaf" ty
isCampfire :: Text -> Bool
isCampfire ty = T.isSuffixOf "Campfire" ty
isHangingSign :: Text -> Bool
isHangingSign ty = T.isSuffixOf "HangingSign" ty
type BlockEntries = M.Map Text BE.BlockEntry
readBlocks :: IO BlockEntries
readBlocks = fromJust <$> decodeFileStrict' @BlockEntries "./blocks.json"
fromSnakeCase :: Text -> Text
fromSnakeCase t = firstUpperCase $ replaceSnake t
where
replaceSnake t1 = T.concat $ firstUpperCase <$> T.splitOn "_" t1
firstUpperCase :: Text -> Text
firstUpperCase t = case T.uncons t of
Just (c, xs) -> T.cons (toUpper c) xs
Nothing -> t
|
e71035acb286cb06fee6a8629b078cae31385b98251f88e871d27d196a5fdb02 | raptazure/experiments | Common.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
-- | Common handler functions.
module Handler.Common where
import Data.FileEmbed (embedFile)
import Import
-- These handlers embed files in the executable at compile time to avoid a
-- runtime dependency, and for efficiency.
getFaviconR :: Handler TypedContent
cache for a month
return $ TypedContent "image/x-icon"
$ toContent $(embedFile "config/favicon.ico")
getRobotsR :: Handler TypedContent
getRobotsR = return $ TypedContent typePlain
$ toContent $(embedFile "config/robots.txt")
| null | https://raw.githubusercontent.com/raptazure/experiments/c48263980d1ce22ee9407ff8dcf0cf5091b01c70/haskell/yesodapp/src/Handler/Common.hs | haskell | # LANGUAGE OverloadedStrings #
| Common handler functions.
These handlers embed files in the executable at compile time to avoid a
runtime dependency, and for efficiency. | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
module Handler.Common where
import Data.FileEmbed (embedFile)
import Import
getFaviconR :: Handler TypedContent
cache for a month
return $ TypedContent "image/x-icon"
$ toContent $(embedFile "config/favicon.ico")
getRobotsR :: Handler TypedContent
getRobotsR = return $ TypedContent typePlain
$ toContent $(embedFile "config/robots.txt")
|
4a5be34ad5289177a0f5d7f4ac68c02b0600c8a1d9377b52fb138ac4f34d1f63 | cs136/seashell | http-dispatchers.rkt | #lang racket/base
Seashell 's backend server .
Copyright ( C ) 2013 - 2015 The Seashell Maintainers .
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; See also 'ADDITIONAL TERMS' at the end of the included LICENSE file.
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;a;
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
(require
racket/contract
racket/string
racket/match
json
net/url
seashell/log
seashell/backend/authenticate
seashell/backend/project
seashell/backend/files
seashell/seashell-config
web-server/http/xexpr
web-server/http/response-structs
web-server/http/request-structs
web-server/dispatchers/dispatch
web-server/private/connection-manager
(prefix-in lift: web-server/dispatchers/dispatch-lift)
(prefix-in log: web-server/dispatchers/dispatch-log))
(provide request-logging-dispatcher
standard-error-dispatcher)
;; Default headers.
(define default-headers
`(,(make-header #"Server" (string->bytes/utf-8 (format "Seashell/~a" SEASHELL_VERSION)))
,(make-header #"Access-Control-Allow-Origin" #"*")))
;; Default footer.
(define (make-default-footer request)
`((hr)
(address ,(format "Seashell/~a running on Racket ~a on ~a"
SEASHELL_VERSION (version) (request-host-ip request)))))
;; (make-headers name value ...)
;; Creates the HTTP response headers.
;;
;; Arguments:
;; headers -> List of pairs of name/value bytes.
;; Returns:
;; List of HTTP headers.
(define make-headers
(case-lambda
[(name value . rest)
(cons (make-header name value) (apply make-headers rest))]
[()
default-headers]))
;; (request-logging-dispatcher) -> (void?)
;; Dispatcher that logs all incoming requests to the standard log.
(define/contract (request-logging-dispatcher connection request)
(-> connection? request? void?)
(logf 'info (string-trim (log:apache-default-format request)))
(next-dispatcher))
;; (standard-empty-response request?) -> response?
Sends the standard Seashell empty response .
(define/contract (standard-empty-response request)
(-> request? response?)
(response/xexpr
`(html
(head
(title "Nothing here."))
(body
(h1 "So down")
,@(make-default-footer request)))
#:code 200
#:headers (make-headers)
#:message #"OK"
#:preamble #"<!DOCTYPE HTML>"))
;; (standard-error-page request?) -> response?
Sends the standard Seashell error page .
(define/contract (standard-error-page request)
(-> request? response?)
(response/xexpr
`(html
(head
(title "404 Not Found"))
(body
(h1 "Not Found")
(p ,(format "The requested URL ~a was not found on this server." (url->string (request-uri request))))
,@(make-default-footer request)))
#:code 404
#:headers (make-headers)
#:message #"Not Found"
#:preamble #"<!DOCTYPE HTML>"))
(define standard-error-dispatcher (lift:make standard-error-page))
;; (standard-unauthenticated-page exn request?) -> response?
Sends the standard Seashell unauthenticated page .
(define/contract (standard-unauthorized-page exn request)
(-> exn? request? response?)
(response/xexpr
`(html
(head
(title "403 Forbidden"))
(body
(h1 "Forbidden")
(p ,(format "You are not authorized to view the request URL ~a.
The reason provided was: ~a" (url->string (request-uri request))
(exn-message exn)))
,@(make-default-footer request)))
#:code 403
#:message #"Forbidden"
#:headers (make-headers)
#:preamble #"<!DOCTYPE HTML>"))
;; (standard-server-error-page exn request?) -> response?
Sends the standard Seashell server error page .
(define/contract (standard-server-error-page exn request)
(-> exn? request? response?)
(logf 'error "Error in handling ~a ~a: ~a." (request-method request)
(url->string (request-uri request)) (exn-message exn))
(response/xexpr
`(html
(head
(title "500 Internal Server Error"))
(body
(h1 "Internal Server Error")
(p ,(format "An internal server error was encountered while processing your request for URL ~a." (url->string (request-uri request))))
,@(if (and (read-config 'debug) exn)
`((hr)
(pre
,(format "Message: ~a\n" (exn-message exn))
,(format-stack-trace (exn-continuation-marks exn))))
'(""))
,@(make-default-footer request)))
#:code 500
#:message #"Internal Server Error"
#:headers (make-headers)
#:preamble #"<!DOCTYPE HTML>"))
| null | https://raw.githubusercontent.com/cs136/seashell/17cc2b0a6d2cdac270d7168e03aa5fed88f9eb02/src/collects/seashell/backend/http-dispatchers.rkt | racket |
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
See also 'ADDITIONAL TERMS' at the end of the included LICENSE file.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
a;
along with this program. If not, see </>.
Default headers.
Default footer.
(make-headers name value ...)
Creates the HTTP response headers.
Arguments:
headers -> List of pairs of name/value bytes.
Returns:
List of HTTP headers.
(request-logging-dispatcher) -> (void?)
Dispatcher that logs all incoming requests to the standard log.
(standard-empty-response request?) -> response?
(standard-error-page request?) -> response?
(standard-unauthenticated-page exn request?) -> response?
(standard-server-error-page exn request?) -> response? | #lang racket/base
Seashell 's backend server .
Copyright ( C ) 2013 - 2015 The Seashell Maintainers .
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(require
racket/contract
racket/string
racket/match
json
net/url
seashell/log
seashell/backend/authenticate
seashell/backend/project
seashell/backend/files
seashell/seashell-config
web-server/http/xexpr
web-server/http/response-structs
web-server/http/request-structs
web-server/dispatchers/dispatch
web-server/private/connection-manager
(prefix-in lift: web-server/dispatchers/dispatch-lift)
(prefix-in log: web-server/dispatchers/dispatch-log))
(provide request-logging-dispatcher
standard-error-dispatcher)
(define default-headers
`(,(make-header #"Server" (string->bytes/utf-8 (format "Seashell/~a" SEASHELL_VERSION)))
,(make-header #"Access-Control-Allow-Origin" #"*")))
(define (make-default-footer request)
`((hr)
(address ,(format "Seashell/~a running on Racket ~a on ~a"
SEASHELL_VERSION (version) (request-host-ip request)))))
(define make-headers
(case-lambda
[(name value . rest)
(cons (make-header name value) (apply make-headers rest))]
[()
default-headers]))
(define/contract (request-logging-dispatcher connection request)
(-> connection? request? void?)
(logf 'info (string-trim (log:apache-default-format request)))
(next-dispatcher))
Sends the standard Seashell empty response .
(define/contract (standard-empty-response request)
(-> request? response?)
(response/xexpr
`(html
(head
(title "Nothing here."))
(body
(h1 "So down")
,@(make-default-footer request)))
#:code 200
#:headers (make-headers)
#:message #"OK"
#:preamble #"<!DOCTYPE HTML>"))
Sends the standard Seashell error page .
(define/contract (standard-error-page request)
(-> request? response?)
(response/xexpr
`(html
(head
(title "404 Not Found"))
(body
(h1 "Not Found")
(p ,(format "The requested URL ~a was not found on this server." (url->string (request-uri request))))
,@(make-default-footer request)))
#:code 404
#:headers (make-headers)
#:message #"Not Found"
#:preamble #"<!DOCTYPE HTML>"))
(define standard-error-dispatcher (lift:make standard-error-page))
Sends the standard Seashell unauthenticated page .
(define/contract (standard-unauthorized-page exn request)
(-> exn? request? response?)
(response/xexpr
`(html
(head
(title "403 Forbidden"))
(body
(h1 "Forbidden")
(p ,(format "You are not authorized to view the request URL ~a.
The reason provided was: ~a" (url->string (request-uri request))
(exn-message exn)))
,@(make-default-footer request)))
#:code 403
#:message #"Forbidden"
#:headers (make-headers)
#:preamble #"<!DOCTYPE HTML>"))
Sends the standard Seashell server error page .
(define/contract (standard-server-error-page exn request)
(-> exn? request? response?)
(logf 'error "Error in handling ~a ~a: ~a." (request-method request)
(url->string (request-uri request)) (exn-message exn))
(response/xexpr
`(html
(head
(title "500 Internal Server Error"))
(body
(h1 "Internal Server Error")
(p ,(format "An internal server error was encountered while processing your request for URL ~a." (url->string (request-uri request))))
,@(if (and (read-config 'debug) exn)
`((hr)
(pre
,(format "Message: ~a\n" (exn-message exn))
,(format-stack-trace (exn-continuation-marks exn))))
'(""))
,@(make-default-footer request)))
#:code 500
#:message #"Internal Server Error"
#:headers (make-headers)
#:preamble #"<!DOCTYPE HTML>"))
|
886a390832abedd6f0509627c6a2e296d3be7b731ac63e5a8c9e8ffbf664e51d | project-oak/hafnium-verification | interproc.mli |
* Copyright ( c ) 2009 - 2013 , Monoidics ltd .
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) 2009-2013, Monoidics ltd.
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
(** Interprocedural Analysis *)
val analyze_procedure : Callbacks.proc_callback_t
(** Run the biabduction analysis on the given procedure *)
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/biabduction/interproc.mli | ocaml | * Interprocedural Analysis
* Run the biabduction analysis on the given procedure |
* Copyright ( c ) 2009 - 2013 , Monoidics ltd .
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) 2009-2013, Monoidics ltd.
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
val analyze_procedure : Callbacks.proc_callback_t
|
0bb4b83db21004e277f043c36bc9fd5eb8946427989682cf8896db7741da1b52 | disco-framework/disco | pw_app.erl | -module(pw_app).
-behavior(application).
-export([start/2,
stop/1]).
-export([solve/4,
printer/0]).
-define(PROVIDERS, [provider_a, provider_b]).
start(_Type, _Args) ->
Numbers = parse_line(io:get_line([])),
Target = parse_line(io:get_line([])),
PrinterPid = spawn(?MODULE, printer, []),
register(printer, PrinterPid),
debug("finished parsing"),
_ = [subs:spawn_provider(Name, Numbers, 1) || Name <- ?PROVIDERS],
NumSchedulers = erlang:system_info(schedulers_online),
start_solvers(NumSchedulers div 2 + 1, Target, provider_a, with_outer_div),
start_solvers(NumSchedulers div 2 - 1, Target, provider_b, without_outer_div),
receive
nothing_whatsoever -> ok
end,
{ok, self()}.
stop(_State) ->
ok.
-spec debug(string()) -> ok.
debug(S) ->
printer ! {debug, self(), S},
ok.
-spec debug(string(), [term()]) -> ok.
debug(Format, Args) ->
printer ! {debug, self(), Format, Args},
ok.
-spec start_solvers(non_neg_integer(), pos_integer(), atom(), constraint()) -> ok.
start_solvers(0, _, _, _) ->
ok;
start_solvers(Num, Target, Provider, Constraint) ->
spawn(?MODULE, solve, [Target, Provider, Constraint, []]),
start_solvers(Num-1, Target, Provider, Constraint).
-spec parse_line(string()) -> term().
parse_line(Line) ->
TermString = lists:sublist(Line, length(Line)-1) ++ ".",
{ok, Tokens, _Rest} = erl_scan:string(TermString),
{ok, Term} = erl_parse:parse_term(Tokens),
Term.
-type operator() :: '+' | '-' | '*' | '/' | '<+>' | '<->' | '<*>'.
-type expr() :: {val, pos_integer()} | {app, operator(), expr(), expr()}.
-type val_comp() :: {Value :: pos_integer(), Complexity :: pos_integer()}.
-type result() :: {expr(), val_comp()}.
-type constraint() :: with_outer_div | without_outer_div | none.
-ifdef(modulo).
-define(OPS, ['<+>', '/', '<*>', '+', '<->', '*', '-', '%']).
-else.
-define(OPS, ['<+>', '/', '<*>', '+', '<->', '*', '-']).
-endif.
%-define(OPS, ['<+>', '/', '<*>', '+', '<->']).
-spec valid(operator(), pos_integer(), pos_integer()) -> boolean().
valid('+', A, B) -> A >= B;
valid('-', A, B) -> A > B;
valid('*', A, B) -> A >= B;
valid('/', A, B) -> A rem B == 0;
valid('<+>', A, B) -> A >= B;
valid('<->', A, B) -> A > B;
valid('<*>', A, B) -> A > B;
valid('%', A, B) -> A rem B /= 0.
-spec appl(operator(), pos_integer(), pos_integer()) -> pos_integer().
appl('+', A, B) -> A+B;
appl('-', A, B) -> A-B;
appl('*', A, B) -> A*B;
appl('/', A, B) -> A div B;
appl('<+>', A, B) -> C = A+B, C*C;
appl('<->', A, B) -> C = A-B, C*C;
appl('<*>', A, B) -> (A+B)*(A-B);
appl('%', A, B) -> A rem B.
-spec interleave(A, [A]) -> [[A]].
interleave(X, []) -> [[X]];
interleave(X, [X|T]) -> lists:map(fun(L) -> [X|L] end, interleave(X, T));
interleave(X, Y=[H|T]) -> [[X|Y]|lists:map(fun(L) -> [H|L] end, interleave(X, T))].
TODO do not permute same numbers ( [ 1,1,1,1,1,1,1,1,1 ] )
beziehungsweise : keine doppelten , bitte .
evtl auch für subs interessant
-spec perms([A]) -> [[A]].
perms(L) ->
uniq(perms_(L)).
perms_([]) -> [[]];
perms_([H|T]) -> lists:flatmap(fun(Perm) -> interleave(H, Perm) end, perms_(T)).
-spec choices([[A]]) -> [[A]].
choices(Subs) -> lists:flatmap(fun perms/1, Subs).
-spec split([A]) -> [{[A], [A]}].
split([]) -> [];
split([_]) -> [];
split([H|T]) -> [{[H], T}|[{[H|Ls], Rs} || {Ls, Rs} <- split(T)]].
%%% TODO: try depth-first
-spec results([pos_integer()]) -> [result()].
results([]) -> [];
results([N]) -> [{{val, N}, {N, N}}];
results(Ns) ->
[Res || {Ls, Rs} <- split(Ns),
LX <- results(Ls),
RY <- results(Rs),
Res <- combine_(LX, RY, ?OPS)].
-spec results_wo_outer_div([pos_integer()]) -> [result()].
results_wo_outer_div([]) -> [];
results_wo_outer_div([N]) -> [{{val, N}, {N, N}}];
results_wo_outer_div(Ns) ->
[Res || {Ls, Rs} <- split(Ns),
LX <- results(Ls),
RY <- results(Rs),
Res <- combine_(LX, RY, ?OPS -- ['/'])].
-spec results_w_outer_div([pos_integer()]) -> [result()].
results_w_outer_div([]) -> [];
results_w_outer_div([N]) -> [{{val, N}, {N, N}}];
results_w_outer_div(Ns) ->
[Res || {Ls, Rs} <- split(Ns),
LX <- results(Ls),
RY <- results(Rs),
Res <- combine_(LX, RY, ['/'])].
-spec combine_(result(), result(), [operator()]) -> [result()].
combine_({L,X={XVal, _}},{R,Y={YVal, _}}, Operators) ->
[{{app, Op, L, R}, apply_comp(Op, X, Y)} ||
Op <- Operators,
valid(Op, XVal, YVal)].
-spec apply_comp(operator(), val_comp(), val_comp()) -> val_comp().
apply_comp(Op, {XVal, XComp}, {YVal, YComp}) ->
Val = appl(Op, XVal, YVal),
{Val, Val + XComp + YComp}.
TODO : try spawning a process for second half of choices
-spec gen_solutions(pos_integer(), constraint(), [[pos_integer()]]) -> ok.
gen_solutions(Target, Constraint, Subs) ->
Self = self(),
ResultFun = case Constraint of
none -> fun results/1;
without_outer_div -> fun results_wo_outer_div/1;
with_outer_div -> fun results_w_outer_div/1
end,
lists:foreach(fun(Choice) ->
Results = [{Expr, Comp} || {Expr, {Val, Comp}} <- ResultFun(Choice),
Val == Target],
lists:foreach(fun(S) -> printer ! {print, Self, S} end, Results)
end, choices(Subs)),
ok.
-spec other_constraint(constraint()) -> constraint().
other_constraint(with_outer_div) -> without_outer_div;
other_constraint(without_outer_div) -> with_outer_div;
other_constraint(none) -> none.
-spec solve(pos_integer(), atom(), constraint(), [atom()]) -> ok.
solve(Target, Provider, Constraint, EmptyProviders) ->
case subs:get_package(Provider) of
[] -> % provider empty
%% try to switch to another provider
NewEmptyProviders = EmptyProviders ++ [Provider],
case ?PROVIDERS -- NewEmptyProviders of
[NewProvider|_] ->
solve(Target, NewProvider, other_constraint(Constraint), NewEmptyProviders);
_ ->
%% just block, so the printer process has time to finish
receive
none -> ok
end
end;
SubPackage ->
gen_solutions(Target, Constraint, SubPackage),
solve(Target, Provider, Constraint, EmptyProviders)
end.
-spec expr_to_s(expr()) -> string().
expr_to_s({val, N}) -> integer_to_list(N);
expr_to_s({app, Op, A, B}) ->
"(" ++ expr_to_s(A) ++ " " ++ atom_to_list(Op) ++ " " ++ expr_to_s(B) ++ ")".
print_prefix(StartTime, Process, Comp) ->
Age = timer:now_diff(erlang:timestamp(), StartTime) / 1000 / 1000,
io:format(standard_error, "~n[~7.3fs, ~p, ~12B] ", [Age, Process, Comp]).
printer() ->
process_flag(priority, high),
printer_loop(erlang:timestamp(), 0).
-spec printer_loop(erlang:timestamp(), non_neg_integer()) -> no_return().
printer_loop(StartTime, Best) ->
receive
{print, Asker, {Expr, Comp}} when Comp > Best ->
print_prefix(StartTime, Asker, Comp),
io:format("~s~n", [expr_to_s(Expr)]),
printer_loop(StartTime, Comp);
{print, _, _} ->
printer_loop(StartTime, Best);
{debug, Asker, String} ->
print_prefix(StartTime, Asker, 0),
io:format(standard_error, String ++ "~n", []),
printer_loop(StartTime, Best);
{debug, Asker, Format, Args} ->
print_prefix(StartTime, Asker, 0),
io:format(standard_error, Format ++ "~n", Args),
printer_loop(StartTime, Best);
Other ->
io:format(standard_error, "Printer does not understand ~p~n", [Other]),
printer_loop(StartTime, Best)
end.
-spec uniq([A]) -> [A].
uniq(List) ->
lists:usort(List).
| null | https://raw.githubusercontent.com/disco-framework/disco/f55f35d46d43ef5f4fa1466bdf8d662f5f01f30f/priv/countdown2/workers/pwb_72/src/pw_app.erl | erlang | ']).
-define(OPS, ['<+>', '/', '<*>', '+', '<->']).
', A, B) -> A rem B /= 0.
', A, B) -> A rem B.
TODO: try depth-first
provider empty
try to switch to another provider
just block, so the printer process has time to finish | -module(pw_app).
-behavior(application).
-export([start/2,
stop/1]).
-export([solve/4,
printer/0]).
-define(PROVIDERS, [provider_a, provider_b]).
start(_Type, _Args) ->
Numbers = parse_line(io:get_line([])),
Target = parse_line(io:get_line([])),
PrinterPid = spawn(?MODULE, printer, []),
register(printer, PrinterPid),
debug("finished parsing"),
_ = [subs:spawn_provider(Name, Numbers, 1) || Name <- ?PROVIDERS],
NumSchedulers = erlang:system_info(schedulers_online),
start_solvers(NumSchedulers div 2 + 1, Target, provider_a, with_outer_div),
start_solvers(NumSchedulers div 2 - 1, Target, provider_b, without_outer_div),
receive
nothing_whatsoever -> ok
end,
{ok, self()}.
stop(_State) ->
ok.
-spec debug(string()) -> ok.
debug(S) ->
printer ! {debug, self(), S},
ok.
-spec debug(string(), [term()]) -> ok.
debug(Format, Args) ->
printer ! {debug, self(), Format, Args},
ok.
-spec start_solvers(non_neg_integer(), pos_integer(), atom(), constraint()) -> ok.
start_solvers(0, _, _, _) ->
ok;
start_solvers(Num, Target, Provider, Constraint) ->
spawn(?MODULE, solve, [Target, Provider, Constraint, []]),
start_solvers(Num-1, Target, Provider, Constraint).
-spec parse_line(string()) -> term().
parse_line(Line) ->
TermString = lists:sublist(Line, length(Line)-1) ++ ".",
{ok, Tokens, _Rest} = erl_scan:string(TermString),
{ok, Term} = erl_parse:parse_term(Tokens),
Term.
-type operator() :: '+' | '-' | '*' | '/' | '<+>' | '<->' | '<*>'.
-type expr() :: {val, pos_integer()} | {app, operator(), expr(), expr()}.
-type val_comp() :: {Value :: pos_integer(), Complexity :: pos_integer()}.
-type result() :: {expr(), val_comp()}.
-type constraint() :: with_outer_div | without_outer_div | none.
-ifdef(modulo).
-else.
-define(OPS, ['<+>', '/', '<*>', '+', '<->', '*', '-']).
-endif.
-spec valid(operator(), pos_integer(), pos_integer()) -> boolean().
valid('+', A, B) -> A >= B;
valid('-', A, B) -> A > B;
valid('*', A, B) -> A >= B;
valid('/', A, B) -> A rem B == 0;
valid('<+>', A, B) -> A >= B;
valid('<->', A, B) -> A > B;
valid('<*>', A, B) -> A > B;
-spec appl(operator(), pos_integer(), pos_integer()) -> pos_integer().
appl('+', A, B) -> A+B;
appl('-', A, B) -> A-B;
appl('*', A, B) -> A*B;
appl('/', A, B) -> A div B;
appl('<+>', A, B) -> C = A+B, C*C;
appl('<->', A, B) -> C = A-B, C*C;
appl('<*>', A, B) -> (A+B)*(A-B);
-spec interleave(A, [A]) -> [[A]].
interleave(X, []) -> [[X]];
interleave(X, [X|T]) -> lists:map(fun(L) -> [X|L] end, interleave(X, T));
interleave(X, Y=[H|T]) -> [[X|Y]|lists:map(fun(L) -> [H|L] end, interleave(X, T))].
TODO do not permute same numbers ( [ 1,1,1,1,1,1,1,1,1 ] )
beziehungsweise : keine doppelten , bitte .
evtl auch für subs interessant
-spec perms([A]) -> [[A]].
perms(L) ->
uniq(perms_(L)).
perms_([]) -> [[]];
perms_([H|T]) -> lists:flatmap(fun(Perm) -> interleave(H, Perm) end, perms_(T)).
-spec choices([[A]]) -> [[A]].
choices(Subs) -> lists:flatmap(fun perms/1, Subs).
-spec split([A]) -> [{[A], [A]}].
split([]) -> [];
split([_]) -> [];
split([H|T]) -> [{[H], T}|[{[H|Ls], Rs} || {Ls, Rs} <- split(T)]].
-spec results([pos_integer()]) -> [result()].
results([]) -> [];
results([N]) -> [{{val, N}, {N, N}}];
results(Ns) ->
[Res || {Ls, Rs} <- split(Ns),
LX <- results(Ls),
RY <- results(Rs),
Res <- combine_(LX, RY, ?OPS)].
-spec results_wo_outer_div([pos_integer()]) -> [result()].
results_wo_outer_div([]) -> [];
results_wo_outer_div([N]) -> [{{val, N}, {N, N}}];
results_wo_outer_div(Ns) ->
[Res || {Ls, Rs} <- split(Ns),
LX <- results(Ls),
RY <- results(Rs),
Res <- combine_(LX, RY, ?OPS -- ['/'])].
-spec results_w_outer_div([pos_integer()]) -> [result()].
results_w_outer_div([]) -> [];
results_w_outer_div([N]) -> [{{val, N}, {N, N}}];
results_w_outer_div(Ns) ->
[Res || {Ls, Rs} <- split(Ns),
LX <- results(Ls),
RY <- results(Rs),
Res <- combine_(LX, RY, ['/'])].
-spec combine_(result(), result(), [operator()]) -> [result()].
combine_({L,X={XVal, _}},{R,Y={YVal, _}}, Operators) ->
[{{app, Op, L, R}, apply_comp(Op, X, Y)} ||
Op <- Operators,
valid(Op, XVal, YVal)].
-spec apply_comp(operator(), val_comp(), val_comp()) -> val_comp().
apply_comp(Op, {XVal, XComp}, {YVal, YComp}) ->
Val = appl(Op, XVal, YVal),
{Val, Val + XComp + YComp}.
TODO : try spawning a process for second half of choices
-spec gen_solutions(pos_integer(), constraint(), [[pos_integer()]]) -> ok.
gen_solutions(Target, Constraint, Subs) ->
Self = self(),
ResultFun = case Constraint of
none -> fun results/1;
without_outer_div -> fun results_wo_outer_div/1;
with_outer_div -> fun results_w_outer_div/1
end,
lists:foreach(fun(Choice) ->
Results = [{Expr, Comp} || {Expr, {Val, Comp}} <- ResultFun(Choice),
Val == Target],
lists:foreach(fun(S) -> printer ! {print, Self, S} end, Results)
end, choices(Subs)),
ok.
-spec other_constraint(constraint()) -> constraint().
other_constraint(with_outer_div) -> without_outer_div;
other_constraint(without_outer_div) -> with_outer_div;
other_constraint(none) -> none.
-spec solve(pos_integer(), atom(), constraint(), [atom()]) -> ok.
solve(Target, Provider, Constraint, EmptyProviders) ->
case subs:get_package(Provider) of
NewEmptyProviders = EmptyProviders ++ [Provider],
case ?PROVIDERS -- NewEmptyProviders of
[NewProvider|_] ->
solve(Target, NewProvider, other_constraint(Constraint), NewEmptyProviders);
_ ->
receive
none -> ok
end
end;
SubPackage ->
gen_solutions(Target, Constraint, SubPackage),
solve(Target, Provider, Constraint, EmptyProviders)
end.
-spec expr_to_s(expr()) -> string().
expr_to_s({val, N}) -> integer_to_list(N);
expr_to_s({app, Op, A, B}) ->
"(" ++ expr_to_s(A) ++ " " ++ atom_to_list(Op) ++ " " ++ expr_to_s(B) ++ ")".
print_prefix(StartTime, Process, Comp) ->
Age = timer:now_diff(erlang:timestamp(), StartTime) / 1000 / 1000,
io:format(standard_error, "~n[~7.3fs, ~p, ~12B] ", [Age, Process, Comp]).
printer() ->
process_flag(priority, high),
printer_loop(erlang:timestamp(), 0).
-spec printer_loop(erlang:timestamp(), non_neg_integer()) -> no_return().
printer_loop(StartTime, Best) ->
receive
{print, Asker, {Expr, Comp}} when Comp > Best ->
print_prefix(StartTime, Asker, Comp),
io:format("~s~n", [expr_to_s(Expr)]),
printer_loop(StartTime, Comp);
{print, _, _} ->
printer_loop(StartTime, Best);
{debug, Asker, String} ->
print_prefix(StartTime, Asker, 0),
io:format(standard_error, String ++ "~n", []),
printer_loop(StartTime, Best);
{debug, Asker, Format, Args} ->
print_prefix(StartTime, Asker, 0),
io:format(standard_error, Format ++ "~n", Args),
printer_loop(StartTime, Best);
Other ->
io:format(standard_error, "Printer does not understand ~p~n", [Other]),
printer_loop(StartTime, Best)
end.
-spec uniq([A]) -> [A].
uniq(List) ->
lists:usort(List).
|
f2feac36520802be45ab13ab7ba676229e69414174f499bdf84c79562c15e0c0 | lspitzner/brittany | Test93.hs | func = x
describe "infix op" $ do
| null | https://raw.githubusercontent.com/lspitzner/brittany/a15eed5f3608bf1fa7084fcf008c6ecb79542562/data/Test93.hs | haskell | func = x
describe "infix op" $ do
| |
2c19472b179df8cb5fefbd104695399fe5319b7c8c4ef11f6f4c5a250b165fb9 | ezyang/reflex-backpack | Reflex.hs | # LANGUAGE CPP #
| This module exports all of the commonly - used functionality of Reflex ; if
you are just getting started with Reflex , this is probably what you want .
module Reflex
( module X
) where
import Reflex.Basics as X
import Reflex.EventWriter as X
import Reflex.Dynamic as X
#ifdef USE_TEMPLATE_HASKELL
import Reflex.Dynamic.TH as X
#endif
import Reflex.Dynamic.Uniq as X
import Reflex.DynamicWriter as X
import Reflex.PerformEvent.Base as X
import Reflex.PerformEvent.Class as X
import Reflex.PostBuild.Base as X
import Reflex.PostBuild.Class as X
import Reflex.Requester.Base as X
import Reflex.Requester.Class as X
import Reflex.Spider as X
import Reflex.TriggerEvent.Base as X
import Reflex.TriggerEvent.Class as X
| null | https://raw.githubusercontent.com/ezyang/reflex-backpack/247898fde872d8909392ebe2539f4623c7449067/Reflex.hs | haskell | # LANGUAGE CPP #
| This module exports all of the commonly - used functionality of Reflex ; if
you are just getting started with Reflex , this is probably what you want .
module Reflex
( module X
) where
import Reflex.Basics as X
import Reflex.EventWriter as X
import Reflex.Dynamic as X
#ifdef USE_TEMPLATE_HASKELL
import Reflex.Dynamic.TH as X
#endif
import Reflex.Dynamic.Uniq as X
import Reflex.DynamicWriter as X
import Reflex.PerformEvent.Base as X
import Reflex.PerformEvent.Class as X
import Reflex.PostBuild.Base as X
import Reflex.PostBuild.Class as X
import Reflex.Requester.Base as X
import Reflex.Requester.Class as X
import Reflex.Spider as X
import Reflex.TriggerEvent.Base as X
import Reflex.TriggerEvent.Class as X
| |
bf4e5d3aada8b873ac840ca546ddef0d667bc7740490a3be3862203d93bb2433 | luontola/varjocafe | settings_test.clj | (ns varjocafe.settings-test
(:use midje.sweet
varjocafe.testutil)
(:require [varjocafe.settings :as settings]
[clojure.java.io :as io])
(:import (java.util.concurrent TimeUnit)))
(fact "#read-properties-file"
(with-silent-logger
(settings/read-properties-file (io/file "no-such-file"))) => {}
(settings/read-properties-file (io/resource "varjocafe/settings_test.properties")) => {"foo" "bar"})
(fact "#dotkeys->tree"
(fact "flat structures"
(settings/dotkeys->tree {}) => {}
(settings/dotkeys->tree {"k" "v"}) => {:k "v"}
(settings/dotkeys->tree {:k "v"}) => {:k "v"}
(settings/dotkeys->tree {:k1 "v1", :k2 "v2"}) => {:k1 "v1", :k2 "v2"})
(fact "hierarchial structures"
(settings/dotkeys->tree {"parent.k" "v"}) => {:parent {:k "v"}}
(settings/dotkeys->tree {:parent.k "v"}) => {:parent {:k "v"}}
(settings/dotkeys->tree {:parent.k1 "v1", :parent.k2 "v2"}) => {:parent {:k1 "v1", :k2 "v2"}})
(fact "merges with defaults"
(fact "flat"
(settings/dotkeys->tree {} {:a "default"}) => {:a "default"}
(settings/dotkeys->tree {"a" "override"} {:a "default"}) => {:a "override"}
(settings/dotkeys->tree {"b" "added"} {:a "default"}) => {:a "default", :b "added"})
(fact "hierarchial"
(settings/dotkeys->tree {} {:a {:b "default"}}) => {:a {:b "default"}}
(settings/dotkeys->tree {"a.b" "override"} {:a {:b "default"}}) => {:a {:b "override"}}
(settings/dotkeys->tree {"a.c" "added"} {:a {:b "default"}}) => {:a {:b "default", :c "added"}})))
(fact "#merge-with-defaults"
(fact "Coerces to int"
(-> {"server.port" "8081"}
(settings/merge-with-defaults settings/default-settings))
=> (contains {:server (contains {:port 8081})}))
(fact "Coerces to boolean"
(-> {"development-mode" "true"}
(settings/merge-with-defaults settings/default-settings))
=> (contains {:development-mode true}))
(fact "Coerces to TimeUnit"
(-> {"updater.interval-unit" "SECONDS"}
(settings/merge-with-defaults settings/default-settings))
=> (contains {:updater (contains {:interval-unit TimeUnit/SECONDS})})
(-> {"updater.interval-unit" "FOO"}
(settings/merge-with-defaults settings/default-settings))
=> (throws IllegalArgumentException (str "Invalid settings: {:updater {:interval-unit "
"\"No enum constant java.util.concurrent.TimeUnit.FOO\"}}")))
(fact "Throws up if settings are not valid"
(-> {"foo" "bar"}
(settings/merge-with-defaults settings/default-settings))
=> (throws IllegalArgumentException "Invalid settings: {:foo disallowed-key}")))
(fact "Validate schemas"
(fact "default-settings"
(settings/validate settings/default-settings) => truthy)
(fact "dev-settings"
(settings/validate settings/dev-settings) => truthy))
| null | https://raw.githubusercontent.com/luontola/varjocafe/cb4c68dce0ef5dd2031ba379dd63b63e8b9433b7/test/varjocafe/settings_test.clj | clojure | (ns varjocafe.settings-test
(:use midje.sweet
varjocafe.testutil)
(:require [varjocafe.settings :as settings]
[clojure.java.io :as io])
(:import (java.util.concurrent TimeUnit)))
(fact "#read-properties-file"
(with-silent-logger
(settings/read-properties-file (io/file "no-such-file"))) => {}
(settings/read-properties-file (io/resource "varjocafe/settings_test.properties")) => {"foo" "bar"})
(fact "#dotkeys->tree"
(fact "flat structures"
(settings/dotkeys->tree {}) => {}
(settings/dotkeys->tree {"k" "v"}) => {:k "v"}
(settings/dotkeys->tree {:k "v"}) => {:k "v"}
(settings/dotkeys->tree {:k1 "v1", :k2 "v2"}) => {:k1 "v1", :k2 "v2"})
(fact "hierarchial structures"
(settings/dotkeys->tree {"parent.k" "v"}) => {:parent {:k "v"}}
(settings/dotkeys->tree {:parent.k "v"}) => {:parent {:k "v"}}
(settings/dotkeys->tree {:parent.k1 "v1", :parent.k2 "v2"}) => {:parent {:k1 "v1", :k2 "v2"}})
(fact "merges with defaults"
(fact "flat"
(settings/dotkeys->tree {} {:a "default"}) => {:a "default"}
(settings/dotkeys->tree {"a" "override"} {:a "default"}) => {:a "override"}
(settings/dotkeys->tree {"b" "added"} {:a "default"}) => {:a "default", :b "added"})
(fact "hierarchial"
(settings/dotkeys->tree {} {:a {:b "default"}}) => {:a {:b "default"}}
(settings/dotkeys->tree {"a.b" "override"} {:a {:b "default"}}) => {:a {:b "override"}}
(settings/dotkeys->tree {"a.c" "added"} {:a {:b "default"}}) => {:a {:b "default", :c "added"}})))
(fact "#merge-with-defaults"
(fact "Coerces to int"
(-> {"server.port" "8081"}
(settings/merge-with-defaults settings/default-settings))
=> (contains {:server (contains {:port 8081})}))
(fact "Coerces to boolean"
(-> {"development-mode" "true"}
(settings/merge-with-defaults settings/default-settings))
=> (contains {:development-mode true}))
(fact "Coerces to TimeUnit"
(-> {"updater.interval-unit" "SECONDS"}
(settings/merge-with-defaults settings/default-settings))
=> (contains {:updater (contains {:interval-unit TimeUnit/SECONDS})})
(-> {"updater.interval-unit" "FOO"}
(settings/merge-with-defaults settings/default-settings))
=> (throws IllegalArgumentException (str "Invalid settings: {:updater {:interval-unit "
"\"No enum constant java.util.concurrent.TimeUnit.FOO\"}}")))
(fact "Throws up if settings are not valid"
(-> {"foo" "bar"}
(settings/merge-with-defaults settings/default-settings))
=> (throws IllegalArgumentException "Invalid settings: {:foo disallowed-key}")))
(fact "Validate schemas"
(fact "default-settings"
(settings/validate settings/default-settings) => truthy)
(fact "dev-settings"
(settings/validate settings/dev-settings) => truthy))
| |
1235c73d56ab37cbe67b77f17cc8f33ae78ed17278eeada7381c9077352abbac | BinRoot/Haskell-Data-Analysis-Cookbook | Main.hs | module Main where
-- Reads a file as input
main :: IO ()
main = do
input <- readFile "input.txt"
print $ countWords input
-- Counts the number of words per line
countWords input = map (length.words) (lines input)
| null | https://raw.githubusercontent.com/BinRoot/Haskell-Data-Analysis-Cookbook/f8c46987d78f4a6c1828b353c5f906b9314c2ef9/Ch01/Code01_txt/Main.hs | haskell | Reads a file as input
Counts the number of words per line | module Main where
main :: IO ()
main = do
input <- readFile "input.txt"
print $ countWords input
countWords input = map (length.words) (lines input)
|
23269c8e75bacae34dc181a54c6e83954c177c73146608209e739cac50e3126c | project-oak/hafnium-verification | reachingDefs.mli |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module Defs : module type of AbstractDomain.FiniteSet (Procdesc.Node)
* The node in which the reaching definition x : = e is defined .
A definition x : = e , declared at node N , reaches the current node if there is a path from node N
to the current node such that x is not modified along the path *
A definition x :=e, declared at node N, reaches the current node if there is a path from node N
to the current node such that x is not modified along the path **)
module ReachingDefsMap : module type of AbstractDomain.Map (Var) (Defs)
(** Map var -> its reaching definition *)
type invariant_map
val compute_invariant_map : Summary.t -> Tenv.t -> invariant_map
val extract_post : Procdesc.Node.id -> invariant_map -> ReachingDefsMap.t option
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/checkers/reachingDefs.mli | ocaml | * Map var -> its reaching definition |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module Defs : module type of AbstractDomain.FiniteSet (Procdesc.Node)
* The node in which the reaching definition x : = e is defined .
A definition x : = e , declared at node N , reaches the current node if there is a path from node N
to the current node such that x is not modified along the path *
A definition x :=e, declared at node N, reaches the current node if there is a path from node N
to the current node such that x is not modified along the path **)
module ReachingDefsMap : module type of AbstractDomain.Map (Var) (Defs)
type invariant_map
val compute_invariant_map : Summary.t -> Tenv.t -> invariant_map
val extract_post : Procdesc.Node.id -> invariant_map -> ReachingDefsMap.t option
|
cad9721dd836531ba639fdc5d1ab7b52a242dc511865c20cf7b78bc8fd278786 | VisionsGlobalEmpowerment/webchange | website.clj | (ns webchange.auth.website
(:require [buddy.hashers :as hashers]
[webchange.db.core :refer [*db*] :as db]
[webchange.common.hmac-sha256 :as sign]
[clojure.tools.logging :as log]
[clj-http.client :as http]
[config.core :refer [env]]
[buddy.auth :as buddy]
[webchange.auth.core :as core]))
(defn website-sign-up-page
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/sign-up/")))
(defn website-logout-page
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/user/logout")))
(defn coerce-user-types
[{id :id :as user}]
(assoc user :id (if (int? id) id (Integer/parseInt id))))
(defn- success?
[response]
(and (http/success? response) (= "success" (-> response :body :status))))
(defn website-user-resource
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/api/user")))
(defn get-user-by-id
[website-user-id]
(let [url (website-user-resource)
response (http/with-additional-middleware [#'sign/wrap-apikey]
(http/post url {:accept :json
:as :json
:form-params {:user_id website-user-id}}))]
(if (success? response)
(-> response :body :data coerce-user-types))))
(defn website-token-resource
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/wp-json/api/authenticate")))
(defn get-user-by-token
[token]
(let [url (website-token-resource)
response (http/with-additional-middleware [#'sign/wrap-apikey]
(http/post url {:accept :json
:as :json
:form-params {:token token}}))]
(if (success? response)
(-> response :body :data coerce-user-types))))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/54c32bdf58a2ad0a2d160966720250232033220e/src/clj/webchange/auth/website.clj | clojure | (ns webchange.auth.website
(:require [buddy.hashers :as hashers]
[webchange.db.core :refer [*db*] :as db]
[webchange.common.hmac-sha256 :as sign]
[clojure.tools.logging :as log]
[clj-http.client :as http]
[config.core :refer [env]]
[buddy.auth :as buddy]
[webchange.auth.core :as core]))
(defn website-sign-up-page
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/sign-up/")))
(defn website-logout-page
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/user/logout")))
(defn coerce-user-types
[{id :id :as user}]
(assoc user :id (if (int? id) id (Integer/parseInt id))))
(defn- success?
[response]
(and (http/success? response) (= "success" (-> response :body :status))))
(defn website-user-resource
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/api/user")))
(defn get-user-by-id
[website-user-id]
(let [url (website-user-resource)
response (http/with-additional-middleware [#'sign/wrap-apikey]
(http/post url {:accept :json
:as :json
:form-params {:user_id website-user-id}}))]
(if (success? response)
(-> response :body :data coerce-user-types))))
(defn website-token-resource
[]
(let [website-host (env :website-host)]
(str "https://" website-host "/wp-json/api/authenticate")))
(defn get-user-by-token
[token]
(let [url (website-token-resource)
response (http/with-additional-middleware [#'sign/wrap-apikey]
(http/post url {:accept :json
:as :json
:form-params {:token token}}))]
(if (success? response)
(-> response :body :data coerce-user-types))))
| |
91ed9e32acf3b8f5cae52d824425508aeafbeb0082f0adda54ce1e0dfde1330b | egobrain/equery | tree_m.erl | -module(tree_m).
-export([schema/0]).
schema() ->
#{
fields => #{
id => #{type => serial},
parentId => #{type => integer, required => true},
value => #{type => varchar}
},
table => <<"tree">>
}.
| null | https://raw.githubusercontent.com/egobrain/equery/448c89dede0b19fc22f50a31063f0bb24d8fdf9c/test/tree_m.erl | erlang | -module(tree_m).
-export([schema/0]).
schema() ->
#{
fields => #{
id => #{type => serial},
parentId => #{type => integer, required => true},
value => #{type => varchar}
},
table => <<"tree">>
}.
| |
825eade1e43125f940c6de4966a710c07d92a0b21b9457eb18ad48f5d46a4941 | TheAlgorithms/Haskell | InsertionSort.hs | module Sorts.InsertionSort where
listToSort = [13, 2, 3, 14, 17, 4, 1, 5, 16, 12, 9, 10, 15, 8, 7, 11, 18, 19, 6, 20]
insertionSort:: (Ord a) => [a] -> [a]
insertionSort [] = [] -- Empty list is empty
Singleton lists are trivially sorted .
insertionSort (x:xs) = insert x (insertionSort xs)
Assumes that the second argument is an alread - sorted list ,
and inserts the first argument in the appropriate position
insert :: (Ord a) => a -> [a] -> [a]
insert x [] = [x]
insert x lst@(y:ys) = if x <= y then x:lst else y:(insert x ys)
main = do
putStrLn $ "Unsorted: " ++ show listToSort
putStrLn $ "Sorted: " ++ show (insertionSort listToSort)
| null | https://raw.githubusercontent.com/TheAlgorithms/Haskell/9dcabef99fb8995a760ff25a9e0d659114c0b9d3/src/Sorts/InsertionSort.hs | haskell | Empty list is empty | module Sorts.InsertionSort where
listToSort = [13, 2, 3, 14, 17, 4, 1, 5, 16, 12, 9, 10, 15, 8, 7, 11, 18, 19, 6, 20]
insertionSort:: (Ord a) => [a] -> [a]
Singleton lists are trivially sorted .
insertionSort (x:xs) = insert x (insertionSort xs)
Assumes that the second argument is an alread - sorted list ,
and inserts the first argument in the appropriate position
insert :: (Ord a) => a -> [a] -> [a]
insert x [] = [x]
insert x lst@(y:ys) = if x <= y then x:lst else y:(insert x ys)
main = do
putStrLn $ "Unsorted: " ++ show listToSort
putStrLn $ "Sorted: " ++ show (insertionSort listToSort)
|
1fcdc29d2a0af36c2adb9e705f56200f5d41f64863feb701fac660c9ca4f18e7 | semmons99/clojure-euler | prob-057.clj | problem 057 ; ; ; ; ; ; ; ; ; ;
(defn numerator [r]
(bigint (first (.split (str r) "/"))))
(defn denominator [r]
(bigint (last (.split (str r) "/"))))
(defn num-digits [n]
(count (rest (.split (str n) ""))))
(defn more-digits [r]
(let [nd (num-digits (numerator r))
dd (num-digits (denominator r))]
(cond
(> nd dd) :numerator
(< nd dd) :denominator
:else :same)))
(defn next-expansion [pex]
(/ (denominator pex) (+ (* 2 (denominator pex)) (numerator pex))))
(defn prob-057 []
(count
(filter #(= :numerator %)
(map more-digits (take 1000 (map inc (iterate next-expansion (/ 1 2)))))))) | null | https://raw.githubusercontent.com/semmons99/clojure-euler/3480bc313b9df7f282dadf6e0b48d96230f1bfc1/prob-057.clj | clojure | ; ; ; ; ; ; ; ; ; | (defn numerator [r]
(bigint (first (.split (str r) "/"))))
(defn denominator [r]
(bigint (last (.split (str r) "/"))))
(defn num-digits [n]
(count (rest (.split (str n) ""))))
(defn more-digits [r]
(let [nd (num-digits (numerator r))
dd (num-digits (denominator r))]
(cond
(> nd dd) :numerator
(< nd dd) :denominator
:else :same)))
(defn next-expansion [pex]
(/ (denominator pex) (+ (* 2 (denominator pex)) (numerator pex))))
(defn prob-057 []
(count
(filter #(= :numerator %)
(map more-digits (take 1000 (map inc (iterate next-expansion (/ 1 2)))))))) |
9beb56ca70bba75c5a75b5b42d9d46a93f14dbe5582984f928e7bcfde2bc6991 | igrep/typesafe-precure | Profiles.hs | # OPTIONS_GHC -fno - warn - missing - signatures #
{-# LANGUAGE OverloadedStrings #-}
module ACME.PreCure.Textbook.DeliciousParty.Profiles where
import Data.String (IsString, fromString)
import ACME.PreCure.Index.Types
girls :: [Girl]
girls =
[ mkGirl "Yui Nagomi" "和実 ゆい"
, mkGirl "Kokone Fuwa" "芙羽 ここね"
, mkGirl "Ran Hanamichi" "華満 らん"
, mkGirl "Amane Kasai" "菓彩 あまね"
]
transformees :: [Transformee]
transformees =
[ mkTransformee
"Cure Precious"
""
"キュアプレシャス"
""
introducesHerselfAs_CurePrecious
, mkTransformee
"Cure Spicy"
""
"キュアスパイシー"
""
introducesHerselfAs_CureSpicy
, mkTransformee
"Cure Yum-Yum"
""
"キュアヤムヤム"
""
introducesHerselfAs_CureYumYum
, mkTransformee
"Cure Finale"
""
"キュアフィナーレ"
""
introducesHerselfAs_CureFinale
, mkTransformee
"Cure Precious"
"Party Up Style"
"キュアプレシャス"
"パーティアップスタイル"
introducesHerselfAs_CurePrecious
, mkTransformee
"Cure Spicy"
"Party Up Style"
"キュアスパイシー"
"パーティアップスタイル"
introducesHerselfAs_CureSpicy
, mkTransformee
"Cure Yum-Yum"
"Party Up Style"
"キュアヤムヤム"
"パーティアップスタイル"
introducesHerselfAs_CureYumYum
, mkTransformee
"Cure Finale"
"Party Up Style"
"キュアフィナーレ"
"パーティアップスタイル"
introducesHerselfAs_CureFinale
]
transformedGroups :: [TransformedGroup]
transformedGroups =
[ mkTransformedGroup groupMembers1 ne "" nj ""
]
where
ne = "Delicious Party♡PreCure"
nj = "デリシャスパーティ♡プリキュア"
specialItems :: [SpecialItem]
specialItems =
[ mkSpecialItem "Heart Cure Watch" "ハートキュアウォッチ"
, mkSpecialItem "Kome-Kome" "コメコメ"
, mkSpecialItem "Pam-Pam" "パムパム"
, mkSpecialItem "Mem-Mem" "メンメン"
, mkSpecialItem "Heart Fruits Pendant" "ハートフルーツペンダント"
, mkSpecialItem "Creamy Fleuret" "クリーミーフルーレ"
, mkSpecialItem "Heart Juicy Mixer" "ハートジューシーミキサー"
, mkSpecialItem "Party Up Ring" "パーティーアップリング"
, mkSpecialItem "Party Candle Tact" "パーティーキャンドルタクト"
]
transformations :: [Transformation]
transformations =
[ mkTransformation
["Yui"]
["KomeKome"]
["CurePrecious"]
( "(コメ!)"
: preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ [sharin'Energy, kome_, komeKome, introducesHerselfAs_CurePrecious]
)
, mkTransformation
["Kokone"]
["PamPam"]
["CureSpicy"]
( "(パム!)"
: preCureDeliciouStan'ByPartyGo
++ openPamPamSandPamPam
++ [sharin'Energy, tasty, pamPam, introducesHerselfAs_CureSpicy]
)
, mkTransformation
["Ran"]
["MemMem"]
["CureYumYum"]
( "(メーン!)"
: preCureDeliciouStan'ByPartyGo
++ kuruKuruMemMemMiracleMemMem
++ [sharin'Energy, wonton, memMem, introducesHerselfAs_CureYumYum]
)
, mkTransformation
["Amane"]
["HeartFruitsPendant"]
["CureFinale"]
( preCureDeliciouStan'ByPartyGo
++ fruitsFabulousOrder
++ [sharin'Energy]
++ toppingBrilliantShineMore
++ [introducesHerselfAs_CureFinale]
)
, mkTransformation
["Yui", "Kokone"]
["KomeKome", "PamPam"]
["CurePrecious", "CureSpicy"]
( preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ openPamPamSandPamPam
++ [sharin'Energy]
++ [kome_, tasty]
++ [komeKome, pamPam]
++ [introducesHerselfAs_CurePrecious, introducesHerselfAs_CureSpicy]
)
, mkTransformation
girls1
items1
groupMembers1
( preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ openPamPamSandPamPam
++ kuruKuruMemMemMiracleMemMem
++ [sharin'Energy]
++ [kome_, tasty, wonton]
++ [komeKome, pamPam, memMem]
++ [introducesHerselfAs_CurePrecious, introducesHerselfAs_CureSpicy, introducesHerselfAs_CureYumYum]
++ ["デリシャスパーティ♡プリキュア!"]
)
, mkTransformation
girls2
items2
groupMembers2
( preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ openPamPamSandPamPam
++ kuruKuruMemMemMiracleMemMem
++ fruitsFabulousOrder
++ [sharin'Energy]
++ [kome_, tasty, wonton]
++ toppingBrilliantShineMore
++ [komeKome, pamPam, memMem]
++ [introducesHerselfAs_CurePrecious, introducesHerselfAs_CureSpicy, introducesHerselfAs_CureYumYum, introducesHerselfAs_CureFinale]
++ ["デリシャスパーティ♡プリキュア!"]
)
, mkTransformation
groupMembers2
itemsLightMyDelicious
groupMembers_PartyUp
[ "(コメコメの力をみんなに!)"
, "パーティキャンドルタクト!"
, "笑顔のパワー!"
, "分け合うパワー!"
, "情熱のパワー!"
, "正義のパワー!"
, "(プリキュア・パーティアップ!)"
]
]
where
preCureDeliciouStan'ByPartyGo = ["プリキュア!デリシャスタンバイ!", "パーティーゴー!"]
nigiNigiKomeKomeHeartWoKomeKome = ["にぎにぎ!", komeKome, "ハートを!", komeKome]
openPamPamSandPamPam = ["オープン!", pamPam, "サンド!", pamPam]
kuruKuruMemMemMiracleMemMem = ["クルクル!", memMem, "ミラクル!", memMem]
fruitsFabulousOrder = ["フルーツ!", "ファビュラス・オーダー!"]
toppingBrilliantShineMore = ["トッピング!", "ブリリアント!", "シャインモア!"]
sharin'Energy = "シェアリンエナジー!"
komeKome = "(コメコメ!)"
pamPam = "(パムパム!)"
memMem = "(メンメン!!)"
kome_ = "(コメー!)"
tasty = "(テイスティー!)"
wonton = "(ワンターン!)"
girls1 = ["Yui", "Kokone", "Ran"]
girls2 = girls1 ++ ["Amane"]
items1 = ["KomeKome", "PamPam", "MemMem"]
items2 = items1 ++ ["HeartFruitsPendant"]
purifications :: [Purification]
purifications =
[ mkPurification
["CurePrecious"]
["HeartCureWatch"]
( [ "プリキュア!プレシャストライアングル!"
, "はぁーっ!かぁーっ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureSpicy"]
["HeartCureWatch"]
( [ "プリキュア!スパイシーサークル!"
, "はぁーっ!たぁーっ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureYumYum"]
["HeartCureWatch"]
( [ "プリキュア!ヤムヤムラインズ!"
, "はぁーっ!かぁーっ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
[ " CureFinale " ]
--["CreamyFleuret"]
--[ "プリキュア!フィナーレブーケ!"
--]
, mkPurification
["CurePrecious"]
["HeartJuicyMixer"]
( ["キュアプレシャス!"]
++ heartJuicyMixerSharinEnergyMix
++
[ "(コメー!)"
, "プリキュア!デリシャスプレシャス・ヒート!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureSpicy"]
["HeartJuicyMixer"]
( ["キュアスパイシー!"]
++ heartJuicyMixerSharinEnergyMix
++
[ "(パム!)"
, "プリキュア!デリシャススパイシー・ベイキン!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureYumYum"]
["HeartJuicyMixer"]
( ["キュアヤムヤム!"]
++ heartJuicyMixerSharinEnergyMix
++
[ "(メーン!)"
, "プリキュア!デリシャスヤムヤム・ドレイン!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureFinale"]
["CreamyFleuret"]
( [ "クリーミーフルーレ!"
, "ブルーミン・ダンシンフルーツ!"
, "プリキュア!デリシャスフィナーレ・ファンファーレ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
groupMembers1
(replicate 3 "HeartJuicyMixer")
( [ "トリプルミックス!デリシャスチャージ!"
, "プレシャスフレイバー!"
, "スパイシーフレイバー!"
, "ヤムヤムフレイバー!"
, "プリキュア!ミックスハート・アタック!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
groupMembers_PartyUp
itemsLightMyDelicious
( [ "心をひとつに!"
, "プリキュア!ライト・マイ・デリシャス!"
]
++ onakaIppaiGochisosamaDeshita
)
]
where
onakaIppaiGochisosamaDeshita =
[ "(オナカ、イッパ~イ!)"
, "ごちそうさまでした!"
]
heartJuicyMixerSharinEnergyMix =
[ "ハートジューシーミキサー!"
, "シェアリン!"
, "エナジー!"
, "ミックス!"
]
groupMembers1 :: IsString s => [s]
groupMembers1 = map fromString groupMembers1'
groupMembers2 :: IsString s => [s]
groupMembers2 = map fromString groupMembers2'
groupMembers1' :: [String]
groupMembers1' = ["CurePrecious", "CureSpicy", "CureYumYum"]
groupMembers2' :: [String]
groupMembers2' = groupMembers1' ++ ["CureFinale"]
groupMembers_PartyUp :: IsString s => [s]
groupMembers_PartyUp = map (fromString . (++ "_PartyUpStyle")) groupMembers2'
itemsLightMyDelicious :: [IdAttachments]
itemsLightMyDelicious = "KomeKome" : concat (replicate 4 ["PartyUpRing", "PartyCandleTact"])
introducesHerselfAs_CurePrecious =
"あつあつごはんで、みなぎるパワー!キュアプレシャス!おいしい笑顔で満たしてあげる!"
introducesHerselfAs_CureSpicy =
"ふわふわサンドde心にスパイス!キュアスパイシー!分け合うおいしさ、焼きつけるわ!"
introducesHerselfAs_CureYumYum =
"きらめくヌードル・エモーション!キュアヤムヤム!おいしいの独り占め、ゆるさないよ!"
introducesHerselfAs_CureFinale =
"ジェントルに、ゴージャスに!咲き誇るスウィートネス!キュアフィナーレ!食卓の最後を、このわたしが飾ろう。"
| null | https://raw.githubusercontent.com/igrep/typesafe-precure/727efe419008cd63ab04a7b8d491f94d781e2339/src/ACME/PreCure/Textbook/DeliciousParty/Profiles.hs | haskell | # LANGUAGE OverloadedStrings #
["CreamyFleuret"]
[ "プリキュア!フィナーレブーケ!"
] | # OPTIONS_GHC -fno - warn - missing - signatures #
module ACME.PreCure.Textbook.DeliciousParty.Profiles where
import Data.String (IsString, fromString)
import ACME.PreCure.Index.Types
girls :: [Girl]
girls =
[ mkGirl "Yui Nagomi" "和実 ゆい"
, mkGirl "Kokone Fuwa" "芙羽 ここね"
, mkGirl "Ran Hanamichi" "華満 らん"
, mkGirl "Amane Kasai" "菓彩 あまね"
]
transformees :: [Transformee]
transformees =
[ mkTransformee
"Cure Precious"
""
"キュアプレシャス"
""
introducesHerselfAs_CurePrecious
, mkTransformee
"Cure Spicy"
""
"キュアスパイシー"
""
introducesHerselfAs_CureSpicy
, mkTransformee
"Cure Yum-Yum"
""
"キュアヤムヤム"
""
introducesHerselfAs_CureYumYum
, mkTransformee
"Cure Finale"
""
"キュアフィナーレ"
""
introducesHerselfAs_CureFinale
, mkTransformee
"Cure Precious"
"Party Up Style"
"キュアプレシャス"
"パーティアップスタイル"
introducesHerselfAs_CurePrecious
, mkTransformee
"Cure Spicy"
"Party Up Style"
"キュアスパイシー"
"パーティアップスタイル"
introducesHerselfAs_CureSpicy
, mkTransformee
"Cure Yum-Yum"
"Party Up Style"
"キュアヤムヤム"
"パーティアップスタイル"
introducesHerselfAs_CureYumYum
, mkTransformee
"Cure Finale"
"Party Up Style"
"キュアフィナーレ"
"パーティアップスタイル"
introducesHerselfAs_CureFinale
]
transformedGroups :: [TransformedGroup]
transformedGroups =
[ mkTransformedGroup groupMembers1 ne "" nj ""
]
where
ne = "Delicious Party♡PreCure"
nj = "デリシャスパーティ♡プリキュア"
specialItems :: [SpecialItem]
specialItems =
[ mkSpecialItem "Heart Cure Watch" "ハートキュアウォッチ"
, mkSpecialItem "Kome-Kome" "コメコメ"
, mkSpecialItem "Pam-Pam" "パムパム"
, mkSpecialItem "Mem-Mem" "メンメン"
, mkSpecialItem "Heart Fruits Pendant" "ハートフルーツペンダント"
, mkSpecialItem "Creamy Fleuret" "クリーミーフルーレ"
, mkSpecialItem "Heart Juicy Mixer" "ハートジューシーミキサー"
, mkSpecialItem "Party Up Ring" "パーティーアップリング"
, mkSpecialItem "Party Candle Tact" "パーティーキャンドルタクト"
]
transformations :: [Transformation]
transformations =
[ mkTransformation
["Yui"]
["KomeKome"]
["CurePrecious"]
( "(コメ!)"
: preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ [sharin'Energy, kome_, komeKome, introducesHerselfAs_CurePrecious]
)
, mkTransformation
["Kokone"]
["PamPam"]
["CureSpicy"]
( "(パム!)"
: preCureDeliciouStan'ByPartyGo
++ openPamPamSandPamPam
++ [sharin'Energy, tasty, pamPam, introducesHerselfAs_CureSpicy]
)
, mkTransformation
["Ran"]
["MemMem"]
["CureYumYum"]
( "(メーン!)"
: preCureDeliciouStan'ByPartyGo
++ kuruKuruMemMemMiracleMemMem
++ [sharin'Energy, wonton, memMem, introducesHerselfAs_CureYumYum]
)
, mkTransformation
["Amane"]
["HeartFruitsPendant"]
["CureFinale"]
( preCureDeliciouStan'ByPartyGo
++ fruitsFabulousOrder
++ [sharin'Energy]
++ toppingBrilliantShineMore
++ [introducesHerselfAs_CureFinale]
)
, mkTransformation
["Yui", "Kokone"]
["KomeKome", "PamPam"]
["CurePrecious", "CureSpicy"]
( preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ openPamPamSandPamPam
++ [sharin'Energy]
++ [kome_, tasty]
++ [komeKome, pamPam]
++ [introducesHerselfAs_CurePrecious, introducesHerselfAs_CureSpicy]
)
, mkTransformation
girls1
items1
groupMembers1
( preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ openPamPamSandPamPam
++ kuruKuruMemMemMiracleMemMem
++ [sharin'Energy]
++ [kome_, tasty, wonton]
++ [komeKome, pamPam, memMem]
++ [introducesHerselfAs_CurePrecious, introducesHerselfAs_CureSpicy, introducesHerselfAs_CureYumYum]
++ ["デリシャスパーティ♡プリキュア!"]
)
, mkTransformation
girls2
items2
groupMembers2
( preCureDeliciouStan'ByPartyGo
++ nigiNigiKomeKomeHeartWoKomeKome
++ openPamPamSandPamPam
++ kuruKuruMemMemMiracleMemMem
++ fruitsFabulousOrder
++ [sharin'Energy]
++ [kome_, tasty, wonton]
++ toppingBrilliantShineMore
++ [komeKome, pamPam, memMem]
++ [introducesHerselfAs_CurePrecious, introducesHerselfAs_CureSpicy, introducesHerselfAs_CureYumYum, introducesHerselfAs_CureFinale]
++ ["デリシャスパーティ♡プリキュア!"]
)
, mkTransformation
groupMembers2
itemsLightMyDelicious
groupMembers_PartyUp
[ "(コメコメの力をみんなに!)"
, "パーティキャンドルタクト!"
, "笑顔のパワー!"
, "分け合うパワー!"
, "情熱のパワー!"
, "正義のパワー!"
, "(プリキュア・パーティアップ!)"
]
]
where
preCureDeliciouStan'ByPartyGo = ["プリキュア!デリシャスタンバイ!", "パーティーゴー!"]
nigiNigiKomeKomeHeartWoKomeKome = ["にぎにぎ!", komeKome, "ハートを!", komeKome]
openPamPamSandPamPam = ["オープン!", pamPam, "サンド!", pamPam]
kuruKuruMemMemMiracleMemMem = ["クルクル!", memMem, "ミラクル!", memMem]
fruitsFabulousOrder = ["フルーツ!", "ファビュラス・オーダー!"]
toppingBrilliantShineMore = ["トッピング!", "ブリリアント!", "シャインモア!"]
sharin'Energy = "シェアリンエナジー!"
komeKome = "(コメコメ!)"
pamPam = "(パムパム!)"
memMem = "(メンメン!!)"
kome_ = "(コメー!)"
tasty = "(テイスティー!)"
wonton = "(ワンターン!)"
girls1 = ["Yui", "Kokone", "Ran"]
girls2 = girls1 ++ ["Amane"]
items1 = ["KomeKome", "PamPam", "MemMem"]
items2 = items1 ++ ["HeartFruitsPendant"]
purifications :: [Purification]
purifications =
[ mkPurification
["CurePrecious"]
["HeartCureWatch"]
( [ "プリキュア!プレシャストライアングル!"
, "はぁーっ!かぁーっ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureSpicy"]
["HeartCureWatch"]
( [ "プリキュア!スパイシーサークル!"
, "はぁーっ!たぁーっ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureYumYum"]
["HeartCureWatch"]
( [ "プリキュア!ヤムヤムラインズ!"
, "はぁーっ!かぁーっ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
[ " CureFinale " ]
, mkPurification
["CurePrecious"]
["HeartJuicyMixer"]
( ["キュアプレシャス!"]
++ heartJuicyMixerSharinEnergyMix
++
[ "(コメー!)"
, "プリキュア!デリシャスプレシャス・ヒート!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureSpicy"]
["HeartJuicyMixer"]
( ["キュアスパイシー!"]
++ heartJuicyMixerSharinEnergyMix
++
[ "(パム!)"
, "プリキュア!デリシャススパイシー・ベイキン!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureYumYum"]
["HeartJuicyMixer"]
( ["キュアヤムヤム!"]
++ heartJuicyMixerSharinEnergyMix
++
[ "(メーン!)"
, "プリキュア!デリシャスヤムヤム・ドレイン!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
["CureFinale"]
["CreamyFleuret"]
( [ "クリーミーフルーレ!"
, "ブルーミン・ダンシンフルーツ!"
, "プリキュア!デリシャスフィナーレ・ファンファーレ!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
groupMembers1
(replicate 3 "HeartJuicyMixer")
( [ "トリプルミックス!デリシャスチャージ!"
, "プレシャスフレイバー!"
, "スパイシーフレイバー!"
, "ヤムヤムフレイバー!"
, "プリキュア!ミックスハート・アタック!"
]
++ onakaIppaiGochisosamaDeshita
)
, mkPurification
groupMembers_PartyUp
itemsLightMyDelicious
( [ "心をひとつに!"
, "プリキュア!ライト・マイ・デリシャス!"
]
++ onakaIppaiGochisosamaDeshita
)
]
where
onakaIppaiGochisosamaDeshita =
[ "(オナカ、イッパ~イ!)"
, "ごちそうさまでした!"
]
heartJuicyMixerSharinEnergyMix =
[ "ハートジューシーミキサー!"
, "シェアリン!"
, "エナジー!"
, "ミックス!"
]
groupMembers1 :: IsString s => [s]
groupMembers1 = map fromString groupMembers1'
groupMembers2 :: IsString s => [s]
groupMembers2 = map fromString groupMembers2'
groupMembers1' :: [String]
groupMembers1' = ["CurePrecious", "CureSpicy", "CureYumYum"]
groupMembers2' :: [String]
groupMembers2' = groupMembers1' ++ ["CureFinale"]
groupMembers_PartyUp :: IsString s => [s]
groupMembers_PartyUp = map (fromString . (++ "_PartyUpStyle")) groupMembers2'
itemsLightMyDelicious :: [IdAttachments]
itemsLightMyDelicious = "KomeKome" : concat (replicate 4 ["PartyUpRing", "PartyCandleTact"])
introducesHerselfAs_CurePrecious =
"あつあつごはんで、みなぎるパワー!キュアプレシャス!おいしい笑顔で満たしてあげる!"
introducesHerselfAs_CureSpicy =
"ふわふわサンドde心にスパイス!キュアスパイシー!分け合うおいしさ、焼きつけるわ!"
introducesHerselfAs_CureYumYum =
"きらめくヌードル・エモーション!キュアヤムヤム!おいしいの独り占め、ゆるさないよ!"
introducesHerselfAs_CureFinale =
"ジェントルに、ゴージャスに!咲き誇るスウィートネス!キュアフィナーレ!食卓の最後を、このわたしが飾ろう。"
|
fb1b48c8f950abdbe331dd59328786c5b22e03c6311155077aaaf6d5f4419084 | blajzer/dib | C.hs | Copyright ( c ) 2010 - 2018
-- See LICENSE for license information.
-- | A builder for C/C++ code.
module Dib.Builders.C (
CTargetInfo(CTargetInfo, outputName, targetName, srcDir, outputLocation, compiler, linker, archiver, inFileOption, outFileOption, commonCompileFlags, cCompileFlags, cxxCompileFlags, linkFlags, archiverFlags, includeDirs, extraCompileDeps, extraLinkDeps, exclusions, staticLibrary),
BuildLocation(InPlace, BuildDir, ObjAndBinDirs),
makeCTarget,
makeCleanTarget,
makeBuildDirs,
emptyConfig,
defaultGCCConfig,
defaultGXXConfig,
defaultClangConfig
) where
import Dib.Gatherers
import Dib.Target
import Dib.Types
import Dib.Util
import Dib.Scanners.CDepScanner
import Data.List as L
import Data.Monoid
import Data.Word
import System.Process (system)
import System.Directory as D
import System.FilePath as F
import qualified Data.Digest.CRC32 as Hash
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified System.Console.ANSI as ANSI
-- | The record type that is used to pass configuration info for the C builder.
data CTargetInfo = CTargetInfo {
-- | The name of the output file.
outputName :: T.Text,
| The name of the ' Target ' . Should be unique among all ' Target 's in a given build .
targetName :: T.Text,
-- | The directory containing the source for this target.
srcDir :: T.Text,
| A ' BuildLocation ' that defines where the object and executable files go .
outputLocation :: BuildLocation,
-- | The compiler executable.
compiler :: T.Text,
-- | The linker executable.
linker :: T.Text,
-- | The archiver executable.
archiver :: T.Text,
-- | The command line option for the input file.
inFileOption :: T.Text,
-- | The command line option for the output file.
outFileOption :: T.Text,
-- | The compiler's include option
includeOption :: T.Text,
-- | Common compiler flags.
commonCompileFlags :: T.Text,
-- | C compiler flags.
cCompileFlags :: T.Text,
-- | C++ compiler flags.
cxxCompileFlags :: T.Text,
-- | Linker flags.
linkFlags :: T.Text,
-- | Archiver flags.
archiverFlags :: T.Text,
-- | A list of directories where include files can be found. Used for
-- dependency scanning and automatically appended to the compile line.
includeDirs :: [T.Text],
-- | Extra compilation dependencies.
extraCompileDeps :: [T.Text],
-- | Extra linking dependencies.
extraLinkDeps :: [T.Text],
-- | Files to exclude from the build.
exclusions :: [T.Text],
-- | Whether or not to build a static lib (using the archiver)
staticLibrary :: Bool
}
| Given a ' CTargetInfo ' and a ' Target ' , produces a checksum
cTargetHash :: CTargetInfo -> Target -> Word32
cTargetHash info _ =
Hash.crc32 $ TE.encodeUtf8 $ T.intercalate "^" [
"srcDir",
srcDir info,
"compiler",
compiler info,
"linker",
linker info,
"archiver",
archiver info,
"inFileOption",
inFileOption info,
"outFileOption",
outFileOption info,
"includeOption",
includeOption info,
"commonCompileFlags",
commonCompileFlags info,
"cCompileFlags",
cCompileFlags info,
"cxxCompileFlags",
cxxCompileFlags info,
"linkFlags",
linkFlags info,
"archiverFlags",
archiverFlags info,
"includeDirs",
T.intercalate "^^" $ includeDirs info,
"extraCompileDeps",
T.intercalate "^^" $ extraCompileDeps info,
"extraLinkDeps",
T.intercalate "^^" $ extraLinkDeps info,
"exclusions",
T.intercalate "^^" $ exclusions info,
"staticLibrary",
if staticLibrary info then "True" else "False"
]
-- | The data type for specifying where built files end up.
data BuildLocation =
-- | Specifies that object files will end up adjacent to their source files
-- and the executable will be in the same directory as the dib.hs file.
InPlace
-- | Specifies that the object files and executable will go in a certain directory.
| BuildDir T.Text
| Specifies that the object files will go in the first directory and the
executable in the second directory .
| ObjAndBinDirs T.Text T.Text
-- | An empty configuration.
emptyConfig :: CTargetInfo
emptyConfig = CTargetInfo {
outputName = "",
targetName = "",
srcDir = "",
outputLocation = InPlace,
compiler = "",
linker = "",
archiver = "",
inFileOption = "",
outFileOption = "",
includeOption = "",
commonCompileFlags = "",
cCompileFlags = "",
cxxCompileFlags = "",
linkFlags = "",
archiverFlags = "",
includeDirs = [],
extraCompileDeps = [],
extraLinkDeps = [],
exclusions = [],
staticLibrary = False
}
-- | A default configuration for gcc.
defaultGCCConfig :: CTargetInfo
defaultGCCConfig = emptyConfig {
compiler = "gcc",
linker = "gcc",
archiver = "ar",
inFileOption = "-c",
outFileOption = "-o",
includeOption = "-I",
archiverFlags = "rs"
}
-- | A default configuration for g++.
defaultGXXConfig :: CTargetInfo
defaultGXXConfig = defaultGCCConfig {
compiler = "gcc",
linker = "g++"
}
-- | A default configuration for clang.
defaultClangConfig :: CTargetInfo
defaultClangConfig = defaultGCCConfig {
compiler = "clang",
linker = "clang"
}
massageFilePath :: T.Text -> T.Text
massageFilePath path = T.replace "\\" "_" $ T.replace "/" "_" path
remapObjFile :: BuildLocation -> T.Text -> T.Text
remapObjFile InPlace file = file
remapObjFile (BuildDir dir) file = dir `T.snoc` F.pathSeparator <> massageFilePath file
remapObjFile (ObjAndBinDirs objDir _) file = objDir `T.snoc` F.pathSeparator <> massageFilePath file
remapBinFile :: BuildLocation -> T.Text -> T.Text
remapBinFile InPlace file = file
remapBinFile (BuildDir dir) file = dir `T.snoc` F.pathSeparator <> file
remapBinFile (ObjAndBinDirs _ binDir) file = binDir `T.snoc` F.pathSeparator <> file
-- | Given a 'CTargetInfo', will make the directories required to build the project.
makeBuildDirs :: CTargetInfo -> IO ()
makeBuildDirs info = do
let helper InPlace = return ()
helper (BuildDir dir) = D.createDirectoryIfMissing True (T.unpack dir)
helper (ObjAndBinDirs objDir binDir) = D.createDirectoryIfMissing True (T.unpack objDir) >> D.createDirectoryIfMissing True (T.unpack binDir)
helper (outputLocation info)
return ()
excludeFiles :: [T.Text] -> T.Text -> Bool
excludeFiles excl file = L.any (`T.isSuffixOf` file) excl
getCorrectCompileFlags :: CTargetInfo -> T.Text -> T.Text
getCorrectCompileFlags info source = if ".c" `T.isSuffixOf` source then cCompileFlags info else cxxCompileFlags info
| Given a ' CTargetInfo ' , produces a ' Target '
makeCTarget :: CTargetInfo -> Target
makeCTarget info =
let includeDirString = includeOption info <> T.intercalate (" " <> includeOption info) (includeDirs info)
makeBuildString source target = T.unpack $ T.concat [compiler info, " ", inFileOption info, " ", source, " ", outFileOption info, " ", target, " ", includeDirString, " ", commonCompileFlags info, " ", getCorrectCompileFlags info source]
makeLinkString sources target = T.unpack $ T.concat [linker info, " ", T.unwords sources, " ", outFileOption info, " ", target, " ", linkFlags info]
makeArchiveString sources target = T.unpack $ T.concat [archiver info, " ", archiverFlags info, " ", target, " ", T.unwords sources]
buildCmd (ManyToOne sources target) = do
let sourceFile = head sources
let buildString = makeBuildString sourceFile target
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Building: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack sourceFile
exitCode <- system buildString
handleExitCode exitCode target buildString
buildCmd _ = return $ Left "Unhandled SrcTransform."
linkCmd (ManyToOne sources target) = do
let linkString = makeLinkString sources target
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Linking: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack target
exitCode <- system linkString
handleExitCode exitCode target linkString
linkCmd _ = return $ Left "Unhandled SrcTransform."
archiveCmd (ManyToOne sources target) = do
let archiveString = makeArchiveString sources target
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Archiving: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack target
exitCode <- system archiveString
handleExitCode exitCode target archiveString
archiveCmd _ = return $ Left "Unhandled SrcTransform."
buildDirGatherer = makeCommandGatherer $ makeBuildDirs info
cppStage = Stage "compile" (map (changeExt "o" (outputLocation info))) (cDepScanner (map T.unpack $ includeDirs info)) (extraCompileDeps info) buildCmd
linkStage = Stage "link" (combineTransforms (remapBinFile (outputLocation info) $ outputName info)) return (extraLinkDeps info) linkCmd
archiveStage = Stage "archive" (combineTransforms (remapBinFile (outputLocation info) $ outputName info)) return [] archiveCmd
in Target (targetName info) (cTargetHash info) [] [cppStage, if staticLibrary info then archiveStage else linkStage] [buildDirGatherer, makeFileTreeGatherer (srcDir info) (matchExtensionsExcluded [".cpp", ".c"] [excludeFiles $ exclusions info])]
changeExt :: T.Text -> BuildLocation -> SrcTransform -> SrcTransform
changeExt newExt buildLoc (OneToOne input _) = OneToOne input $ remapObjFile buildLoc $ T.dropWhileEnd (/='.') input <> newExt
changeExt _ _ _ = undefined
combineTransforms :: T.Text -> [SrcTransform] -> [SrcTransform]
combineTransforms target transforms = [ManyToOne (L.sort sources) target]
where sources = foldl' (\acc (OneToOne input _) -> acc ++ [input]) [] transforms
| Given a ' CTargetInfo ' , produces a ' Target ' that will clean the project .
makeCleanTarget :: CTargetInfo -> Target
makeCleanTarget info =
let cleanCmd (OneToOne input _) = do
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Removing: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack input
D.removeFile (T.unpack input)
return $ Right $ OneToOne "" ""
cleanCmd _ = error "Should never hit this."
objDir InPlace = srcDir info
objDir (BuildDir dir) = dir
objDir (ObjAndBinDirs objDir _) = objDir
programFile InPlace = outputName info
programFile (BuildDir dir) = dir `T.snoc` F.pathSeparator <> outputName info
programFile (ObjAndBinDirs _ binDir) = binDir `T.snoc` F.pathSeparator <> outputName info
cleanStage = Stage "clean" id return [] cleanCmd
objectGatherer = makeFileTreeGatherer (objDir $ outputLocation info) (matchExtension ".o")
programGatherer = makeSingleFileGatherer (programFile $ outputLocation info)
in Target ("clean-" <> targetName info) (const 0) [] [cleanStage] [objectGatherer, programGatherer]
| null | https://raw.githubusercontent.com/blajzer/dib/750253c972668bb0d849239f94b96050bae74f2a/src/Dib/Builders/C.hs | haskell | See LICENSE for license information.
| A builder for C/C++ code.
| The record type that is used to pass configuration info for the C builder.
| The name of the output file.
| The directory containing the source for this target.
| The compiler executable.
| The linker executable.
| The archiver executable.
| The command line option for the input file.
| The command line option for the output file.
| The compiler's include option
| Common compiler flags.
| C compiler flags.
| C++ compiler flags.
| Linker flags.
| Archiver flags.
| A list of directories where include files can be found. Used for
dependency scanning and automatically appended to the compile line.
| Extra compilation dependencies.
| Extra linking dependencies.
| Files to exclude from the build.
| Whether or not to build a static lib (using the archiver)
| The data type for specifying where built files end up.
| Specifies that object files will end up adjacent to their source files
and the executable will be in the same directory as the dib.hs file.
| Specifies that the object files and executable will go in a certain directory.
| An empty configuration.
| A default configuration for gcc.
| A default configuration for g++.
| A default configuration for clang.
| Given a 'CTargetInfo', will make the directories required to build the project. | Copyright ( c ) 2010 - 2018
module Dib.Builders.C (
CTargetInfo(CTargetInfo, outputName, targetName, srcDir, outputLocation, compiler, linker, archiver, inFileOption, outFileOption, commonCompileFlags, cCompileFlags, cxxCompileFlags, linkFlags, archiverFlags, includeDirs, extraCompileDeps, extraLinkDeps, exclusions, staticLibrary),
BuildLocation(InPlace, BuildDir, ObjAndBinDirs),
makeCTarget,
makeCleanTarget,
makeBuildDirs,
emptyConfig,
defaultGCCConfig,
defaultGXXConfig,
defaultClangConfig
) where
import Dib.Gatherers
import Dib.Target
import Dib.Types
import Dib.Util
import Dib.Scanners.CDepScanner
import Data.List as L
import Data.Monoid
import Data.Word
import System.Process (system)
import System.Directory as D
import System.FilePath as F
import qualified Data.Digest.CRC32 as Hash
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified System.Console.ANSI as ANSI
data CTargetInfo = CTargetInfo {
outputName :: T.Text,
| The name of the ' Target ' . Should be unique among all ' Target 's in a given build .
targetName :: T.Text,
srcDir :: T.Text,
| A ' BuildLocation ' that defines where the object and executable files go .
outputLocation :: BuildLocation,
compiler :: T.Text,
linker :: T.Text,
archiver :: T.Text,
inFileOption :: T.Text,
outFileOption :: T.Text,
includeOption :: T.Text,
commonCompileFlags :: T.Text,
cCompileFlags :: T.Text,
cxxCompileFlags :: T.Text,
linkFlags :: T.Text,
archiverFlags :: T.Text,
includeDirs :: [T.Text],
extraCompileDeps :: [T.Text],
extraLinkDeps :: [T.Text],
exclusions :: [T.Text],
staticLibrary :: Bool
}
| Given a ' CTargetInfo ' and a ' Target ' , produces a checksum
cTargetHash :: CTargetInfo -> Target -> Word32
cTargetHash info _ =
Hash.crc32 $ TE.encodeUtf8 $ T.intercalate "^" [
"srcDir",
srcDir info,
"compiler",
compiler info,
"linker",
linker info,
"archiver",
archiver info,
"inFileOption",
inFileOption info,
"outFileOption",
outFileOption info,
"includeOption",
includeOption info,
"commonCompileFlags",
commonCompileFlags info,
"cCompileFlags",
cCompileFlags info,
"cxxCompileFlags",
cxxCompileFlags info,
"linkFlags",
linkFlags info,
"archiverFlags",
archiverFlags info,
"includeDirs",
T.intercalate "^^" $ includeDirs info,
"extraCompileDeps",
T.intercalate "^^" $ extraCompileDeps info,
"extraLinkDeps",
T.intercalate "^^" $ extraLinkDeps info,
"exclusions",
T.intercalate "^^" $ exclusions info,
"staticLibrary",
if staticLibrary info then "True" else "False"
]
data BuildLocation =
InPlace
| BuildDir T.Text
| Specifies that the object files will go in the first directory and the
executable in the second directory .
| ObjAndBinDirs T.Text T.Text
emptyConfig :: CTargetInfo
emptyConfig = CTargetInfo {
outputName = "",
targetName = "",
srcDir = "",
outputLocation = InPlace,
compiler = "",
linker = "",
archiver = "",
inFileOption = "",
outFileOption = "",
includeOption = "",
commonCompileFlags = "",
cCompileFlags = "",
cxxCompileFlags = "",
linkFlags = "",
archiverFlags = "",
includeDirs = [],
extraCompileDeps = [],
extraLinkDeps = [],
exclusions = [],
staticLibrary = False
}
defaultGCCConfig :: CTargetInfo
defaultGCCConfig = emptyConfig {
compiler = "gcc",
linker = "gcc",
archiver = "ar",
inFileOption = "-c",
outFileOption = "-o",
includeOption = "-I",
archiverFlags = "rs"
}
defaultGXXConfig :: CTargetInfo
defaultGXXConfig = defaultGCCConfig {
compiler = "gcc",
linker = "g++"
}
defaultClangConfig :: CTargetInfo
defaultClangConfig = defaultGCCConfig {
compiler = "clang",
linker = "clang"
}
massageFilePath :: T.Text -> T.Text
massageFilePath path = T.replace "\\" "_" $ T.replace "/" "_" path
remapObjFile :: BuildLocation -> T.Text -> T.Text
remapObjFile InPlace file = file
remapObjFile (BuildDir dir) file = dir `T.snoc` F.pathSeparator <> massageFilePath file
remapObjFile (ObjAndBinDirs objDir _) file = objDir `T.snoc` F.pathSeparator <> massageFilePath file
remapBinFile :: BuildLocation -> T.Text -> T.Text
remapBinFile InPlace file = file
remapBinFile (BuildDir dir) file = dir `T.snoc` F.pathSeparator <> file
remapBinFile (ObjAndBinDirs _ binDir) file = binDir `T.snoc` F.pathSeparator <> file
makeBuildDirs :: CTargetInfo -> IO ()
makeBuildDirs info = do
let helper InPlace = return ()
helper (BuildDir dir) = D.createDirectoryIfMissing True (T.unpack dir)
helper (ObjAndBinDirs objDir binDir) = D.createDirectoryIfMissing True (T.unpack objDir) >> D.createDirectoryIfMissing True (T.unpack binDir)
helper (outputLocation info)
return ()
excludeFiles :: [T.Text] -> T.Text -> Bool
excludeFiles excl file = L.any (`T.isSuffixOf` file) excl
getCorrectCompileFlags :: CTargetInfo -> T.Text -> T.Text
getCorrectCompileFlags info source = if ".c" `T.isSuffixOf` source then cCompileFlags info else cxxCompileFlags info
| Given a ' CTargetInfo ' , produces a ' Target '
makeCTarget :: CTargetInfo -> Target
makeCTarget info =
let includeDirString = includeOption info <> T.intercalate (" " <> includeOption info) (includeDirs info)
makeBuildString source target = T.unpack $ T.concat [compiler info, " ", inFileOption info, " ", source, " ", outFileOption info, " ", target, " ", includeDirString, " ", commonCompileFlags info, " ", getCorrectCompileFlags info source]
makeLinkString sources target = T.unpack $ T.concat [linker info, " ", T.unwords sources, " ", outFileOption info, " ", target, " ", linkFlags info]
makeArchiveString sources target = T.unpack $ T.concat [archiver info, " ", archiverFlags info, " ", target, " ", T.unwords sources]
buildCmd (ManyToOne sources target) = do
let sourceFile = head sources
let buildString = makeBuildString sourceFile target
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Building: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack sourceFile
exitCode <- system buildString
handleExitCode exitCode target buildString
buildCmd _ = return $ Left "Unhandled SrcTransform."
linkCmd (ManyToOne sources target) = do
let linkString = makeLinkString sources target
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Linking: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack target
exitCode <- system linkString
handleExitCode exitCode target linkString
linkCmd _ = return $ Left "Unhandled SrcTransform."
archiveCmd (ManyToOne sources target) = do
let archiveString = makeArchiveString sources target
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Archiving: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack target
exitCode <- system archiveString
handleExitCode exitCode target archiveString
archiveCmd _ = return $ Left "Unhandled SrcTransform."
buildDirGatherer = makeCommandGatherer $ makeBuildDirs info
cppStage = Stage "compile" (map (changeExt "o" (outputLocation info))) (cDepScanner (map T.unpack $ includeDirs info)) (extraCompileDeps info) buildCmd
linkStage = Stage "link" (combineTransforms (remapBinFile (outputLocation info) $ outputName info)) return (extraLinkDeps info) linkCmd
archiveStage = Stage "archive" (combineTransforms (remapBinFile (outputLocation info) $ outputName info)) return [] archiveCmd
in Target (targetName info) (cTargetHash info) [] [cppStage, if staticLibrary info then archiveStage else linkStage] [buildDirGatherer, makeFileTreeGatherer (srcDir info) (matchExtensionsExcluded [".cpp", ".c"] [excludeFiles $ exclusions info])]
changeExt :: T.Text -> BuildLocation -> SrcTransform -> SrcTransform
changeExt newExt buildLoc (OneToOne input _) = OneToOne input $ remapObjFile buildLoc $ T.dropWhileEnd (/='.') input <> newExt
changeExt _ _ _ = undefined
combineTransforms :: T.Text -> [SrcTransform] -> [SrcTransform]
combineTransforms target transforms = [ManyToOne (L.sort sources) target]
where sources = foldl' (\acc (OneToOne input _) -> acc ++ [input]) [] transforms
| Given a ' CTargetInfo ' , produces a ' Target ' that will clean the project .
makeCleanTarget :: CTargetInfo -> Target
makeCleanTarget info =
let cleanCmd (OneToOne input _) = do
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.White]
putStr "Removing: "
ANSI.setSGR [ANSI.Reset]
putStrLn $ T.unpack input
D.removeFile (T.unpack input)
return $ Right $ OneToOne "" ""
cleanCmd _ = error "Should never hit this."
objDir InPlace = srcDir info
objDir (BuildDir dir) = dir
objDir (ObjAndBinDirs objDir _) = objDir
programFile InPlace = outputName info
programFile (BuildDir dir) = dir `T.snoc` F.pathSeparator <> outputName info
programFile (ObjAndBinDirs _ binDir) = binDir `T.snoc` F.pathSeparator <> outputName info
cleanStage = Stage "clean" id return [] cleanCmd
objectGatherer = makeFileTreeGatherer (objDir $ outputLocation info) (matchExtension ".o")
programGatherer = makeSingleFileGatherer (programFile $ outputLocation info)
in Target ("clean-" <> targetName info) (const 0) [] [cleanStage] [objectGatherer, programGatherer]
|
b6aef56136c226e3b41cedda5e5c5a583c59c36980f2a148fc0a1fcf43cc67f7 | borkdude/quickblog | api.clj | (ns quickblog.api
{:org.babashka/cli
{:spec
{
;; Blog metadata
:blog-title
{:desc "Title of the blog"
:ref "<title>"
:default "quickblog"
:require true
:group :blog-metadata}
:blog-author
{:desc "Author's name"
:ref "<name>"
:default "Quick Blogger"
:require true
:group :blog-metadata}
:blog-description
{:desc "Blog description for subtitle and RSS feeds"
:ref "<text>"
:default "A blog about blogging quickly"
:require true
:group :blog-metadata}
:blog-root
{:desc "Base URL of the blog"
:ref "<url>"
:default ""
:require true
:group :blog-metadata}
;; Optional metadata
:about-link
{:desc "Link to about the author page"
:ref "<url>"
:group :optional-metadata}
:discuss-link
{:desc "Link to discussion forum for posts"
:ref "<url>"
:group :optional-metadata}
:twitter-handle
{:desc "Author's Twitter handle"
:ref "<handle>"
:group :optional-metadata}
Post config
:default-metadata
{:desc "Default metadata to add to posts"
:default {:tags ["clojure"]}
:group :post-config}
:num-index-posts
{:desc "Number of most recent posts to show on the index page"
:ref "<num>"
:default 3
:group :post-config}
:posts-file
{:desc "File containing deprecated post metadata (used only for `migrate`)"
:ref "<file>"
:default "posts.edn"
:group :post-config}
;; Input directories
:assets-dir
{:desc "Directory to copy assets (images, etc.) from"
:ref "<dir>"
:default "assets"
:require true
:group :input-directories}
:posts-dir
{:desc "Directory to read posts from"
:ref "<dir>"
:default "posts"
:require true
:group :input-directories}
:templates-dir
{:desc "Directory to read templates from; see Templates section in README"
:ref "<dir>"
:default "templates"
:require true
:group :input-directories}
;; Output directories
:out-dir
{:desc "Base directory for outputting static site"
:ref "<dir>"
:default "public"
:require true
:group :output-directories}
:assets-out-dir
{:desc "Directory to write assets to (relative to :out-dir)"
:ref "<dir>"
:default "assets"
:require true
:group :output-directories}
:tags-dir
{:desc "Directory to write tags to (relative to :out-dir)"
:ref "<dir>"
:default "tags"
:require true
:group :output-directories}
;; Caching
:force-render
{:desc "If true, pages will be re-rendered regardless of cache status"
:default false
:group :caching}
:cache-dir
{:desc "Directory to use for caching"
:ref "<dir>"
:default ".work"
:require true
:group :caching}
:rendering-system-files
{:desc "Files involved in rendering pages (only set if you know what you're doing!)"
:ref "<file1> <file2>..."
:default ["bb.edn" "deps.edn"]
:coerce []
:require true
:group :caching}
;; Social sharing
:blog-image
{:desc "Blog thumbnail image URL; see Features > Social sharing in README"
:ref "<url>"
:group :social-sharing}
Favicon
:favicon
{:desc "If true, favicon will be added to all pages"
:default false
:group :favicon}
:favicon-dir
{:desc "Directory to read favicon assets from"
:ref "<dir>"
:default "assets/favicon"
:group :favicon}
:favicon-out-dir
{:desc "Directory to write favicon assets to (relative to :out-dir)"
:ref "<dir>"
:default "assets/favicon"
:group :favicon}
;; Command-specific opts
}}}
(:require
[babashka.fs :as fs]
[clojure.data.xml :as xml]
[clojure.edn :as edn]
[clojure.set :as set]
[clojure.string :as str]
[quickblog.internal :as lib]
[selmer.parser :as selmer]
[selmer.filters :as filters]))
;; Add filter for tag page links; see:
;; #filters
(filters/add-filter! :escape-tag lib/escape-tag)
(defn- update-out-dirs
[{:keys [out-dir assets-out-dir favicon-out-dir] :as opts}]
(let [out-dir-ify (fn [dir]
(if-not (str/starts-with? (str dir) (str out-dir))
(fs/file out-dir dir)
dir))]
(assoc opts
:assets-out-dir (out-dir-ify assets-out-dir)
:favicon-out-dir (out-dir-ify favicon-out-dir))))
(defn- update-opts [opts]
(-> opts
(update :rendering-system-files #(map fs/file (cons (:templates-dir opts) %)))
update-out-dirs))
(defn- get-defaults [metadata]
(->> (get-in metadata [:org.babashka/cli :spec])
(filter (fn [[_ m]] (contains? m :default)))
(map (fn [[k m]] [k (:default m)]))
(into {})))
(defn- apply-default-opts [opts]
(let [defaults (get-defaults (meta (the-ns 'quickblog.api)))]
(-> (->> defaults
(map (fn [[k default]] [k (if (contains? opts k) (opts k) default)]))
(into {}))
(merge opts)
update-opts)))
(def ^:private favicon-assets
["android-chrome-192x192.png"
"android-chrome-512x512.png"
"apple-touch-icon.png"
"browserconfig.xml"
"favicon-16x16.png"
"favicon-32x32.png"
"favicon.ico"
"mstile-150x150.png"
"safari-pinned-tab.svg"
"site.webmanifest"])
(def ^:private legacy-template "
<html><head>
<meta http-equiv=\"refresh\" content=\"0; URL=/{{new_url}}\" />
</head></html>")
(defn- base-html [opts]
(slurp (lib/ensure-template opts "base.html")))
(defn- ensure-favicon-assets [{:keys [favicon favicon-dir]}]
(when favicon
(doseq [asset favicon-assets]
(lib/ensure-resource (fs/file favicon-dir asset)
(fs/file "assets" "favicon" asset)))))
(defn- gen-posts [{:keys [deleted-posts modified-posts posts
cache-dir out-dir]
:as opts}]
(let [posts-to-write (set/union modified-posts
(lib/modified-post-pages opts))
page-template (base-html opts)
post-template (slurp (lib/ensure-template opts "post.html"))]
(fs/create-dirs cache-dir)
(fs/create-dirs out-dir)
(doseq [[file post] posts
:when (contains? posts-to-write file)
:let [{:keys [file date legacy]} post
html-file (lib/html-file file)]]
(lib/write-post! (assoc opts
:page-template page-template
:post-template post-template)
post)
(let [legacy-dir (fs/file out-dir (str/replace date "-" "/")
(str/replace file ".md" ""))]
(when legacy
(fs/create-dirs legacy-dir)
(let [legacy-file (fs/file (fs/file legacy-dir "index.html"))
redirect-html (selmer/render legacy-template
{:new_url html-file})]
(println "Writing legacy redirect:" (str legacy-file))
(spit legacy-file redirect-html)))))
(doseq [file deleted-posts]
(println "Post deleted; removing from cache and outdir:" (str file))
(fs/delete-if-exists (fs/file cache-dir (lib/cache-file file)))
(fs/delete-if-exists (fs/file out-dir (lib/html-file file))))))
(defn- gen-tags [{:keys [blog-title blog-description
blog-image blog-image-alt twitter-handle
modified-tags posts out-dir tags-dir]
:as opts}]
(let [tags-out-dir (fs/create-dirs (fs/file out-dir tags-dir))
posts-by-tag (lib/posts-by-tag posts)
tags-file (fs/file tags-out-dir "index.html")
template (base-html opts)]
(when (or (not (empty? modified-tags))
(not (fs/exists? tags-file)))
(lib/write-page! opts tags-file template
{:skip-archive true
:title (str blog-title " - Tags")
:relative-path "../"
:body (lib/tag-links "Tags" posts-by-tag opts)
:sharing {:description (format "Tags - %s"
blog-description)
:author twitter-handle
:twitter-handle twitter-handle
:image (lib/blog-link opts blog-image)
:image-alt blog-image-alt
:url (lib/blog-link opts "tags/index.html")}})
(doseq [tag-and-posts posts-by-tag]
(lib/write-tag! opts tags-out-dir template tag-and-posts))
;; Delete tags pages for removed tags
(doseq [tag (remove posts-by-tag modified-tags)
:let [tag-filename (fs/file tags-out-dir (lib/tag-file tag))]]
(println "Deleting removed tag:" (str tag-filename))
(fs/delete-if-exists tag-filename)))))
;;;; Generate index page with last `num-index-posts` posts
(defn- index [{:keys [posts] :as opts}]
(let [posts (for [{:keys [file html] :as post} posts
:let [preview (first (str/split @html #"<!-- end-of-preview -->" 2))]]
(assoc post
:post-link (str/replace file ".md" ".html")
:body preview
:truncated (not= preview @html)))
index-template (lib/ensure-template opts "index.html")]
(selmer/render (slurp index-template) {:posts posts})))
(defn- spit-index
[{:keys [blog-title blog-description blog-image blog-image-alt twitter-handle
posts cached-posts deleted-posts modified-posts num-index-posts
out-dir]
:as opts}]
(let [index-posts #(->> (vals %)
lib/sort-posts
(take num-index-posts))
posts (index-posts posts)
cached-posts (index-posts cached-posts)
out-file (fs/file out-dir "index.html")
stale? (or (not= (map :file posts)
(map :file cached-posts))
(some modified-posts (map :file posts))
(some deleted-posts (map :file cached-posts))
(not (fs/exists? out-file)))]
(when stale?
(let [body (index (assoc opts :posts posts))]
(lib/write-page! opts out-file
(base-html opts)
{:title blog-title
:body body
:sharing {:description blog-description
:author twitter-handle
:twitter-handle twitter-handle
:image (lib/blog-link opts blog-image)
:image-alt blog-image-alt
:url (lib/blog-link opts "index.html")}})))))
;;;; Generate archive page with links to all posts
(defn- spit-archive [{:keys [blog-title blog-description
blog-image blog-image-alt twitter-handle
modified-metadata posts out-dir]
:as opts}]
(let [out-file (fs/file out-dir "archive.html")
stale? (or (some not-empty (vals modified-metadata))
(not (fs/exists? out-file)))]
(when stale?
(let [title (str blog-title " - Archive")
posts (lib/sort-posts (vals posts))]
(lib/write-page! opts out-file
(base-html opts)
{:skip-archive true
:title title
:body (lib/post-links "Archive" posts opts)
:sharing {:description (format "Archive - %s"
blog-description)
:author twitter-handle
:twitter-handle twitter-handle
:image (lib/blog-link opts blog-image)
:image-alt blog-image-alt
:url (lib/blog-link opts "archive.html")}})))))
;;;; Generate atom feeds
(xml/alias-uri 'atom "")
(import java.time.format.DateTimeFormatter)
(defn- rfc-3339-now []
(let [fmt (DateTimeFormatter/ofPattern "yyyy-MM-dd'T'HH:mm:ssxxx")
now (java.time.ZonedDateTime/now java.time.ZoneOffset/UTC)]
(.format now fmt)))
(defn- rfc-3339 [yyyy-MM-dd]
(let [in-fmt (DateTimeFormatter/ofPattern "yyyy-MM-dd")
local-date (java.time.LocalDate/parse yyyy-MM-dd in-fmt)
fmt (DateTimeFormatter/ofPattern "yyyy-MM-dd'T'HH:mm:ssxxx")
now (java.time.ZonedDateTime/of (.atTime local-date 23 59 59) java.time.ZoneOffset/UTC)]
(.format now fmt)))
(defn- atom-feed
;; validate at
[{:keys [blog-title blog-author blog-root] :as opts} posts]
(-> (xml/sexp-as-element
[::atom/feed
{:xmlns ""}
[::atom/title blog-title]
[::atom/link {:href (lib/blog-link opts "atom.xml") :rel "self"}]
[::atom/link {:href blog-root}]
[::atom/updated (rfc-3339-now)]
[::atom/id blog-root]
[::atom/author
[::atom/name blog-author]]
(for [{:keys [title date file preview html]} posts
:when (not preview)
:let [html-file (str/replace file ".md" ".html")
link (lib/blog-link opts html-file)]]
[::atom/entry
[::atom/id link]
[::atom/link {:href link}]
[::atom/title title]
[::atom/updated (rfc-3339 date)]
[::atom/content {:type "html"}
[:-cdata @html]]])])
xml/indent-str))
(defn- clojure-post? [{:keys [tags]}]
(let [clojure-tags #{"clojure" "clojurescript"}
lowercase-tags (map str/lower-case tags)]
(some clojure-tags lowercase-tags)))
(defn- spit-feeds [{:keys [out-dir modified-posts posts] :as opts}]
(let [feed-file (fs/file out-dir "atom.xml")
clojure-feed-file (fs/file out-dir "planetclojure.xml")
all-posts (lib/sort-posts (vals posts))
clojure-posts (->> (vals posts)
(filter clojure-post?)
lib/sort-posts)
clojure-posts-modified? (->> modified-posts
(map posts)
(some clojure-post?))]
(if (and (not clojure-posts-modified?) (fs/exists? clojure-feed-file))
(println "No Clojure posts modified; skipping Clojure feed")
(do
(println "Writing Clojure feed" (str clojure-feed-file))
(spit clojure-feed-file (atom-feed opts clojure-posts))))
(if (and (empty? modified-posts) (fs/exists? feed-file))
(println "No posts modified; skipping main feed")
(do
(println "Writing feed" (str feed-file))
(spit feed-file (atom-feed opts all-posts))))))
(defn render
"Renders posts declared in `posts.edn` to `out-dir`."
[opts]
(let [{:keys [assets-dir
assets-out-dir
cache-dir
favicon-dir
favicon-out-dir
out-dir
posts-file
templates-dir]
:as opts}
(-> opts apply-default-opts lib/refresh-cache)]
(if (empty? (:posts opts))
(binding [*out* *err*]
(println
(if (fs/exists? posts-file)
(format "Run `bb migrate` to move metadata from `%s` to post files"
posts-file)
"No posts found; run `bb new` to create one")))
(do
(lib/ensure-template opts "style.css")
(ensure-favicon-assets opts)
(when (fs/exists? assets-dir)
(lib/copy-tree-modified assets-dir assets-out-dir))
(when (fs/exists? favicon-dir)
(lib/copy-tree-modified favicon-dir favicon-out-dir))
(doseq [file (fs/glob templates-dir "*.{css,svg}")]
(lib/copy-modified file (fs/file out-dir (.getFileName file))))
(fs/create-dirs (fs/file cache-dir))
(gen-posts opts)
(gen-tags opts)
(spit-archive opts)
(spit-index opts)
(spit-feeds opts)
(lib/write-cache! opts)))
opts))
(defn quickblog
"Alias for `render`"
[opts]
(render opts))
(defn- now []
(.format (java.time.LocalDate/now)
(java.time.format.DateTimeFormatter/ofPattern "yyyy-MM-dd")))
(defn new
"Creates new `file` in posts dir."
{:org.babashka/cli
{:spec
{:file
{:desc "Filename of post (relative to posts-dir)"
:ref "<filename>"
:require true}
:title
{:desc "Title of post"
:ref "<title>"
:require true}
:tags
{:desc "List of tags (default: 'clojure'; example: --tags tag1 tag2 \"tag3 has spaces\")"
:ref "<tags>"
:coerce []}}}}
[opts]
(let [{:keys [file title posts-dir tags default-metadata]
:as opts} (apply-default-opts opts)
tags (cond (empty? tags) (:tags default-metadata)
(= tags [true]) [] ;; `--tags` without arguments
:else tags)]
(doseq [k [:file :title]]
(assert (contains? opts k) (format "Missing required option: %s" k)))
(let [file (if (re-matches #"^.+[.][^.]+$" file)
file
(str file ".md"))
post-file (fs/file posts-dir file)]
(when-not (fs/exists? post-file)
(fs/create-dirs posts-dir)
(spit (fs/file posts-dir file)
(format "Title: %s\nDate: %s\nTags: %s\n\nWrite a blog post here!"
title (now) (str/join "," tags)))))))
(defn clean
"Removes cache and output directories"
[opts]
(let [{:keys [cache-dir out-dir]} (apply-default-opts opts)]
(doseq [dir [cache-dir out-dir]]
(println "Removing dir:" dir)
(fs/delete-tree dir))))
(defn migrate
"Migrates from `posts.edn` to post-local metadata"
[opts]
(let [{:keys [posts-file] :as opts} (apply-default-opts opts)]
(if (fs/exists? posts-file)
(do
(doseq [post (->> (slurp posts-file) (format "[%s]") edn/read-string)]
(lib/migrate-post opts post))
(println "If all posts were successfully migrated, you should now delete"
(str posts-file)))
(println (format "Posts file %s does not exist; no posts to migrate"
(str posts-file))))))
(defn refresh-templates
"Updates to latest default templates"
[opts]
(lib/refresh-templates (apply-default-opts opts)))
(defn serve
"Runs file-server on `port`."
{:org.babashka/cli
{:spec
{:port
{:desc "Port for HTTP server to listen on"
:ref "<port>"
:default 1888}}}}
([opts] (serve opts true))
([opts block?]
(let [{:keys [port out-dir]} (merge (get-defaults (meta #'serve))
(apply-default-opts opts))
serve (requiring-resolve 'babashka.http-server/serve)]
(serve {:port port
:dir out-dir})
(when block? @(promise)))))
(def ^:private posts-cache (atom nil))
(defn watch
"Watches posts, templates, and assets for changes. Runs file server using
`serve`."
{:org.babashka/cli
{:spec
{:port
{:desc "Port for HTTP server to listen on"
:ref "<port>"
:default 1888}}}}
[opts]
(let [{:keys [assets-dir assets-out-dir posts-dir templates-dir]
:as opts}
(-> opts
apply-default-opts
(assoc :watch "<script type=\"text/javascript\" src=\"\"></script>")
render)]
(reset! posts-cache (:posts opts))
(serve opts false)
(let [load-pod (requiring-resolve 'babashka.pods/load-pod)]
(load-pod 'org.babashka/fswatcher "0.0.3")
(let [watch (requiring-resolve 'pod.babashka.fswatcher/watch)]
(watch posts-dir
(fn [{:keys [path type]}]
(println "Change detected:" (name type) (str path))
(when (#{:create :remove :rename :write :write|chmod} type)
(let [post-filename (-> (fs/file path) fs/file-name)]
skip Emacs backup files and the like
(when-not (str/starts-with? post-filename ".")
(println "Re-rendering" post-filename)
(let [post (lib/load-post opts path)
posts (cond
(contains? #{:remove :rename} type)
(dissoc @posts-cache post-filename)
(:quickblog/error post)
(do
(println (:quickblog/error post))
(dissoc @posts-cache post-filename))
:else
(assoc @posts-cache post-filename post))
opts (-> opts
(assoc :cached-posts @posts-cache
:posts posts)
render)]
(reset! posts-cache (:posts opts))))))))
(watch templates-dir
(fn [{:keys [path type]}]
(println "Template change detected; re-rendering all posts:"
(name type) (str path))
(let [opts (-> opts
(dissoc :cached-posts :posts)
render)]
(reset! posts-cache (:posts opts)))))
(when (fs/exists? assets-dir)
(watch assets-dir
(fn [{:keys [path type]}]
(println "Asset change detected:"
(name type) (str path))
(when (contains? #{:remove :rename} type)
(let [file (fs/file assets-out-dir (fs/file-name path))]
(println "Removing deleted asset:" (str file))
(fs/delete-if-exists file)))
(lib/copy-tree-modified assets-dir assets-out-dir)))))))
@(promise))
| null | https://raw.githubusercontent.com/borkdude/quickblog/a011122f1654206aa9a5d411010807d319ea0ad8/src/quickblog/api.clj | clojure | Blog metadata
Optional metadata
Input directories
Output directories
Caching
Social sharing
Command-specific opts
Add filter for tag page links; see:
#filters
Delete tags pages for removed tags
Generate index page with last `num-index-posts` posts
Generate archive page with links to all posts
Generate atom feeds
validate at
`--tags` without arguments | (ns quickblog.api
{:org.babashka/cli
{:spec
{
:blog-title
{:desc "Title of the blog"
:ref "<title>"
:default "quickblog"
:require true
:group :blog-metadata}
:blog-author
{:desc "Author's name"
:ref "<name>"
:default "Quick Blogger"
:require true
:group :blog-metadata}
:blog-description
{:desc "Blog description for subtitle and RSS feeds"
:ref "<text>"
:default "A blog about blogging quickly"
:require true
:group :blog-metadata}
:blog-root
{:desc "Base URL of the blog"
:ref "<url>"
:default ""
:require true
:group :blog-metadata}
:about-link
{:desc "Link to about the author page"
:ref "<url>"
:group :optional-metadata}
:discuss-link
{:desc "Link to discussion forum for posts"
:ref "<url>"
:group :optional-metadata}
:twitter-handle
{:desc "Author's Twitter handle"
:ref "<handle>"
:group :optional-metadata}
Post config
:default-metadata
{:desc "Default metadata to add to posts"
:default {:tags ["clojure"]}
:group :post-config}
:num-index-posts
{:desc "Number of most recent posts to show on the index page"
:ref "<num>"
:default 3
:group :post-config}
:posts-file
{:desc "File containing deprecated post metadata (used only for `migrate`)"
:ref "<file>"
:default "posts.edn"
:group :post-config}
:assets-dir
{:desc "Directory to copy assets (images, etc.) from"
:ref "<dir>"
:default "assets"
:require true
:group :input-directories}
:posts-dir
{:desc "Directory to read posts from"
:ref "<dir>"
:default "posts"
:require true
:group :input-directories}
:templates-dir
{:desc "Directory to read templates from; see Templates section in README"
:ref "<dir>"
:default "templates"
:require true
:group :input-directories}
:out-dir
{:desc "Base directory for outputting static site"
:ref "<dir>"
:default "public"
:require true
:group :output-directories}
:assets-out-dir
{:desc "Directory to write assets to (relative to :out-dir)"
:ref "<dir>"
:default "assets"
:require true
:group :output-directories}
:tags-dir
{:desc "Directory to write tags to (relative to :out-dir)"
:ref "<dir>"
:default "tags"
:require true
:group :output-directories}
:force-render
{:desc "If true, pages will be re-rendered regardless of cache status"
:default false
:group :caching}
:cache-dir
{:desc "Directory to use for caching"
:ref "<dir>"
:default ".work"
:require true
:group :caching}
:rendering-system-files
{:desc "Files involved in rendering pages (only set if you know what you're doing!)"
:ref "<file1> <file2>..."
:default ["bb.edn" "deps.edn"]
:coerce []
:require true
:group :caching}
:blog-image
{:desc "Blog thumbnail image URL; see Features > Social sharing in README"
:ref "<url>"
:group :social-sharing}
Favicon
:favicon
{:desc "If true, favicon will be added to all pages"
:default false
:group :favicon}
:favicon-dir
{:desc "Directory to read favicon assets from"
:ref "<dir>"
:default "assets/favicon"
:group :favicon}
:favicon-out-dir
{:desc "Directory to write favicon assets to (relative to :out-dir)"
:ref "<dir>"
:default "assets/favicon"
:group :favicon}
}}}
(:require
[babashka.fs :as fs]
[clojure.data.xml :as xml]
[clojure.edn :as edn]
[clojure.set :as set]
[clojure.string :as str]
[quickblog.internal :as lib]
[selmer.parser :as selmer]
[selmer.filters :as filters]))
(filters/add-filter! :escape-tag lib/escape-tag)
(defn- update-out-dirs
[{:keys [out-dir assets-out-dir favicon-out-dir] :as opts}]
(let [out-dir-ify (fn [dir]
(if-not (str/starts-with? (str dir) (str out-dir))
(fs/file out-dir dir)
dir))]
(assoc opts
:assets-out-dir (out-dir-ify assets-out-dir)
:favicon-out-dir (out-dir-ify favicon-out-dir))))
(defn- update-opts [opts]
(-> opts
(update :rendering-system-files #(map fs/file (cons (:templates-dir opts) %)))
update-out-dirs))
(defn- get-defaults [metadata]
(->> (get-in metadata [:org.babashka/cli :spec])
(filter (fn [[_ m]] (contains? m :default)))
(map (fn [[k m]] [k (:default m)]))
(into {})))
(defn- apply-default-opts [opts]
(let [defaults (get-defaults (meta (the-ns 'quickblog.api)))]
(-> (->> defaults
(map (fn [[k default]] [k (if (contains? opts k) (opts k) default)]))
(into {}))
(merge opts)
update-opts)))
(def ^:private favicon-assets
["android-chrome-192x192.png"
"android-chrome-512x512.png"
"apple-touch-icon.png"
"browserconfig.xml"
"favicon-16x16.png"
"favicon-32x32.png"
"favicon.ico"
"mstile-150x150.png"
"safari-pinned-tab.svg"
"site.webmanifest"])
(def ^:private legacy-template "
<html><head>
<meta http-equiv=\"refresh\" content=\"0; URL=/{{new_url}}\" />
</head></html>")
(defn- base-html [opts]
(slurp (lib/ensure-template opts "base.html")))
(defn- ensure-favicon-assets [{:keys [favicon favicon-dir]}]
(when favicon
(doseq [asset favicon-assets]
(lib/ensure-resource (fs/file favicon-dir asset)
(fs/file "assets" "favicon" asset)))))
(defn- gen-posts [{:keys [deleted-posts modified-posts posts
cache-dir out-dir]
:as opts}]
(let [posts-to-write (set/union modified-posts
(lib/modified-post-pages opts))
page-template (base-html opts)
post-template (slurp (lib/ensure-template opts "post.html"))]
(fs/create-dirs cache-dir)
(fs/create-dirs out-dir)
(doseq [[file post] posts
:when (contains? posts-to-write file)
:let [{:keys [file date legacy]} post
html-file (lib/html-file file)]]
(lib/write-post! (assoc opts
:page-template page-template
:post-template post-template)
post)
(let [legacy-dir (fs/file out-dir (str/replace date "-" "/")
(str/replace file ".md" ""))]
(when legacy
(fs/create-dirs legacy-dir)
(let [legacy-file (fs/file (fs/file legacy-dir "index.html"))
redirect-html (selmer/render legacy-template
{:new_url html-file})]
(println "Writing legacy redirect:" (str legacy-file))
(spit legacy-file redirect-html)))))
(doseq [file deleted-posts]
(println "Post deleted; removing from cache and outdir:" (str file))
(fs/delete-if-exists (fs/file cache-dir (lib/cache-file file)))
(fs/delete-if-exists (fs/file out-dir (lib/html-file file))))))
(defn- gen-tags [{:keys [blog-title blog-description
blog-image blog-image-alt twitter-handle
modified-tags posts out-dir tags-dir]
:as opts}]
(let [tags-out-dir (fs/create-dirs (fs/file out-dir tags-dir))
posts-by-tag (lib/posts-by-tag posts)
tags-file (fs/file tags-out-dir "index.html")
template (base-html opts)]
(when (or (not (empty? modified-tags))
(not (fs/exists? tags-file)))
(lib/write-page! opts tags-file template
{:skip-archive true
:title (str blog-title " - Tags")
:relative-path "../"
:body (lib/tag-links "Tags" posts-by-tag opts)
:sharing {:description (format "Tags - %s"
blog-description)
:author twitter-handle
:twitter-handle twitter-handle
:image (lib/blog-link opts blog-image)
:image-alt blog-image-alt
:url (lib/blog-link opts "tags/index.html")}})
(doseq [tag-and-posts posts-by-tag]
(lib/write-tag! opts tags-out-dir template tag-and-posts))
(doseq [tag (remove posts-by-tag modified-tags)
:let [tag-filename (fs/file tags-out-dir (lib/tag-file tag))]]
(println "Deleting removed tag:" (str tag-filename))
(fs/delete-if-exists tag-filename)))))
(defn- index [{:keys [posts] :as opts}]
(let [posts (for [{:keys [file html] :as post} posts
:let [preview (first (str/split @html #"<!-- end-of-preview -->" 2))]]
(assoc post
:post-link (str/replace file ".md" ".html")
:body preview
:truncated (not= preview @html)))
index-template (lib/ensure-template opts "index.html")]
(selmer/render (slurp index-template) {:posts posts})))
(defn- spit-index
[{:keys [blog-title blog-description blog-image blog-image-alt twitter-handle
posts cached-posts deleted-posts modified-posts num-index-posts
out-dir]
:as opts}]
(let [index-posts #(->> (vals %)
lib/sort-posts
(take num-index-posts))
posts (index-posts posts)
cached-posts (index-posts cached-posts)
out-file (fs/file out-dir "index.html")
stale? (or (not= (map :file posts)
(map :file cached-posts))
(some modified-posts (map :file posts))
(some deleted-posts (map :file cached-posts))
(not (fs/exists? out-file)))]
(when stale?
(let [body (index (assoc opts :posts posts))]
(lib/write-page! opts out-file
(base-html opts)
{:title blog-title
:body body
:sharing {:description blog-description
:author twitter-handle
:twitter-handle twitter-handle
:image (lib/blog-link opts blog-image)
:image-alt blog-image-alt
:url (lib/blog-link opts "index.html")}})))))
(defn- spit-archive [{:keys [blog-title blog-description
blog-image blog-image-alt twitter-handle
modified-metadata posts out-dir]
:as opts}]
(let [out-file (fs/file out-dir "archive.html")
stale? (or (some not-empty (vals modified-metadata))
(not (fs/exists? out-file)))]
(when stale?
(let [title (str blog-title " - Archive")
posts (lib/sort-posts (vals posts))]
(lib/write-page! opts out-file
(base-html opts)
{:skip-archive true
:title title
:body (lib/post-links "Archive" posts opts)
:sharing {:description (format "Archive - %s"
blog-description)
:author twitter-handle
:twitter-handle twitter-handle
:image (lib/blog-link opts blog-image)
:image-alt blog-image-alt
:url (lib/blog-link opts "archive.html")}})))))
(xml/alias-uri 'atom "")
(import java.time.format.DateTimeFormatter)
(defn- rfc-3339-now []
(let [fmt (DateTimeFormatter/ofPattern "yyyy-MM-dd'T'HH:mm:ssxxx")
now (java.time.ZonedDateTime/now java.time.ZoneOffset/UTC)]
(.format now fmt)))
(defn- rfc-3339 [yyyy-MM-dd]
(let [in-fmt (DateTimeFormatter/ofPattern "yyyy-MM-dd")
local-date (java.time.LocalDate/parse yyyy-MM-dd in-fmt)
fmt (DateTimeFormatter/ofPattern "yyyy-MM-dd'T'HH:mm:ssxxx")
now (java.time.ZonedDateTime/of (.atTime local-date 23 59 59) java.time.ZoneOffset/UTC)]
(.format now fmt)))
(defn- atom-feed
[{:keys [blog-title blog-author blog-root] :as opts} posts]
(-> (xml/sexp-as-element
[::atom/feed
{:xmlns ""}
[::atom/title blog-title]
[::atom/link {:href (lib/blog-link opts "atom.xml") :rel "self"}]
[::atom/link {:href blog-root}]
[::atom/updated (rfc-3339-now)]
[::atom/id blog-root]
[::atom/author
[::atom/name blog-author]]
(for [{:keys [title date file preview html]} posts
:when (not preview)
:let [html-file (str/replace file ".md" ".html")
link (lib/blog-link opts html-file)]]
[::atom/entry
[::atom/id link]
[::atom/link {:href link}]
[::atom/title title]
[::atom/updated (rfc-3339 date)]
[::atom/content {:type "html"}
[:-cdata @html]]])])
xml/indent-str))
(defn- clojure-post? [{:keys [tags]}]
(let [clojure-tags #{"clojure" "clojurescript"}
lowercase-tags (map str/lower-case tags)]
(some clojure-tags lowercase-tags)))
(defn- spit-feeds [{:keys [out-dir modified-posts posts] :as opts}]
(let [feed-file (fs/file out-dir "atom.xml")
clojure-feed-file (fs/file out-dir "planetclojure.xml")
all-posts (lib/sort-posts (vals posts))
clojure-posts (->> (vals posts)
(filter clojure-post?)
lib/sort-posts)
clojure-posts-modified? (->> modified-posts
(map posts)
(some clojure-post?))]
(if (and (not clojure-posts-modified?) (fs/exists? clojure-feed-file))
(println "No Clojure posts modified; skipping Clojure feed")
(do
(println "Writing Clojure feed" (str clojure-feed-file))
(spit clojure-feed-file (atom-feed opts clojure-posts))))
(if (and (empty? modified-posts) (fs/exists? feed-file))
(println "No posts modified; skipping main feed")
(do
(println "Writing feed" (str feed-file))
(spit feed-file (atom-feed opts all-posts))))))
(defn render
"Renders posts declared in `posts.edn` to `out-dir`."
[opts]
(let [{:keys [assets-dir
assets-out-dir
cache-dir
favicon-dir
favicon-out-dir
out-dir
posts-file
templates-dir]
:as opts}
(-> opts apply-default-opts lib/refresh-cache)]
(if (empty? (:posts opts))
(binding [*out* *err*]
(println
(if (fs/exists? posts-file)
(format "Run `bb migrate` to move metadata from `%s` to post files"
posts-file)
"No posts found; run `bb new` to create one")))
(do
(lib/ensure-template opts "style.css")
(ensure-favicon-assets opts)
(when (fs/exists? assets-dir)
(lib/copy-tree-modified assets-dir assets-out-dir))
(when (fs/exists? favicon-dir)
(lib/copy-tree-modified favicon-dir favicon-out-dir))
(doseq [file (fs/glob templates-dir "*.{css,svg}")]
(lib/copy-modified file (fs/file out-dir (.getFileName file))))
(fs/create-dirs (fs/file cache-dir))
(gen-posts opts)
(gen-tags opts)
(spit-archive opts)
(spit-index opts)
(spit-feeds opts)
(lib/write-cache! opts)))
opts))
(defn quickblog
"Alias for `render`"
[opts]
(render opts))
(defn- now []
(.format (java.time.LocalDate/now)
(java.time.format.DateTimeFormatter/ofPattern "yyyy-MM-dd")))
(defn new
"Creates new `file` in posts dir."
{:org.babashka/cli
{:spec
{:file
{:desc "Filename of post (relative to posts-dir)"
:ref "<filename>"
:require true}
:title
{:desc "Title of post"
:ref "<title>"
:require true}
:tags
{:desc "List of tags (default: 'clojure'; example: --tags tag1 tag2 \"tag3 has spaces\")"
:ref "<tags>"
:coerce []}}}}
[opts]
(let [{:keys [file title posts-dir tags default-metadata]
:as opts} (apply-default-opts opts)
tags (cond (empty? tags) (:tags default-metadata)
:else tags)]
(doseq [k [:file :title]]
(assert (contains? opts k) (format "Missing required option: %s" k)))
(let [file (if (re-matches #"^.+[.][^.]+$" file)
file
(str file ".md"))
post-file (fs/file posts-dir file)]
(when-not (fs/exists? post-file)
(fs/create-dirs posts-dir)
(spit (fs/file posts-dir file)
(format "Title: %s\nDate: %s\nTags: %s\n\nWrite a blog post here!"
title (now) (str/join "," tags)))))))
(defn clean
"Removes cache and output directories"
[opts]
(let [{:keys [cache-dir out-dir]} (apply-default-opts opts)]
(doseq [dir [cache-dir out-dir]]
(println "Removing dir:" dir)
(fs/delete-tree dir))))
(defn migrate
"Migrates from `posts.edn` to post-local metadata"
[opts]
(let [{:keys [posts-file] :as opts} (apply-default-opts opts)]
(if (fs/exists? posts-file)
(do
(doseq [post (->> (slurp posts-file) (format "[%s]") edn/read-string)]
(lib/migrate-post opts post))
(println "If all posts were successfully migrated, you should now delete"
(str posts-file)))
(println (format "Posts file %s does not exist; no posts to migrate"
(str posts-file))))))
(defn refresh-templates
"Updates to latest default templates"
[opts]
(lib/refresh-templates (apply-default-opts opts)))
(defn serve
"Runs file-server on `port`."
{:org.babashka/cli
{:spec
{:port
{:desc "Port for HTTP server to listen on"
:ref "<port>"
:default 1888}}}}
([opts] (serve opts true))
([opts block?]
(let [{:keys [port out-dir]} (merge (get-defaults (meta #'serve))
(apply-default-opts opts))
serve (requiring-resolve 'babashka.http-server/serve)]
(serve {:port port
:dir out-dir})
(when block? @(promise)))))
(def ^:private posts-cache (atom nil))
(defn watch
"Watches posts, templates, and assets for changes. Runs file server using
`serve`."
{:org.babashka/cli
{:spec
{:port
{:desc "Port for HTTP server to listen on"
:ref "<port>"
:default 1888}}}}
[opts]
(let [{:keys [assets-dir assets-out-dir posts-dir templates-dir]
:as opts}
(-> opts
apply-default-opts
(assoc :watch "<script type=\"text/javascript\" src=\"\"></script>")
render)]
(reset! posts-cache (:posts opts))
(serve opts false)
(let [load-pod (requiring-resolve 'babashka.pods/load-pod)]
(load-pod 'org.babashka/fswatcher "0.0.3")
(let [watch (requiring-resolve 'pod.babashka.fswatcher/watch)]
(watch posts-dir
(fn [{:keys [path type]}]
(println "Change detected:" (name type) (str path))
(when (#{:create :remove :rename :write :write|chmod} type)
(let [post-filename (-> (fs/file path) fs/file-name)]
skip Emacs backup files and the like
(when-not (str/starts-with? post-filename ".")
(println "Re-rendering" post-filename)
(let [post (lib/load-post opts path)
posts (cond
(contains? #{:remove :rename} type)
(dissoc @posts-cache post-filename)
(:quickblog/error post)
(do
(println (:quickblog/error post))
(dissoc @posts-cache post-filename))
:else
(assoc @posts-cache post-filename post))
opts (-> opts
(assoc :cached-posts @posts-cache
:posts posts)
render)]
(reset! posts-cache (:posts opts))))))))
(watch templates-dir
(fn [{:keys [path type]}]
(println "Template change detected; re-rendering all posts:"
(name type) (str path))
(let [opts (-> opts
(dissoc :cached-posts :posts)
render)]
(reset! posts-cache (:posts opts)))))
(when (fs/exists? assets-dir)
(watch assets-dir
(fn [{:keys [path type]}]
(println "Asset change detected:"
(name type) (str path))
(when (contains? #{:remove :rename} type)
(let [file (fs/file assets-out-dir (fs/file-name path))]
(println "Removing deleted asset:" (str file))
(fs/delete-if-exists file)))
(lib/copy-tree-modified assets-dir assets-out-dir)))))))
@(promise))
|
ad2823b950680cf5c8b1f31306c46aab023fac843543c1e57c6167a9bd3c1b12 | jwiegley/notes | Challenge.hs | module Challenge where
import Data.IORef
foo n i = do
r <- newIORef n
return $ \i -> atomicModifyIORef r (\x -> (x,x+i))
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/haskell/Challenge.hs | haskell | module Challenge where
import Data.IORef
foo n i = do
r <- newIORef n
return $ \i -> atomicModifyIORef r (\x -> (x,x+i))
| |
4b53bdc2ef65df343b6274f3169edaab8e5b16565544d92f92b738a0d7529b1a | janestreet/memtrace_viewer_with_deps | perfect_hash.ml | open Core_kernel
open Poly
This is not intended as a realistic candidate hash function since it allocates when
hashing , but as some kind of ` perfect ' baseline against which other hash functions can
be compared and judged .
It is a perfect hash in the sense that it produces no collisions of intermediate state
( trivially ) . It 's also achieving about as good quality as possible in [ get_hash_value ]
by virtue of using a ( former ) cryptographically secure hash function .
Additionally , it tries to enforce the invariant well - behaved [ hash_fold_t ] functions
must obey : different values of the same type must produce mix - in sequences
of form ( a @ [ b1 ] @ c1 ) and ( a @ [ b2 ] @ c2 ) where b1 and b2 are " meaningfully "
different ( see the checks in [ compare ] ) .
This requirement is a way to resolve possible systematic collisions resulting from e.g.
forgetting to write down a tag of a variant , or a length of an array .
It 's not crazy to think about relaxing this requirement , but you ca n't relax it
too much : you ca n't allow some [ hash_fold_t ] functions to write their tag
to the end because that leads to a problem :
even though [ 1 ; 2 ] differs from [ 1 ] in last position and [ 3 ] differs from [ 2 ; 3 ] in
first position , ( [ 1 ; 2 ] @ [ 3 ] ) and ( [ 1 ] @ [ 2 ; 3 ] ) is a collision !
hashing, but as some kind of `perfect' baseline against which other hash functions can
be compared and judged.
It is a perfect hash in the sense that it produces no collisions of intermediate state
(trivially). It's also achieving about as good quality as possible in [get_hash_value]
by virtue of using a (former) cryptographically secure hash function.
Additionally, it tries to enforce the invariant well-behaved [hash_fold_t] functions
must obey: different values of the same type must produce mix-in sequences
of form (a @ [b1] @ c1) and (a @ [b2] @ c2) where b1 and b2 are "meaningfully"
different (see the checks in [compare]).
This requirement is a way to resolve possible systematic collisions resulting from e.g.
forgetting to write down a tag of a variant, or a length of an array.
It's not crazy to think about relaxing this requirement, but you can't relax it
too much: you can't allow some [hash_fold_t] functions to write their tag
to the end because that leads to a problem:
even though [1; 2] differs from [1] in last position and [3] differs from [2; 3] in
first position, ([1; 2] @ [3]) and ([1] @ [2; 3]) is a collision!
*)
let description = "perfect hash"
type hash_value = int
type v =
| Int of int
| Int64 of int64
| String of string
| Float of float
[@@deriving sexp]
let compare_v a b = match (a, b) with
| Int a, Int b -> compare a b
| Int64 a, Int64 b -> compare a b
| String a, String b -> compare a b
| Float a, Float b -> compare a b
| _, _ -> failwith "uncomparable"
module State = struct
module T = struct
type t = v list [@@deriving sexp]
let rec compare a b = match (a, b) with
| (x :: xs), (y :: ys) -> [%compare : v * t] (x, xs) (y, ys)
| [], [] -> 0
| _, _ -> failwith "perfect hashes of different lengths"
let compare a b = compare (List.rev a) (List.rev b)
end
include T
include Comparable.Make (T)
end
type state = State.t
let fold_int t i = Int i :: t
let fold_int64 t i = Int64 i :: t
let fold_string t i = String i :: t
let fold_float t i = Float i :: t
type seed = unit
let alloc () = []
let reset ?seed:_ _ = []
let get_hash_value t =
Caml.Int64.to_int (Int64.of_string (
"0x" ^ String.prefix (Md5.to_hex (Md5.digest_string (
Sexplib.Sexp.to_string (State.sexp_of_t t)))) 16))
module For_tests = struct
let state_to_string t = Sexplib.Sexp.to_string (State.sexp_of_t t)
let compare_state = State.compare
end
| null | https://raw.githubusercontent.com/janestreet/memtrace_viewer_with_deps/5a9e1f927f5f8333e2d71c8d3ca03a45587422c4/vendor/ppx_hash/runtime-lib/test/perfect_hash.ml | ocaml | open Core_kernel
open Poly
This is not intended as a realistic candidate hash function since it allocates when
hashing , but as some kind of ` perfect ' baseline against which other hash functions can
be compared and judged .
It is a perfect hash in the sense that it produces no collisions of intermediate state
( trivially ) . It 's also achieving about as good quality as possible in [ get_hash_value ]
by virtue of using a ( former ) cryptographically secure hash function .
Additionally , it tries to enforce the invariant well - behaved [ hash_fold_t ] functions
must obey : different values of the same type must produce mix - in sequences
of form ( a @ [ b1 ] @ c1 ) and ( a @ [ b2 ] @ c2 ) where b1 and b2 are " meaningfully "
different ( see the checks in [ compare ] ) .
This requirement is a way to resolve possible systematic collisions resulting from e.g.
forgetting to write down a tag of a variant , or a length of an array .
It 's not crazy to think about relaxing this requirement , but you ca n't relax it
too much : you ca n't allow some [ hash_fold_t ] functions to write their tag
to the end because that leads to a problem :
even though [ 1 ; 2 ] differs from [ 1 ] in last position and [ 3 ] differs from [ 2 ; 3 ] in
first position , ( [ 1 ; 2 ] @ [ 3 ] ) and ( [ 1 ] @ [ 2 ; 3 ] ) is a collision !
hashing, but as some kind of `perfect' baseline against which other hash functions can
be compared and judged.
It is a perfect hash in the sense that it produces no collisions of intermediate state
(trivially). It's also achieving about as good quality as possible in [get_hash_value]
by virtue of using a (former) cryptographically secure hash function.
Additionally, it tries to enforce the invariant well-behaved [hash_fold_t] functions
must obey: different values of the same type must produce mix-in sequences
of form (a @ [b1] @ c1) and (a @ [b2] @ c2) where b1 and b2 are "meaningfully"
different (see the checks in [compare]).
This requirement is a way to resolve possible systematic collisions resulting from e.g.
forgetting to write down a tag of a variant, or a length of an array.
It's not crazy to think about relaxing this requirement, but you can't relax it
too much: you can't allow some [hash_fold_t] functions to write their tag
to the end because that leads to a problem:
even though [1; 2] differs from [1] in last position and [3] differs from [2; 3] in
first position, ([1; 2] @ [3]) and ([1] @ [2; 3]) is a collision!
*)
let description = "perfect hash"
type hash_value = int
type v =
| Int of int
| Int64 of int64
| String of string
| Float of float
[@@deriving sexp]
let compare_v a b = match (a, b) with
| Int a, Int b -> compare a b
| Int64 a, Int64 b -> compare a b
| String a, String b -> compare a b
| Float a, Float b -> compare a b
| _, _ -> failwith "uncomparable"
module State = struct
module T = struct
type t = v list [@@deriving sexp]
let rec compare a b = match (a, b) with
| (x :: xs), (y :: ys) -> [%compare : v * t] (x, xs) (y, ys)
| [], [] -> 0
| _, _ -> failwith "perfect hashes of different lengths"
let compare a b = compare (List.rev a) (List.rev b)
end
include T
include Comparable.Make (T)
end
type state = State.t
let fold_int t i = Int i :: t
let fold_int64 t i = Int64 i :: t
let fold_string t i = String i :: t
let fold_float t i = Float i :: t
type seed = unit
let alloc () = []
let reset ?seed:_ _ = []
let get_hash_value t =
Caml.Int64.to_int (Int64.of_string (
"0x" ^ String.prefix (Md5.to_hex (Md5.digest_string (
Sexplib.Sexp.to_string (State.sexp_of_t t)))) 16))
module For_tests = struct
let state_to_string t = Sexplib.Sexp.to_string (State.sexp_of_t t)
let compare_state = State.compare
end
| |
22d116a086b11f9cf688de01b3065390c7321e2e571e2410898d1998c9d7861b | simonmar/par-tutorial | KMeansCommon.hs | {-# LANGUAGE DeriveDataTypeable #-}
module KMeansCommon where
import Data.List (foldl')
import Data.Typeable (Typeable)
import Data.Data (Data)
import qualified Data.ByteString.Char8 as B
import Data.Binary
import Control.DeepSeq
data Vector = Vector {-#UNPACK#-}!Double {-#UNPACK#-}!Double deriving (Show,Read,Typeable,Data,Eq)
instance Binary Vector where put (Vector a b) = put a>>put b
get = do a<-get
b<-get
return $ Vector a b
data Cluster = Cluster
{
clId :: {-#UNPACK#-}!Int,
clCount :: {-#UNPACK#-}!Int,
clSum :: {-#UNPACK#-}!Vector,
clCent :: {-#UNPACK#-}!Vector
} deriving (Show,Read,Typeable,Data,Eq)
instance NFData Cluster -- default should be fine
sqDistance :: Vector -> Vector -> Double
sqDistance (Vector x1 y1) (Vector x2 y2) = ((x1-x2)^2) + ((y1-y2)^2)
makeCluster :: Int -> [Vector] -> Cluster
makeCluster clid vecs
= Cluster { clId = clid,
clCount = count,
clSum = vecsum,
clCent = centre
}
where vecsum@(Vector a b) = foldl' addVector zeroVector vecs
centre = Vector (a / fromIntegral count) (b / fromIntegral count)
count = length vecs
combineClusters c1 c2 =
Cluster {clId = clId c1,
clCount = count,
clSum = vecsum,
clCent = Vector (a / fromIntegral count) (b / fromIntegral count)}
where count = clCount c1 + clCount c2
vecsum@(Vector a b) = addVector (clSum c1) (clSum c2)
addVector (Vector a b) (Vector c d) = Vector (a+c) (b+d)
zeroVector = Vector 0 0
getPoints :: FilePath -> IO [Vector]
getPoints fp = do c <- readFile fp
return $ read c
getClusters :: FilePath -> IO [Cluster]
getClusters fp = do c <- readFile fp
return $ read c
readPoints :: FilePath -> IO [Vector]
readPoints f = do
s <- B.readFile f
let ls = map B.words $ B.lines s
points = [ Vector (read (B.unpack sx)) (read (B.unpack sy))
| (sx:sy:_) <- ls ]
--
return points
| null | https://raw.githubusercontent.com/simonmar/par-tutorial/f9061ea177800eb4ed9660bcabc8d8d836e1c73c/code/kmeans/KMeansCommon.hs | haskell | # LANGUAGE DeriveDataTypeable #
#UNPACK#
#UNPACK#
#UNPACK#
#UNPACK#
#UNPACK#
#UNPACK#
default should be fine
| module KMeansCommon where
import Data.List (foldl')
import Data.Typeable (Typeable)
import Data.Data (Data)
import qualified Data.ByteString.Char8 as B
import Data.Binary
import Control.DeepSeq
instance Binary Vector where put (Vector a b) = put a>>put b
get = do a<-get
b<-get
return $ Vector a b
data Cluster = Cluster
{
} deriving (Show,Read,Typeable,Data,Eq)
sqDistance :: Vector -> Vector -> Double
sqDistance (Vector x1 y1) (Vector x2 y2) = ((x1-x2)^2) + ((y1-y2)^2)
makeCluster :: Int -> [Vector] -> Cluster
makeCluster clid vecs
= Cluster { clId = clid,
clCount = count,
clSum = vecsum,
clCent = centre
}
where vecsum@(Vector a b) = foldl' addVector zeroVector vecs
centre = Vector (a / fromIntegral count) (b / fromIntegral count)
count = length vecs
combineClusters c1 c2 =
Cluster {clId = clId c1,
clCount = count,
clSum = vecsum,
clCent = Vector (a / fromIntegral count) (b / fromIntegral count)}
where count = clCount c1 + clCount c2
vecsum@(Vector a b) = addVector (clSum c1) (clSum c2)
addVector (Vector a b) (Vector c d) = Vector (a+c) (b+d)
zeroVector = Vector 0 0
getPoints :: FilePath -> IO [Vector]
getPoints fp = do c <- readFile fp
return $ read c
getClusters :: FilePath -> IO [Cluster]
getClusters fp = do c <- readFile fp
return $ read c
readPoints :: FilePath -> IO [Vector]
readPoints f = do
s <- B.readFile f
let ls = map B.words $ B.lines s
points = [ Vector (read (B.unpack sx)) (read (B.unpack sy))
| (sx:sy:_) <- ls ]
return points
|
1cc5667c1889214cbb6078616ac7f3b6e5fe794247fba0f5bd7ad337b9b25707 | mpickering/apply-refact | Bracket4.hs | yes = foo (bar) | null | https://raw.githubusercontent.com/mpickering/apply-refact/a4343ea0f4f9d8c2e16d6b16b9068f321ba4f272/tests/examples/Bracket4.hs | haskell | yes = foo (bar) | |
9650b1012ddd2f87516af61373f66b39b3a66ec901031d6ae35f1f316d6e9493 | NetComposer/nksip | ipv6_test_client3.erl | %% -------------------------------------------------------------------
%%
%% ipv6_test: IPv6 Tests and RFC5118 Torture Tests
%%
Copyright ( c ) 2013 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(ipv6_test_client3).
-include_lib("nkserver/include/nkserver_module.hrl").
-export([sip_invite/2, sip_ack/2, sip_bye/2]).
sip_invite(Req, _Call) ->
tests_util:save_ref(Req),
{ok, Ids} = nksip_request:header(<<"x-nk-id">>, Req),
{ok, SrvId} = nksip_request:srv_id(Req),
Hds = [{add, "x-nk-id", nklib_util:bjoin([SrvId|Ids])}],
{reply, {ok, Hds}}.
sip_ack(Req, _Call) ->
tests_util:send_ref(ack, Req),
ok.
sip_bye(Req, _Call) ->
tests_util:send_ref(bye, Req),
{reply, ok}.
| null | https://raw.githubusercontent.com/NetComposer/nksip/7fbcc66806635dc8ecc5d11c30322e4d1df36f0a/test/callbacks/ipv6_test_client3.erl | erlang | -------------------------------------------------------------------
ipv6_test: IPv6 Tests and RFC5118 Torture Tests
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
------------------------------------------------------------------- | Copyright ( c ) 2013 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(ipv6_test_client3).
-include_lib("nkserver/include/nkserver_module.hrl").
-export([sip_invite/2, sip_ack/2, sip_bye/2]).
sip_invite(Req, _Call) ->
tests_util:save_ref(Req),
{ok, Ids} = nksip_request:header(<<"x-nk-id">>, Req),
{ok, SrvId} = nksip_request:srv_id(Req),
Hds = [{add, "x-nk-id", nklib_util:bjoin([SrvId|Ids])}],
{reply, {ok, Hds}}.
sip_ack(Req, _Call) ->
tests_util:send_ref(ack, Req),
ok.
sip_bye(Req, _Call) ->
tests_util:send_ref(bye, Req),
{reply, ok}.
|
24ef8871d750c3af146d87a5d337d22d9410ea40342e918f0a767eb4e11fcd76 | yurug/ocaml4.04.0-copatterns | lexer.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* The lexical analyzer *)
val init : unit -> unit
val token: Lexing.lexbuf -> Parser.token
val skip_hash_bang: Lexing.lexbuf -> unit
type error =
| Illegal_character of char
| Illegal_escape of string
| Unterminated_comment of Location.t
| Unterminated_string
| Unterminated_string_in_comment of Location.t * Location.t
| Keyword_as_label of string
| Invalid_literal of string
| Invalid_directive of string * string option
;;
exception Error of error * Location.t
open Format
val report_error: formatter -> error -> unit
(* Deprecated. Use Location.{error_of_exn, report_error}. *)
val in_comment : unit -> bool;;
val in_string : unit -> bool;;
val print_warnings : bool ref
val handle_docstrings: bool ref
val comments : unit -> (string * Location.t) list
val token_with_comments : Lexing.lexbuf -> Parser.token
[ set_preprocessor init preprocessor ] registers [ init ] as the function
to call to initialize the preprocessor when the lexer is initialized ,
and [ preprocessor ] a function that is called when a new token is needed
by the parser , as [ preprocessor ] where [ lexer ] is the
lexing function .
When a preprocessor is configured by calling [ set_preprocessor ] , the lexer
changes its behavior to accept backslash - newline as a token - separating blank .
[set_preprocessor init preprocessor] registers [init] as the function
to call to initialize the preprocessor when the lexer is initialized,
and [preprocessor] a function that is called when a new token is needed
by the parser, as [preprocessor lexer lexbuf] where [lexer] is the
lexing function.
When a preprocessor is configured by calling [set_preprocessor], the lexer
changes its behavior to accept backslash-newline as a token-separating blank.
*)
val set_preprocessor :
(unit -> unit) ->
((Lexing.lexbuf -> Parser.token) -> Lexing.lexbuf -> Parser.token) ->
unit
| null | https://raw.githubusercontent.com/yurug/ocaml4.04.0-copatterns/b3ec6a3cc203bd2cde3b618546d29e10f1102323/parsing/lexer.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
The lexical analyzer
Deprecated. Use Location.{error_of_exn, report_error}. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
val init : unit -> unit
val token: Lexing.lexbuf -> Parser.token
val skip_hash_bang: Lexing.lexbuf -> unit
type error =
| Illegal_character of char
| Illegal_escape of string
| Unterminated_comment of Location.t
| Unterminated_string
| Unterminated_string_in_comment of Location.t * Location.t
| Keyword_as_label of string
| Invalid_literal of string
| Invalid_directive of string * string option
;;
exception Error of error * Location.t
open Format
val report_error: formatter -> error -> unit
val in_comment : unit -> bool;;
val in_string : unit -> bool;;
val print_warnings : bool ref
val handle_docstrings: bool ref
val comments : unit -> (string * Location.t) list
val token_with_comments : Lexing.lexbuf -> Parser.token
[ set_preprocessor init preprocessor ] registers [ init ] as the function
to call to initialize the preprocessor when the lexer is initialized ,
and [ preprocessor ] a function that is called when a new token is needed
by the parser , as [ preprocessor ] where [ lexer ] is the
lexing function .
When a preprocessor is configured by calling [ set_preprocessor ] , the lexer
changes its behavior to accept backslash - newline as a token - separating blank .
[set_preprocessor init preprocessor] registers [init] as the function
to call to initialize the preprocessor when the lexer is initialized,
and [preprocessor] a function that is called when a new token is needed
by the parser, as [preprocessor lexer lexbuf] where [lexer] is the
lexing function.
When a preprocessor is configured by calling [set_preprocessor], the lexer
changes its behavior to accept backslash-newline as a token-separating blank.
*)
val set_preprocessor :
(unit -> unit) ->
((Lexing.lexbuf -> Parser.token) -> Lexing.lexbuf -> Parser.token) ->
unit
|
5ff16e5eb1c27aa752173112a5a3e2e0923ac66d65a344c61422fb751681ebb1 | bozsahin/cogs542 | hw1.ccg.lisp | (DEFPARAMETER *CCG-GRAMMAR*
'(((KEY 1) (PHON OKCUNUN) (MORPH N)
(SYN
(((BCAT S) (FEATS NIL)) (DIR FS) (MODAL ALL)
(((BCAT S) (FEATS NIL)) (DIR BS) (MODAL ALL)
((BCAT NP) (FEATS ((CASE NOM)))))))
(SEM (LAM P (P "OKCU"))) (PARAM 1.0))
((KEY 2) (PHON VUR) (MORPH V)
(SYN
((((BCAT S) (FEATS NIL)) (DIR BS) (MODAL ALL)
((BCAT NP) (FEATS ((CASE NOM)))))
(DIR BS) (MODAL ALL) ((BCAT NP) (FEATS ((CASE ACC))))))
(SEM (LAM X1 (LAM X2 (("HIT" X1) X2)))) (PARAM 1.0))
((KEY 3) (PHON HEDEF) (MORPH N) (SYN ((BCAT NP) (FEATS NIL))) (SEM "HEDEF")
(PARAM 1.0))
((KEY 4) (PHON -DUGU) (MORPH REL)
(SYN
((((BCAT NP) (FEATS NIL)) (DIR FS) (MODAL ALL) ((BCAT NP) (FEATS NIL)))
(DIR BS) (MODAL ALL)
(((BCAT S) (FEATS NIL)) (DIR BS) (MODAL ALL)
((BCAT NP) (FEATS ((CASE ACC)))))))
(SEM (LAM P (LAM Q (LAM X (("AND" (P X)) (Q X)))))) (PARAM 1.0)))) | null | https://raw.githubusercontent.com/bozsahin/cogs542/3093fe6dc720405096af2e18d7ed9edd5f2ef47d/ccglab-examples/hw1/hw1.ccg.lisp | lisp | (DEFPARAMETER *CCG-GRAMMAR*
'(((KEY 1) (PHON OKCUNUN) (MORPH N)
(SYN
(((BCAT S) (FEATS NIL)) (DIR FS) (MODAL ALL)
(((BCAT S) (FEATS NIL)) (DIR BS) (MODAL ALL)
((BCAT NP) (FEATS ((CASE NOM)))))))
(SEM (LAM P (P "OKCU"))) (PARAM 1.0))
((KEY 2) (PHON VUR) (MORPH V)
(SYN
((((BCAT S) (FEATS NIL)) (DIR BS) (MODAL ALL)
((BCAT NP) (FEATS ((CASE NOM)))))
(DIR BS) (MODAL ALL) ((BCAT NP) (FEATS ((CASE ACC))))))
(SEM (LAM X1 (LAM X2 (("HIT" X1) X2)))) (PARAM 1.0))
((KEY 3) (PHON HEDEF) (MORPH N) (SYN ((BCAT NP) (FEATS NIL))) (SEM "HEDEF")
(PARAM 1.0))
((KEY 4) (PHON -DUGU) (MORPH REL)
(SYN
((((BCAT NP) (FEATS NIL)) (DIR FS) (MODAL ALL) ((BCAT NP) (FEATS NIL)))
(DIR BS) (MODAL ALL)
(((BCAT S) (FEATS NIL)) (DIR BS) (MODAL ALL)
((BCAT NP) (FEATS ((CASE ACC)))))))
(SEM (LAM P (LAM Q (LAM X (("AND" (P X)) (Q X)))))) (PARAM 1.0)))) | |
bd0bf33e4efb8954ca1beb7772a41cd0d19260b4360f2c26d15666e8204f10ba | LaurentMazare/tensorflow-ocaml | mnist_dcgan.ml | Deep Convolutional Generative Adverserial Networks trained on the MNIST dataset .
open Base
open Tensorflow
open Tensorflow_core
module O = Ops
let image_dim = Mnist_helper.image_dim
let latent_dim = 100
let batch_size = 256
let learning_rate = 1e-5
let batches = 10 ** 8
(* No need to keep running averages as this is only used in training mode. *)
let batch_norm xs =
let nb_dims = Node.shape xs |> List.length in
let batch_moments = Ops.moments xs ~dims:(List.init (nb_dims - 1) ~f:Fn.id) in
Ops.normalize xs batch_moments ~epsilon:1e-8
(** [create_generator rand_input] creates a Generator network taking as
input [rand_input]. This returns both the network and the variables
that it contains.
*)
let create_generator rand_input =
let linear1 = Layer.Linear.create 1024 in
let linear2 = Layer.Linear.create (7 * 7 * 128) in
let conv2dt1 =
Layer.Conv2DTranspose.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 64
in
let conv2dt2 =
Layer.Conv2DTranspose.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 1
in
let output =
Layer.Linear.apply linear1 rand_input ~activation:Relu
|> Layer.Linear.apply linear2
|> batch_norm
|> O.relu
|> Layer.reshape ~shape:[ -1; 7; 7; 128 ]
|> Layer.Conv2DTranspose.apply conv2dt1
|> batch_norm
|> O.relu
|> Layer.Conv2DTranspose.apply conv2dt2
|> Layer.flatten
|> O.tanh
in
let vars =
List.concat_map [ linear1; linear2 ] ~f:Layer.Linear.vars
@ List.concat_map [ conv2dt1; conv2dt2 ] ~f:Layer.Conv2DTranspose.vars
in
output, vars
* [ create_discriminator xs1 xs2 ] creates two Discriminator networks taking as
input [ xs1 ] and [ xs2 ] , the two networks share the same weights .
This returns the two networks as well as their ( shared ) variables .
input [xs1] and [xs2], the two networks share the same weights.
This returns the two networks as well as their (shared) variables.
*)
let create_discriminator xs1 xs2 =
let conv2d1 = Layer.Conv2D.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 16 in
let conv2d2 = Layer.Conv2D.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 32 in
let linear1 = Layer.Linear.create 1 in
let linear2 = Layer.Linear.create 1 in
let model xs =
Layer.reshape xs ~shape:[ -1; 28; 28; 1 ]
|> Layer.Conv2D.apply conv2d1
|> O.leaky_relu ~alpha:0.1
|> Layer.Conv2D.apply conv2d2
|> batch_norm
|> O.leaky_relu ~alpha:0.1
|> Layer.reshape ~shape:[ -1; 7 * 7 * 32 ]
|> Layer.Linear.apply linear1
|> batch_norm
|> O.leaky_relu ~alpha:0.1
|> Layer.Linear.apply linear2 ~activation:Sigmoid
in
let ys1 = model xs1 in
let ys2 = model xs2 in
let vars =
List.concat_map [ linear1; linear2 ] ~f:Layer.Linear.vars
@ List.concat_map [ conv2d1; conv2d2 ] ~f:Layer.Conv2D.vars
in
ys1, ys2, vars
let write_samples samples ~filename =
Stdio.Out_channel.with_file filename ~f:(fun channel ->
for sample_index = 0 to 99 do
List.init image_dim ~f:(fun pixel_index ->
Tensorflow_core.Tensor.get samples [| sample_index; pixel_index |]
|> Printf.sprintf "%.2f")
|> String.concat ~sep:", "
|> Printf.sprintf "data%d = [%s]\n" sample_index
|> Stdio.Out_channel.output_string channel
done)
let () =
let mnist = Mnist_helper.read_files () in
(* Create a placeholder for random latent data used by the generator and for the actual
MNIST data used by the discriminator. *)
let rand_data_ph = O.placeholder [ batch_size; latent_dim ] ~type_:Float in
let real_data_ph = O.placeholder [ batch_size; image_dim ] ~type_:Float in
Create the Generator and Discriminator networks .
let generated, gen_variables = create_generator (O.Placeholder.to_node rand_data_ph) in
let real_doutput, fake_doutput, discriminator_variables =
create_discriminator O.((Placeholder.to_node real_data_ph * f 2.) - f 1.) generated
in
The Generator loss is based on the Discriminator making mistakes on the generated
output . The Discriminator loss is based on being right on both real and generated
data .
output. The Discriminator loss is based on being right on both real and generated
data. *)
let real_loss =
O.binary_cross_entropy ~labels:(O.f 0.9) ~model_values:real_doutput `mean
in
let fake_loss =
O.binary_cross_entropy ~labels:(O.f 0.) ~model_values:fake_doutput `mean
in
let discriminator_loss = O.(real_loss + fake_loss) in
let generator_loss =
O.binary_cross_entropy ~labels:(O.f 1.) ~model_values:fake_doutput `mean
in
let learning_rate = O.f learning_rate in
let discriminator_opt =
Optimizers.adam_minimizer
~learning_rate
discriminator_loss
~varsf:discriminator_variables
in
let generator_opt =
Optimizers.adam_minimizer ~learning_rate generator_loss ~varsf:gen_variables
in
(* Create tensor for random data both for training and validation. *)
let batch_rand = Tensor.create2 Float32 batch_size latent_dim in
let samples_rand = Tensor.create2 Float32 batch_size latent_dim in
(* Always reuse the same random latent space for validation samples. *)
Tensor.fill_uniform samples_rand ~lower_bound:(-1.) ~upper_bound:1.;
Checkpointing.loop
~start_index:1
~end_index:batches
~save_vars_from:discriminator_opt
~checkpoint_base:"./tf-mnist-dcgan.ckpt"
(fun ~index:batch_idx ->
let batch_images, _ = Mnist_helper.train_batch mnist ~batch_size ~batch_idx in
let discriminator_loss =
Tensor.fill_uniform batch_rand ~lower_bound:(-1.) ~upper_bound:1.;
Session.run
~inputs:
Session.Input.
[ float real_data_ph batch_images; float rand_data_ph batch_rand ]
~targets:discriminator_opt
(Session.Output.scalar_float discriminator_loss)
in
let generator_loss =
Tensor.fill_uniform batch_rand ~lower_bound:(-1.) ~upper_bound:1.;
Session.run
~inputs:Session.Input.[ float rand_data_ph batch_rand ]
~targets:generator_opt
(Session.Output.scalar_float generator_loss)
in
if batch_idx % 100 = 0
then
Stdio.printf
"batch %4d d-loss: %12.6f g-loss: %12.6f\n%!"
batch_idx
discriminator_loss
generator_loss;
if batch_idx % 100000 = 0 || (batch_idx < 100000 && batch_idx % 25000 = 0)
then
Session.run
(Session.Output.float generated)
~inputs:Session.Input.[ float rand_data_ph samples_rand ]
|> write_samples ~filename:(Printf.sprintf "out%d.txt" batch_idx))
| null | https://raw.githubusercontent.com/LaurentMazare/tensorflow-ocaml/52c5f1dec1a8b7dc9bc6ef06abbc07da6cd90d39/examples/gan/mnist_dcgan.ml | ocaml | No need to keep running averages as this is only used in training mode.
* [create_generator rand_input] creates a Generator network taking as
input [rand_input]. This returns both the network and the variables
that it contains.
Create a placeholder for random latent data used by the generator and for the actual
MNIST data used by the discriminator.
Create tensor for random data both for training and validation.
Always reuse the same random latent space for validation samples. | Deep Convolutional Generative Adverserial Networks trained on the MNIST dataset .
open Base
open Tensorflow
open Tensorflow_core
module O = Ops
let image_dim = Mnist_helper.image_dim
let latent_dim = 100
let batch_size = 256
let learning_rate = 1e-5
let batches = 10 ** 8
let batch_norm xs =
let nb_dims = Node.shape xs |> List.length in
let batch_moments = Ops.moments xs ~dims:(List.init (nb_dims - 1) ~f:Fn.id) in
Ops.normalize xs batch_moments ~epsilon:1e-8
let create_generator rand_input =
let linear1 = Layer.Linear.create 1024 in
let linear2 = Layer.Linear.create (7 * 7 * 128) in
let conv2dt1 =
Layer.Conv2DTranspose.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 64
in
let conv2dt2 =
Layer.Conv2DTranspose.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 1
in
let output =
Layer.Linear.apply linear1 rand_input ~activation:Relu
|> Layer.Linear.apply linear2
|> batch_norm
|> O.relu
|> Layer.reshape ~shape:[ -1; 7; 7; 128 ]
|> Layer.Conv2DTranspose.apply conv2dt1
|> batch_norm
|> O.relu
|> Layer.Conv2DTranspose.apply conv2dt2
|> Layer.flatten
|> O.tanh
in
let vars =
List.concat_map [ linear1; linear2 ] ~f:Layer.Linear.vars
@ List.concat_map [ conv2dt1; conv2dt2 ] ~f:Layer.Conv2DTranspose.vars
in
output, vars
* [ create_discriminator xs1 xs2 ] creates two Discriminator networks taking as
input [ xs1 ] and [ xs2 ] , the two networks share the same weights .
This returns the two networks as well as their ( shared ) variables .
input [xs1] and [xs2], the two networks share the same weights.
This returns the two networks as well as their (shared) variables.
*)
let create_discriminator xs1 xs2 =
let conv2d1 = Layer.Conv2D.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 16 in
let conv2d2 = Layer.Conv2D.create ~ksize:(5, 5) ~strides:(2, 2) ~padding:Same 32 in
let linear1 = Layer.Linear.create 1 in
let linear2 = Layer.Linear.create 1 in
let model xs =
Layer.reshape xs ~shape:[ -1; 28; 28; 1 ]
|> Layer.Conv2D.apply conv2d1
|> O.leaky_relu ~alpha:0.1
|> Layer.Conv2D.apply conv2d2
|> batch_norm
|> O.leaky_relu ~alpha:0.1
|> Layer.reshape ~shape:[ -1; 7 * 7 * 32 ]
|> Layer.Linear.apply linear1
|> batch_norm
|> O.leaky_relu ~alpha:0.1
|> Layer.Linear.apply linear2 ~activation:Sigmoid
in
let ys1 = model xs1 in
let ys2 = model xs2 in
let vars =
List.concat_map [ linear1; linear2 ] ~f:Layer.Linear.vars
@ List.concat_map [ conv2d1; conv2d2 ] ~f:Layer.Conv2D.vars
in
ys1, ys2, vars
let write_samples samples ~filename =
Stdio.Out_channel.with_file filename ~f:(fun channel ->
for sample_index = 0 to 99 do
List.init image_dim ~f:(fun pixel_index ->
Tensorflow_core.Tensor.get samples [| sample_index; pixel_index |]
|> Printf.sprintf "%.2f")
|> String.concat ~sep:", "
|> Printf.sprintf "data%d = [%s]\n" sample_index
|> Stdio.Out_channel.output_string channel
done)
let () =
let mnist = Mnist_helper.read_files () in
let rand_data_ph = O.placeholder [ batch_size; latent_dim ] ~type_:Float in
let real_data_ph = O.placeholder [ batch_size; image_dim ] ~type_:Float in
Create the Generator and Discriminator networks .
let generated, gen_variables = create_generator (O.Placeholder.to_node rand_data_ph) in
let real_doutput, fake_doutput, discriminator_variables =
create_discriminator O.((Placeholder.to_node real_data_ph * f 2.) - f 1.) generated
in
The Generator loss is based on the Discriminator making mistakes on the generated
output . The Discriminator loss is based on being right on both real and generated
data .
output. The Discriminator loss is based on being right on both real and generated
data. *)
let real_loss =
O.binary_cross_entropy ~labels:(O.f 0.9) ~model_values:real_doutput `mean
in
let fake_loss =
O.binary_cross_entropy ~labels:(O.f 0.) ~model_values:fake_doutput `mean
in
let discriminator_loss = O.(real_loss + fake_loss) in
let generator_loss =
O.binary_cross_entropy ~labels:(O.f 1.) ~model_values:fake_doutput `mean
in
let learning_rate = O.f learning_rate in
let discriminator_opt =
Optimizers.adam_minimizer
~learning_rate
discriminator_loss
~varsf:discriminator_variables
in
let generator_opt =
Optimizers.adam_minimizer ~learning_rate generator_loss ~varsf:gen_variables
in
let batch_rand = Tensor.create2 Float32 batch_size latent_dim in
let samples_rand = Tensor.create2 Float32 batch_size latent_dim in
Tensor.fill_uniform samples_rand ~lower_bound:(-1.) ~upper_bound:1.;
Checkpointing.loop
~start_index:1
~end_index:batches
~save_vars_from:discriminator_opt
~checkpoint_base:"./tf-mnist-dcgan.ckpt"
(fun ~index:batch_idx ->
let batch_images, _ = Mnist_helper.train_batch mnist ~batch_size ~batch_idx in
let discriminator_loss =
Tensor.fill_uniform batch_rand ~lower_bound:(-1.) ~upper_bound:1.;
Session.run
~inputs:
Session.Input.
[ float real_data_ph batch_images; float rand_data_ph batch_rand ]
~targets:discriminator_opt
(Session.Output.scalar_float discriminator_loss)
in
let generator_loss =
Tensor.fill_uniform batch_rand ~lower_bound:(-1.) ~upper_bound:1.;
Session.run
~inputs:Session.Input.[ float rand_data_ph batch_rand ]
~targets:generator_opt
(Session.Output.scalar_float generator_loss)
in
if batch_idx % 100 = 0
then
Stdio.printf
"batch %4d d-loss: %12.6f g-loss: %12.6f\n%!"
batch_idx
discriminator_loss
generator_loss;
if batch_idx % 100000 = 0 || (batch_idx < 100000 && batch_idx % 25000 = 0)
then
Session.run
(Session.Output.float generated)
~inputs:Session.Input.[ float rand_data_ph samples_rand ]
|> write_samples ~filename:(Printf.sprintf "out%d.txt" batch_idx))
|
92e7ea16ef4e5e407ad82d85d54d6fd4f5d58e94ea43df5d265a9fc1cf9f77ac | Metaxal/MrEd-Designer | templates.rkt | #lang racket
(require racket/gui/base
"misc.rkt"
"mred-id.rkt"
"mred-plugin.rkt"
"code-generation.rkt"
"template-load.rkt"
)
(module+ test
(require rackunit))
(define template-dir (build-path "templates"))
;; Dictionary of (template-file . template-name)
(define/provide template-dict #f)
(define/provide (template-file f) (build-path template-dir f))
;; We should make a class for templates !
;; Avoid loading several times...
(define/provide (get-template-name file)
(and (file-exists? file)
(with-input-from-file file
(λ()(let* ([name (read)])
(and (string? name)
name))))
))
(define template-name-pattern
"med-template-~a.med")
(define template-name-regexp
(format (regexp-quote template-name-pattern) ".*"))
;; Call this function to set the template-dict to the correct value
;; or to update it (e.g., if the directory structure has changed)
(define/provide (make-template-dict)
(set! template-dict
(append-map (λ(f)
(let ([f (build-path template-dir f)])
(if (and (file-exists? f) ; it may be a directory
(regexp-match template-name-regexp (path->string f)))
(list (cons f (get-template-name f)))
'()
)))
(directory-list template-dir))))
(define (print-template mid name)
; writes the code that will be executed
(write name) (newline)
(parameterize ([print-as-expression #f])
(pretty-print
`(list
(cons 'name
,name)
(cons 'parent-class
,(send (send mid get-plugin) get-parent-widget-class-symbol))
(cons 'med-version
,(list 'list application-version-maj application-version-min))
(cons 'code
,(write-mred-id-code mid))))))
(define/provide (save-template mid name [file #f])
(debug-printf "save-template: ~a\n" name)
(when name
(let ([file (or file
(make-temporary-file template-name-pattern #f
template-dir))])
; write the name of the template
(with-output-to-file file
(λ() (print-template mid name))
#:exists 'replace)
))
(debug-printf "save-template: exit\n")
)
; returns the result of executing the code stored in the template, or #f on error.
(define/provide (load-template file parent-mid)
(debug-printf "load-template: ~a\n" file)
(and file
(let ([dico (template-load-file file)])
(debug-printf "load-template: load done\n")
(when dico
(let ([name (dict-ref dico 'name)]
[parent-class (dict-ref dico 'parent-class)]
if not found ( # f ) , then file was created with version < 3.9
[proc (dict-ref dico 'code)])
(if med-version
(printf "MED template version: ~a\n" med-version)
(printf "No MED template version found\n"))
(and (check-template-version med-version)
(procedure? proc)
(equal? (procedure-arity proc) 1)
(or (can-instantiate-under? parent-mid parent-class)
(begin
(printf "Cannot insert template at this node\n")
#f))
(proc parent-mid)
)
)))
)
)
(define/provide (delete-template file)
(when file
(delete-file file)))
(define (newer-version-than-current? vers)
(and vers
(or (> (first vers) application-version-maj)
(and (= (first vers) application-version-maj)
(> (second vers) application-version-min)))))
(define (check-template-version vers)
(or (not (newer-version-than-current? vers))
(eq?
'yes
(message-box "Object created with newer version"
(format "The object you are loading was made with version ~a.~a of ~a which is newer than you current version ~a.~a. There may be problems loading it. Do you still want to proceed?"
(first vers) (second vers)
application-name
application-version-maj application-version-min)
#f '(yes-no)))))
| null | https://raw.githubusercontent.com/Metaxal/MrEd-Designer/220833b738a1d46fbe309ea124ef61b825e42e68/mred-designer/templates.rkt | racket | Dictionary of (template-file . template-name)
We should make a class for templates !
Avoid loading several times...
Call this function to set the template-dict to the correct value
or to update it (e.g., if the directory structure has changed)
it may be a directory
writes the code that will be executed
write the name of the template
returns the result of executing the code stored in the template, or #f on error. | #lang racket
(require racket/gui/base
"misc.rkt"
"mred-id.rkt"
"mred-plugin.rkt"
"code-generation.rkt"
"template-load.rkt"
)
(module+ test
(require rackunit))
(define template-dir (build-path "templates"))
(define/provide template-dict #f)
(define/provide (template-file f) (build-path template-dir f))
(define/provide (get-template-name file)
(and (file-exists? file)
(with-input-from-file file
(λ()(let* ([name (read)])
(and (string? name)
name))))
))
(define template-name-pattern
"med-template-~a.med")
(define template-name-regexp
(format (regexp-quote template-name-pattern) ".*"))
(define/provide (make-template-dict)
(set! template-dict
(append-map (λ(f)
(let ([f (build-path template-dir f)])
(regexp-match template-name-regexp (path->string f)))
(list (cons f (get-template-name f)))
'()
)))
(directory-list template-dir))))
(define (print-template mid name)
(write name) (newline)
(parameterize ([print-as-expression #f])
(pretty-print
`(list
(cons 'name
,name)
(cons 'parent-class
,(send (send mid get-plugin) get-parent-widget-class-symbol))
(cons 'med-version
,(list 'list application-version-maj application-version-min))
(cons 'code
,(write-mred-id-code mid))))))
(define/provide (save-template mid name [file #f])
(debug-printf "save-template: ~a\n" name)
(when name
(let ([file (or file
(make-temporary-file template-name-pattern #f
template-dir))])
(with-output-to-file file
(λ() (print-template mid name))
#:exists 'replace)
))
(debug-printf "save-template: exit\n")
)
(define/provide (load-template file parent-mid)
(debug-printf "load-template: ~a\n" file)
(and file
(let ([dico (template-load-file file)])
(debug-printf "load-template: load done\n")
(when dico
(let ([name (dict-ref dico 'name)]
[parent-class (dict-ref dico 'parent-class)]
if not found ( # f ) , then file was created with version < 3.9
[proc (dict-ref dico 'code)])
(if med-version
(printf "MED template version: ~a\n" med-version)
(printf "No MED template version found\n"))
(and (check-template-version med-version)
(procedure? proc)
(equal? (procedure-arity proc) 1)
(or (can-instantiate-under? parent-mid parent-class)
(begin
(printf "Cannot insert template at this node\n")
#f))
(proc parent-mid)
)
)))
)
)
(define/provide (delete-template file)
(when file
(delete-file file)))
(define (newer-version-than-current? vers)
(and vers
(or (> (first vers) application-version-maj)
(and (= (first vers) application-version-maj)
(> (second vers) application-version-min)))))
(define (check-template-version vers)
(or (not (newer-version-than-current? vers))
(eq?
'yes
(message-box "Object created with newer version"
(format "The object you are loading was made with version ~a.~a of ~a which is newer than you current version ~a.~a. There may be problems loading it. Do you still want to proceed?"
(first vers) (second vers)
application-name
application-version-maj application-version-min)
#f '(yes-no)))))
|
6ba3ee8accc8ebd0aaa23b9bb5a6c31aa4b85766388e67ae599fdfbb300df3be | backtracking/mlpost | mlpost_dot.mli | open Mlpost
* Place figures , boxes or boxlikes with graphviz
module Dot : sig
module Make (B : Signature.Boxlike) : sig
type node
type edge = node * node
val mknode : B.t -> node
(** creates an abstract node from a boxlike *)
val place :
?orient:[ `TB | `LR | `BT | `RL ] ->
node list ->
edge list ->
B.t list * Path.t list
* [ place ~orient nodes edges ] returns a concrete
representation of the abstract directed graph composed by
[ nodes ] linked by [ edges ] . The concrete representation is
composed by the list of all the boxlikes of [ nodes ] placed
by dot and by the list of paths representing the [ edges ]
drawn by dot
@param orient specifies the orientation of the graph :
- ` TB top to bottom ( default )
- ` LR left to right
- ` BT bottom to top
- ` RL right to left
representation of the abstract directed graph composed by
[nodes] linked by [edges]. The concrete representation is
composed by the list of all the boxlikes of [nodes] placed
by dot and by the list of paths representing the [edges]
drawn by dot
@param orient specifies the orientation of the graph :
- `TB top to bottom (default)
- `LR left to right
- `BT bottom to top
- `RL right to left
*)
end
end
| null | https://raw.githubusercontent.com/backtracking/mlpost/bd4305289fd64d531b9f42d64dd641d72ab82fd5/contrib/dot/mlpost_dot.mli | ocaml | * creates an abstract node from a boxlike | open Mlpost
* Place figures , boxes or boxlikes with graphviz
module Dot : sig
module Make (B : Signature.Boxlike) : sig
type node
type edge = node * node
val mknode : B.t -> node
val place :
?orient:[ `TB | `LR | `BT | `RL ] ->
node list ->
edge list ->
B.t list * Path.t list
* [ place ~orient nodes edges ] returns a concrete
representation of the abstract directed graph composed by
[ nodes ] linked by [ edges ] . The concrete representation is
composed by the list of all the boxlikes of [ nodes ] placed
by dot and by the list of paths representing the [ edges ]
drawn by dot
@param orient specifies the orientation of the graph :
- ` TB top to bottom ( default )
- ` LR left to right
- ` BT bottom to top
- ` RL right to left
representation of the abstract directed graph composed by
[nodes] linked by [edges]. The concrete representation is
composed by the list of all the boxlikes of [nodes] placed
by dot and by the list of paths representing the [edges]
drawn by dot
@param orient specifies the orientation of the graph :
- `TB top to bottom (default)
- `LR left to right
- `BT bottom to top
- `RL right to left
*)
end
end
|
a0a647cd4ce2d9ff0e397887da2c463d5ba7a868872070bd5151591ae336c489 | clj-commons/useful | config_test.clj | (ns flatland.useful.config-test
(:use clojure.test flatland.useful.config))
(deftest reading
(is (= '{size 1} (read-config "config1.clj")))
(is (thrown-with-msg? java.io.FileNotFoundException #"Cannot find config resource config3.clj"
(read-config "config3.clj")))
(is (= nil (read-config "config3.clj" :optional true))))
(deftest loading
(is (= {:x [1 1]
:y [1 1]}
(load-config "config2.clj")))
(is (thrown-with-msg? java.io.FileNotFoundException #"Cannot find config resource config3.clj"
(load-config "config3.clj")))
(is (= nil (load-config "config3.clj" :optional true))))
| null | https://raw.githubusercontent.com/clj-commons/useful/dc5cdebf8983a2e2ea24ec8951fbb4dfb037da45/test/flatland/useful/config_test.clj | clojure | (ns flatland.useful.config-test
(:use clojure.test flatland.useful.config))
(deftest reading
(is (= '{size 1} (read-config "config1.clj")))
(is (thrown-with-msg? java.io.FileNotFoundException #"Cannot find config resource config3.clj"
(read-config "config3.clj")))
(is (= nil (read-config "config3.clj" :optional true))))
(deftest loading
(is (= {:x [1 1]
:y [1 1]}
(load-config "config2.clj")))
(is (thrown-with-msg? java.io.FileNotFoundException #"Cannot find config resource config3.clj"
(load-config "config3.clj")))
(is (= nil (load-config "config3.clj" :optional true))))
| |
b4cd0cb2c173cbbad381bedadbf1e01b879d13caf2768e1d51394d37454d4c12 | achirkin/vulkan | VK_KHR_device_group_creation.hs | # OPTIONS_GHC -fno - warn - missing - pattern - synonym - signatures #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_HADDOCK not - home #
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE ForeignFunctionInterface #
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
module Graphics.Vulkan.Ext.VK_KHR_device_group_creation
* Vulkan extension : @VK_KHR_device_group_creation@
-- |
--
-- supported: @vulkan@
--
contact : @Jeff Bolz @jeffbolznv@
--
-- author: @KHR@
--
-- type: @instance@
--
-- Extension number: @71@
VkDeviceGroupDeviceCreateInfoKHR,
VkPhysicalDeviceGroupPropertiesKHR,
VkEnumeratePhysicalDeviceGroupsKHR,
pattern VkEnumeratePhysicalDeviceGroupsKHR,
HS_vkEnumeratePhysicalDeviceGroupsKHR,
PFN_vkEnumeratePhysicalDeviceGroupsKHR,
module Graphics.Vulkan.Marshal, AHardwareBuffer(), ANativeWindow(),
CAMetalLayer(), VkBool32(..), VkDeviceAddress(..),
VkDeviceSize(..), VkFlags(..), VkSampleMask(..), VkResult(..),
VkStructureType(..), VkAccelerationStructureKHR,
VkAccelerationStructureKHR_T(), VkAccelerationStructureNV,
VkAccelerationStructureNV_T(), VkBuffer, VkBufferView,
VkBufferView_T(), VkBuffer_T(), VkCommandBuffer,
VkCommandBuffer_T(), VkCommandPool, VkCommandPool_T(),
VkDebugReportCallbackEXT, VkDebugReportCallbackEXT_T(),
VkDebugUtilsMessengerEXT, VkDebugUtilsMessengerEXT_T(),
VkDeferredOperationKHR, VkDeferredOperationKHR_T(),
VkDescriptorPool, VkDescriptorPool_T(), VkDescriptorSet,
VkDescriptorSetLayout, VkDescriptorSetLayout_T(),
VkDescriptorSet_T(), VkDescriptorUpdateTemplate,
VkDescriptorUpdateTemplateKHR, VkDescriptorUpdateTemplateKHR_T(),
VkDescriptorUpdateTemplate_T(), VkDevice, VkDeviceMemory,
VkDeviceMemory_T(), VkDevice_T(), VkDisplayKHR, VkDisplayKHR_T(),
VkDisplayModeKHR, VkDisplayModeKHR_T(), VkEvent, VkEvent_T(),
VkFence, VkFence_T(), VkFramebuffer, VkFramebuffer_T(), VkImage,
VkImageView, VkImageView_T(), VkImage_T(),
VkIndirectCommandsLayoutNV, VkIndirectCommandsLayoutNV_T(),
VkInstance, VkInstance_T(), VkPerformanceConfigurationINTEL,
VkPerformanceConfigurationINTEL_T(), VkPhysicalDevice,
VkPhysicalDevice_T(), VkPipeline, VkPipelineCache,
VkPipelineCache_T(), VkPipelineLayout, VkPipelineLayout_T(),
VkPipeline_T(), VkPrivateDataSlotEXT, VkPrivateDataSlotEXT_T(),
VkQueryPool, VkQueryPool_T(), VkQueue, VkQueue_T(), VkRenderPass,
VkRenderPass_T(), VkSampler, VkSamplerYcbcrConversion,
VkSamplerYcbcrConversionKHR, VkSamplerYcbcrConversionKHR_T(),
VkSamplerYcbcrConversion_T(), VkSampler_T(), VkSemaphore,
VkSemaphore_T(), VkShaderModule, VkShaderModule_T(), VkSurfaceKHR,
VkSurfaceKHR_T(), VkSwapchainKHR, VkSwapchainKHR_T(),
VkValidationCacheEXT, VkValidationCacheEXT_T(),
VkPhysicalDevice16BitStorageFeatures,
VkPhysicalDevice16BitStorageFeaturesKHR,
VkPhysicalDevice4444FormatsFeaturesEXT,
VkPhysicalDevice8BitStorageFeatures,
VkPhysicalDevice8BitStorageFeaturesKHR,
VkPhysicalDeviceASTCDecodeFeaturesEXT,
VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT,
VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT,
VkPhysicalDeviceBufferAddressFeaturesEXT,
VkPhysicalDeviceBufferDeviceAddressFeatures,
VkPhysicalDeviceBufferDeviceAddressFeaturesEXT,
VkPhysicalDeviceBufferDeviceAddressFeaturesKHR,
VkPhysicalDeviceCoherentMemoryFeaturesAMD,
VkPhysicalDeviceComputeShaderDerivativesFeaturesNV,
VkPhysicalDeviceConditionalRenderingFeaturesEXT,
VkPhysicalDeviceConservativeRasterizationPropertiesEXT,
VkPhysicalDeviceCooperativeMatrixFeaturesNV,
VkPhysicalDeviceCooperativeMatrixPropertiesNV,
VkPhysicalDeviceCornerSampledImageFeaturesNV,
VkPhysicalDeviceCoverageReductionModeFeaturesNV,
VkPhysicalDeviceCustomBorderColorFeaturesEXT,
VkPhysicalDeviceCustomBorderColorPropertiesEXT,
VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV,
VkPhysicalDeviceDepthClipEnableFeaturesEXT,
VkPhysicalDeviceDepthStencilResolveProperties,
VkPhysicalDeviceDepthStencilResolvePropertiesKHR,
VkPhysicalDeviceDescriptorIndexingFeatures,
VkPhysicalDeviceDescriptorIndexingFeaturesEXT,
VkPhysicalDeviceDescriptorIndexingProperties,
VkPhysicalDeviceDescriptorIndexingPropertiesEXT,
VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV,
VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV,
VkPhysicalDeviceDiagnosticsConfigFeaturesNV,
VkPhysicalDeviceDiscardRectanglePropertiesEXT,
VkPhysicalDeviceDriverProperties,
VkPhysicalDeviceDriverPropertiesKHR,
VkPhysicalDeviceExclusiveScissorFeaturesNV,
VkPhysicalDeviceExtendedDynamicStateFeaturesEXT,
VkPhysicalDeviceExternalBufferInfo,
VkPhysicalDeviceExternalBufferInfoKHR,
VkPhysicalDeviceExternalFenceInfo,
VkPhysicalDeviceExternalFenceInfoKHR,
VkPhysicalDeviceExternalImageFormatInfo,
VkPhysicalDeviceExternalImageFormatInfoKHR,
VkPhysicalDeviceExternalMemoryHostPropertiesEXT,
VkPhysicalDeviceExternalSemaphoreInfo,
VkPhysicalDeviceExternalSemaphoreInfoKHR,
VkPhysicalDeviceFeatures2, VkPhysicalDeviceFeatures2KHR,
VkPhysicalDeviceFloat16Int8FeaturesKHR,
VkPhysicalDeviceFloatControlsProperties,
VkPhysicalDeviceFloatControlsPropertiesKHR,
VkPhysicalDeviceFragmentDensityMap2FeaturesEXT,
VkPhysicalDeviceFragmentDensityMap2PropertiesEXT,
VkPhysicalDeviceFragmentDensityMapFeaturesEXT,
VkPhysicalDeviceFragmentDensityMapPropertiesEXT,
VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV,
VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT,
VkPhysicalDeviceGroupProperties,
VkPhysicalDeviceHostQueryResetFeatures,
VkPhysicalDeviceHostQueryResetFeaturesEXT,
VkPhysicalDeviceIDProperties, VkPhysicalDeviceIDPropertiesKHR,
VkPhysicalDeviceImageDrmFormatModifierInfoEXT,
VkPhysicalDeviceImageFormatInfo2,
VkPhysicalDeviceImageFormatInfo2KHR,
VkPhysicalDeviceImageRobustnessFeaturesEXT,
VkPhysicalDeviceImageViewImageFormatInfoEXT,
VkPhysicalDeviceImagelessFramebufferFeatures,
VkPhysicalDeviceImagelessFramebufferFeaturesKHR,
VkPhysicalDeviceIndexTypeUint8FeaturesEXT,
VkPhysicalDeviceInlineUniformBlockFeaturesEXT,
VkPhysicalDeviceInlineUniformBlockPropertiesEXT,
VkPhysicalDeviceLimits,
VkPhysicalDeviceLineRasterizationFeaturesEXT,
VkPhysicalDeviceLineRasterizationPropertiesEXT,
VkPhysicalDeviceMaintenance3Properties,
VkPhysicalDeviceMaintenance3PropertiesKHR,
VkPhysicalDeviceMemoryBudgetPropertiesEXT,
VkPhysicalDeviceMemoryPriorityFeaturesEXT,
VkPhysicalDeviceMemoryProperties,
VkPhysicalDeviceMemoryProperties2,
VkPhysicalDeviceMemoryProperties2KHR,
VkPhysicalDeviceMeshShaderFeaturesNV,
VkPhysicalDeviceMeshShaderPropertiesNV,
VkPhysicalDeviceMultiviewFeatures,
VkPhysicalDeviceMultiviewFeaturesKHR,
VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX,
VkPhysicalDeviceMultiviewProperties,
VkPhysicalDeviceMultiviewPropertiesKHR,
VkPhysicalDevicePCIBusInfoPropertiesEXT,
VkPhysicalDevicePerformanceQueryFeaturesKHR,
VkPhysicalDevicePerformanceQueryPropertiesKHR,
VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT,
VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR,
VkPhysicalDevicePointClippingProperties,
VkPhysicalDevicePointClippingPropertiesKHR,
VkPhysicalDevicePrivateDataFeaturesEXT, VkPhysicalDeviceProperties,
VkPhysicalDeviceProperties2, VkPhysicalDeviceProperties2KHR,
VkPhysicalDeviceProtectedMemoryFeatures,
VkPhysicalDeviceProtectedMemoryProperties,
VkPhysicalDevicePushDescriptorPropertiesKHR,
VkPhysicalDeviceRayTracingPropertiesNV,
VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV,
VkPhysicalDeviceRobustness2FeaturesEXT,
VkPhysicalDeviceRobustness2PropertiesEXT,
VkPhysicalDeviceSampleLocationsPropertiesEXT,
VkPhysicalDeviceSamplerFilterMinmaxProperties,
VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT,
VkPhysicalDeviceSamplerYcbcrConversionFeatures,
VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR,
VkPhysicalDeviceScalarBlockLayoutFeatures,
VkPhysicalDeviceScalarBlockLayoutFeaturesEXT,
VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures,
VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR,
VkPhysicalDeviceShaderAtomicFloatFeaturesEXT,
VkPhysicalDeviceShaderAtomicInt64Features,
VkPhysicalDeviceShaderAtomicInt64FeaturesKHR,
VkPhysicalDeviceShaderClockFeaturesKHR,
VkPhysicalDeviceShaderCoreProperties2AMD,
VkPhysicalDeviceShaderCorePropertiesAMD,
VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT,
VkPhysicalDeviceShaderDrawParameterFeatures,
VkPhysicalDeviceShaderDrawParametersFeatures,
VkPhysicalDeviceShaderFloat16Int8Features,
VkPhysicalDeviceShaderFloat16Int8FeaturesKHR,
VkPhysicalDeviceShaderImageFootprintFeaturesNV,
VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL,
VkPhysicalDeviceShaderSMBuiltinsFeaturesNV,
VkPhysicalDeviceShaderSMBuiltinsPropertiesNV,
VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures,
VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR,
VkPhysicalDeviceShadingRateImageFeaturesNV,
VkPhysicalDeviceShadingRateImagePropertiesNV,
VkPhysicalDeviceSparseImageFormatInfo2,
VkPhysicalDeviceSparseImageFormatInfo2KHR,
VkPhysicalDeviceSparseProperties,
VkPhysicalDeviceSubgroupProperties,
VkPhysicalDeviceSubgroupSizeControlFeaturesEXT,
VkPhysicalDeviceSubgroupSizeControlPropertiesEXT,
VkPhysicalDeviceSurfaceInfo2KHR,
VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT,
VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT,
VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT,
VkPhysicalDeviceTimelineSemaphoreFeatures,
VkPhysicalDeviceTimelineSemaphoreFeaturesKHR,
VkPhysicalDeviceTimelineSemaphoreProperties,
VkPhysicalDeviceTimelineSemaphorePropertiesKHR,
VkPhysicalDeviceToolPropertiesEXT,
VkPhysicalDeviceTransformFeedbackFeaturesEXT,
VkPhysicalDeviceTransformFeedbackPropertiesEXT,
VkPhysicalDeviceUniformBufferStandardLayoutFeatures,
VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR,
VkPhysicalDeviceVariablePointerFeatures,
VkPhysicalDeviceVariablePointerFeaturesKHR,
VkPhysicalDeviceVariablePointersFeatures,
VkPhysicalDeviceVariablePointersFeaturesKHR,
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT,
VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT,
VkPhysicalDeviceVulkan11Features,
VkPhysicalDeviceVulkan11Properties,
VkPhysicalDeviceVulkan12Features,
VkPhysicalDeviceVulkan12Properties,
VkPhysicalDeviceVulkanMemoryModelFeatures,
VkPhysicalDeviceVulkanMemoryModelFeaturesKHR,
VkPhysicalDeviceYcbcrImageArraysFeaturesEXT,
VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION,
pattern VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION,
VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
pattern VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
pattern VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
pattern VK_MAX_DEVICE_GROUP_SIZE_KHR,
pattern VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Constants (pattern VK_MAX_DEVICE_GROUP_SIZE_KHR)
import Graphics.Vulkan.Core_1_1 (pattern VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
pattern VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES)
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Marshal.Proc (VulkanProc (..))
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Enum.Result
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Handles
import Graphics.Vulkan.Types.Struct.Device (VkDeviceGroupDeviceCreateInfoKHR)
import Graphics.Vulkan.Types.Struct.PhysicalDevice
pattern VkEnumeratePhysicalDeviceGroupsKHR :: CString
pattern VkEnumeratePhysicalDeviceGroupsKHR <-
(is_VkEnumeratePhysicalDeviceGroupsKHR -> True)
where
VkEnumeratePhysicalDeviceGroupsKHR
= _VkEnumeratePhysicalDeviceGroupsKHR
{-# INLINE _VkEnumeratePhysicalDeviceGroupsKHR #-}
_VkEnumeratePhysicalDeviceGroupsKHR :: CString
_VkEnumeratePhysicalDeviceGroupsKHR
= Ptr "vkEnumeratePhysicalDeviceGroupsKHR\NUL"#
# INLINE is_VkEnumeratePhysicalDeviceGroupsKHR #
is_VkEnumeratePhysicalDeviceGroupsKHR :: CString -> Bool
is_VkEnumeratePhysicalDeviceGroupsKHR
= (EQ ==) . cmpCStrings _VkEnumeratePhysicalDeviceGroupsKHR
type VkEnumeratePhysicalDeviceGroupsKHR =
"vkEnumeratePhysicalDeviceGroupsKHR"
-- | This is an alias for `vkEnumeratePhysicalDeviceGroups`.
--
Success codes : ' VK_SUCCESS ' , ' VK_INCOMPLETE ' .
--
-- Error codes: 'VK_ERROR_OUT_OF_HOST_MEMORY', 'VK_ERROR_OUT_OF_DEVICE_MEMORY', 'VK_ERROR_INITIALIZATION_FAILED'.
--
> vkEnumeratePhysicalDeviceGroupsKHR
> ( VkInstance instance
-- > , uint32_t* pPhysicalDeviceGroupCount
> , VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties
-- > )
--
< -extensions/html/vkspec.html#vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR registry at www.khronos.org >
type HS_vkEnumeratePhysicalDeviceGroupsKHR =
VkInstance -- ^ instance
->
Ptr Word32 -- ^ pPhysicalDeviceGroupCount
-> Ptr VkPhysicalDeviceGroupProperties -- ^ pPhysicalDeviceGroupProperties
-> IO VkResult
type PFN_vkEnumeratePhysicalDeviceGroupsKHR =
FunPtr HS_vkEnumeratePhysicalDeviceGroupsKHR
foreign import ccall unsafe "dynamic"
unwrapVkEnumeratePhysicalDeviceGroupsKHRUnsafe ::
PFN_vkEnumeratePhysicalDeviceGroupsKHR ->
HS_vkEnumeratePhysicalDeviceGroupsKHR
foreign import ccall safe "dynamic"
unwrapVkEnumeratePhysicalDeviceGroupsKHRSafe ::
PFN_vkEnumeratePhysicalDeviceGroupsKHR ->
HS_vkEnumeratePhysicalDeviceGroupsKHR
instance VulkanProc "vkEnumeratePhysicalDeviceGroupsKHR" where
type VkProcType "vkEnumeratePhysicalDeviceGroupsKHR" =
HS_vkEnumeratePhysicalDeviceGroupsKHR
vkProcSymbol = _VkEnumeratePhysicalDeviceGroupsKHR
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe
= unwrapVkEnumeratePhysicalDeviceGroupsKHRUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkEnumeratePhysicalDeviceGroupsKHRSafe
# INLINE unwrapVkProcPtrSafe #
pattern VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION ::
(Num a, Eq a) => a
pattern VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION = 1
type VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION = 1
pattern VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME :: CString
pattern VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME <-
(is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME -> True)
where
VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
= _VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
# INLINE _ VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME #
_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME :: CString
_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
= Ptr "VK_KHR_device_group_creation\NUL"#
# INLINE is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME #
is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME :: CString -> Bool
is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
= (EQ ==) .
cmpCStrings _VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
type VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME =
"VK_KHR_device_group_creation"
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES
pattern VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR =
VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO
pattern VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR =
VK_MEMORY_HEAP_MULTI_INSTANCE_BIT
| null | https://raw.githubusercontent.com/achirkin/vulkan/b2e0568c71b5135010f4bba939cd8dcf7a05c361/vulkan-api/src-gen/Graphics/Vulkan/Ext/VK_KHR_device_group_creation.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE MagicHash #
# LANGUAGE PatternSynonyms #
# LANGUAGE Strict #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
|
supported: @vulkan@
author: @KHR@
type: @instance@
Extension number: @71@
# INLINE _VkEnumeratePhysicalDeviceGroupsKHR #
| This is an alias for `vkEnumeratePhysicalDeviceGroups`.
Error codes: 'VK_ERROR_OUT_OF_HOST_MEMORY', 'VK_ERROR_OUT_OF_DEVICE_MEMORY', 'VK_ERROR_INITIALIZATION_FAILED'.
> , uint32_t* pPhysicalDeviceGroupCount
> )
^ instance
^ pPhysicalDeviceGroupCount
^ pPhysicalDeviceGroupProperties | # OPTIONS_GHC -fno - warn - missing - pattern - synonym - signatures #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_HADDOCK not - home #
# LANGUAGE FlexibleInstances #
# LANGUAGE ForeignFunctionInterface #
module Graphics.Vulkan.Ext.VK_KHR_device_group_creation
* Vulkan extension : @VK_KHR_device_group_creation@
contact : @Jeff Bolz @jeffbolznv@
VkDeviceGroupDeviceCreateInfoKHR,
VkPhysicalDeviceGroupPropertiesKHR,
VkEnumeratePhysicalDeviceGroupsKHR,
pattern VkEnumeratePhysicalDeviceGroupsKHR,
HS_vkEnumeratePhysicalDeviceGroupsKHR,
PFN_vkEnumeratePhysicalDeviceGroupsKHR,
module Graphics.Vulkan.Marshal, AHardwareBuffer(), ANativeWindow(),
CAMetalLayer(), VkBool32(..), VkDeviceAddress(..),
VkDeviceSize(..), VkFlags(..), VkSampleMask(..), VkResult(..),
VkStructureType(..), VkAccelerationStructureKHR,
VkAccelerationStructureKHR_T(), VkAccelerationStructureNV,
VkAccelerationStructureNV_T(), VkBuffer, VkBufferView,
VkBufferView_T(), VkBuffer_T(), VkCommandBuffer,
VkCommandBuffer_T(), VkCommandPool, VkCommandPool_T(),
VkDebugReportCallbackEXT, VkDebugReportCallbackEXT_T(),
VkDebugUtilsMessengerEXT, VkDebugUtilsMessengerEXT_T(),
VkDeferredOperationKHR, VkDeferredOperationKHR_T(),
VkDescriptorPool, VkDescriptorPool_T(), VkDescriptorSet,
VkDescriptorSetLayout, VkDescriptorSetLayout_T(),
VkDescriptorSet_T(), VkDescriptorUpdateTemplate,
VkDescriptorUpdateTemplateKHR, VkDescriptorUpdateTemplateKHR_T(),
VkDescriptorUpdateTemplate_T(), VkDevice, VkDeviceMemory,
VkDeviceMemory_T(), VkDevice_T(), VkDisplayKHR, VkDisplayKHR_T(),
VkDisplayModeKHR, VkDisplayModeKHR_T(), VkEvent, VkEvent_T(),
VkFence, VkFence_T(), VkFramebuffer, VkFramebuffer_T(), VkImage,
VkImageView, VkImageView_T(), VkImage_T(),
VkIndirectCommandsLayoutNV, VkIndirectCommandsLayoutNV_T(),
VkInstance, VkInstance_T(), VkPerformanceConfigurationINTEL,
VkPerformanceConfigurationINTEL_T(), VkPhysicalDevice,
VkPhysicalDevice_T(), VkPipeline, VkPipelineCache,
VkPipelineCache_T(), VkPipelineLayout, VkPipelineLayout_T(),
VkPipeline_T(), VkPrivateDataSlotEXT, VkPrivateDataSlotEXT_T(),
VkQueryPool, VkQueryPool_T(), VkQueue, VkQueue_T(), VkRenderPass,
VkRenderPass_T(), VkSampler, VkSamplerYcbcrConversion,
VkSamplerYcbcrConversionKHR, VkSamplerYcbcrConversionKHR_T(),
VkSamplerYcbcrConversion_T(), VkSampler_T(), VkSemaphore,
VkSemaphore_T(), VkShaderModule, VkShaderModule_T(), VkSurfaceKHR,
VkSurfaceKHR_T(), VkSwapchainKHR, VkSwapchainKHR_T(),
VkValidationCacheEXT, VkValidationCacheEXT_T(),
VkPhysicalDevice16BitStorageFeatures,
VkPhysicalDevice16BitStorageFeaturesKHR,
VkPhysicalDevice4444FormatsFeaturesEXT,
VkPhysicalDevice8BitStorageFeatures,
VkPhysicalDevice8BitStorageFeaturesKHR,
VkPhysicalDeviceASTCDecodeFeaturesEXT,
VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT,
VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT,
VkPhysicalDeviceBufferAddressFeaturesEXT,
VkPhysicalDeviceBufferDeviceAddressFeatures,
VkPhysicalDeviceBufferDeviceAddressFeaturesEXT,
VkPhysicalDeviceBufferDeviceAddressFeaturesKHR,
VkPhysicalDeviceCoherentMemoryFeaturesAMD,
VkPhysicalDeviceComputeShaderDerivativesFeaturesNV,
VkPhysicalDeviceConditionalRenderingFeaturesEXT,
VkPhysicalDeviceConservativeRasterizationPropertiesEXT,
VkPhysicalDeviceCooperativeMatrixFeaturesNV,
VkPhysicalDeviceCooperativeMatrixPropertiesNV,
VkPhysicalDeviceCornerSampledImageFeaturesNV,
VkPhysicalDeviceCoverageReductionModeFeaturesNV,
VkPhysicalDeviceCustomBorderColorFeaturesEXT,
VkPhysicalDeviceCustomBorderColorPropertiesEXT,
VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV,
VkPhysicalDeviceDepthClipEnableFeaturesEXT,
VkPhysicalDeviceDepthStencilResolveProperties,
VkPhysicalDeviceDepthStencilResolvePropertiesKHR,
VkPhysicalDeviceDescriptorIndexingFeatures,
VkPhysicalDeviceDescriptorIndexingFeaturesEXT,
VkPhysicalDeviceDescriptorIndexingProperties,
VkPhysicalDeviceDescriptorIndexingPropertiesEXT,
VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV,
VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV,
VkPhysicalDeviceDiagnosticsConfigFeaturesNV,
VkPhysicalDeviceDiscardRectanglePropertiesEXT,
VkPhysicalDeviceDriverProperties,
VkPhysicalDeviceDriverPropertiesKHR,
VkPhysicalDeviceExclusiveScissorFeaturesNV,
VkPhysicalDeviceExtendedDynamicStateFeaturesEXT,
VkPhysicalDeviceExternalBufferInfo,
VkPhysicalDeviceExternalBufferInfoKHR,
VkPhysicalDeviceExternalFenceInfo,
VkPhysicalDeviceExternalFenceInfoKHR,
VkPhysicalDeviceExternalImageFormatInfo,
VkPhysicalDeviceExternalImageFormatInfoKHR,
VkPhysicalDeviceExternalMemoryHostPropertiesEXT,
VkPhysicalDeviceExternalSemaphoreInfo,
VkPhysicalDeviceExternalSemaphoreInfoKHR,
VkPhysicalDeviceFeatures2, VkPhysicalDeviceFeatures2KHR,
VkPhysicalDeviceFloat16Int8FeaturesKHR,
VkPhysicalDeviceFloatControlsProperties,
VkPhysicalDeviceFloatControlsPropertiesKHR,
VkPhysicalDeviceFragmentDensityMap2FeaturesEXT,
VkPhysicalDeviceFragmentDensityMap2PropertiesEXT,
VkPhysicalDeviceFragmentDensityMapFeaturesEXT,
VkPhysicalDeviceFragmentDensityMapPropertiesEXT,
VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV,
VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT,
VkPhysicalDeviceGroupProperties,
VkPhysicalDeviceHostQueryResetFeatures,
VkPhysicalDeviceHostQueryResetFeaturesEXT,
VkPhysicalDeviceIDProperties, VkPhysicalDeviceIDPropertiesKHR,
VkPhysicalDeviceImageDrmFormatModifierInfoEXT,
VkPhysicalDeviceImageFormatInfo2,
VkPhysicalDeviceImageFormatInfo2KHR,
VkPhysicalDeviceImageRobustnessFeaturesEXT,
VkPhysicalDeviceImageViewImageFormatInfoEXT,
VkPhysicalDeviceImagelessFramebufferFeatures,
VkPhysicalDeviceImagelessFramebufferFeaturesKHR,
VkPhysicalDeviceIndexTypeUint8FeaturesEXT,
VkPhysicalDeviceInlineUniformBlockFeaturesEXT,
VkPhysicalDeviceInlineUniformBlockPropertiesEXT,
VkPhysicalDeviceLimits,
VkPhysicalDeviceLineRasterizationFeaturesEXT,
VkPhysicalDeviceLineRasterizationPropertiesEXT,
VkPhysicalDeviceMaintenance3Properties,
VkPhysicalDeviceMaintenance3PropertiesKHR,
VkPhysicalDeviceMemoryBudgetPropertiesEXT,
VkPhysicalDeviceMemoryPriorityFeaturesEXT,
VkPhysicalDeviceMemoryProperties,
VkPhysicalDeviceMemoryProperties2,
VkPhysicalDeviceMemoryProperties2KHR,
VkPhysicalDeviceMeshShaderFeaturesNV,
VkPhysicalDeviceMeshShaderPropertiesNV,
VkPhysicalDeviceMultiviewFeatures,
VkPhysicalDeviceMultiviewFeaturesKHR,
VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX,
VkPhysicalDeviceMultiviewProperties,
VkPhysicalDeviceMultiviewPropertiesKHR,
VkPhysicalDevicePCIBusInfoPropertiesEXT,
VkPhysicalDevicePerformanceQueryFeaturesKHR,
VkPhysicalDevicePerformanceQueryPropertiesKHR,
VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT,
VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR,
VkPhysicalDevicePointClippingProperties,
VkPhysicalDevicePointClippingPropertiesKHR,
VkPhysicalDevicePrivateDataFeaturesEXT, VkPhysicalDeviceProperties,
VkPhysicalDeviceProperties2, VkPhysicalDeviceProperties2KHR,
VkPhysicalDeviceProtectedMemoryFeatures,
VkPhysicalDeviceProtectedMemoryProperties,
VkPhysicalDevicePushDescriptorPropertiesKHR,
VkPhysicalDeviceRayTracingPropertiesNV,
VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV,
VkPhysicalDeviceRobustness2FeaturesEXT,
VkPhysicalDeviceRobustness2PropertiesEXT,
VkPhysicalDeviceSampleLocationsPropertiesEXT,
VkPhysicalDeviceSamplerFilterMinmaxProperties,
VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT,
VkPhysicalDeviceSamplerYcbcrConversionFeatures,
VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR,
VkPhysicalDeviceScalarBlockLayoutFeatures,
VkPhysicalDeviceScalarBlockLayoutFeaturesEXT,
VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures,
VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR,
VkPhysicalDeviceShaderAtomicFloatFeaturesEXT,
VkPhysicalDeviceShaderAtomicInt64Features,
VkPhysicalDeviceShaderAtomicInt64FeaturesKHR,
VkPhysicalDeviceShaderClockFeaturesKHR,
VkPhysicalDeviceShaderCoreProperties2AMD,
VkPhysicalDeviceShaderCorePropertiesAMD,
VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT,
VkPhysicalDeviceShaderDrawParameterFeatures,
VkPhysicalDeviceShaderDrawParametersFeatures,
VkPhysicalDeviceShaderFloat16Int8Features,
VkPhysicalDeviceShaderFloat16Int8FeaturesKHR,
VkPhysicalDeviceShaderImageFootprintFeaturesNV,
VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL,
VkPhysicalDeviceShaderSMBuiltinsFeaturesNV,
VkPhysicalDeviceShaderSMBuiltinsPropertiesNV,
VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures,
VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR,
VkPhysicalDeviceShadingRateImageFeaturesNV,
VkPhysicalDeviceShadingRateImagePropertiesNV,
VkPhysicalDeviceSparseImageFormatInfo2,
VkPhysicalDeviceSparseImageFormatInfo2KHR,
VkPhysicalDeviceSparseProperties,
VkPhysicalDeviceSubgroupProperties,
VkPhysicalDeviceSubgroupSizeControlFeaturesEXT,
VkPhysicalDeviceSubgroupSizeControlPropertiesEXT,
VkPhysicalDeviceSurfaceInfo2KHR,
VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT,
VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT,
VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT,
VkPhysicalDeviceTimelineSemaphoreFeatures,
VkPhysicalDeviceTimelineSemaphoreFeaturesKHR,
VkPhysicalDeviceTimelineSemaphoreProperties,
VkPhysicalDeviceTimelineSemaphorePropertiesKHR,
VkPhysicalDeviceToolPropertiesEXT,
VkPhysicalDeviceTransformFeedbackFeaturesEXT,
VkPhysicalDeviceTransformFeedbackPropertiesEXT,
VkPhysicalDeviceUniformBufferStandardLayoutFeatures,
VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR,
VkPhysicalDeviceVariablePointerFeatures,
VkPhysicalDeviceVariablePointerFeaturesKHR,
VkPhysicalDeviceVariablePointersFeatures,
VkPhysicalDeviceVariablePointersFeaturesKHR,
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT,
VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT,
VkPhysicalDeviceVulkan11Features,
VkPhysicalDeviceVulkan11Properties,
VkPhysicalDeviceVulkan12Features,
VkPhysicalDeviceVulkan12Properties,
VkPhysicalDeviceVulkanMemoryModelFeatures,
VkPhysicalDeviceVulkanMemoryModelFeaturesKHR,
VkPhysicalDeviceYcbcrImageArraysFeaturesEXT,
VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION,
pattern VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION,
VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
pattern VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
pattern VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
pattern VK_MAX_DEVICE_GROUP_SIZE_KHR,
pattern VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Constants (pattern VK_MAX_DEVICE_GROUP_SIZE_KHR)
import Graphics.Vulkan.Core_1_1 (pattern VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
pattern VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES)
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Marshal.Proc (VulkanProc (..))
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Enum.Result
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Handles
import Graphics.Vulkan.Types.Struct.Device (VkDeviceGroupDeviceCreateInfoKHR)
import Graphics.Vulkan.Types.Struct.PhysicalDevice
pattern VkEnumeratePhysicalDeviceGroupsKHR :: CString
pattern VkEnumeratePhysicalDeviceGroupsKHR <-
(is_VkEnumeratePhysicalDeviceGroupsKHR -> True)
where
VkEnumeratePhysicalDeviceGroupsKHR
= _VkEnumeratePhysicalDeviceGroupsKHR
_VkEnumeratePhysicalDeviceGroupsKHR :: CString
_VkEnumeratePhysicalDeviceGroupsKHR
= Ptr "vkEnumeratePhysicalDeviceGroupsKHR\NUL"#
# INLINE is_VkEnumeratePhysicalDeviceGroupsKHR #
is_VkEnumeratePhysicalDeviceGroupsKHR :: CString -> Bool
is_VkEnumeratePhysicalDeviceGroupsKHR
= (EQ ==) . cmpCStrings _VkEnumeratePhysicalDeviceGroupsKHR
type VkEnumeratePhysicalDeviceGroupsKHR =
"vkEnumeratePhysicalDeviceGroupsKHR"
Success codes : ' VK_SUCCESS ' , ' VK_INCOMPLETE ' .
> vkEnumeratePhysicalDeviceGroupsKHR
> ( VkInstance instance
> , VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties
< -extensions/html/vkspec.html#vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR registry at www.khronos.org >
type HS_vkEnumeratePhysicalDeviceGroupsKHR =
->
-> IO VkResult
type PFN_vkEnumeratePhysicalDeviceGroupsKHR =
FunPtr HS_vkEnumeratePhysicalDeviceGroupsKHR
foreign import ccall unsafe "dynamic"
unwrapVkEnumeratePhysicalDeviceGroupsKHRUnsafe ::
PFN_vkEnumeratePhysicalDeviceGroupsKHR ->
HS_vkEnumeratePhysicalDeviceGroupsKHR
foreign import ccall safe "dynamic"
unwrapVkEnumeratePhysicalDeviceGroupsKHRSafe ::
PFN_vkEnumeratePhysicalDeviceGroupsKHR ->
HS_vkEnumeratePhysicalDeviceGroupsKHR
instance VulkanProc "vkEnumeratePhysicalDeviceGroupsKHR" where
type VkProcType "vkEnumeratePhysicalDeviceGroupsKHR" =
HS_vkEnumeratePhysicalDeviceGroupsKHR
vkProcSymbol = _VkEnumeratePhysicalDeviceGroupsKHR
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe
= unwrapVkEnumeratePhysicalDeviceGroupsKHRUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkEnumeratePhysicalDeviceGroupsKHRSafe
# INLINE unwrapVkProcPtrSafe #
pattern VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION ::
(Num a, Eq a) => a
pattern VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION = 1
type VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION = 1
pattern VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME :: CString
pattern VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME <-
(is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME -> True)
where
VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
= _VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
# INLINE _ VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME #
_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME :: CString
_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
= Ptr "VK_KHR_device_group_creation\NUL"#
# INLINE is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME #
is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME :: CString -> Bool
is_VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
= (EQ ==) .
cmpCStrings _VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME
type VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME =
"VK_KHR_device_group_creation"
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES
pattern VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR =
VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO
pattern VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR =
VK_MEMORY_HEAP_MULTI_INSTANCE_BIT
|
16ff665762ed09a2a6643204505196c2112fd61fcc9e38af8f9b2f2d4fd20c4c | Plutonomicon/plutarch-plutus | PLamSpec.hs | module Plutarch.PLamSpec (spec) where
import Data.ByteString (ByteString)
import PlutusCore qualified as PLC
import Plutarch.Prelude
import Plutarch.Test
import Plutarch.Unsafe (punsafeBuiltin)
import Test.Hspec
spec :: Spec
spec = do
describe "plam" . pgoldenSpec $ do
"id" @| plam id
"flip.const" @| plam (\_ y -> y)
"plet" @| plam (\x _ -> plet x $ const perror)
"primitives" @\ do
"bool" @\ do
"true" @| plam $ \_ -> pconstant True
"int" @\ do
"0" @| plam $ const (0 :: Term _ PInteger)
"1" @| plam $ const (1 :: Term _ PInteger)
"512" @| plam $ const (512 :: Term _ PInteger)
"1048576" @| plam $ const (1048576 :: Term _ PInteger)
"bytestring" @\ do
"1" @| plam $ \_ -> pconstant ("1" :: ByteString)
"1111111" @| plam $ \_ -> pconstant ("1111111" :: ByteString)
"unit" @\ do
"list" @| plam $ \_ -> pconstant ([()] :: [()])
"()" @| plam $ \_ -> pconstant ()
"id" @| plam $ id
"fun" @\ do
"lam+" @| plam $ const (plam (+) :: Term _ (PInteger :--> PInteger :--> PInteger))
"+" @| (plam (+) :: Term _ (PInteger :--> PInteger :--> PInteger))
"η-reduction-optimisations" @\ do
"λx y. addInteger x y => addInteger"
@| plam
$ \x y -> (x :: Term _ PInteger) + y
"λx y. hoist (force mkCons) x y => force mkCons"
@| plam
$ \x y -> pforce (punsafeBuiltin PLC.MkCons) # x # y
"λx y. hoist mkCons x y => mkCons x y"
@| plam
$ \x y -> punsafeBuiltin PLC.MkCons # x # y
"λx y. hoist (λx y. x + y - y - x) x y => λx y. x + y - y - x"
@| plam
$ \x y -> phoistAcyclic (plam $ \(x :: Term _ PInteger) y -> x + y - y - x) # x # y
"λx y. x + x"
@| plam
$ \(x :: Term _ PInteger) (_ :: Term _ PInteger) -> x + x
"let x = addInteger in x 1 1"
@| plet (punsafeBuiltin PLC.AddInteger)
$ \x -> x # (1 :: Term _ PInteger) # (1 :: Term _ PInteger)
"let x = 0 in x => 0"
@| plet 0
$ \(x :: Term _ PInteger) -> x
"let x = hoist (λx. x + x) in 0 => 0"
@| plet (phoistAcyclic $ plam $ \(x :: Term _ PInteger) -> x + x)
$ const (0 :: Term _ PInteger)
"let x = hoist (λx. x + x) in x"
@| plet (phoistAcyclic $ plam $ \(x :: Term _ PInteger) -> x + x)
$ id
"λx y. sha2_256 x y =>!"
@| plam (\x y -> punsafeBuiltin PLC.Sha2_256 # x # y)
"let f = hoist (λx. x) in λx y. f x y => λx y. x y"
@| plam (\x y -> phoistAcyclic (plam id) # x # y)
"let f = hoist (λx. x True) in λx y. f x y => λx y. (λz. z True) x y"
@| plam (\x y -> phoistAcyclic (plam $ \x -> x # pcon PTrue) # x # y)
"λy. (λx. x + x) y"
@| plam
$ \y -> plam (\(x :: Term _ PInteger) -> x + x) # y
| null | https://raw.githubusercontent.com/Plutonomicon/plutarch-plutus/9b83892057f2aaaed76e3af6193ad1ae242244cc/plutarch-test/tests/Plutarch/PLamSpec.hs | haskell | > PInteger :--> PInteger))
> PInteger :--> PInteger)) | module Plutarch.PLamSpec (spec) where
import Data.ByteString (ByteString)
import PlutusCore qualified as PLC
import Plutarch.Prelude
import Plutarch.Test
import Plutarch.Unsafe (punsafeBuiltin)
import Test.Hspec
spec :: Spec
spec = do
describe "plam" . pgoldenSpec $ do
"id" @| plam id
"flip.const" @| plam (\_ y -> y)
"plet" @| plam (\x _ -> plet x $ const perror)
"primitives" @\ do
"bool" @\ do
"true" @| plam $ \_ -> pconstant True
"int" @\ do
"0" @| plam $ const (0 :: Term _ PInteger)
"1" @| plam $ const (1 :: Term _ PInteger)
"512" @| plam $ const (512 :: Term _ PInteger)
"1048576" @| plam $ const (1048576 :: Term _ PInteger)
"bytestring" @\ do
"1" @| plam $ \_ -> pconstant ("1" :: ByteString)
"1111111" @| plam $ \_ -> pconstant ("1111111" :: ByteString)
"unit" @\ do
"list" @| plam $ \_ -> pconstant ([()] :: [()])
"()" @| plam $ \_ -> pconstant ()
"id" @| plam $ id
"fun" @\ do
"η-reduction-optimisations" @\ do
"λx y. addInteger x y => addInteger"
@| plam
$ \x y -> (x :: Term _ PInteger) + y
"λx y. hoist (force mkCons) x y => force mkCons"
@| plam
$ \x y -> pforce (punsafeBuiltin PLC.MkCons) # x # y
"λx y. hoist mkCons x y => mkCons x y"
@| plam
$ \x y -> punsafeBuiltin PLC.MkCons # x # y
"λx y. hoist (λx y. x + y - y - x) x y => λx y. x + y - y - x"
@| plam
$ \x y -> phoistAcyclic (plam $ \(x :: Term _ PInteger) y -> x + y - y - x) # x # y
"λx y. x + x"
@| plam
$ \(x :: Term _ PInteger) (_ :: Term _ PInteger) -> x + x
"let x = addInteger in x 1 1"
@| plet (punsafeBuiltin PLC.AddInteger)
$ \x -> x # (1 :: Term _ PInteger) # (1 :: Term _ PInteger)
"let x = 0 in x => 0"
@| plet 0
$ \(x :: Term _ PInteger) -> x
"let x = hoist (λx. x + x) in 0 => 0"
@| plet (phoistAcyclic $ plam $ \(x :: Term _ PInteger) -> x + x)
$ const (0 :: Term _ PInteger)
"let x = hoist (λx. x + x) in x"
@| plet (phoistAcyclic $ plam $ \(x :: Term _ PInteger) -> x + x)
$ id
"λx y. sha2_256 x y =>!"
@| plam (\x y -> punsafeBuiltin PLC.Sha2_256 # x # y)
"let f = hoist (λx. x) in λx y. f x y => λx y. x y"
@| plam (\x y -> phoistAcyclic (plam id) # x # y)
"let f = hoist (λx. x True) in λx y. f x y => λx y. (λz. z True) x y"
@| plam (\x y -> phoistAcyclic (plam $ \x -> x # pcon PTrue) # x # y)
"λy. (λx. x + x) y"
@| plam
$ \y -> plam (\(x :: Term _ PInteger) -> x + x) # y
|
5a213d985e815cdc74f1af46c20feb7316ef735c433468d0129d858576005806 | jaked/froc | main.ml | open OUnit
let tests = "Froc" >::: [
Sa.tests
]
;;
OUnit.run_test_tt_main tests
| null | https://raw.githubusercontent.com/jaked/froc/6068a1fab883ed9254bfeb53a1f9c15e8af0bb20/test/froc/main.ml | ocaml | open OUnit
let tests = "Froc" >::: [
Sa.tests
]
;;
OUnit.run_test_tt_main tests
| |
d17bd1a77f8c52c65ecc3f6c3dd846a384179965e40709b5034a94bb67646a65 | ghc/packages-dph | Tuple.hs | {-# OPTIONS_HADDOCK hide #-}
# LANGUAGE CPP #
#include "fusion-phases.h"
-- | PRepr instance for tuples
-- and PD wrappers for other functions defined in D.A.P.PArray.PData.Tuple.
module Data.Array.Parallel.PArray.PRepr.Tuple
( PRepr
, ziplPA)
where
import Data.Array.Parallel.PArray.Types
import Data.Array.Parallel.PArray.PRepr.Base
import Data.Array.Parallel.PArray.PData.Base
import Data.Array.Parallel.PArray.PData.Tuple2
import Data.Array.Parallel.PArray.PData.Tuple3
import Data.Array.Parallel.PArray.PData.Tuple4
import Data.Array.Parallel.PArray.PData.Tuple5
import Data.Array.Parallel.PArray.PData.Tuple6
import Data.Array.Parallel.PArray.PData.Tuple7
import Data.Array.Parallel.PArray.PData.Nested
import Data.Array.Parallel.PArray.PData.Wrap
-- Tuple2 --------------------------------------------------------------------
type instance PRepr (a, b)
= (Wrap a, Wrap b)
instance (PA a, PA b) => PA (a, b) where
# INLINE_PA toPRepr #
toPRepr (a, b)
= (Wrap a, Wrap b)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b)
= (a, b)
{-# INLINE_PA toArrPRepr #-}
toArrPRepr (PTuple2 as bs)
= PTuple2 (PWrap as) (PWrap bs)
{-# INLINE_PA fromArrPRepr #-}
fromArrPRepr (PTuple2 (PWrap as) (PWrap bs))
= PTuple2 as bs
{-# INLINE_PA toArrPReprs #-}
toArrPReprs (PTuple2s as bs)
= PTuple2s (PWraps as) (PWraps bs)
{-# INLINE_PA fromArrPReprs #-}
fromArrPReprs (PTuple2s (PWraps as) (PWraps bs))
= PTuple2s as bs
-- | Lifted zip on PData arrays.
ziplPA :: (PA a, PA b)
=> PData (PArray a) -> PData (PArray b) -> PData (PArray (a, b))
ziplPA xs ys
= let
-- TODO: can we use the flat version here?
PNested vsegd (PTuple2s xs' ys') segd _
= ziplPR (toNestedArrPRepr xs) (toNestedArrPRepr ys)
pdatas = PTuple2s (fromArrPReprs xs') (fromArrPReprs ys')
flat = fromArrPRepr $ extractvs_delay (toArrPReprs pdatas) vsegd
in PNested vsegd pdatas segd flat
-- Tuple3 --------------------------------------------------------------------
type instance PRepr (a, b, c)
= (Wrap a, Wrap b, Wrap c)
instance (PA a, PA b, PA c) => PA (a, b, c) where
# INLINE_PA toPRepr #
toPRepr (a, b, c)
= (Wrap a, Wrap b, Wrap c)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c)
= (a, b, c)
{-# INLINE_PA toArrPRepr #-}
toArrPRepr (PTuple3 as bs cs)
= PTuple3 (PWrap as) (PWrap bs) (PWrap cs)
{-# INLINE_PA fromArrPRepr #-}
fromArrPRepr (PTuple3 (PWrap as) (PWrap bs) (PWrap cs))
= PTuple3 as bs cs
{-# INLINE_PA toArrPReprs #-}
toArrPReprs (PTuple3s as bs cs)
= PTuple3s (PWraps as) (PWraps bs) (PWraps cs)
{-# INLINE_PA fromArrPReprs #-}
fromArrPReprs (PTuple3s (PWraps as) (PWraps bs) (PWraps cs))
= PTuple3s as bs cs
-- Tuple4 --------------------------------------------------------------------
type instance PRepr (a, b, c, d)
= (Wrap a, Wrap b, Wrap c, Wrap d)
instance (PA a, PA b, PA c, PA d) => PA (a, b, c, d) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d)
= (Wrap a, Wrap b, Wrap c, Wrap d)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d)
= (a, b, c, d)
{-# INLINE_PA toArrPRepr #-}
toArrPRepr (PTuple4 as bs cs ds)
= PTuple4 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds)
{-# INLINE_PA fromArrPRepr #-}
fromArrPRepr (PTuple4 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds))
= PTuple4 as bs cs ds
{-# INLINE_PA toArrPReprs #-}
toArrPReprs (PTuple4s as bs cs ds)
= PTuple4s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds)
{-# INLINE_PA fromArrPReprs #-}
fromArrPReprs (PTuple4s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds))
= PTuple4s as bs cs ds
-- Tuple5 --------------------------------------------------------------------
type instance PRepr (a, b, c, d, e)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e)
instance (PA a, PA b, PA c, PA d, PA e) => PA (a, b, c, d, e) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d, e)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e)
= (a, b, c, d, e)
{-# INLINE_PA toArrPRepr #-}
toArrPRepr (PTuple5 as bs cs ds es)
= PTuple5 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es)
{-# INLINE_PA fromArrPRepr #-}
fromArrPRepr (PTuple5 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es))
= PTuple5 as bs cs ds es
{-# INLINE_PA toArrPReprs #-}
toArrPReprs (PTuple5s as bs cs ds es)
= PTuple5s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es)
{-# INLINE_PA fromArrPReprs #-}
fromArrPReprs (PTuple5s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es))
= PTuple5s as bs cs ds es
-- Tuple6 --------------------------------------------------------------------
type instance PRepr (a, b, c, d, e, f)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f)
instance (PA a, PA b, PA c, PA d, PA e, PA f) => PA (a, b, c, d, e, f) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d, e, f)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f)
= (a, b, c, d, e, f)
{-# INLINE_PA toArrPRepr #-}
toArrPRepr (PTuple6 as bs cs ds es fs)
= PTuple6 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs)
{-# INLINE_PA fromArrPRepr #-}
fromArrPRepr (PTuple6 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs))
= PTuple6 as bs cs ds es fs
{-# INLINE_PA toArrPReprs #-}
toArrPReprs (PTuple6s as bs cs ds es fs)
= PTuple6s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs)
{-# INLINE_PA fromArrPReprs #-}
fromArrPReprs (PTuple6s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs))
= PTuple6s as bs cs ds es fs
-- Tuple7 --------------------------------------------------------------------
type instance PRepr (a, b, c, d, e, f, g)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f, Wrap g)
instance (PA a, PA b, PA c, PA d, PA e, PA f, PA g) => PA (a, b, c, d, e, f, g) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d, e, f, g)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f, Wrap g)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f, Wrap g)
= (a, b, c, d, e, f, g)
{-# INLINE_PA toArrPRepr #-}
toArrPRepr (PTuple7 as bs cs ds es fs gs)
= PTuple7 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs) (PWrap gs)
{-# INLINE_PA fromArrPRepr #-}
fromArrPRepr (PTuple7 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs) (PWrap gs))
= PTuple7 as bs cs ds es fs gs
{-# INLINE_PA toArrPReprs #-}
toArrPReprs (PTuple7s as bs cs ds es fs gs)
= PTuple7s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs) (PWraps gs)
{-# INLINE_PA fromArrPReprs #-}
fromArrPReprs (PTuple7s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs) (PWraps gs))
= PTuple7s as bs cs ds es fs gs
| null | https://raw.githubusercontent.com/ghc/packages-dph/64eca669f13f4d216af9024474a3fc73ce101793/dph-lifted-vseg/Data/Array/Parallel/PArray/PRepr/Tuple.hs | haskell | # OPTIONS_HADDOCK hide #
| PRepr instance for tuples
and PD wrappers for other functions defined in D.A.P.PArray.PData.Tuple.
Tuple2 --------------------------------------------------------------------
# INLINE_PA toArrPRepr #
# INLINE_PA fromArrPRepr #
# INLINE_PA toArrPReprs #
# INLINE_PA fromArrPReprs #
| Lifted zip on PData arrays.
TODO: can we use the flat version here?
Tuple3 --------------------------------------------------------------------
# INLINE_PA toArrPRepr #
# INLINE_PA fromArrPRepr #
# INLINE_PA toArrPReprs #
# INLINE_PA fromArrPReprs #
Tuple4 --------------------------------------------------------------------
# INLINE_PA toArrPRepr #
# INLINE_PA fromArrPRepr #
# INLINE_PA toArrPReprs #
# INLINE_PA fromArrPReprs #
Tuple5 --------------------------------------------------------------------
# INLINE_PA toArrPRepr #
# INLINE_PA fromArrPRepr #
# INLINE_PA toArrPReprs #
# INLINE_PA fromArrPReprs #
Tuple6 --------------------------------------------------------------------
# INLINE_PA toArrPRepr #
# INLINE_PA fromArrPRepr #
# INLINE_PA toArrPReprs #
# INLINE_PA fromArrPReprs #
Tuple7 --------------------------------------------------------------------
# INLINE_PA toArrPRepr #
# INLINE_PA fromArrPRepr #
# INLINE_PA toArrPReprs #
# INLINE_PA fromArrPReprs # | # LANGUAGE CPP #
#include "fusion-phases.h"
module Data.Array.Parallel.PArray.PRepr.Tuple
( PRepr
, ziplPA)
where
import Data.Array.Parallel.PArray.Types
import Data.Array.Parallel.PArray.PRepr.Base
import Data.Array.Parallel.PArray.PData.Base
import Data.Array.Parallel.PArray.PData.Tuple2
import Data.Array.Parallel.PArray.PData.Tuple3
import Data.Array.Parallel.PArray.PData.Tuple4
import Data.Array.Parallel.PArray.PData.Tuple5
import Data.Array.Parallel.PArray.PData.Tuple6
import Data.Array.Parallel.PArray.PData.Tuple7
import Data.Array.Parallel.PArray.PData.Nested
import Data.Array.Parallel.PArray.PData.Wrap
type instance PRepr (a, b)
= (Wrap a, Wrap b)
instance (PA a, PA b) => PA (a, b) where
# INLINE_PA toPRepr #
toPRepr (a, b)
= (Wrap a, Wrap b)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b)
= (a, b)
toArrPRepr (PTuple2 as bs)
= PTuple2 (PWrap as) (PWrap bs)
fromArrPRepr (PTuple2 (PWrap as) (PWrap bs))
= PTuple2 as bs
toArrPReprs (PTuple2s as bs)
= PTuple2s (PWraps as) (PWraps bs)
fromArrPReprs (PTuple2s (PWraps as) (PWraps bs))
= PTuple2s as bs
ziplPA :: (PA a, PA b)
=> PData (PArray a) -> PData (PArray b) -> PData (PArray (a, b))
ziplPA xs ys
= let
PNested vsegd (PTuple2s xs' ys') segd _
= ziplPR (toNestedArrPRepr xs) (toNestedArrPRepr ys)
pdatas = PTuple2s (fromArrPReprs xs') (fromArrPReprs ys')
flat = fromArrPRepr $ extractvs_delay (toArrPReprs pdatas) vsegd
in PNested vsegd pdatas segd flat
type instance PRepr (a, b, c)
= (Wrap a, Wrap b, Wrap c)
instance (PA a, PA b, PA c) => PA (a, b, c) where
# INLINE_PA toPRepr #
toPRepr (a, b, c)
= (Wrap a, Wrap b, Wrap c)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c)
= (a, b, c)
toArrPRepr (PTuple3 as bs cs)
= PTuple3 (PWrap as) (PWrap bs) (PWrap cs)
fromArrPRepr (PTuple3 (PWrap as) (PWrap bs) (PWrap cs))
= PTuple3 as bs cs
toArrPReprs (PTuple3s as bs cs)
= PTuple3s (PWraps as) (PWraps bs) (PWraps cs)
fromArrPReprs (PTuple3s (PWraps as) (PWraps bs) (PWraps cs))
= PTuple3s as bs cs
type instance PRepr (a, b, c, d)
= (Wrap a, Wrap b, Wrap c, Wrap d)
instance (PA a, PA b, PA c, PA d) => PA (a, b, c, d) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d)
= (Wrap a, Wrap b, Wrap c, Wrap d)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d)
= (a, b, c, d)
toArrPRepr (PTuple4 as bs cs ds)
= PTuple4 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds)
fromArrPRepr (PTuple4 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds))
= PTuple4 as bs cs ds
toArrPReprs (PTuple4s as bs cs ds)
= PTuple4s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds)
fromArrPReprs (PTuple4s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds))
= PTuple4s as bs cs ds
type instance PRepr (a, b, c, d, e)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e)
instance (PA a, PA b, PA c, PA d, PA e) => PA (a, b, c, d, e) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d, e)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e)
= (a, b, c, d, e)
toArrPRepr (PTuple5 as bs cs ds es)
= PTuple5 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es)
fromArrPRepr (PTuple5 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es))
= PTuple5 as bs cs ds es
toArrPReprs (PTuple5s as bs cs ds es)
= PTuple5s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es)
fromArrPReprs (PTuple5s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es))
= PTuple5s as bs cs ds es
type instance PRepr (a, b, c, d, e, f)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f)
instance (PA a, PA b, PA c, PA d, PA e, PA f) => PA (a, b, c, d, e, f) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d, e, f)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f)
= (a, b, c, d, e, f)
toArrPRepr (PTuple6 as bs cs ds es fs)
= PTuple6 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs)
fromArrPRepr (PTuple6 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs))
= PTuple6 as bs cs ds es fs
toArrPReprs (PTuple6s as bs cs ds es fs)
= PTuple6s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs)
fromArrPReprs (PTuple6s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs))
= PTuple6s as bs cs ds es fs
type instance PRepr (a, b, c, d, e, f, g)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f, Wrap g)
instance (PA a, PA b, PA c, PA d, PA e, PA f, PA g) => PA (a, b, c, d, e, f, g) where
# INLINE_PA toPRepr #
toPRepr (a, b, c, d, e, f, g)
= (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f, Wrap g)
# INLINE_PA fromPRepr #
fromPRepr (Wrap a, Wrap b, Wrap c, Wrap d, Wrap e, Wrap f, Wrap g)
= (a, b, c, d, e, f, g)
toArrPRepr (PTuple7 as bs cs ds es fs gs)
= PTuple7 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs) (PWrap gs)
fromArrPRepr (PTuple7 (PWrap as) (PWrap bs) (PWrap cs) (PWrap ds) (PWrap es) (PWrap fs) (PWrap gs))
= PTuple7 as bs cs ds es fs gs
toArrPReprs (PTuple7s as bs cs ds es fs gs)
= PTuple7s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs) (PWraps gs)
fromArrPReprs (PTuple7s (PWraps as) (PWraps bs) (PWraps cs) (PWraps ds) (PWraps es) (PWraps fs) (PWraps gs))
= PTuple7s as bs cs ds es fs gs
|
7842fc1abc2419119008580e3df808d9e0666fd32cb44387bf3dd6dd2a6a950a | mejgun/haskell-tdlib | EncryptedCredentials.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.EncryptedCredentials where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
-- |
| Contains encrypted Telegram Passport data credentials @data The encrypted credentials @hash The decrypted data hash @secret Secret for data decryption , encrypted with the service 's public key
EncryptedCredentials
{ -- |
secret :: Maybe String,
-- |
hash :: Maybe String,
-- |
_data :: Maybe String
}
deriving (Eq)
instance Show EncryptedCredentials where
show
EncryptedCredentials
{ secret = secret_,
hash = hash_,
_data = _data_
} =
"EncryptedCredentials"
++ U.cc
[ U.p "secret" secret_,
U.p "hash" hash_,
U.p "_data" _data_
]
instance T.FromJSON EncryptedCredentials where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"encryptedCredentials" -> parseEncryptedCredentials v
_ -> mempty
where
parseEncryptedCredentials :: A.Value -> T.Parser EncryptedCredentials
parseEncryptedCredentials = A.withObject "EncryptedCredentials" $ \o -> do
secret_ <- o A..:? "secret"
hash_ <- o A..:? "hash"
_data_ <- o A..:? "data"
return $ EncryptedCredentials {secret = secret_, hash = hash_, _data = _data_}
parseJSON _ = mempty
instance T.ToJSON EncryptedCredentials where
toJSON
EncryptedCredentials
{ secret = secret_,
hash = hash_,
_data = _data_
} =
A.object
[ "@type" A..= T.String "encryptedCredentials",
"secret" A..= secret_,
"hash" A..= hash_,
"data" A..= _data_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/beb6635177d7626b70fd909b1d89f2156a992cd2/src/TD/Data/EncryptedCredentials.hs | haskell | # LANGUAGE OverloadedStrings #
|
|
|
|
| |
module TD.Data.EncryptedCredentials where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
| Contains encrypted Telegram Passport data credentials @data The encrypted credentials @hash The decrypted data hash @secret Secret for data decryption , encrypted with the service 's public key
EncryptedCredentials
secret :: Maybe String,
hash :: Maybe String,
_data :: Maybe String
}
deriving (Eq)
instance Show EncryptedCredentials where
show
EncryptedCredentials
{ secret = secret_,
hash = hash_,
_data = _data_
} =
"EncryptedCredentials"
++ U.cc
[ U.p "secret" secret_,
U.p "hash" hash_,
U.p "_data" _data_
]
instance T.FromJSON EncryptedCredentials where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"encryptedCredentials" -> parseEncryptedCredentials v
_ -> mempty
where
parseEncryptedCredentials :: A.Value -> T.Parser EncryptedCredentials
parseEncryptedCredentials = A.withObject "EncryptedCredentials" $ \o -> do
secret_ <- o A..:? "secret"
hash_ <- o A..:? "hash"
_data_ <- o A..:? "data"
return $ EncryptedCredentials {secret = secret_, hash = hash_, _data = _data_}
parseJSON _ = mempty
instance T.ToJSON EncryptedCredentials where
toJSON
EncryptedCredentials
{ secret = secret_,
hash = hash_,
_data = _data_
} =
A.object
[ "@type" A..= T.String "encryptedCredentials",
"secret" A..= secret_,
"hash" A..= hash_,
"data" A..= _data_
]
|
64b8e0714e760592d290e583b3a0d71bc71308b826ba292a7f2f1616179097f3 | Helium4Haskell/helium | MaximalMunch2.hs | ---- This is comment
-- This too
(-->) x y = x + y -- but this is an operator
main = 3
| null | https://raw.githubusercontent.com/Helium4Haskell/helium/5928bff479e6f151b4ceb6c69bbc15d71e29eb47/test/simple/parser/MaximalMunch2.hs | haskell | -- This is comment
This too
>) x y = x + y -- but this is an operator |
main = 3
|
a219a1749dabb83d7eab75f8a842703bdf45059ec9a67fb90a4f7a4c7e303809 | ddmcdonald/sparser | 1st-loader.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
copyright ( c ) 2014 - 2022 -- all rights reserved
;;;
File : " 1st - loader "
Module : " : kinds : "
version : June 2022
;; Initiated 3/28/14 to allow the upper-model file to be decomposed
;; into more managable pieces.
4/14/14 moved in files from ISR .
Added time 2/1/16 .
(in-package :sparser)
(gload "kinds;processes")
(gload "kinds;things")
(gload "kinds;predicate")
(gload "kinds;attribution")
(gload "kinds;space")
(gload "kinds;time")
(gload "kinds;eci-categories")
(gload "kinds;movement")
(gload "kinds;general-rules")
| null | https://raw.githubusercontent.com/ddmcdonald/sparser/8505847d9b5607b76876abede0f969ccc5dfddd1/Sparser/code/s/grammar/model/core/kinds/1st-loader.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
Initiated 3/28/14 to allow the upper-model file to be decomposed
into more managable pieces. | copyright ( c ) 2014 - 2022 -- all rights reserved
File : " 1st - loader "
Module : " : kinds : "
version : June 2022
4/14/14 moved in files from ISR .
Added time 2/1/16 .
(in-package :sparser)
(gload "kinds;processes")
(gload "kinds;things")
(gload "kinds;predicate")
(gload "kinds;attribution")
(gload "kinds;space")
(gload "kinds;time")
(gload "kinds;eci-categories")
(gload "kinds;movement")
(gload "kinds;general-rules")
|
6bf2d1267189d4ae295884035e50f95282b62d7158962245e8048340176b4a33 | khibino/haskell-relational-record | Birthday.hs | # LANGUAGE TemplateHaskell , MultiParamTypeClasses , FlexibleInstances #
module Birthday where
import DataSource (definePgConTable)
$(definePgConTable "EXAMPLE" "birthday"
[''Eq, ''Show])
| null | https://raw.githubusercontent.com/khibino/haskell-relational-record/759b3d7cea207e64d2bd1cf195125182f73d2a52/doc/slide/Haskell-Day-201609/Birthday.hs | haskell | # LANGUAGE TemplateHaskell , MultiParamTypeClasses , FlexibleInstances #
module Birthday where
import DataSource (definePgConTable)
$(definePgConTable "EXAMPLE" "birthday"
[''Eq, ''Show])
| |
4e734c46ca2ab3b35e1ac66cbaaaf0447b8dae83a48c0ea89ec043008a6b9667 | HJianBo/sserl | sserl_tabv.erl | %%-----------------------------------------------------------------------
%% @doc for review saved data
%%
%% @end
%%-----------------------------------------------------------------------
-module(sserl_tabv).
-include("sserl.hrl").
-export([traffic_counter/0, traffic_counter/1,
traffic/0, traffic/1]).
traffic_counter() ->
TCs = mnesia:dirty_match_object(traffic_counter4day, #traffic_counter4day{_='_'}),
io:format("traffic_counter4day, ~p records~n", [length(TCs)]),
io:format("~p~n", [TCs]).
traffic_counter(Port) ->
TCs = mnesia:dirty_match_object(traffic_counter4day, #traffic_counter4day{port=Port, _='_'}),
io:format("traffic_counter4day for ~p, ~p records~n", [Port, length(TCs)]),
io:format("~p~n", [TCs]).
traffic() ->
Traffics = mnesia:dirty_match_object(traffic, #traffic{_='_'}),
io:format("traffic all data, ~p records~n", [length(Traffics)]).
traffic(Port) ->
Traffics = mnesia:dirty_match_object(traffic, #traffic{port=Port, _='_'}),
io:format("traffic for ~p, ~p records~n", [Port, length(Traffics)]).
| null | https://raw.githubusercontent.com/HJianBo/sserl/9e42930caf8bfe90ae9ed2edac2e0672f7e5e55e/apps/sserl/src/sserl_tabv.erl | erlang | -----------------------------------------------------------------------
@doc for review saved data
@end
----------------------------------------------------------------------- | -module(sserl_tabv).
-include("sserl.hrl").
-export([traffic_counter/0, traffic_counter/1,
traffic/0, traffic/1]).
traffic_counter() ->
TCs = mnesia:dirty_match_object(traffic_counter4day, #traffic_counter4day{_='_'}),
io:format("traffic_counter4day, ~p records~n", [length(TCs)]),
io:format("~p~n", [TCs]).
traffic_counter(Port) ->
TCs = mnesia:dirty_match_object(traffic_counter4day, #traffic_counter4day{port=Port, _='_'}),
io:format("traffic_counter4day for ~p, ~p records~n", [Port, length(TCs)]),
io:format("~p~n", [TCs]).
traffic() ->
Traffics = mnesia:dirty_match_object(traffic, #traffic{_='_'}),
io:format("traffic all data, ~p records~n", [length(Traffics)]).
traffic(Port) ->
Traffics = mnesia:dirty_match_object(traffic, #traffic{port=Port, _='_'}),
io:format("traffic for ~p, ~p records~n", [Port, length(Traffics)]).
|
883dc1ca4b34b95bf7e9842198ee899961085cf42081c9586f236655e4877479 | neilprosser/mr-maestro | validators_test.clj | (ns maestro.validators-test
(:require [bouncer.core :as b]
[maestro
[environments :as environments]
[validators :refer :all]]
[midje.sweet :refer :all])
(:import clojure.lang.ExceptionInfo))
(fact "that allowed-instances throws up if we provide an unknown virtualisation type"
(allowed-instances "whatever")
=> (throws ExceptionInfo "Unknown virtualisation type 'whatever'."))
(fact "that asking for paravirtual allowed instances gives the right set"
(allowed-instances "para")
=> para-instance-types)
(fact "that asking for HVM allowed instances gives the right set"
(allowed-instances "hvm")
=> hvm-instance-types)
(fact "that `zero-or-more?` is happy with nil input"
(zero-or-more? nil)
=> truthy)
(fact "that `zero-or-more?` is happy with zero"
(zero-or-more? 0)
=> truthy)
(fact "that `zero-or-more?` is happy with zero string"
(zero-or-more? "0")
=> truthy)
(fact "that `zero-or-more?` is happy with postitive numeric string input"
(zero-or-more? "23")
=> truthy)
(fact "that `zero-or-more?` is happy with positive numeric input"
(zero-or-more? 23)
=> truthy)
(fact "that `zero-or-more?` is unhappy with negative numeric string input"
(zero-or-more? "-1")
=> falsey)
(fact "that `zero-or-more?` is unhappy with negative numeric input"
(zero-or-more? -1)
=> falsey)
(fact "that `positive?` is happy with nil input"
(positive? nil)
=> truthy)
(fact "that `positive?` is unhappy with zero"
(positive? 0)
=> falsey)
(fact "that `positive?` is unhappy with zero string"
(positive? "0")
=> falsey)
(fact "that `positive?` is happy with postitive numeric string input"
(positive? "23")
=> truthy)
(fact "that `positive?` is happy with positive numeric input"
(positive? 23)
=> truthy)
(fact "that `positive?` is unhappy with negative numeric string input"
(positive? "-1")
=> falsey)
(fact "that `positive?` is unhappy with negative numeric input"
(positive? -1)
=> falsey)
(fact "that `positive?` is unhappy with letters"
(positive? "a")
=> falsey)
(fact "that `valid-ami?` is happy with nil"
(valid-ami? nil)
=> truthy)
(fact "that `valid-ami?` is happy with a short AMI"
(valid-ami? "ami-12345678")
=> truthy)
(fact "that `valid-ami?` is happy with a long AMI"
(valid-ami? "ami-12345678901234567890")
=> truthy)
(fact "that `valid-ami?` is unhappy with garbage"
(valid-ami? "blaghblagasdasdkjsald")
=> falsey)
(fact "that `valid-application?` is happy with nil"
(valid-application? nil)
=> truthy)
(fact "that `valid-application? is happy with all letters"
(valid-application? "application")
=> truthy)
(fact "that `valid-application?` is unhappy about something with characters which aren't letters"
(valid-application? "hello-world")
=> falsey)
(fact "that `valid-date?` is happy with nil input"
(valid-date? nil)
=> truthy)
(fact "that `valid-date?` is happy with valid date"
(valid-date? "2013-01-01")
=> truthy)
(fact "that `valid-date?` is unhappy with invalid date"
(valid-date? "not a date")
=> falsey)
(fact "that `valid-boolean?` is happy with true"
(valid-boolean? "true")
=> truthy)
(fact "that `valid-boolean?` is happy with false"
(valid-boolean? "false")
=> truthy)
(fact "that `valid-boolean?` is happy with nil"
(valid-boolean? nil)
=> truthy)
(fact "that `valid-boolean?` is unhappy with garbage"
(valid-boolean? "tfaafse")
=> falsey)
(fact "that `valid-hash?` is happy with nil"
(valid-hash? nil)
=> truthy)
(fact "that `valid-hash?` is unhappy with something invalid"
(valid-hash? "not a hash")
=> falsey)
(fact "that `valid-hash?` is happy with a valid hash"
(valid-hash? "db0adbdcf61e4237e1d116834e185aa06cb682ff")
=> truthy)
(fact "that `valid-uuid?` is happy with nil"
(valid-uuid? nil)
=> truthy)
(fact "that `valid-uuid?` is unhappy with something invalid"
(valid-uuid? "hello")
=> falsey)
(fact "that `valid-uuid?` is happy with a valid UUID"
(valid-uuid? "a7ceb675-dd1c-4e71-bde9-0bc44df714bf")
=> truthy)
(fact "that `valid-healthcheck-type?` is happy with `EC2`"
(valid-healthcheck-type? "EC2")
=> truthy)
(fact "that `valid-healthcheck-type?` is happy with `ELB`"
(valid-healthcheck-type? "ELB")
=> truthy)
(fact "that `valid-healthcheck-type?` is happy with `nil`"
(valid-healthcheck-type? nil)
=> truthy)
(fact "that `valid-healthcheck-type?` is unhappy with garbage"
(valid-healthcheck-type? "dsjksdjk")
=> falsey)
(fact "that `valid-instance-type?` is happy with a known instance type"
(valid-instance-type? "m1.small")
=> truthy)
(fact "that `valid-instance-type?` is unhappy with `nil`"
(valid-instance-type? nil)
=> falsey)
(fact "that `valid-instance-type?` is unhappy with garbage"
(valid-instance-type? "adkjlasd")
=> falsey)
(fact "that `valid-availability-zone?` is happy with a known availability zone"
(valid-availability-zone? "a")
=> truthy)
(fact "that `valid-availability-zone?` is happy with `nil`"
(valid-availability-zone? nil)
=> truthy)
(fact "that `valid-availability-zone?` is unhappy with garbage"
(valid-availability-zone? "dasdasds")
=> falsey)
(fact "that `valid-availability-zones?` is happy with `nil`"
(valid-availability-zones? nil)
=> truthy)
(fact "that `valid-availbility-zones?` is happy with a single valid zone"
(valid-availability-zones? ["a"])
=> truthy)
(fact "that `valid-availability-zones?` is happy with multiple valid zones"
(valid-availability-zones? ["a" "b"])
=> truthy)
(fact "that `valid-availability-zones?` is unhappy with a single invalid zone"
(valid-availability-zones? "daskd")
=> falsey)
(fact "that `valid-availability-zones?` is unhappy with an invalid zone alongside a valid one"
(valid-availability-zones? ["a" "fkajdks"])
=> falsey)
(fact "that `valid-region?` is happy with `nil`"
(valid-region? nil)
=> truthy)
(fact "that `valid-region?` is unhappy about an unknown region"
(valid-region? "unknown")
=> falsey)
(fact "that `valid-region?` is happy with a known region"
(valid-region? "eu-west-1")
=> truthy)
(fact "that `valid-subnet-purpose?` is happy with `nil`"
(valid-subnet-purpose? nil)
=> truthy)
(fact "that `valid-subnet-purpose?` is happy with a valid subnet purpose"
(valid-subnet-purpose? "mgmt")
=> truthy)
(fact "that `valid-subnet-purpose?` is unhappy with garbage"
(valid-subnet-purpose? "akdjskdasjdkas")
=> falsey)
(fact "that `valid-termination-policy?` is happy with `nil`"
(valid-termination-policy? nil)
=> truthy)
(fact "that `valid-termination-policy?` is happy with a valid termination policy"
(valid-termination-policy? "ClosestToNextInstanceHour")
=> truthy)
(fact "that `valid-termination-policy?` is unhappy with garbage"
(valid-termination-policy? "askjlkasjdks")
=> falsey)
(fact "that `known-environment?` is happy with nil"
(known-environment? nil)
=> truthy)
(fact "that `known-environment?` is unhappy with something unknown"
(known-environment? "unknown")
=> falsey
(provided
(environments/environments) => {}))
(fact "that `known-environment?` is happy with something known"
(known-environment? "known")
=> truthy
(provided
(environments/environments) => {:known {}}))
(fact "that `known-status?` is happy with nil"
(known-status? nil)
=> truthy)
(fact "that `known-status?` is unhappy with something unknown"
(known-status? "unknown")
=> falsey)
(fact "that `known-status?` is happy with something known"
(known-status? "running")
=> truthy)
(fact "that `valid-scheduled-actions?` is happy with a single good scheduled action"
(valid-scheduled-actions? {:action-1 {:cron "hello"
:desired-capacity 1
:max 1
:min 1}})
=> truthy)
(fact "that `valid-scheduled-actions?` is happy with multiple good scheduled actions"
(valid-scheduled-actions? {:action-1 {:cron "1 2 3 4 5"
:desired-capacity 1
:max 1
:min 1}
:action-2 {:cron "world"
:desired-capacity 1
:max 1
:min 1}})
=> truthy)
(fact "that `valid-scheduled-actions?` is happy with multiple good scheduled actions"
(valid-scheduled-actions? {:action-1 {:cron "30 4 * * *"
:desired-capacity 1
:max 1
:min 1}
:action-2 {:cron "world"
:desired-capacity 1
:max 1
:min 1}})
=> truthy)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing cron"
(valid-scheduled-actions? {:action-1 {:desired-capacity 1
:max 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing desired-capacity"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:max 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action has a non-numeric desired-capacity"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity "a"
:max 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing max"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action has a non-numeric max"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:max "dasd"
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing min"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:max 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action has a non-numeric min"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:max 1
:min "asdasda"}})
=> falsey)
(def deployment-request
{:ami "ami-cdea1270"
:application "application"
:environment "environment"
:message "message"
:user "user"})
(fact "that our valid deployment request passes validation"
(first (b/validate deployment-request deployment-request-validators)) => falsey
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need an AMI"
(first (b/validate (dissoc deployment-request :ami) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need an application"
(first (b/validate (dissoc deployment-request :application) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need an environment"
(first (b/validate (dissoc deployment-request :environment) deployment-request-validators)) => truthy)
(fact "that validating a deployment request spots that we're using an unknown environment"
(first (b/validate deployment-request deployment-request-validators)) => truthy
(provided
(environments/environments) => {}))
(fact "that validating a deployment request spots that we need a message"
(first (b/validate (dissoc deployment-request :message) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need a user"
(first (b/validate (dissoc deployment-request :user) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(def deployment
{:application "application"
:environment "environment"
:id "something"
:message "Some message"
:new-state {:image-details {:id "ami-012aefc3"}}
:region "region"
:user "user"})
(fact "that our valid deployment passes validation"
(first (b/validate deployment deployment-validators)) => falsey
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need an application"
(first (b/validate (dissoc deployment :application) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need an environment"
(first (b/validate (dissoc deployment :environment) deployment-validators)) => truthy)
(fact "that validating a deployment spots that we're using an unknown environment"
(first (b/validate deployment deployment-validators)) => truthy
(provided
(environments/environments) => {}))
(fact "that validating a deployment spots that we need an ID"
(first (b/validate (dissoc deployment :id) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a message"
(first (b/validate (dissoc deployment :message) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need an image ID"
(first (b/validate (assoc-in deployment [:new-state :image-details :id] nil) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a valid image ID"
(first (b/validate (assoc-in deployment [:new-state :image-details :id] "ami-whatever") deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a region"
(first (b/validate (dissoc deployment :region) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a user"
(first (b/validate (dissoc deployment :user) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(def resize
{:desired-capacity 2
:max 3
:min 1})
(fact "that validating a resize request spots that we need a desired-capacity"
(first (b/validate (dissoc resize :desired-capacity) resize-request-validators)) => truthy)
(fact "that validating a resize request spots when desired-capacity isn't a number"
(first (b/validate (assoc resize :desired-capacity "a") resize-request-validators)) => truthy)
(fact "that validating a resize request spots when desired-capacity is negative"
(first (b/validate (assoc resize :desired-capacity -1) resize-request-validators)) => truthy)
(fact "that validating a resize request lets desired-capacity have a value of 0"
(first (b/validate (assoc resize :desired-capacity 0) resize-request-validators)) => falsey)
(fact "that validating a resize request spots that we need a max"
(first (b/validate (dissoc resize :max) resize-request-validators)) => truthy)
(fact "that validating a resize request spots when max isn't a number"
(first (b/validate (assoc resize :max "a") resize-request-validators)) => truthy)
(fact "that validating a resize request spots when max is negative"
(first (b/validate (assoc resize :max -1) resize-request-validators)) => truthy)
(fact "that validating a resize request lets max have a value of 0"
(first (b/validate (assoc resize :max 0) resize-request-validators)) => falsey)
(fact "that validating a resize request spots that we need a min"
(first (b/validate (dissoc resize :min) resize-request-validators)) => truthy)
(fact "that validating a resize request spots when min isn't a number"
(first (b/validate (assoc resize :min "a") resize-request-validators)) => truthy)
(fact "that validating a resize request spots when min is negative"
(first (b/validate (assoc resize :min -1) resize-request-validators)) => truthy)
(fact "that validating a resize request lets min have a value of 0"
(first (b/validate (assoc resize :min 0) resize-request-validators)) => falsey)
(fact "that validating a valid resize request is all good"
(first (b/validate resize resize-request-validators)) => falsey)
| null | https://raw.githubusercontent.com/neilprosser/mr-maestro/469790fd712262016729c1d83d4b4e11869237a2/test/maestro/validators_test.clj | clojure | (ns maestro.validators-test
(:require [bouncer.core :as b]
[maestro
[environments :as environments]
[validators :refer :all]]
[midje.sweet :refer :all])
(:import clojure.lang.ExceptionInfo))
(fact "that allowed-instances throws up if we provide an unknown virtualisation type"
(allowed-instances "whatever")
=> (throws ExceptionInfo "Unknown virtualisation type 'whatever'."))
(fact "that asking for paravirtual allowed instances gives the right set"
(allowed-instances "para")
=> para-instance-types)
(fact "that asking for HVM allowed instances gives the right set"
(allowed-instances "hvm")
=> hvm-instance-types)
(fact "that `zero-or-more?` is happy with nil input"
(zero-or-more? nil)
=> truthy)
(fact "that `zero-or-more?` is happy with zero"
(zero-or-more? 0)
=> truthy)
(fact "that `zero-or-more?` is happy with zero string"
(zero-or-more? "0")
=> truthy)
(fact "that `zero-or-more?` is happy with postitive numeric string input"
(zero-or-more? "23")
=> truthy)
(fact "that `zero-or-more?` is happy with positive numeric input"
(zero-or-more? 23)
=> truthy)
(fact "that `zero-or-more?` is unhappy with negative numeric string input"
(zero-or-more? "-1")
=> falsey)
(fact "that `zero-or-more?` is unhappy with negative numeric input"
(zero-or-more? -1)
=> falsey)
(fact "that `positive?` is happy with nil input"
(positive? nil)
=> truthy)
(fact "that `positive?` is unhappy with zero"
(positive? 0)
=> falsey)
(fact "that `positive?` is unhappy with zero string"
(positive? "0")
=> falsey)
(fact "that `positive?` is happy with postitive numeric string input"
(positive? "23")
=> truthy)
(fact "that `positive?` is happy with positive numeric input"
(positive? 23)
=> truthy)
(fact "that `positive?` is unhappy with negative numeric string input"
(positive? "-1")
=> falsey)
(fact "that `positive?` is unhappy with negative numeric input"
(positive? -1)
=> falsey)
(fact "that `positive?` is unhappy with letters"
(positive? "a")
=> falsey)
(fact "that `valid-ami?` is happy with nil"
(valid-ami? nil)
=> truthy)
(fact "that `valid-ami?` is happy with a short AMI"
(valid-ami? "ami-12345678")
=> truthy)
(fact "that `valid-ami?` is happy with a long AMI"
(valid-ami? "ami-12345678901234567890")
=> truthy)
(fact "that `valid-ami?` is unhappy with garbage"
(valid-ami? "blaghblagasdasdkjsald")
=> falsey)
(fact "that `valid-application?` is happy with nil"
(valid-application? nil)
=> truthy)
(fact "that `valid-application? is happy with all letters"
(valid-application? "application")
=> truthy)
(fact "that `valid-application?` is unhappy about something with characters which aren't letters"
(valid-application? "hello-world")
=> falsey)
(fact "that `valid-date?` is happy with nil input"
(valid-date? nil)
=> truthy)
(fact "that `valid-date?` is happy with valid date"
(valid-date? "2013-01-01")
=> truthy)
(fact "that `valid-date?` is unhappy with invalid date"
(valid-date? "not a date")
=> falsey)
(fact "that `valid-boolean?` is happy with true"
(valid-boolean? "true")
=> truthy)
(fact "that `valid-boolean?` is happy with false"
(valid-boolean? "false")
=> truthy)
(fact "that `valid-boolean?` is happy with nil"
(valid-boolean? nil)
=> truthy)
(fact "that `valid-boolean?` is unhappy with garbage"
(valid-boolean? "tfaafse")
=> falsey)
(fact "that `valid-hash?` is happy with nil"
(valid-hash? nil)
=> truthy)
(fact "that `valid-hash?` is unhappy with something invalid"
(valid-hash? "not a hash")
=> falsey)
(fact "that `valid-hash?` is happy with a valid hash"
(valid-hash? "db0adbdcf61e4237e1d116834e185aa06cb682ff")
=> truthy)
(fact "that `valid-uuid?` is happy with nil"
(valid-uuid? nil)
=> truthy)
(fact "that `valid-uuid?` is unhappy with something invalid"
(valid-uuid? "hello")
=> falsey)
(fact "that `valid-uuid?` is happy with a valid UUID"
(valid-uuid? "a7ceb675-dd1c-4e71-bde9-0bc44df714bf")
=> truthy)
(fact "that `valid-healthcheck-type?` is happy with `EC2`"
(valid-healthcheck-type? "EC2")
=> truthy)
(fact "that `valid-healthcheck-type?` is happy with `ELB`"
(valid-healthcheck-type? "ELB")
=> truthy)
(fact "that `valid-healthcheck-type?` is happy with `nil`"
(valid-healthcheck-type? nil)
=> truthy)
(fact "that `valid-healthcheck-type?` is unhappy with garbage"
(valid-healthcheck-type? "dsjksdjk")
=> falsey)
(fact "that `valid-instance-type?` is happy with a known instance type"
(valid-instance-type? "m1.small")
=> truthy)
(fact "that `valid-instance-type?` is unhappy with `nil`"
(valid-instance-type? nil)
=> falsey)
(fact "that `valid-instance-type?` is unhappy with garbage"
(valid-instance-type? "adkjlasd")
=> falsey)
(fact "that `valid-availability-zone?` is happy with a known availability zone"
(valid-availability-zone? "a")
=> truthy)
(fact "that `valid-availability-zone?` is happy with `nil`"
(valid-availability-zone? nil)
=> truthy)
(fact "that `valid-availability-zone?` is unhappy with garbage"
(valid-availability-zone? "dasdasds")
=> falsey)
(fact "that `valid-availability-zones?` is happy with `nil`"
(valid-availability-zones? nil)
=> truthy)
(fact "that `valid-availbility-zones?` is happy with a single valid zone"
(valid-availability-zones? ["a"])
=> truthy)
(fact "that `valid-availability-zones?` is happy with multiple valid zones"
(valid-availability-zones? ["a" "b"])
=> truthy)
(fact "that `valid-availability-zones?` is unhappy with a single invalid zone"
(valid-availability-zones? "daskd")
=> falsey)
(fact "that `valid-availability-zones?` is unhappy with an invalid zone alongside a valid one"
(valid-availability-zones? ["a" "fkajdks"])
=> falsey)
(fact "that `valid-region?` is happy with `nil`"
(valid-region? nil)
=> truthy)
(fact "that `valid-region?` is unhappy about an unknown region"
(valid-region? "unknown")
=> falsey)
(fact "that `valid-region?` is happy with a known region"
(valid-region? "eu-west-1")
=> truthy)
(fact "that `valid-subnet-purpose?` is happy with `nil`"
(valid-subnet-purpose? nil)
=> truthy)
(fact "that `valid-subnet-purpose?` is happy with a valid subnet purpose"
(valid-subnet-purpose? "mgmt")
=> truthy)
(fact "that `valid-subnet-purpose?` is unhappy with garbage"
(valid-subnet-purpose? "akdjskdasjdkas")
=> falsey)
(fact "that `valid-termination-policy?` is happy with `nil`"
(valid-termination-policy? nil)
=> truthy)
(fact "that `valid-termination-policy?` is happy with a valid termination policy"
(valid-termination-policy? "ClosestToNextInstanceHour")
=> truthy)
(fact "that `valid-termination-policy?` is unhappy with garbage"
(valid-termination-policy? "askjlkasjdks")
=> falsey)
(fact "that `known-environment?` is happy with nil"
(known-environment? nil)
=> truthy)
(fact "that `known-environment?` is unhappy with something unknown"
(known-environment? "unknown")
=> falsey
(provided
(environments/environments) => {}))
(fact "that `known-environment?` is happy with something known"
(known-environment? "known")
=> truthy
(provided
(environments/environments) => {:known {}}))
(fact "that `known-status?` is happy with nil"
(known-status? nil)
=> truthy)
(fact "that `known-status?` is unhappy with something unknown"
(known-status? "unknown")
=> falsey)
(fact "that `known-status?` is happy with something known"
(known-status? "running")
=> truthy)
(fact "that `valid-scheduled-actions?` is happy with a single good scheduled action"
(valid-scheduled-actions? {:action-1 {:cron "hello"
:desired-capacity 1
:max 1
:min 1}})
=> truthy)
(fact "that `valid-scheduled-actions?` is happy with multiple good scheduled actions"
(valid-scheduled-actions? {:action-1 {:cron "1 2 3 4 5"
:desired-capacity 1
:max 1
:min 1}
:action-2 {:cron "world"
:desired-capacity 1
:max 1
:min 1}})
=> truthy)
(fact "that `valid-scheduled-actions?` is happy with multiple good scheduled actions"
(valid-scheduled-actions? {:action-1 {:cron "30 4 * * *"
:desired-capacity 1
:max 1
:min 1}
:action-2 {:cron "world"
:desired-capacity 1
:max 1
:min 1}})
=> truthy)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing cron"
(valid-scheduled-actions? {:action-1 {:desired-capacity 1
:max 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing desired-capacity"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:max 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action has a non-numeric desired-capacity"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity "a"
:max 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing max"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action has a non-numeric max"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:max "dasd"
:min 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action is missing min"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:max 1}})
=> falsey)
(fact "that `valid-scheduled-actions?` is unhappy when a scheduled action has a non-numeric min"
(valid-scheduled-actions? {:action-1 {:cron "* * * * *"
:desired-capacity 1
:max 1
:min "asdasda"}})
=> falsey)
(def deployment-request
{:ami "ami-cdea1270"
:application "application"
:environment "environment"
:message "message"
:user "user"})
(fact "that our valid deployment request passes validation"
(first (b/validate deployment-request deployment-request-validators)) => falsey
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need an AMI"
(first (b/validate (dissoc deployment-request :ami) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need an application"
(first (b/validate (dissoc deployment-request :application) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need an environment"
(first (b/validate (dissoc deployment-request :environment) deployment-request-validators)) => truthy)
(fact "that validating a deployment request spots that we're using an unknown environment"
(first (b/validate deployment-request deployment-request-validators)) => truthy
(provided
(environments/environments) => {}))
(fact "that validating a deployment request spots that we need a message"
(first (b/validate (dissoc deployment-request :message) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment request spots that we need a user"
(first (b/validate (dissoc deployment-request :user) deployment-request-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(def deployment
{:application "application"
:environment "environment"
:id "something"
:message "Some message"
:new-state {:image-details {:id "ami-012aefc3"}}
:region "region"
:user "user"})
(fact "that our valid deployment passes validation"
(first (b/validate deployment deployment-validators)) => falsey
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need an application"
(first (b/validate (dissoc deployment :application) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need an environment"
(first (b/validate (dissoc deployment :environment) deployment-validators)) => truthy)
(fact "that validating a deployment spots that we're using an unknown environment"
(first (b/validate deployment deployment-validators)) => truthy
(provided
(environments/environments) => {}))
(fact "that validating a deployment spots that we need an ID"
(first (b/validate (dissoc deployment :id) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a message"
(first (b/validate (dissoc deployment :message) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need an image ID"
(first (b/validate (assoc-in deployment [:new-state :image-details :id] nil) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a valid image ID"
(first (b/validate (assoc-in deployment [:new-state :image-details :id] "ami-whatever") deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a region"
(first (b/validate (dissoc deployment :region) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(fact "that validating a deployment spots that we need a user"
(first (b/validate (dissoc deployment :user) deployment-validators)) => truthy
(provided
(environments/environments) => {:environment {}}))
(def resize
{:desired-capacity 2
:max 3
:min 1})
(fact "that validating a resize request spots that we need a desired-capacity"
(first (b/validate (dissoc resize :desired-capacity) resize-request-validators)) => truthy)
(fact "that validating a resize request spots when desired-capacity isn't a number"
(first (b/validate (assoc resize :desired-capacity "a") resize-request-validators)) => truthy)
(fact "that validating a resize request spots when desired-capacity is negative"
(first (b/validate (assoc resize :desired-capacity -1) resize-request-validators)) => truthy)
(fact "that validating a resize request lets desired-capacity have a value of 0"
(first (b/validate (assoc resize :desired-capacity 0) resize-request-validators)) => falsey)
(fact "that validating a resize request spots that we need a max"
(first (b/validate (dissoc resize :max) resize-request-validators)) => truthy)
(fact "that validating a resize request spots when max isn't a number"
(first (b/validate (assoc resize :max "a") resize-request-validators)) => truthy)
(fact "that validating a resize request spots when max is negative"
(first (b/validate (assoc resize :max -1) resize-request-validators)) => truthy)
(fact "that validating a resize request lets max have a value of 0"
(first (b/validate (assoc resize :max 0) resize-request-validators)) => falsey)
(fact "that validating a resize request spots that we need a min"
(first (b/validate (dissoc resize :min) resize-request-validators)) => truthy)
(fact "that validating a resize request spots when min isn't a number"
(first (b/validate (assoc resize :min "a") resize-request-validators)) => truthy)
(fact "that validating a resize request spots when min is negative"
(first (b/validate (assoc resize :min -1) resize-request-validators)) => truthy)
(fact "that validating a resize request lets min have a value of 0"
(first (b/validate (assoc resize :min 0) resize-request-validators)) => falsey)
(fact "that validating a valid resize request is all good"
(first (b/validate resize resize-request-validators)) => falsey)
| |
aed5c9595173aeda81ac620808ffa1d437b776078f3d53523786ba7b8db2eb75 | babashka/babashka | test_utils.clj | (ns babashka.test-utils
(:require
[babashka.fs :as fs]
[babashka.impl.classpath :as cp]
[babashka.impl.common :as common]
[babashka.main :as main]
[babashka.process :as p]
[clojure.string :as str]
[clojure.test :as test :refer [*report-counters*]]
[clojure.tools.reader.reader-types :as r]
[sci.core :as sci]
[sci.impl.vars :as vars]))
(set! *warn-on-reflection* true)
(def windows? main/windows?)
(def normalize
(if windows?
(fn [s] (if (string? s)
(str/replace s "\r\n" "\n")
s))
identity))
(def escape-file-paths
(if windows?
(fn [s] (if (string? s)
(str/replace s "\\" "\\\\")
s))
identity))
(def ^:dynamic *bb-edn-path* nil)
(defmethod clojure.test/report :begin-test-var [m]
(println "===" (-> m :var meta :name))
(println))
(defmethod clojure.test/report :end-test-var [_m]
(when-let [rc *report-counters*]
(let [{:keys [:fail :error]} @rc]
(when (and (= "true" (System/getenv "BABASHKA_FAIL_FAST"))
(or (pos? fail) (pos? error)))
(println "=== Failing fast")
(System/exit 1)))))
(defn bb-jvm [input-or-opts & args]
(alter-var-root #'cp/the-url-loader (constantly (delay (cp/new-loader []))))
(reset! main/env {})
(vreset! common/bb-edn nil)
(System/clearProperty "babashka.config")
(let [args (cond-> args *bb-edn-path*
(->> (list* "--config" *bb-edn-path* "--deps-root" ".")))
os (java.io.StringWriter.)
es (if-let [err (:err input-or-opts)]
err (java.io.StringWriter.))
in (if (string? input-or-opts)
input-or-opts (:in input-or-opts))
is (when in
(r/indexing-push-back-reader
(r/push-back-reader (java.io.StringReader. in))))
bindings-map (cond-> {sci/out os
sci/err es}
is (assoc sci/in is))]
(try
(when (string? input-or-opts) (vars/bindRoot sci/in is))
(vars/bindRoot sci/out os)
(vars/bindRoot sci/err es)
(sci/with-bindings bindings-map
(let [res (binding [*out* os
*err* es]
(if (string? input-or-opts)
(with-in-str input-or-opts (apply main/main args))
(apply main/main args)))]
(if (zero? res)
(do
(let [err (str es)]
(when-not (str/blank? err)
(println err))) ;; flush stderr
(normalize (str os)))
(do
(println (str os))
(throw (ex-info (str es)
{:stdout (str os)
:stderr (str es)}))))))
(finally
(when (string? input-or-opts) (vars/bindRoot sci/in *in*))
(vars/bindRoot sci/out *out*)
(vars/bindRoot sci/err *err*)))))
(defn bb-native [input & args]
(let [args (cond-> args *bb-edn-path*
(->> (list* "--config" *bb-edn-path* "--deps-root" ".")))
res (p/process (into ["./bb"] args)
{:in input
:out :string
:err :string})
res (deref res)
exit (:exit res)
error? (pos? exit)]
(if error? (throw (ex-info (or (:err res) "") {}))
(normalize (:out res)))))
(def bb
(case (System/getenv "BABASHKA_TEST_ENV")
"jvm" #'bb-jvm
"native" #'bb-native
#'bb-jvm))
(def jvm? (= bb #'bb-jvm))
(def native? (not jvm?))
(if jvm?
(println "==== Testing JVM version")
(println "==== Testing native version"))
(defn socket-loop [^java.net.ServerSocket server]
(with-open [listener server]
(loop []
(with-open [socket (.accept listener)]
(let [input-stream (.getInputStream socket)]
(print (slurp input-stream))
(flush)))
(recur))))
(defn start-server! [port]
(let [server (java.net.ServerSocket. port)]
(future (socket-loop server))
server))
(defn stop-server! [^java.net.ServerSocket server]
(.close server))
(defmacro with-config [cfg & body]
`(let [temp-dir# (fs/create-temp-dir)
bb-edn-file# (fs/file temp-dir# "bb.edn")]
(binding [*print-meta* true]
(spit bb-edn-file# ~cfg))
(binding [*bb-edn-path* (str bb-edn-file#)]
~@body)))
| null | https://raw.githubusercontent.com/babashka/babashka/72dbf53df715c8561f1315d0d55fcd4d7e992bdd/test/babashka/test_utils.clj | clojure | flush stderr | (ns babashka.test-utils
(:require
[babashka.fs :as fs]
[babashka.impl.classpath :as cp]
[babashka.impl.common :as common]
[babashka.main :as main]
[babashka.process :as p]
[clojure.string :as str]
[clojure.test :as test :refer [*report-counters*]]
[clojure.tools.reader.reader-types :as r]
[sci.core :as sci]
[sci.impl.vars :as vars]))
(set! *warn-on-reflection* true)
(def windows? main/windows?)
(def normalize
(if windows?
(fn [s] (if (string? s)
(str/replace s "\r\n" "\n")
s))
identity))
(def escape-file-paths
(if windows?
(fn [s] (if (string? s)
(str/replace s "\\" "\\\\")
s))
identity))
(def ^:dynamic *bb-edn-path* nil)
(defmethod clojure.test/report :begin-test-var [m]
(println "===" (-> m :var meta :name))
(println))
(defmethod clojure.test/report :end-test-var [_m]
(when-let [rc *report-counters*]
(let [{:keys [:fail :error]} @rc]
(when (and (= "true" (System/getenv "BABASHKA_FAIL_FAST"))
(or (pos? fail) (pos? error)))
(println "=== Failing fast")
(System/exit 1)))))
(defn bb-jvm [input-or-opts & args]
(alter-var-root #'cp/the-url-loader (constantly (delay (cp/new-loader []))))
(reset! main/env {})
(vreset! common/bb-edn nil)
(System/clearProperty "babashka.config")
(let [args (cond-> args *bb-edn-path*
(->> (list* "--config" *bb-edn-path* "--deps-root" ".")))
os (java.io.StringWriter.)
es (if-let [err (:err input-or-opts)]
err (java.io.StringWriter.))
in (if (string? input-or-opts)
input-or-opts (:in input-or-opts))
is (when in
(r/indexing-push-back-reader
(r/push-back-reader (java.io.StringReader. in))))
bindings-map (cond-> {sci/out os
sci/err es}
is (assoc sci/in is))]
(try
(when (string? input-or-opts) (vars/bindRoot sci/in is))
(vars/bindRoot sci/out os)
(vars/bindRoot sci/err es)
(sci/with-bindings bindings-map
(let [res (binding [*out* os
*err* es]
(if (string? input-or-opts)
(with-in-str input-or-opts (apply main/main args))
(apply main/main args)))]
(if (zero? res)
(do
(let [err (str es)]
(when-not (str/blank? err)
(normalize (str os)))
(do
(println (str os))
(throw (ex-info (str es)
{:stdout (str os)
:stderr (str es)}))))))
(finally
(when (string? input-or-opts) (vars/bindRoot sci/in *in*))
(vars/bindRoot sci/out *out*)
(vars/bindRoot sci/err *err*)))))
(defn bb-native [input & args]
(let [args (cond-> args *bb-edn-path*
(->> (list* "--config" *bb-edn-path* "--deps-root" ".")))
res (p/process (into ["./bb"] args)
{:in input
:out :string
:err :string})
res (deref res)
exit (:exit res)
error? (pos? exit)]
(if error? (throw (ex-info (or (:err res) "") {}))
(normalize (:out res)))))
(def bb
(case (System/getenv "BABASHKA_TEST_ENV")
"jvm" #'bb-jvm
"native" #'bb-native
#'bb-jvm))
(def jvm? (= bb #'bb-jvm))
(def native? (not jvm?))
(if jvm?
(println "==== Testing JVM version")
(println "==== Testing native version"))
(defn socket-loop [^java.net.ServerSocket server]
(with-open [listener server]
(loop []
(with-open [socket (.accept listener)]
(let [input-stream (.getInputStream socket)]
(print (slurp input-stream))
(flush)))
(recur))))
(defn start-server! [port]
(let [server (java.net.ServerSocket. port)]
(future (socket-loop server))
server))
(defn stop-server! [^java.net.ServerSocket server]
(.close server))
(defmacro with-config [cfg & body]
`(let [temp-dir# (fs/create-temp-dir)
bb-edn-file# (fs/file temp-dir# "bb.edn")]
(binding [*print-meta* true]
(spit bb-edn-file# ~cfg))
(binding [*bb-edn-path* (str bb-edn-file#)]
~@body)))
|
9644b6dd7497166df0201c43121a5327ae87fd58d3761ce41c8455bc784c378e | alexdong/ansi-common-lisp-exercises | henley.lisp | (defparameter *words* (make-hash-table :size 10000))
(defconstant maxword 100)
(defun read-text (pathname)
(with-open-file (s pathname :direction :input)
(let ((buffer (make-string maxword))
(pos 0))
(do ((c (read-char s nil :eof)
(read-char s nil :eof)))
((eql c :eof))
(if (or (alpha-char-p c) (char= c #\'))
(progn
(setf (aref buffer pos) c)
(incf pos))
(progn
(unless (zerop pos)
(see (intern (string-downcase
(subseq buffer 0 pos))))
(setf pos 0))
(let ((p (punc c)))
(if p (see p)))))))))
(defun punc (c)
(case c
(#\. '|.|)
(#\, '|,|)
' )
(#\! '|!|)
(#\? '|?|)))
(let ((prev '|.|))
(defun see (symb)
(let ((pair (assoc symb (gethash prev *words*))))
(if (null pair)
(push (cons symb 1) (gethash prev *words*))
(incf (cdr pair))))
(setf prev symb)))
(defun generate-text (n &optional (prev '|.|))
(if (zerop n)
(terpri)
(let ((next (random-next prev)))
(format t "~A " next)
(generate-text (1- n) next ))))
(defun random-next (prev)
(let* ((choices (gethash prev *words*))
(i (random (reduce #'+ choices
:key #'cdr))))
(dolist (pair choices)
(if (minusp (decf i (cdr pair)))
(return (car pair)))))) | null | https://raw.githubusercontent.com/alexdong/ansi-common-lisp-exercises/03b05a23027d62e8724177b2c56dd58065c1f18f/henley.lisp | lisp | (defparameter *words* (make-hash-table :size 10000))
(defconstant maxword 100)
(defun read-text (pathname)
(with-open-file (s pathname :direction :input)
(let ((buffer (make-string maxword))
(pos 0))
(do ((c (read-char s nil :eof)
(read-char s nil :eof)))
((eql c :eof))
(if (or (alpha-char-p c) (char= c #\'))
(progn
(setf (aref buffer pos) c)
(incf pos))
(progn
(unless (zerop pos)
(see (intern (string-downcase
(subseq buffer 0 pos))))
(setf pos 0))
(let ((p (punc c)))
(if p (see p)))))))))
(defun punc (c)
(case c
(#\. '|.|)
(#\, '|,|)
' )
(#\! '|!|)
(#\? '|?|)))
(let ((prev '|.|))
(defun see (symb)
(let ((pair (assoc symb (gethash prev *words*))))
(if (null pair)
(push (cons symb 1) (gethash prev *words*))
(incf (cdr pair))))
(setf prev symb)))
(defun generate-text (n &optional (prev '|.|))
(if (zerop n)
(terpri)
(let ((next (random-next prev)))
(format t "~A " next)
(generate-text (1- n) next ))))
(defun random-next (prev)
(let* ((choices (gethash prev *words*))
(i (random (reduce #'+ choices
:key #'cdr))))
(dolist (pair choices)
(if (minusp (decf i (cdr pair)))
(return (car pair)))))) | |
cba3ebbfbe9e28bb92eba9914ea16c4e215d4dca62889ddae089b0031221f7e0 | roelvandijk/numerals | TestData.hs | |
[ @ISO639 - 1@ ] -
[ @ISO639 - 2@ ] -
[ @ISO639 - 3@ ] one
[ @Native name@ ] : ka
[ @English name@ ] Oneida
[@ISO639-1@] -
[@ISO639-2@] -
[@ISO639-3@] one
[@Native name@] Onʌyotaʔa:ka
[@English name@] Oneida
-}
module Text.Numeral.Language.ONE.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
{-
Sources:
-to-count-in-oneida/en/one/
-}
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (1, "úskah")
, (2, "téken")
, (3, "áhsʌ")
, (4, "kayé")
, (5, "wisk")
, (6, "yá·yahk")
, (7, "tsya·ták")
, (8, "tékluˀ")
, (9, "wá·tlu")
, (10, "oye·lí")
, (11, "úskah yawʌ·lé")
, (12, "tékni yawʌ·lé")
, (13, "áhsʌ yawʌ·lé")
, (14, "kayé yawʌ·lé")
, (15, "wisk yawʌ·lé")
, (16, "yá·yahk yawʌ·lé")
, (17, "tsya·ták yawʌ·lé")
, (18, "tékluˀ yawʌ·lé")
, (19, "wá·tlu yawʌ·lé")
, (20, "tewáshʌ")
, (21, "tewáshʌ úskah")
, (22, "tewáshʌ téken")
, (23, "tewáshʌ áhsʌ")
, (24, "tewáshʌ kayé")
, (25, "tewáshʌ wisk")
, (26, "tewáshʌ yá·yahk")
, (27, "tewáshʌ tsya·ták")
, (28, "tewáshʌ tékluˀ")
, (29, "tewáshʌ wá·tlu")
, (30, "áhsʌ niwáshʌ")
, (31, "áhsʌ niwáshʌ úskah")
, (32, "áhsʌ niwáshʌ téken")
, (33, "áhsʌ niwáshʌ áhsʌ")
, (34, "áhsʌ niwáshʌ kayé")
, (35, "áhsʌ niwáshʌ wisk")
, (36, "áhsʌ niwáshʌ yá·yahk")
, (37, "áhsʌ niwáshʌ tsya·ták")
, (38, "áhsʌ niwáshʌ tékluˀ")
, (39, "áhsʌ niwáshʌ wá·tlu")
, (40, "kayé niwáshʌ")
, (41, "kayé niwáshʌ úskah")
, (42, "kayé niwáshʌ téken")
, (43, "kayé niwáshʌ áhsʌ")
, (44, "kayé niwáshʌ kayé")
, (45, "kayé niwáshʌ wisk")
, (46, "kayé niwáshʌ yá·yahk")
, (47, "kayé niwáshʌ tsya·ták")
, (48, "kayé niwáshʌ tékluˀ")
, (49, "kayé niwáshʌ wá·tlu")
, (50, "wisk niwáshʌ")
, (51, "wisk niwáshʌ úskah")
, (52, "wisk niwáshʌ téken")
, (53, "wisk niwáshʌ áhsʌ")
, (54, "wisk niwáshʌ kayé")
, (55, "wisk niwáshʌ wisk")
, (56, "wisk niwáshʌ yá·yahk")
, (57, "wisk niwáshʌ tsya·ták")
, (58, "wisk niwáshʌ tékluˀ")
, (59, "wisk niwáshʌ wá·tlu")
, (60, "yá·yahk niwáshʌ")
, (61, "yá·yahk niwáshʌ úskah")
, (62, "yá·yahk niwáshʌ téken")
, (63, "yá·yahk niwáshʌ áhsʌ")
, (64, "yá·yahk niwáshʌ kayé")
, (65, "yá·yahk niwáshʌ wisk")
, (66, "yá·yahk niwáshʌ yá·yahk")
, (67, "yá·yahk niwáshʌ tsya·ták")
, (68, "yá·yahk niwáshʌ tékluˀ")
, (69, "yá·yahk niwáshʌ wá·tlu")
, (70, "tsya·ták niwáshʌ")
, (71, "tsya·ták niwáshʌ úskah")
, (72, "tsya·ták niwáshʌ téken")
, (73, "tsya·ták niwáshʌ áhsʌ")
, (74, "tsya·ták niwáshʌ kayé")
, (75, "tsya·ták niwáshʌ wisk")
, (76, "tsya·ták niwáshʌ yá·yahk")
, (77, "tsya·ták niwáshʌ tsya·ták")
, (78, "tsya·ták niwáshʌ tékluˀ")
, (79, "tsya·ták niwáshʌ wá·tlu")
, (80, "tékluˀ niwáshʌ")
, (81, "tékluˀ niwáshʌ úskah")
, (82, "tékluˀ niwáshʌ téken")
, (83, "tékluˀ niwáshʌ áhsʌ")
, (84, "tékluˀ niwáshʌ kayé")
, (85, "tékluˀ niwáshʌ wisk")
, (86, "tékluˀ niwáshʌ yá·yahk")
, (87, "tékluˀ niwáshʌ tsya·ták")
, (88, "tékluˀ niwáshʌ tékluˀ")
, (89, "tékluˀ niwáshʌ wá·tlu")
, (90, "wá·tlu niwáshʌ")
, (91, "wá·tlu niwáshʌ úskah")
, (92, "wá·tlu niwáshʌ téken")
, (93, "wá·tlu niwáshʌ áhsʌ")
, (94, "wá·tlu niwáshʌ kayé")
, (95, "wá·tlu niwáshʌ wisk")
, (96, "wá·tlu niwáshʌ yá·yahk")
, (97, "wá·tlu niwáshʌ tsya·ták")
, (98, "wá·tlu niwáshʌ tékluˀ")
, (99, "wá·tlu niwáshʌ wá·tlu")
, (100, "tewʌˀnyáwelu")
, (101, "tewʌˀnyáwelu ok úskah")
, (102, "tewʌˀnyáwelu ok téken")
, (103, "tewʌˀnyáwelu ok áhsʌ")
, (104, "tewʌˀnyáwelu ok kayé")
, (105, "tewʌˀnyáwelu ok wisk")
, (106, "tewʌˀnyáwelu ok yá·yahk")
, (107, "tewʌˀnyáwelu ok tsya·ták")
, (108, "tewʌˀnyáwelu ok tékluˀ")
, (109, "tewʌˀnyáwelu ok wá·tlu")
, (110, "tewʌˀnyáwelu ok oye·lí")
, (123, "tewʌˀnyáwelu ok tewáshʌ áhsʌ")
, (200, "tékni tewʌˀnyáwelu")
, (300, "áhsʌ tewʌˀnyáwelu")
, (321, "áhsʌ tewʌˀnyáwelu ok tewáshʌ úskah")
, (400, "kayé tewʌˀnyáwelu")
, (500, "wisk tewʌˀnyáwelu")
, (600, "yá·yahk tewʌˀnyáwelu")
, (700, "tsya·ták tewʌˀnyáwelu")
, (800, "tékluˀ tewʌˀnyáwelu")
, (900, "wá·tlu tewʌˀnyáwelu")
, (909, "wá·tlu tewʌˀnyáwelu ok wá·tlu")
, (990, "wá·tlu tewʌˀnyáwelu ok wá·tlu niwáshʌ")
, (999, "wá·tlu tewʌˀnyáwelu ok wá·tlu niwáshʌ wá·tlu")
, (1000, "skanutó·tslat")
]
)
]
| null | https://raw.githubusercontent.com/roelvandijk/numerals/b1e4121e0824ac0646a3230bd311818e159ec127/src-test/Text/Numeral/Language/ONE/TestData.hs | haskell | ------------------------------------------------------------------------------
Imports
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Test data
------------------------------------------------------------------------------
Sources:
-to-count-in-oneida/en/one/
| |
[ @ISO639 - 1@ ] -
[ @ISO639 - 2@ ] -
[ @ISO639 - 3@ ] one
[ @Native name@ ] : ka
[ @English name@ ] Oneida
[@ISO639-1@] -
[@ISO639-2@] -
[@ISO639-3@] one
[@Native name@] Onʌyotaʔa:ka
[@English name@] Oneida
-}
module Text.Numeral.Language.ONE.TestData (cardinals) where
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (1, "úskah")
, (2, "téken")
, (3, "áhsʌ")
, (4, "kayé")
, (5, "wisk")
, (6, "yá·yahk")
, (7, "tsya·ták")
, (8, "tékluˀ")
, (9, "wá·tlu")
, (10, "oye·lí")
, (11, "úskah yawʌ·lé")
, (12, "tékni yawʌ·lé")
, (13, "áhsʌ yawʌ·lé")
, (14, "kayé yawʌ·lé")
, (15, "wisk yawʌ·lé")
, (16, "yá·yahk yawʌ·lé")
, (17, "tsya·ták yawʌ·lé")
, (18, "tékluˀ yawʌ·lé")
, (19, "wá·tlu yawʌ·lé")
, (20, "tewáshʌ")
, (21, "tewáshʌ úskah")
, (22, "tewáshʌ téken")
, (23, "tewáshʌ áhsʌ")
, (24, "tewáshʌ kayé")
, (25, "tewáshʌ wisk")
, (26, "tewáshʌ yá·yahk")
, (27, "tewáshʌ tsya·ták")
, (28, "tewáshʌ tékluˀ")
, (29, "tewáshʌ wá·tlu")
, (30, "áhsʌ niwáshʌ")
, (31, "áhsʌ niwáshʌ úskah")
, (32, "áhsʌ niwáshʌ téken")
, (33, "áhsʌ niwáshʌ áhsʌ")
, (34, "áhsʌ niwáshʌ kayé")
, (35, "áhsʌ niwáshʌ wisk")
, (36, "áhsʌ niwáshʌ yá·yahk")
, (37, "áhsʌ niwáshʌ tsya·ták")
, (38, "áhsʌ niwáshʌ tékluˀ")
, (39, "áhsʌ niwáshʌ wá·tlu")
, (40, "kayé niwáshʌ")
, (41, "kayé niwáshʌ úskah")
, (42, "kayé niwáshʌ téken")
, (43, "kayé niwáshʌ áhsʌ")
, (44, "kayé niwáshʌ kayé")
, (45, "kayé niwáshʌ wisk")
, (46, "kayé niwáshʌ yá·yahk")
, (47, "kayé niwáshʌ tsya·ták")
, (48, "kayé niwáshʌ tékluˀ")
, (49, "kayé niwáshʌ wá·tlu")
, (50, "wisk niwáshʌ")
, (51, "wisk niwáshʌ úskah")
, (52, "wisk niwáshʌ téken")
, (53, "wisk niwáshʌ áhsʌ")
, (54, "wisk niwáshʌ kayé")
, (55, "wisk niwáshʌ wisk")
, (56, "wisk niwáshʌ yá·yahk")
, (57, "wisk niwáshʌ tsya·ták")
, (58, "wisk niwáshʌ tékluˀ")
, (59, "wisk niwáshʌ wá·tlu")
, (60, "yá·yahk niwáshʌ")
, (61, "yá·yahk niwáshʌ úskah")
, (62, "yá·yahk niwáshʌ téken")
, (63, "yá·yahk niwáshʌ áhsʌ")
, (64, "yá·yahk niwáshʌ kayé")
, (65, "yá·yahk niwáshʌ wisk")
, (66, "yá·yahk niwáshʌ yá·yahk")
, (67, "yá·yahk niwáshʌ tsya·ták")
, (68, "yá·yahk niwáshʌ tékluˀ")
, (69, "yá·yahk niwáshʌ wá·tlu")
, (70, "tsya·ták niwáshʌ")
, (71, "tsya·ták niwáshʌ úskah")
, (72, "tsya·ták niwáshʌ téken")
, (73, "tsya·ták niwáshʌ áhsʌ")
, (74, "tsya·ták niwáshʌ kayé")
, (75, "tsya·ták niwáshʌ wisk")
, (76, "tsya·ták niwáshʌ yá·yahk")
, (77, "tsya·ták niwáshʌ tsya·ták")
, (78, "tsya·ták niwáshʌ tékluˀ")
, (79, "tsya·ták niwáshʌ wá·tlu")
, (80, "tékluˀ niwáshʌ")
, (81, "tékluˀ niwáshʌ úskah")
, (82, "tékluˀ niwáshʌ téken")
, (83, "tékluˀ niwáshʌ áhsʌ")
, (84, "tékluˀ niwáshʌ kayé")
, (85, "tékluˀ niwáshʌ wisk")
, (86, "tékluˀ niwáshʌ yá·yahk")
, (87, "tékluˀ niwáshʌ tsya·ták")
, (88, "tékluˀ niwáshʌ tékluˀ")
, (89, "tékluˀ niwáshʌ wá·tlu")
, (90, "wá·tlu niwáshʌ")
, (91, "wá·tlu niwáshʌ úskah")
, (92, "wá·tlu niwáshʌ téken")
, (93, "wá·tlu niwáshʌ áhsʌ")
, (94, "wá·tlu niwáshʌ kayé")
, (95, "wá·tlu niwáshʌ wisk")
, (96, "wá·tlu niwáshʌ yá·yahk")
, (97, "wá·tlu niwáshʌ tsya·ták")
, (98, "wá·tlu niwáshʌ tékluˀ")
, (99, "wá·tlu niwáshʌ wá·tlu")
, (100, "tewʌˀnyáwelu")
, (101, "tewʌˀnyáwelu ok úskah")
, (102, "tewʌˀnyáwelu ok téken")
, (103, "tewʌˀnyáwelu ok áhsʌ")
, (104, "tewʌˀnyáwelu ok kayé")
, (105, "tewʌˀnyáwelu ok wisk")
, (106, "tewʌˀnyáwelu ok yá·yahk")
, (107, "tewʌˀnyáwelu ok tsya·ták")
, (108, "tewʌˀnyáwelu ok tékluˀ")
, (109, "tewʌˀnyáwelu ok wá·tlu")
, (110, "tewʌˀnyáwelu ok oye·lí")
, (123, "tewʌˀnyáwelu ok tewáshʌ áhsʌ")
, (200, "tékni tewʌˀnyáwelu")
, (300, "áhsʌ tewʌˀnyáwelu")
, (321, "áhsʌ tewʌˀnyáwelu ok tewáshʌ úskah")
, (400, "kayé tewʌˀnyáwelu")
, (500, "wisk tewʌˀnyáwelu")
, (600, "yá·yahk tewʌˀnyáwelu")
, (700, "tsya·ták tewʌˀnyáwelu")
, (800, "tékluˀ tewʌˀnyáwelu")
, (900, "wá·tlu tewʌˀnyáwelu")
, (909, "wá·tlu tewʌˀnyáwelu ok wá·tlu")
, (990, "wá·tlu tewʌˀnyáwelu ok wá·tlu niwáshʌ")
, (999, "wá·tlu tewʌˀnyáwelu ok wá·tlu niwáshʌ wá·tlu")
, (1000, "skanutó·tslat")
]
)
]
|
6e6f148d3440eaf82996317839ad881af0d28b6d0ec6ca6518cfd623e03da840 | Twinside/Rasterific | Arbitrary.hs | # OPTIONS_GHC -fno - warn - orphans #
module Arbitrary( randomTests ) where
import Control.DeepSeq
import Test.QuickCheck
import Codec.Picture
import Graphics.Rasterific
import Graphics.Rasterific.Texture
instance Arbitrary a => Arbitrary (V2 a) where
arbitrary = V2 <$> arbitrary <*> arbitrary
instance Arbitrary PathCommand where
arbitrary = oneof
[ PathLineTo <$> arbitrary
, PathQuadraticBezierCurveTo <$> arbitrary <*> arbitrary
, PathCubicBezierCurveTo <$> arbitrary <*> arbitrary <*> arbitrary
]
instance Arbitrary Path where
arbitrary = Path <$> arbitrary <*> pure True <*> arbitrary
instance Arbitrary SamplerRepeat where
arbitrary = oneof $ map pure [toEnum 0 ..]
instance Arbitrary FillMethod where
arbitrary = oneof $ map pure [toEnum 0 ..]
instance Arbitrary Join where
arbitrary = oneof [pure JoinRound, JoinMiter <$> arbitrary]
instance Arbitrary Cap where
arbitrary = oneof [pure CapRound, CapStraight <$> arbitrary]
newtype StrokeTest = StrokeTest (Drawing PixelRGBA8 ())
instance Show StrokeTest where
show (StrokeTest sub) =
"StrokeTest " ++ dumpDrawing sub
instance Arbitrary StrokeTest where
arbitrary = StrokeTest <$>
(stroke <$> (getPositive <$> arbitrary)
<*> arbitrary
<*> arbitrary
<*> (pathToPrimitives <$> arbitrary))
newtype DashedStrokeTest = DashedStrokeTest (Drawing PixelRGBA8 ())
instance Show DashedStrokeTest where
show (DashedStrokeTest sub) =
"StrokeTest " ++ dumpDrawing sub
instance Arbitrary DashedStrokeTest where
arbitrary = DashedStrokeTest <$>
(dashedStroke <$> (fmap getPositive <$> arbitrary)
<*> (getPositive <$> arbitrary)
<*> arbitrary <*> arbitrary
<*> (pathToPrimitives <$> arbitrary))
backgroundColor :: PixelRGBA8
backgroundColor = PixelRGBA8 255 255 255 255
frontTexture :: Texture PixelRGBA8
frontTexture = uniformTexture $ PixelRGBA8 0 0x86 0xc1 255
fillTest :: Path -> Bool
fillTest path = deepseq img True
where img = renderDrawing 200 200 backgroundColor $
withTexture frontTexture $
fill $ pathToPrimitives path
strokeTest :: StrokeTest -> Bool
strokeTest (StrokeTest test) = deepseq img True
where img = renderDrawing 200 200 backgroundColor $
withTexture frontTexture test
dashedStrokeTest :: DashedStrokeTest -> Bool
dashedStrokeTest (DashedStrokeTest test) = deepseq img True
where img = renderDrawing 200 200 backgroundColor $
withTexture frontTexture test
randomTests :: IO ()
randomTests = do
quickCheck fillTest
quickCheck strokeTest
quickCheck dashedStrokeTest
| null | https://raw.githubusercontent.com/Twinside/Rasterific/709e2828d6378aa82f133a881665f2fa951e0204/exec-src/Arbitrary.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module Arbitrary( randomTests ) where
import Control.DeepSeq
import Test.QuickCheck
import Codec.Picture
import Graphics.Rasterific
import Graphics.Rasterific.Texture
instance Arbitrary a => Arbitrary (V2 a) where
arbitrary = V2 <$> arbitrary <*> arbitrary
instance Arbitrary PathCommand where
arbitrary = oneof
[ PathLineTo <$> arbitrary
, PathQuadraticBezierCurveTo <$> arbitrary <*> arbitrary
, PathCubicBezierCurveTo <$> arbitrary <*> arbitrary <*> arbitrary
]
instance Arbitrary Path where
arbitrary = Path <$> arbitrary <*> pure True <*> arbitrary
instance Arbitrary SamplerRepeat where
arbitrary = oneof $ map pure [toEnum 0 ..]
instance Arbitrary FillMethod where
arbitrary = oneof $ map pure [toEnum 0 ..]
instance Arbitrary Join where
arbitrary = oneof [pure JoinRound, JoinMiter <$> arbitrary]
instance Arbitrary Cap where
arbitrary = oneof [pure CapRound, CapStraight <$> arbitrary]
newtype StrokeTest = StrokeTest (Drawing PixelRGBA8 ())
instance Show StrokeTest where
show (StrokeTest sub) =
"StrokeTest " ++ dumpDrawing sub
instance Arbitrary StrokeTest where
arbitrary = StrokeTest <$>
(stroke <$> (getPositive <$> arbitrary)
<*> arbitrary
<*> arbitrary
<*> (pathToPrimitives <$> arbitrary))
newtype DashedStrokeTest = DashedStrokeTest (Drawing PixelRGBA8 ())
instance Show DashedStrokeTest where
show (DashedStrokeTest sub) =
"StrokeTest " ++ dumpDrawing sub
instance Arbitrary DashedStrokeTest where
arbitrary = DashedStrokeTest <$>
(dashedStroke <$> (fmap getPositive <$> arbitrary)
<*> (getPositive <$> arbitrary)
<*> arbitrary <*> arbitrary
<*> (pathToPrimitives <$> arbitrary))
backgroundColor :: PixelRGBA8
backgroundColor = PixelRGBA8 255 255 255 255
frontTexture :: Texture PixelRGBA8
frontTexture = uniformTexture $ PixelRGBA8 0 0x86 0xc1 255
fillTest :: Path -> Bool
fillTest path = deepseq img True
where img = renderDrawing 200 200 backgroundColor $
withTexture frontTexture $
fill $ pathToPrimitives path
strokeTest :: StrokeTest -> Bool
strokeTest (StrokeTest test) = deepseq img True
where img = renderDrawing 200 200 backgroundColor $
withTexture frontTexture test
dashedStrokeTest :: DashedStrokeTest -> Bool
dashedStrokeTest (DashedStrokeTest test) = deepseq img True
where img = renderDrawing 200 200 backgroundColor $
withTexture frontTexture test
randomTests :: IO ()
randomTests = do
quickCheck fillTest
quickCheck strokeTest
quickCheck dashedStrokeTest
| |
2faeb7d54b6e3f096c82f14886422fe75c1df96b87efa3b71c63cfaa7cb71df4 | bhauman/advent-of-clojure | day01.clj | (ns advent-2017.day01
(:require [clojure.java.io :as io]))
(def data (mapv
(comp read-string str)
(slurp (io/resource "2017/day01"))))
(defn count-em [d]
(->> (partition 2 1 (conj (vec d) (first d)))
(filter (partial apply =))
(map first)
(apply +)))
(comment
(count-em [1 1 2 2])
(count-em [1 1 1 1])
(count-em [1 2 3 4])
(count-em [9 1 2 1 2 1 2 9]))
part 1
#_(count-em data)
(defn count-em2 [d]
(->> (map
vector
d
(drop (/ (clojure.core/count d) 2) (cycle d)))
(filter (partial apply =))
(map first)
(apply +)))
(comment
(count-em2 [1 2 1 2])
(count-em2 [1 2 2 1])
(count-em2 [1 2 3 4 2 5])
(count-em2 [1 2 3 1 2 3])
(count-em2 [1 2 1 3 1 4 1 5]))
part 2
#_(count-em2 data)
| null | https://raw.githubusercontent.com/bhauman/advent-of-clojure/856763baf45bf7bf452ffd304dc1b89f9bc879a6/src/advent-2017/day01.clj | clojure | (ns advent-2017.day01
(:require [clojure.java.io :as io]))
(def data (mapv
(comp read-string str)
(slurp (io/resource "2017/day01"))))
(defn count-em [d]
(->> (partition 2 1 (conj (vec d) (first d)))
(filter (partial apply =))
(map first)
(apply +)))
(comment
(count-em [1 1 2 2])
(count-em [1 1 1 1])
(count-em [1 2 3 4])
(count-em [9 1 2 1 2 1 2 9]))
part 1
#_(count-em data)
(defn count-em2 [d]
(->> (map
vector
d
(drop (/ (clojure.core/count d) 2) (cycle d)))
(filter (partial apply =))
(map first)
(apply +)))
(comment
(count-em2 [1 2 1 2])
(count-em2 [1 2 2 1])
(count-em2 [1 2 3 4 2 5])
(count-em2 [1 2 3 1 2 3])
(count-em2 [1 2 1 3 1 4 1 5]))
part 2
#_(count-em2 data)
| |
d03e61d9d35218367f7865b637f7b53a4597e4a3b6257657a15af291e4523537 | fpco/ide-backend | GHC.hs | module Distribution.Simple.Program.GHC (
GhcOptions(..),
GhcMode(..),
GhcOptimisation(..),
GhcDynLinkMode(..),
ghcInvocation,
renderGhcOptions,
runGHC,
) where
import Distribution.Simple.GHC.ImplInfo ( getImplInfo, GhcImplInfo(..) )
import Distribution.Package
import Distribution.PackageDescription hiding (Flag)
import Distribution.ModuleName
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.Setup ( Flag(..), flagToMaybe, fromFlagOrDefault,
flagToList )
import Distribution.Simple.Program.Types
import Distribution.Simple.Program.Run
import Distribution.Text
import Distribution.Verbosity
import Distribution.Utils.NubList ( NubListR, fromNubListR )
import Language.Haskell.Extension ( Language(..), Extension(..) )
import qualified Data.Map as M
import Data.Monoid
import Data.List ( intercalate )
| A structured set of GHC options / flags
--
data GhcOptions = GhcOptions {
| The major mode for the ghc invocation .
ghcOptMode :: Flag GhcMode,
| Any extra options to pass directly to ghc . These go at the end and hence
-- override other stuff.
ghcOptExtra :: NubListR String,
| Extra default flags to pass directly to ghc . These go at the beginning
-- and so can be overridden by other stuff.
ghcOptExtraDefault :: NubListR String,
-----------------------
-- Inputs and outputs
-- | The main input files; could be .hs, .hi, .c, .o, depending on mode.
ghcOptInputFiles :: NubListR FilePath,
| The names of input modules , mainly for @--make@ mode .
ghcOptInputModules :: NubListR ModuleName,
| Location for output file ; the @ghc -o@ flag .
ghcOptOutputFile :: Flag FilePath,
-- | Location for dynamic output file in 'GhcStaticAndDynamic' mode;
-- the @ghc -dyno@ flag.
ghcOptOutputDynFile :: Flag FilePath,
| Start with an empty search path for source files ;
-- the @ghc -i@ flag (@-i@ on it's own with no path argument).
ghcOptSourcePathClear :: Flag Bool,
| Search path for source files ; the @ghc -i@ flag .
ghcOptSourcePath :: NubListR FilePath,
-------------
-- Packages
| The package key the modules will belong to ; the @ghc -this - package - key@
-- flag.
ghcOptPackageKey :: Flag PackageKey,
| GHC package databases to use , the @ghc -package - conf@ flag .
ghcOptPackageDBs :: PackageDBStack,
| The GHC packages to use . For compatability with old and new ghc , this
-- requires both the short and long form of the package id;
-- the @ghc -package@ or @ghc -package-id@ flags.
ghcOptPackages ::
NubListR (InstalledPackageId, PackageId, ModuleRenaming),
-- | Start with a clean package set; the @ghc -hide-all-packages@ flag
ghcOptHideAllPackages :: Flag Bool,
| Do n't automatically link in Haskell98 etc ; the @ghc
-- -no-auto-link-packages@ flag.
ghcOptNoAutoLinkPackages :: Flag Bool,
-- | What packages are implementing the signatures
ghcOptSigOf :: [(ModuleName, (PackageKey, ModuleName))],
-----------------
-- Linker stuff
-- | Names of libraries to link in; the @ghc -l@ flag.
ghcOptLinkLibs :: NubListR FilePath,
-- | Search path for libraries to link in; the @ghc -L@ flag.
ghcOptLinkLibPath :: NubListR FilePath,
-- | Options to pass through to the linker; the @ghc -optl@ flag.
ghcOptLinkOptions :: NubListR String,
| OSX only : frameworks to link in ; the @ghc -framework@ flag .
ghcOptLinkFrameworks :: NubListR String,
-- | Don't do the link step, useful in make mode; the @ghc -no-link@ flag.
ghcOptNoLink :: Flag Bool,
-- | Don't link in the normal RTS @main@ entry point; the @ghc -no-hs-main@
-- flag.
ghcOptLinkNoHsMain :: Flag Bool,
--------------------
C and CPP stuff
-- | Options to pass through to the C compiler; the @ghc -optc@ flag.
ghcOptCcOptions :: NubListR String,
| Options to pass through to CPP ; the @ghc -optP@ flag .
ghcOptCppOptions :: NubListR String,
| Search path for CPP includes like header files ; the @ghc -I@ flag .
ghcOptCppIncludePath :: NubListR FilePath,
| Extra header files to include at CPP stage ; the @ghc -optP - include@ flag .
ghcOptCppIncludes :: NubListR FilePath,
| Extra header files to include for old - style FFI ; the @ghc -#include@ flag .
ghcOptFfiIncludes :: NubListR FilePath,
----------------------------
-- Language and extensions
| The base language ; the @ghc -XHaskell98@ or @-XHaskell2010@ flag .
ghcOptLanguage :: Flag Language,
-- | The language extensions; the @ghc -X@ flag.
ghcOptExtensions :: NubListR Extension,
| A GHC version - dependent mapping of extensions to flags . This must be
-- set to be able to make use of the 'ghcOptExtensions'.
ghcOptExtensionMap :: M.Map Extension String,
----------------
-- Compilation
-- | What optimisation level to use; the @ghc -O@ flag.
ghcOptOptimisation :: Flag GhcOptimisation,
-- | Emit debug info; the @ghc -g@ flag.
ghcOptDebugInfo :: Flag Bool,
-- | Compile in profiling mode; the @ghc -prof@ flag.
ghcOptProfilingMode :: Flag Bool,
| Use the object files\ " feature ; the @ghc -split - objs@ flag .
ghcOptSplitObjs :: Flag Bool,
-- | Run N jobs simultaneously (if possible).
ghcOptNumJobs :: Flag (Maybe Int),
-- | Enable coverage analysis; the @ghc -fhpc -hpcdir@ flags.
ghcOptHPCDir :: Flag FilePath,
----------------
-- GHCi
-- | Extra GHCi startup scripts; the @-ghci-script@ flag
ghcOptGHCiScripts :: NubListR FilePath,
------------------------
-- Redirecting outputs
ghcOptHiSuffix :: Flag String,
ghcOptObjSuffix :: Flag String,
ghcOptDynHiSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode
ghcOptDynObjSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode
ghcOptHiDir :: Flag FilePath,
ghcOptObjDir :: Flag FilePath,
ghcOptOutputDir :: Flag FilePath,
ghcOptStubDir :: Flag FilePath,
--------------------
-- Dynamic linking
ghcOptDynLinkMode :: Flag GhcDynLinkMode,
ghcOptShared :: Flag Bool,
ghcOptFPic :: Flag Bool,
ghcOptDylibName :: Flag String,
ghcOptRPaths :: NubListR FilePath,
---------------
-- Misc flags
| Get GHC to be quiet or verbose with what it 's doing ; the @ghc -v@ flag .
ghcOptVerbosity :: Flag Verbosity,
| Let GHC know that it is that 's calling it .
Modifies some of the GHC error messages .
ghcOptCabal :: Flag Bool
} deriving Show
data GhcMode = GhcModeCompile -- ^ @ghc -c@
^ @ghc@
| GhcModeMake -- ^ @ghc --make@
^ @ghci@ \/ @ghc --interactive@
| GhcModeAbiHash -- ^ @ghc --abi-hash@
-- | GhcModeDepAnalysis -- ^ @ghc -M@
-- | GhcModeEvaluate -- ^ @ghc -e@
deriving (Show, Eq)
data GhcOptimisation = GhcNoOptimisation -- ^ @-O0@
| GhcNormalOptimisation -- ^ @-O@
| GhcMaximumOptimisation -- ^ @-O2@
^ e.g. @-Odph@
deriving (Show, Eq)
data GhcDynLinkMode = GhcStaticOnly -- ^ @-static@
| GhcDynamicOnly -- ^ @-dynamic@
| GhcStaticAndDynamic -- ^ @-static -dynamic-too@
deriving (Show, Eq)
runGHC :: Verbosity -> ConfiguredProgram -> Compiler -> GhcOptions -> IO ()
runGHC verbosity ghcProg comp opts = do
runProgramInvocation verbosity (ghcInvocation ghcProg comp opts)
ghcInvocation :: ConfiguredProgram -> Compiler -> GhcOptions -> ProgramInvocation
ghcInvocation prog comp opts =
programInvocation prog (renderGhcOptions comp opts)
renderGhcOptions :: Compiler -> GhcOptions -> [String]
renderGhcOptions comp opts
| compilerFlavor comp `notElem` [GHC, GHCJS] =
error $ "Distribution.Simple.Program.GHC.renderGhcOptions: "
++ "compiler flavor must be 'GHC' or 'GHCJS'!"
| otherwise =
concat
[ case flagToMaybe (ghcOptMode opts) of
Nothing -> []
Just GhcModeCompile -> ["-c"]
Just GhcModeLink -> []
Just GhcModeMake -> ["--make"]
Just GhcModeInteractive -> ["--interactive"]
Just GhcModeAbiHash -> ["--abi-hash"]
-- Just GhcModeDepAnalysis -> ["-M"]
-- Just GhcModeEvaluate -> ["-e", expr]
, flags ghcOptExtraDefault
, [ "-no-link" | flagBool ghcOptNoLink ]
---------------
-- Misc flags
, maybe [] verbosityOpts (flagToMaybe (ghcOptVerbosity opts))
, [ "-fbuilding-cabal-package" | flagBool ghcOptCabal
, flagBuildingCabalPkg implInfo ]
----------------
-- Compilation
, case flagToMaybe (ghcOptOptimisation opts) of
Nothing -> []
Just GhcNoOptimisation -> ["-O0"]
Just GhcNormalOptimisation -> ["-O"]
Just GhcMaximumOptimisation -> ["-O2"]
Just (GhcSpecialOptimisation s) -> ["-O" ++ s] -- eg -Odph
, [ "-g" | flagDebugInfo implInfo && flagBool ghcOptDebugInfo ]
, [ "-prof" | flagBool ghcOptProfilingMode ]
, [ "-split-objs" | flagBool ghcOptSplitObjs ]
, case flagToMaybe (ghcOptHPCDir opts) of
Nothing -> []
Just hpcdir -> ["-fhpc", "-hpcdir", hpcdir]
, if parmakeSupported comp
then case ghcOptNumJobs opts of
NoFlag -> []
Flag n -> ["-j" ++ maybe "" show n]
else []
--------------------
-- Dynamic linking
, [ "-shared" | flagBool ghcOptShared ]
, case flagToMaybe (ghcOptDynLinkMode opts) of
Nothing -> []
Just GhcStaticOnly -> ["-static"]
Just GhcDynamicOnly -> ["-dynamic"]
Just GhcStaticAndDynamic -> ["-static", "-dynamic-too"]
, [ "-fPIC" | flagBool ghcOptFPic ]
, concat [ ["-dylib-install-name", libname] | libname <- flag ghcOptDylibName ]
------------------------
-- Redirecting outputs
, concat [ ["-osuf", suf] | suf <- flag ghcOptObjSuffix ]
, concat [ ["-hisuf", suf] | suf <- flag ghcOptHiSuffix ]
, concat [ ["-dynosuf", suf] | suf <- flag ghcOptDynObjSuffix ]
, concat [ ["-dynhisuf",suf] | suf <- flag ghcOptDynHiSuffix ]
, concat [ ["-outputdir", dir] | dir <- flag ghcOptOutputDir
, flagOutputDir implInfo ]
, concat [ ["-odir", dir] | dir <- flag ghcOptObjDir ]
, concat [ ["-hidir", dir] | dir <- flag ghcOptHiDir ]
, concat [ ["-stubdir", dir] | dir <- flag ghcOptStubDir
, flagStubdir implInfo ]
-----------------------
-- Source search path
, [ "-i" | flagBool ghcOptSourcePathClear ]
, [ "-i" ++ dir | dir <- flags ghcOptSourcePath ]
--------------------
C and CPP stuff
, [ "-I" ++ dir | dir <- flags ghcOptCppIncludePath ]
, [ "-optP" ++ opt | opt <- flags ghcOptCppOptions ]
, concat [ [ "-optP-include", "-optP" ++ inc]
| inc <- flags ghcOptCppIncludes ]
, [ "-#include \"" ++ inc ++ "\""
| inc <- flags ghcOptFfiIncludes, flagFfiIncludes implInfo ]
, [ "-optc" ++ opt | opt <- flags ghcOptCcOptions ]
-----------------
-- Linker stuff
, [ "-optl" ++ opt | opt <- flags ghcOptLinkOptions ]
, ["-l" ++ lib | lib <- flags ghcOptLinkLibs ]
, ["-L" ++ dir | dir <- flags ghcOptLinkLibPath ]
, concat [ ["-framework", fmwk] | fmwk <- flags ghcOptLinkFrameworks ]
, [ "-no-hs-main" | flagBool ghcOptLinkNoHsMain ]
, [ "-dynload deploy" | not (null (flags ghcOptRPaths)) ]
, concat [ [ "-optl-Wl,-rpath," ++ dir]
| dir <- flags ghcOptRPaths ]
-------------
-- Packages
, concat [ [if packageKeySupported comp
then "-this-package-key"
else "-package-name", display pkgid]
| pkgid <- flag ghcOptPackageKey ]
, [ "-hide-all-packages" | flagBool ghcOptHideAllPackages ]
, [ "-no-auto-link-packages" | flagBool ghcOptNoAutoLinkPackages ]
, packageDbArgs implInfo (ghcOptPackageDBs opts)
, if null (ghcOptSigOf opts)
then []
else "-sig-of"
: intercalate "," (map (\(n,(p,m)) -> display n ++ " is "
++ display p ++ ":"
++ display m)
(ghcOptSigOf opts))
: []
, concat $ if flagPackageId implInfo
then let space "" = ""
space xs = ' ' : xs
in [ ["-package-id", display ipkgid ++ space (display rns)]
| (ipkgid,_,rns) <- flags ghcOptPackages ]
else [ ["-package", display pkgid]
| (_,pkgid,_) <- flags ghcOptPackages ]
----------------------------
-- Language and extensions
, if supportsHaskell2010 implInfo
then [ "-X" ++ display lang | lang <- flag ghcOptLanguage ]
else []
, [ case M.lookup ext (ghcOptExtensionMap opts) of
Just arg -> arg
Nothing -> error $ "Distribution.Simple.Program.GHC.renderGhcOptions: "
++ display ext ++ " not present in ghcOptExtensionMap."
| ext <- flags ghcOptExtensions ]
----------------
-- GHCi
, concat [ [ "-ghci-script", script ] | script <- flags ghcOptGHCiScripts
, flagGhciScript implInfo ]
---------------
-- Inputs
, [ display modu | modu <- flags ghcOptInputModules ]
, flags ghcOptInputFiles
, concat [ [ "-o", out] | out <- flag ghcOptOutputFile ]
, concat [ [ "-dyno", out] | out <- flag ghcOptOutputDynFile ]
---------------
Extra
, flags ghcOptExtra
]
where
implInfo = getImplInfo comp
flag flg = flagToList (flg opts)
flags flg = fromNubListR . flg $ opts
flagBool flg = fromFlagOrDefault False (flg opts)
verbosityOpts :: Verbosity -> [String]
verbosityOpts verbosity
| verbosity >= deafening = ["-v"]
| verbosity >= normal = []
| otherwise = ["-w", "-v0"]
packageDbArgs :: GhcImplInfo -> PackageDBStack -> [String]
packageDbArgs implInfo dbstack = case dbstack of
(GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs
(GlobalPackageDB:dbs) -> ("-no-user-" ++ packageDbFlag)
: concatMap specific dbs
_ -> ierror
where
specific (SpecificPackageDB db) = [ '-':packageDbFlag , db ]
specific _ = ierror
ierror = error $ "internal error: unexpected package db stack: "
++ show dbstack
packageDbFlag
| flagPackageConf implInfo
= "package-conf"
| otherwise
= "package-db"
-- -----------------------------------------------------------------------------
Boilerplate Monoid instance for GhcOptions
instance Monoid GhcOptions where
mempty = GhcOptions {
ghcOptMode = mempty,
ghcOptExtra = mempty,
ghcOptExtraDefault = mempty,
ghcOptInputFiles = mempty,
ghcOptInputModules = mempty,
ghcOptOutputFile = mempty,
ghcOptOutputDynFile = mempty,
ghcOptSourcePathClear = mempty,
ghcOptSourcePath = mempty,
ghcOptPackageKey = mempty,
ghcOptPackageDBs = mempty,
ghcOptPackages = mempty,
ghcOptHideAllPackages = mempty,
ghcOptNoAutoLinkPackages = mempty,
ghcOptSigOf = mempty,
ghcOptLinkLibs = mempty,
ghcOptLinkLibPath = mempty,
ghcOptLinkOptions = mempty,
ghcOptLinkFrameworks = mempty,
ghcOptNoLink = mempty,
ghcOptLinkNoHsMain = mempty,
ghcOptCcOptions = mempty,
ghcOptCppOptions = mempty,
ghcOptCppIncludePath = mempty,
ghcOptCppIncludes = mempty,
ghcOptFfiIncludes = mempty,
ghcOptLanguage = mempty,
ghcOptExtensions = mempty,
ghcOptExtensionMap = mempty,
ghcOptOptimisation = mempty,
ghcOptDebugInfo = mempty,
ghcOptProfilingMode = mempty,
ghcOptSplitObjs = mempty,
ghcOptNumJobs = mempty,
ghcOptHPCDir = mempty,
ghcOptGHCiScripts = mempty,
ghcOptHiSuffix = mempty,
ghcOptObjSuffix = mempty,
ghcOptDynHiSuffix = mempty,
ghcOptDynObjSuffix = mempty,
ghcOptHiDir = mempty,
ghcOptObjDir = mempty,
ghcOptOutputDir = mempty,
ghcOptStubDir = mempty,
ghcOptDynLinkMode = mempty,
ghcOptShared = mempty,
ghcOptFPic = mempty,
ghcOptDylibName = mempty,
ghcOptRPaths = mempty,
ghcOptVerbosity = mempty,
ghcOptCabal = mempty
}
mappend a b = GhcOptions {
ghcOptMode = combine ghcOptMode,
ghcOptExtra = combine ghcOptExtra,
ghcOptExtraDefault = combine ghcOptExtraDefault,
ghcOptInputFiles = combine ghcOptInputFiles,
ghcOptInputModules = combine ghcOptInputModules,
ghcOptOutputFile = combine ghcOptOutputFile,
ghcOptOutputDynFile = combine ghcOptOutputDynFile,
ghcOptSourcePathClear = combine ghcOptSourcePathClear,
ghcOptSourcePath = combine ghcOptSourcePath,
ghcOptPackageKey = combine ghcOptPackageKey,
ghcOptPackageDBs = combine ghcOptPackageDBs,
ghcOptPackages = combine ghcOptPackages,
ghcOptHideAllPackages = combine ghcOptHideAllPackages,
ghcOptNoAutoLinkPackages = combine ghcOptNoAutoLinkPackages,
ghcOptSigOf = combine ghcOptSigOf,
ghcOptLinkLibs = combine ghcOptLinkLibs,
ghcOptLinkLibPath = combine ghcOptLinkLibPath,
ghcOptLinkOptions = combine ghcOptLinkOptions,
ghcOptLinkFrameworks = combine ghcOptLinkFrameworks,
ghcOptNoLink = combine ghcOptNoLink,
ghcOptLinkNoHsMain = combine ghcOptLinkNoHsMain,
ghcOptCcOptions = combine ghcOptCcOptions,
ghcOptCppOptions = combine ghcOptCppOptions,
ghcOptCppIncludePath = combine ghcOptCppIncludePath,
ghcOptCppIncludes = combine ghcOptCppIncludes,
ghcOptFfiIncludes = combine ghcOptFfiIncludes,
ghcOptLanguage = combine ghcOptLanguage,
ghcOptExtensions = combine ghcOptExtensions,
ghcOptExtensionMap = combine ghcOptExtensionMap,
ghcOptOptimisation = combine ghcOptOptimisation,
ghcOptDebugInfo = combine ghcOptDebugInfo,
ghcOptProfilingMode = combine ghcOptProfilingMode,
ghcOptSplitObjs = combine ghcOptSplitObjs,
ghcOptNumJobs = combine ghcOptNumJobs,
ghcOptHPCDir = combine ghcOptHPCDir,
ghcOptGHCiScripts = combine ghcOptGHCiScripts,
ghcOptHiSuffix = combine ghcOptHiSuffix,
ghcOptObjSuffix = combine ghcOptObjSuffix,
ghcOptDynHiSuffix = combine ghcOptDynHiSuffix,
ghcOptDynObjSuffix = combine ghcOptDynObjSuffix,
ghcOptHiDir = combine ghcOptHiDir,
ghcOptObjDir = combine ghcOptObjDir,
ghcOptOutputDir = combine ghcOptOutputDir,
ghcOptStubDir = combine ghcOptStubDir,
ghcOptDynLinkMode = combine ghcOptDynLinkMode,
ghcOptShared = combine ghcOptShared,
ghcOptFPic = combine ghcOptFPic,
ghcOptDylibName = combine ghcOptDylibName,
ghcOptRPaths = combine ghcOptRPaths,
ghcOptVerbosity = combine ghcOptVerbosity,
ghcOptCabal = combine ghcOptCabal
}
where
combine field = field a `mappend` field b
| null | https://raw.githubusercontent.com/fpco/ide-backend/860636f2d0e872e9481569236bce690637e0016e/ide-backend/TestSuite/inputs/Cabal-1.22.0.0/Distribution/Simple/Program/GHC.hs | haskell |
override other stuff.
and so can be overridden by other stuff.
---------------------
Inputs and outputs
| The main input files; could be .hs, .hi, .c, .o, depending on mode.
make@ mode .
| Location for dynamic output file in 'GhcStaticAndDynamic' mode;
the @ghc -dyno@ flag.
the @ghc -i@ flag (@-i@ on it's own with no path argument).
-----------
Packages
flag.
requires both the short and long form of the package id;
the @ghc -package@ or @ghc -package-id@ flags.
| Start with a clean package set; the @ghc -hide-all-packages@ flag
-no-auto-link-packages@ flag.
| What packages are implementing the signatures
---------------
Linker stuff
| Names of libraries to link in; the @ghc -l@ flag.
| Search path for libraries to link in; the @ghc -L@ flag.
| Options to pass through to the linker; the @ghc -optl@ flag.
| Don't do the link step, useful in make mode; the @ghc -no-link@ flag.
| Don't link in the normal RTS @main@ entry point; the @ghc -no-hs-main@
flag.
------------------
| Options to pass through to the C compiler; the @ghc -optc@ flag.
--------------------------
Language and extensions
| The language extensions; the @ghc -X@ flag.
set to be able to make use of the 'ghcOptExtensions'.
--------------
Compilation
| What optimisation level to use; the @ghc -O@ flag.
| Emit debug info; the @ghc -g@ flag.
| Compile in profiling mode; the @ghc -prof@ flag.
| Run N jobs simultaneously (if possible).
| Enable coverage analysis; the @ghc -fhpc -hpcdir@ flags.
--------------
GHCi
| Extra GHCi startup scripts; the @-ghci-script@ flag
----------------------
Redirecting outputs
^ only in 'GhcStaticAndDynamic' mode
^ only in 'GhcStaticAndDynamic' mode
------------------
Dynamic linking
-------------
Misc flags
^ @ghc -c@
^ @ghc --make@
interactive@
^ @ghc --abi-hash@
| GhcModeDepAnalysis -- ^ @ghc -M@
| GhcModeEvaluate -- ^ @ghc -e@
^ @-O0@
^ @-O@
^ @-O2@
^ @-static@
^ @-dynamic@
^ @-static -dynamic-too@
Just GhcModeDepAnalysis -> ["-M"]
Just GhcModeEvaluate -> ["-e", expr]
-------------
Misc flags
--------------
Compilation
eg -Odph
------------------
Dynamic linking
----------------------
Redirecting outputs
---------------------
Source search path
------------------
---------------
Linker stuff
-----------
Packages
--------------------------
Language and extensions
--------------
GHCi
-------------
Inputs
-------------
----------------------------------------------------------------------------- | module Distribution.Simple.Program.GHC (
GhcOptions(..),
GhcMode(..),
GhcOptimisation(..),
GhcDynLinkMode(..),
ghcInvocation,
renderGhcOptions,
runGHC,
) where
import Distribution.Simple.GHC.ImplInfo ( getImplInfo, GhcImplInfo(..) )
import Distribution.Package
import Distribution.PackageDescription hiding (Flag)
import Distribution.ModuleName
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.Setup ( Flag(..), flagToMaybe, fromFlagOrDefault,
flagToList )
import Distribution.Simple.Program.Types
import Distribution.Simple.Program.Run
import Distribution.Text
import Distribution.Verbosity
import Distribution.Utils.NubList ( NubListR, fromNubListR )
import Language.Haskell.Extension ( Language(..), Extension(..) )
import qualified Data.Map as M
import Data.Monoid
import Data.List ( intercalate )
| A structured set of GHC options / flags
data GhcOptions = GhcOptions {
| The major mode for the ghc invocation .
ghcOptMode :: Flag GhcMode,
| Any extra options to pass directly to ghc . These go at the end and hence
ghcOptExtra :: NubListR String,
| Extra default flags to pass directly to ghc . These go at the beginning
ghcOptExtraDefault :: NubListR String,
ghcOptInputFiles :: NubListR FilePath,
ghcOptInputModules :: NubListR ModuleName,
| Location for output file ; the @ghc -o@ flag .
ghcOptOutputFile :: Flag FilePath,
ghcOptOutputDynFile :: Flag FilePath,
| Start with an empty search path for source files ;
ghcOptSourcePathClear :: Flag Bool,
| Search path for source files ; the @ghc -i@ flag .
ghcOptSourcePath :: NubListR FilePath,
| The package key the modules will belong to ; the @ghc -this - package - key@
ghcOptPackageKey :: Flag PackageKey,
| GHC package databases to use , the @ghc -package - conf@ flag .
ghcOptPackageDBs :: PackageDBStack,
| The GHC packages to use . For compatability with old and new ghc , this
ghcOptPackages ::
NubListR (InstalledPackageId, PackageId, ModuleRenaming),
ghcOptHideAllPackages :: Flag Bool,
| Do n't automatically link in Haskell98 etc ; the @ghc
ghcOptNoAutoLinkPackages :: Flag Bool,
ghcOptSigOf :: [(ModuleName, (PackageKey, ModuleName))],
ghcOptLinkLibs :: NubListR FilePath,
ghcOptLinkLibPath :: NubListR FilePath,
ghcOptLinkOptions :: NubListR String,
| OSX only : frameworks to link in ; the @ghc -framework@ flag .
ghcOptLinkFrameworks :: NubListR String,
ghcOptNoLink :: Flag Bool,
ghcOptLinkNoHsMain :: Flag Bool,
C and CPP stuff
ghcOptCcOptions :: NubListR String,
| Options to pass through to CPP ; the @ghc -optP@ flag .
ghcOptCppOptions :: NubListR String,
| Search path for CPP includes like header files ; the @ghc -I@ flag .
ghcOptCppIncludePath :: NubListR FilePath,
| Extra header files to include at CPP stage ; the @ghc -optP - include@ flag .
ghcOptCppIncludes :: NubListR FilePath,
| Extra header files to include for old - style FFI ; the @ghc -#include@ flag .
ghcOptFfiIncludes :: NubListR FilePath,
| The base language ; the @ghc -XHaskell98@ or @-XHaskell2010@ flag .
ghcOptLanguage :: Flag Language,
ghcOptExtensions :: NubListR Extension,
| A GHC version - dependent mapping of extensions to flags . This must be
ghcOptExtensionMap :: M.Map Extension String,
ghcOptOptimisation :: Flag GhcOptimisation,
ghcOptDebugInfo :: Flag Bool,
ghcOptProfilingMode :: Flag Bool,
| Use the object files\ " feature ; the @ghc -split - objs@ flag .
ghcOptSplitObjs :: Flag Bool,
ghcOptNumJobs :: Flag (Maybe Int),
ghcOptHPCDir :: Flag FilePath,
ghcOptGHCiScripts :: NubListR FilePath,
ghcOptHiSuffix :: Flag String,
ghcOptObjSuffix :: Flag String,
ghcOptHiDir :: Flag FilePath,
ghcOptObjDir :: Flag FilePath,
ghcOptOutputDir :: Flag FilePath,
ghcOptStubDir :: Flag FilePath,
ghcOptDynLinkMode :: Flag GhcDynLinkMode,
ghcOptShared :: Flag Bool,
ghcOptFPic :: Flag Bool,
ghcOptDylibName :: Flag String,
ghcOptRPaths :: NubListR FilePath,
| Get GHC to be quiet or verbose with what it 's doing ; the @ghc -v@ flag .
ghcOptVerbosity :: Flag Verbosity,
| Let GHC know that it is that 's calling it .
Modifies some of the GHC error messages .
ghcOptCabal :: Flag Bool
} deriving Show
^ @ghc@
deriving (Show, Eq)
^ e.g. @-Odph@
deriving (Show, Eq)
deriving (Show, Eq)
runGHC :: Verbosity -> ConfiguredProgram -> Compiler -> GhcOptions -> IO ()
runGHC verbosity ghcProg comp opts = do
runProgramInvocation verbosity (ghcInvocation ghcProg comp opts)
ghcInvocation :: ConfiguredProgram -> Compiler -> GhcOptions -> ProgramInvocation
ghcInvocation prog comp opts =
programInvocation prog (renderGhcOptions comp opts)
renderGhcOptions :: Compiler -> GhcOptions -> [String]
renderGhcOptions comp opts
| compilerFlavor comp `notElem` [GHC, GHCJS] =
error $ "Distribution.Simple.Program.GHC.renderGhcOptions: "
++ "compiler flavor must be 'GHC' or 'GHCJS'!"
| otherwise =
concat
[ case flagToMaybe (ghcOptMode opts) of
Nothing -> []
Just GhcModeCompile -> ["-c"]
Just GhcModeLink -> []
Just GhcModeMake -> ["--make"]
Just GhcModeInteractive -> ["--interactive"]
Just GhcModeAbiHash -> ["--abi-hash"]
, flags ghcOptExtraDefault
, [ "-no-link" | flagBool ghcOptNoLink ]
, maybe [] verbosityOpts (flagToMaybe (ghcOptVerbosity opts))
, [ "-fbuilding-cabal-package" | flagBool ghcOptCabal
, flagBuildingCabalPkg implInfo ]
, case flagToMaybe (ghcOptOptimisation opts) of
Nothing -> []
Just GhcNoOptimisation -> ["-O0"]
Just GhcNormalOptimisation -> ["-O"]
Just GhcMaximumOptimisation -> ["-O2"]
, [ "-g" | flagDebugInfo implInfo && flagBool ghcOptDebugInfo ]
, [ "-prof" | flagBool ghcOptProfilingMode ]
, [ "-split-objs" | flagBool ghcOptSplitObjs ]
, case flagToMaybe (ghcOptHPCDir opts) of
Nothing -> []
Just hpcdir -> ["-fhpc", "-hpcdir", hpcdir]
, if parmakeSupported comp
then case ghcOptNumJobs opts of
NoFlag -> []
Flag n -> ["-j" ++ maybe "" show n]
else []
, [ "-shared" | flagBool ghcOptShared ]
, case flagToMaybe (ghcOptDynLinkMode opts) of
Nothing -> []
Just GhcStaticOnly -> ["-static"]
Just GhcDynamicOnly -> ["-dynamic"]
Just GhcStaticAndDynamic -> ["-static", "-dynamic-too"]
, [ "-fPIC" | flagBool ghcOptFPic ]
, concat [ ["-dylib-install-name", libname] | libname <- flag ghcOptDylibName ]
, concat [ ["-osuf", suf] | suf <- flag ghcOptObjSuffix ]
, concat [ ["-hisuf", suf] | suf <- flag ghcOptHiSuffix ]
, concat [ ["-dynosuf", suf] | suf <- flag ghcOptDynObjSuffix ]
, concat [ ["-dynhisuf",suf] | suf <- flag ghcOptDynHiSuffix ]
, concat [ ["-outputdir", dir] | dir <- flag ghcOptOutputDir
, flagOutputDir implInfo ]
, concat [ ["-odir", dir] | dir <- flag ghcOptObjDir ]
, concat [ ["-hidir", dir] | dir <- flag ghcOptHiDir ]
, concat [ ["-stubdir", dir] | dir <- flag ghcOptStubDir
, flagStubdir implInfo ]
, [ "-i" | flagBool ghcOptSourcePathClear ]
, [ "-i" ++ dir | dir <- flags ghcOptSourcePath ]
C and CPP stuff
, [ "-I" ++ dir | dir <- flags ghcOptCppIncludePath ]
, [ "-optP" ++ opt | opt <- flags ghcOptCppOptions ]
, concat [ [ "-optP-include", "-optP" ++ inc]
| inc <- flags ghcOptCppIncludes ]
, [ "-#include \"" ++ inc ++ "\""
| inc <- flags ghcOptFfiIncludes, flagFfiIncludes implInfo ]
, [ "-optc" ++ opt | opt <- flags ghcOptCcOptions ]
, [ "-optl" ++ opt | opt <- flags ghcOptLinkOptions ]
, ["-l" ++ lib | lib <- flags ghcOptLinkLibs ]
, ["-L" ++ dir | dir <- flags ghcOptLinkLibPath ]
, concat [ ["-framework", fmwk] | fmwk <- flags ghcOptLinkFrameworks ]
, [ "-no-hs-main" | flagBool ghcOptLinkNoHsMain ]
, [ "-dynload deploy" | not (null (flags ghcOptRPaths)) ]
, concat [ [ "-optl-Wl,-rpath," ++ dir]
| dir <- flags ghcOptRPaths ]
, concat [ [if packageKeySupported comp
then "-this-package-key"
else "-package-name", display pkgid]
| pkgid <- flag ghcOptPackageKey ]
, [ "-hide-all-packages" | flagBool ghcOptHideAllPackages ]
, [ "-no-auto-link-packages" | flagBool ghcOptNoAutoLinkPackages ]
, packageDbArgs implInfo (ghcOptPackageDBs opts)
, if null (ghcOptSigOf opts)
then []
else "-sig-of"
: intercalate "," (map (\(n,(p,m)) -> display n ++ " is "
++ display p ++ ":"
++ display m)
(ghcOptSigOf opts))
: []
, concat $ if flagPackageId implInfo
then let space "" = ""
space xs = ' ' : xs
in [ ["-package-id", display ipkgid ++ space (display rns)]
| (ipkgid,_,rns) <- flags ghcOptPackages ]
else [ ["-package", display pkgid]
| (_,pkgid,_) <- flags ghcOptPackages ]
, if supportsHaskell2010 implInfo
then [ "-X" ++ display lang | lang <- flag ghcOptLanguage ]
else []
, [ case M.lookup ext (ghcOptExtensionMap opts) of
Just arg -> arg
Nothing -> error $ "Distribution.Simple.Program.GHC.renderGhcOptions: "
++ display ext ++ " not present in ghcOptExtensionMap."
| ext <- flags ghcOptExtensions ]
, concat [ [ "-ghci-script", script ] | script <- flags ghcOptGHCiScripts
, flagGhciScript implInfo ]
, [ display modu | modu <- flags ghcOptInputModules ]
, flags ghcOptInputFiles
, concat [ [ "-o", out] | out <- flag ghcOptOutputFile ]
, concat [ [ "-dyno", out] | out <- flag ghcOptOutputDynFile ]
Extra
, flags ghcOptExtra
]
where
implInfo = getImplInfo comp
flag flg = flagToList (flg opts)
flags flg = fromNubListR . flg $ opts
flagBool flg = fromFlagOrDefault False (flg opts)
verbosityOpts :: Verbosity -> [String]
verbosityOpts verbosity
| verbosity >= deafening = ["-v"]
| verbosity >= normal = []
| otherwise = ["-w", "-v0"]
packageDbArgs :: GhcImplInfo -> PackageDBStack -> [String]
packageDbArgs implInfo dbstack = case dbstack of
(GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs
(GlobalPackageDB:dbs) -> ("-no-user-" ++ packageDbFlag)
: concatMap specific dbs
_ -> ierror
where
specific (SpecificPackageDB db) = [ '-':packageDbFlag , db ]
specific _ = ierror
ierror = error $ "internal error: unexpected package db stack: "
++ show dbstack
packageDbFlag
| flagPackageConf implInfo
= "package-conf"
| otherwise
= "package-db"
Boilerplate Monoid instance for GhcOptions
instance Monoid GhcOptions where
mempty = GhcOptions {
ghcOptMode = mempty,
ghcOptExtra = mempty,
ghcOptExtraDefault = mempty,
ghcOptInputFiles = mempty,
ghcOptInputModules = mempty,
ghcOptOutputFile = mempty,
ghcOptOutputDynFile = mempty,
ghcOptSourcePathClear = mempty,
ghcOptSourcePath = mempty,
ghcOptPackageKey = mempty,
ghcOptPackageDBs = mempty,
ghcOptPackages = mempty,
ghcOptHideAllPackages = mempty,
ghcOptNoAutoLinkPackages = mempty,
ghcOptSigOf = mempty,
ghcOptLinkLibs = mempty,
ghcOptLinkLibPath = mempty,
ghcOptLinkOptions = mempty,
ghcOptLinkFrameworks = mempty,
ghcOptNoLink = mempty,
ghcOptLinkNoHsMain = mempty,
ghcOptCcOptions = mempty,
ghcOptCppOptions = mempty,
ghcOptCppIncludePath = mempty,
ghcOptCppIncludes = mempty,
ghcOptFfiIncludes = mempty,
ghcOptLanguage = mempty,
ghcOptExtensions = mempty,
ghcOptExtensionMap = mempty,
ghcOptOptimisation = mempty,
ghcOptDebugInfo = mempty,
ghcOptProfilingMode = mempty,
ghcOptSplitObjs = mempty,
ghcOptNumJobs = mempty,
ghcOptHPCDir = mempty,
ghcOptGHCiScripts = mempty,
ghcOptHiSuffix = mempty,
ghcOptObjSuffix = mempty,
ghcOptDynHiSuffix = mempty,
ghcOptDynObjSuffix = mempty,
ghcOptHiDir = mempty,
ghcOptObjDir = mempty,
ghcOptOutputDir = mempty,
ghcOptStubDir = mempty,
ghcOptDynLinkMode = mempty,
ghcOptShared = mempty,
ghcOptFPic = mempty,
ghcOptDylibName = mempty,
ghcOptRPaths = mempty,
ghcOptVerbosity = mempty,
ghcOptCabal = mempty
}
mappend a b = GhcOptions {
ghcOptMode = combine ghcOptMode,
ghcOptExtra = combine ghcOptExtra,
ghcOptExtraDefault = combine ghcOptExtraDefault,
ghcOptInputFiles = combine ghcOptInputFiles,
ghcOptInputModules = combine ghcOptInputModules,
ghcOptOutputFile = combine ghcOptOutputFile,
ghcOptOutputDynFile = combine ghcOptOutputDynFile,
ghcOptSourcePathClear = combine ghcOptSourcePathClear,
ghcOptSourcePath = combine ghcOptSourcePath,
ghcOptPackageKey = combine ghcOptPackageKey,
ghcOptPackageDBs = combine ghcOptPackageDBs,
ghcOptPackages = combine ghcOptPackages,
ghcOptHideAllPackages = combine ghcOptHideAllPackages,
ghcOptNoAutoLinkPackages = combine ghcOptNoAutoLinkPackages,
ghcOptSigOf = combine ghcOptSigOf,
ghcOptLinkLibs = combine ghcOptLinkLibs,
ghcOptLinkLibPath = combine ghcOptLinkLibPath,
ghcOptLinkOptions = combine ghcOptLinkOptions,
ghcOptLinkFrameworks = combine ghcOptLinkFrameworks,
ghcOptNoLink = combine ghcOptNoLink,
ghcOptLinkNoHsMain = combine ghcOptLinkNoHsMain,
ghcOptCcOptions = combine ghcOptCcOptions,
ghcOptCppOptions = combine ghcOptCppOptions,
ghcOptCppIncludePath = combine ghcOptCppIncludePath,
ghcOptCppIncludes = combine ghcOptCppIncludes,
ghcOptFfiIncludes = combine ghcOptFfiIncludes,
ghcOptLanguage = combine ghcOptLanguage,
ghcOptExtensions = combine ghcOptExtensions,
ghcOptExtensionMap = combine ghcOptExtensionMap,
ghcOptOptimisation = combine ghcOptOptimisation,
ghcOptDebugInfo = combine ghcOptDebugInfo,
ghcOptProfilingMode = combine ghcOptProfilingMode,
ghcOptSplitObjs = combine ghcOptSplitObjs,
ghcOptNumJobs = combine ghcOptNumJobs,
ghcOptHPCDir = combine ghcOptHPCDir,
ghcOptGHCiScripts = combine ghcOptGHCiScripts,
ghcOptHiSuffix = combine ghcOptHiSuffix,
ghcOptObjSuffix = combine ghcOptObjSuffix,
ghcOptDynHiSuffix = combine ghcOptDynHiSuffix,
ghcOptDynObjSuffix = combine ghcOptDynObjSuffix,
ghcOptHiDir = combine ghcOptHiDir,
ghcOptObjDir = combine ghcOptObjDir,
ghcOptOutputDir = combine ghcOptOutputDir,
ghcOptStubDir = combine ghcOptStubDir,
ghcOptDynLinkMode = combine ghcOptDynLinkMode,
ghcOptShared = combine ghcOptShared,
ghcOptFPic = combine ghcOptFPic,
ghcOptDylibName = combine ghcOptDylibName,
ghcOptRPaths = combine ghcOptRPaths,
ghcOptVerbosity = combine ghcOptVerbosity,
ghcOptCabal = combine ghcOptCabal
}
where
combine field = field a `mappend` field b
|
56ba93f7dfefad8ae9a2196dec0f9ce67f991a6650533964b6f7661571facf2d | bgamari/bayes-stack | Gibbs.hs | # LANGUAGE TypeFamilies , FlexibleInstances , FlexibleContexts ,
ExistentialQuantification , GADTs , CPP #
ExistentialQuantification, GADTs, CPP #-}
module BayesStack.Gibbs ( UpdateUnit(..)
, WrappedUpdateUnit(..)
) where
import Control.DeepSeq
import Data.Random
class (Show (Setting uu), Show uu) => UpdateUnit uu where
type ModelState uu
type Setting uu
fetchSetting :: uu -> ModelState uu -> Setting uu
evolveSetting :: ModelState uu -> uu -> RVar (Setting uu)
updateSetting :: uu -> Setting uu -> Setting uu -> ModelState uu -> ModelState uu
data WrappedUpdateUnit ms = forall uu. (UpdateUnit uu, ModelState uu ~ ms,
NFData (Setting uu), Eq (Setting uu))
=> WrappedUU uu
| null | https://raw.githubusercontent.com/bgamari/bayes-stack/020df7bb7263104fdea254e57d6c7daf7806da3e/BayesStack/Gibbs.hs | haskell | # LANGUAGE TypeFamilies , FlexibleInstances , FlexibleContexts ,
ExistentialQuantification , GADTs , CPP #
ExistentialQuantification, GADTs, CPP #-}
module BayesStack.Gibbs ( UpdateUnit(..)
, WrappedUpdateUnit(..)
) where
import Control.DeepSeq
import Data.Random
class (Show (Setting uu), Show uu) => UpdateUnit uu where
type ModelState uu
type Setting uu
fetchSetting :: uu -> ModelState uu -> Setting uu
evolveSetting :: ModelState uu -> uu -> RVar (Setting uu)
updateSetting :: uu -> Setting uu -> Setting uu -> ModelState uu -> ModelState uu
data WrappedUpdateUnit ms = forall uu. (UpdateUnit uu, ModelState uu ~ ms,
NFData (Setting uu), Eq (Setting uu))
=> WrappedUU uu
| |
bbeaae6cb9050a146641e4a68e71bec372257c5b26242c56c619b68da9d45ca3 | copton/ocram | Main.hs | module Main where
import Reactive.Banana
import Control.Concurrent (threadDelay)
import Control.Monad (when)
main :: IO ()
main = do
(ah, fire) <- newAddHandler
let
createNetwork = do
eCommand <- fromAddHandler ah
reactimate $ handle <$> eCommand
handle n = print n >> when (n == 1) (fire 3)
loop = threadDelay (10^6) >> loop
network <- compile $ createNetwork
actuate network
fire 1
fire 2
loop
| null | https://raw.githubusercontent.com/copton/ocram/c7166eab0187868a52a61017c6d3687e5a1a6162/try/frp/time/Main.hs | haskell | module Main where
import Reactive.Banana
import Control.Concurrent (threadDelay)
import Control.Monad (when)
main :: IO ()
main = do
(ah, fire) <- newAddHandler
let
createNetwork = do
eCommand <- fromAddHandler ah
reactimate $ handle <$> eCommand
handle n = print n >> when (n == 1) (fire 3)
loop = threadDelay (10^6) >> loop
network <- compile $ createNetwork
actuate network
fire 1
fire 2
loop
| |
ad9d78470d5c67ab7c17a8baea4f454c40d1337475a4cf76f282fe97ea7f3a5c | marick/suchwow | metadata.clj | (ns such.metadata
"Convenience functions for working with metadata. Intended to be used with
`(:require [such.metadata :as meta])`."
(:refer-clojure :exclude [merge assoc get contains?]))
(defn get
"Equivalent to `(get (meta o) k)` or `(get (meta o) k default)`."
([o k default]
(clojure.core/get (meta o) k default))
([o k]
(get o k nil)))
(defn merge
"Merge the maps onto the metadata of `o`, creating a new object
equal to `o` but with the merged metadata.
(meta/merge o {:author \"Brian\" :lang :en-ca})
"
[o & maps]
(let [all (apply clojure.core/merge maps)]
(vary-meta o clojure.core/merge all)))
(defn assoc
"`assoc` the key-value pairs onto the metadata of `o`, creating a
new object equal to `o` but with the new metadata.
(meta/assoc o :author \"Brian\" :lang :en-ca)
"
[o & kvs]
(let [all (apply hash-map kvs)]
(merge o all)))
(defn contains?
"Answer whether the metadata contains the given key. A key with
a `nil` value counts as being contained.
(meta/contains? o :author) => true
"
[o key]
(clojure.core/contains? (meta o) key))
| null | https://raw.githubusercontent.com/marick/suchwow/111cd4aa21ee23552742701bfe52e593b65fb0f8/src/such/metadata.clj | clojure | (ns such.metadata
"Convenience functions for working with metadata. Intended to be used with
`(:require [such.metadata :as meta])`."
(:refer-clojure :exclude [merge assoc get contains?]))
(defn get
"Equivalent to `(get (meta o) k)` or `(get (meta o) k default)`."
([o k default]
(clojure.core/get (meta o) k default))
([o k]
(get o k nil)))
(defn merge
"Merge the maps onto the metadata of `o`, creating a new object
equal to `o` but with the merged metadata.
(meta/merge o {:author \"Brian\" :lang :en-ca})
"
[o & maps]
(let [all (apply clojure.core/merge maps)]
(vary-meta o clojure.core/merge all)))
(defn assoc
"`assoc` the key-value pairs onto the metadata of `o`, creating a
new object equal to `o` but with the new metadata.
(meta/assoc o :author \"Brian\" :lang :en-ca)
"
[o & kvs]
(let [all (apply hash-map kvs)]
(merge o all)))
(defn contains?
"Answer whether the metadata contains the given key. A key with
a `nil` value counts as being contained.
(meta/contains? o :author) => true
"
[o key]
(clojure.core/contains? (meta o) key))
| |
85868129e4e95725b2f3b7b0d72c451c38231a4c036dcfdd3409e13442fc2c61 | matijapretnar/eff | value.ml | open Utils
open Language
type value =
| Const of Const.t
| Tuple of value list
| Record of value Type.Field.Map.t
| Variant of Type.Label.t * value option
| Closure of closure
| TypeCoercionClosure of (Type.ct_ty -> value)
| DirtCoercionClosure of (Type.ct_dirt -> value)
| Handler of (result -> result)
and result = Value of value | Call of Effect.t * value * closure
and closure = value -> result
let unit_value = Tuple []
let unit_result = Value unit_value
let to_bool = function
| Const (Const.Boolean b) -> b
| _ -> Error.runtime "A boolean value expected."
let to_int = function
| Const (Const.Integer n) -> n
| _ -> Error.runtime "An integer value expected."
let to_float = function
| Const (Const.Float f) -> f
| _ -> Error.runtime "A floating-point value expected."
let to_str = function
| Const (Const.String s) -> s
| _ -> Error.runtime "A string value expected."
let to_handler = function
| Handler h -> h
| _ -> Error.runtime "A handler expected."
let print_effect eff ppf = Format.fprintf ppf "%t" (Effect.print eff)
let rec print_value ?max_level v ppf =
let print ?at_level = Print.print ?max_level ?at_level ppf in
match to_list v with
| Some vs -> print "[@[%t@]]" (Print.sequence "; " print_value vs)
| None -> (
match v with
| Const c -> Const.print c ppf
| Tuple lst -> Print.tuple print_value lst ppf
| Record assoc ->
Print.record Type.Field.print print_value
(Type.Field.Map.bindings assoc)
ppf
| Variant (lbl, None) -> print ~at_level:1 "%t" (Type.Label.print lbl)
| Variant (lbl, Some v) ->
print ~at_level:1 "%t @[<hov>%t@]" (Type.Label.print lbl)
(print_value v)
| Closure _ -> print "<fun>"
| Handler _ -> print "<handler>"
| TypeCoercionClosure _ -> print "<ty_coercion>"
| DirtCoercionClosure _ -> print "<dir_coercion>")
and to_list = function
| Variant (lbl, None) when lbl = Type.nil -> Some []
| Variant (lbl, Some (Tuple [ hd; tl ])) when lbl = Type.cons ->
Option.bind (to_list tl) (fun vs -> Some (hd :: vs))
| _ -> None
let print_result r ppf =
match r with
| Value v -> print_value v ppf
| Call (eff, v, _) ->
Format.fprintf ppf "Call %t %t" (print_effect eff) (print_value v)
| null | https://raw.githubusercontent.com/matijapretnar/eff/0b0ec7a83e7db4d040ed8fdfc1ac6e3c0f344be1/src/05-backends/runtime/value.ml | ocaml | open Utils
open Language
type value =
| Const of Const.t
| Tuple of value list
| Record of value Type.Field.Map.t
| Variant of Type.Label.t * value option
| Closure of closure
| TypeCoercionClosure of (Type.ct_ty -> value)
| DirtCoercionClosure of (Type.ct_dirt -> value)
| Handler of (result -> result)
and result = Value of value | Call of Effect.t * value * closure
and closure = value -> result
let unit_value = Tuple []
let unit_result = Value unit_value
let to_bool = function
| Const (Const.Boolean b) -> b
| _ -> Error.runtime "A boolean value expected."
let to_int = function
| Const (Const.Integer n) -> n
| _ -> Error.runtime "An integer value expected."
let to_float = function
| Const (Const.Float f) -> f
| _ -> Error.runtime "A floating-point value expected."
let to_str = function
| Const (Const.String s) -> s
| _ -> Error.runtime "A string value expected."
let to_handler = function
| Handler h -> h
| _ -> Error.runtime "A handler expected."
let print_effect eff ppf = Format.fprintf ppf "%t" (Effect.print eff)
let rec print_value ?max_level v ppf =
let print ?at_level = Print.print ?max_level ?at_level ppf in
match to_list v with
| Some vs -> print "[@[%t@]]" (Print.sequence "; " print_value vs)
| None -> (
match v with
| Const c -> Const.print c ppf
| Tuple lst -> Print.tuple print_value lst ppf
| Record assoc ->
Print.record Type.Field.print print_value
(Type.Field.Map.bindings assoc)
ppf
| Variant (lbl, None) -> print ~at_level:1 "%t" (Type.Label.print lbl)
| Variant (lbl, Some v) ->
print ~at_level:1 "%t @[<hov>%t@]" (Type.Label.print lbl)
(print_value v)
| Closure _ -> print "<fun>"
| Handler _ -> print "<handler>"
| TypeCoercionClosure _ -> print "<ty_coercion>"
| DirtCoercionClosure _ -> print "<dir_coercion>")
and to_list = function
| Variant (lbl, None) when lbl = Type.nil -> Some []
| Variant (lbl, Some (Tuple [ hd; tl ])) when lbl = Type.cons ->
Option.bind (to_list tl) (fun vs -> Some (hd :: vs))
| _ -> None
let print_result r ppf =
match r with
| Value v -> print_value v ppf
| Call (eff, v, _) ->
Format.fprintf ppf "Call %t %t" (print_effect eff) (print_value v)
| |
263d32a36dabccfda43731a0f6f5ece769e14349573a1e87bff0f1bd0b75c7f3 | jgm/HeX | Standard.hs | {-# LANGUAGE OverloadedStrings, ScopedTypeVariables, PackageImports #-}
module Text.HeX.Standard ( defaults ) where
import Text.HeX
import qualified Text.HeX.Standard.Generic as Generic
import qualified Text.HeX.Standard.Html as Html
import qualified Text.HeX.Standard.LaTeX as LaTeX
import qualified Text.HeX.Math.MathML as MathML
import qualified Text.HeX.Math.LaTeX as LaTeXMath
defaults :: HeX ()
defaults = do
setVar "secnum" ([] :: [Int])
forFormat "html" $ Html.defaults >> MathML.defaults
forFormat "latex" $ LaTeX.defaults >> LaTeXMath.defaults
Generic.defaults
| null | https://raw.githubusercontent.com/jgm/HeX/5bab503606e01c453555545493c43c00398ca408/Text/HeX/Standard.hs | haskell | # LANGUAGE OverloadedStrings, ScopedTypeVariables, PackageImports # | module Text.HeX.Standard ( defaults ) where
import Text.HeX
import qualified Text.HeX.Standard.Generic as Generic
import qualified Text.HeX.Standard.Html as Html
import qualified Text.HeX.Standard.LaTeX as LaTeX
import qualified Text.HeX.Math.MathML as MathML
import qualified Text.HeX.Math.LaTeX as LaTeXMath
defaults :: HeX ()
defaults = do
setVar "secnum" ([] :: [Int])
forFormat "html" $ Html.defaults >> MathML.defaults
forFormat "latex" $ LaTeX.defaults >> LaTeXMath.defaults
Generic.defaults
|
fb08d56bb431d54dca3cde959583648a9556e7a59e629ece756b4f48f50598b9 | ujamjar/hardcaml-llvmsim | test_fifo.ml | open HardCaml
module Seq = Signal.Make_seq(struct
let reg_spec = Signal.Seq.r_sync
let ram_spec = Signal.Seq.r_none
end)
module Fifo = struct
open Signal.Comb
let bits = 4
module I = struct
type 'a t = { clear : 'a[@bits 1]; wr : 'a[@bits 1]; d : 'a[@bits bits]; rd : 'a[@bits 1]; }[@@deriving hardcaml]
end
module O = struct
type 'a t = { q : 'a[@bits bits]; }[@@deriving hardcaml]
end
let f i =
let open I in
let size = 1 lsl bits in
let wa = Seq.reg_fb ~c:i.clear ~e:i.wr ~w:bits (fun d -> d +:. 1) -- "fifo_wa" in
let ra = Seq.reg_fb ~c:i.clear ~e:i.rd ~w:bits (fun d -> d +:. 1) -- "fifo_ra" in
let q = Seq.ram_rbw size ~we:i.wr ~wa ~d:i.d ~re:i.rd ~ra in
O.{ q }
end
module B = Bits.Ext.Comb.IntbitsList
module S = Cyclesim.Api
module Cl = HardCamlLlvmsim.Sim.Gen(B)(Fifo.I)(Fifo.O)
module Cs = HardCaml.Interface.Gen(B)(Fifo.I)(Fifo.O)
module Cs' = HardCaml.Cyclesim.Make(B)
module Waveterm_waves = HardCamlWaveTerm.Wave.Make(HardCamlWaveTerm.Wave.Bits(B))
module Waveterm_sim = HardCamlWaveTerm.Sim.Make(B)(Waveterm_waves)
module Waveterm_ui = HardCamlWaveTerm.Ui.Make(B)(Waveterm_waves)
let test =
let circuit,sim,i,o = Cl.make "test_fifo_llvm" Fifo.f in
HardCamlLlvmsim.Sim.write "" circuit;
let _,sim',_,_,_ = Cs.make "test_fifo_cs" Fifo.f in
let sim = Cs'.combine_strict sim sim' in
let sim, waves = Waveterm_sim.wrap sim in
let open Fifo.I in
let open Fifo.O in
S.reset sim;
let cycle () = try S.cycle sim with _ -> () in
for j=0 to 3 do
i.d := B.consti Fifo.bits j;
i.wr := B.vdd;
cycle ();
done;
i.d := B.consti Fifo.bits 0;
i.wr := B.gnd;
for j=0 to 3 do
i.rd := B.vdd;
cycle ();
done;
i.rd := B.gnd;
cycle ();
cycle ();
Lwt_main.run (Waveterm_ui.run Waveterm_waves.({ cfg=default; waves }))
| null | https://raw.githubusercontent.com/ujamjar/hardcaml-llvmsim/dc0e1a67bdf18a7ccf05035c042ec46fa7f4296b/test/test_fifo.ml | ocaml | open HardCaml
module Seq = Signal.Make_seq(struct
let reg_spec = Signal.Seq.r_sync
let ram_spec = Signal.Seq.r_none
end)
module Fifo = struct
open Signal.Comb
let bits = 4
module I = struct
type 'a t = { clear : 'a[@bits 1]; wr : 'a[@bits 1]; d : 'a[@bits bits]; rd : 'a[@bits 1]; }[@@deriving hardcaml]
end
module O = struct
type 'a t = { q : 'a[@bits bits]; }[@@deriving hardcaml]
end
let f i =
let open I in
let size = 1 lsl bits in
let wa = Seq.reg_fb ~c:i.clear ~e:i.wr ~w:bits (fun d -> d +:. 1) -- "fifo_wa" in
let ra = Seq.reg_fb ~c:i.clear ~e:i.rd ~w:bits (fun d -> d +:. 1) -- "fifo_ra" in
let q = Seq.ram_rbw size ~we:i.wr ~wa ~d:i.d ~re:i.rd ~ra in
O.{ q }
end
module B = Bits.Ext.Comb.IntbitsList
module S = Cyclesim.Api
module Cl = HardCamlLlvmsim.Sim.Gen(B)(Fifo.I)(Fifo.O)
module Cs = HardCaml.Interface.Gen(B)(Fifo.I)(Fifo.O)
module Cs' = HardCaml.Cyclesim.Make(B)
module Waveterm_waves = HardCamlWaveTerm.Wave.Make(HardCamlWaveTerm.Wave.Bits(B))
module Waveterm_sim = HardCamlWaveTerm.Sim.Make(B)(Waveterm_waves)
module Waveterm_ui = HardCamlWaveTerm.Ui.Make(B)(Waveterm_waves)
let test =
let circuit,sim,i,o = Cl.make "test_fifo_llvm" Fifo.f in
HardCamlLlvmsim.Sim.write "" circuit;
let _,sim',_,_,_ = Cs.make "test_fifo_cs" Fifo.f in
let sim = Cs'.combine_strict sim sim' in
let sim, waves = Waveterm_sim.wrap sim in
let open Fifo.I in
let open Fifo.O in
S.reset sim;
let cycle () = try S.cycle sim with _ -> () in
for j=0 to 3 do
i.d := B.consti Fifo.bits j;
i.wr := B.vdd;
cycle ();
done;
i.d := B.consti Fifo.bits 0;
i.wr := B.gnd;
for j=0 to 3 do
i.rd := B.vdd;
cycle ();
done;
i.rd := B.gnd;
cycle ();
cycle ();
Lwt_main.run (Waveterm_ui.run Waveterm_waves.({ cfg=default; waves }))
| |
0370bd76c9c5c7c30bd87e72646f1983d1f5105921e69848c4403ea16bb7f9c3 | esl/esl-rabbitmq-client | esl_rabbitmq_client_msg_handler.erl | -module(esl_rabbitmq_client_msg_handler).
-behaviour(gen_server).
-include_lib("amqp_client/include/amqp_client.hrl").
%% API
-export([start_link/1]).
%% `gen_server' behaviour callbacks
-export([ init/1
, handle_call/3
, handle_info/2
, handle_cast/2
, code_change/3
, terminate/2
]).
-type state() :: #{client_msgs_handler := pid()}.
%% =============================================================================
%% API
%% =============================================================================
-spec start_link(MsgsHandlerPid::pid()) ->
{ok, pid()} | {error, {already_started, pid()} | term()} | ignore.
start_link(MsgsHandlerPid) ->
gen_server:start_link(?MODULE, [MsgsHandlerPid], []).
%% =============================================================================
%% `gen_server' behaviour callbacks
%% =============================================================================
-spec init(Args::term()) ->
{ok, state()}.
init([MsgsHandlerPid]) ->
{ok, #{client_msgs_handler => MsgsHandlerPid}}.
-spec handle_call(Request::term(), From::tuple(), State::state()) ->
{noreply, state()}.
handle_call(Request, From, State) ->
error_logger:info_msg(
"Unknown request -> ~p from -> ~p when state was -> ~p~n"
, [Request, From, State]
),
{noreply, State}.
-spec handle_info(Info::timeout | term(), State::state()) ->
{noreply, state()}.
handle_info( BasicConsumeOK = #'basic.consume_ok'{}
, State = #{client_msgs_handler := MsgsHandlerPid}
) ->
NewMsg = esl_rabbitmq_client_amqp:from_basic_consume_ok(BasicConsumeOK),
MsgsHandlerPid ! NewMsg,
{noreply, State};
handle_info( {BasicDeliver = #'basic.deliver'{}, AMQPMsg = #amqp_msg{}}
, State = #{client_msgs_handler := MsgsHandlerPid}
) ->
NewMsg = esl_rabbitmq_client_amqp:from_basic_deliver(BasicDeliver, AMQPMsg),
MsgsHandlerPid ! NewMsg,
{noreply, State};
handle_info( BasicCancel = #'basic.cancel'{}
, State = #{client_msgs_handler := MsgsHandlerPid}
) ->
NewMsg = esl_rabbitmq_client_amqp:from_basic_cancel(BasicCancel),
MsgsHandlerPid ! NewMsg,
{noreply, State};
handle_info(Info, State) ->
error_logger:info_msg( "Unknown Info -> ~p when state was -> ~p~n"
, [Info, State]
),
{noreply, State}.
-spec handle_cast(Request::term(), State::state()) ->
{noreply, state()}.
handle_cast(Request, State) ->
error_logger:info_msg( "Unknown Request -> ~p when state was -> ~p~n"
, [Request, State]
),
{noreply, State}.
-spec code_change(OldVsn::term(), State::state(), Extra::term()) ->
{ok, state()}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
-spec terminate(Reason::term(), State::state()) ->
ok.
terminate(Reason, State) ->
error_logger:info_msg( "Terminating with reason -> ~p when state was -> ~p~n"
, [Reason, State]
),
ok.
| null | https://raw.githubusercontent.com/esl/esl-rabbitmq-client/0bcd0ff7ca4cf1a982c7a9d2d1fdf29f0a82ac8f/src/esl_rabbitmq_client_msg_handler.erl | erlang | API
`gen_server' behaviour callbacks
=============================================================================
API
=============================================================================
=============================================================================
`gen_server' behaviour callbacks
============================================================================= | -module(esl_rabbitmq_client_msg_handler).
-behaviour(gen_server).
-include_lib("amqp_client/include/amqp_client.hrl").
-export([start_link/1]).
-export([ init/1
, handle_call/3
, handle_info/2
, handle_cast/2
, code_change/3
, terminate/2
]).
-type state() :: #{client_msgs_handler := pid()}.
-spec start_link(MsgsHandlerPid::pid()) ->
{ok, pid()} | {error, {already_started, pid()} | term()} | ignore.
start_link(MsgsHandlerPid) ->
gen_server:start_link(?MODULE, [MsgsHandlerPid], []).
-spec init(Args::term()) ->
{ok, state()}.
init([MsgsHandlerPid]) ->
{ok, #{client_msgs_handler => MsgsHandlerPid}}.
-spec handle_call(Request::term(), From::tuple(), State::state()) ->
{noreply, state()}.
handle_call(Request, From, State) ->
error_logger:info_msg(
"Unknown request -> ~p from -> ~p when state was -> ~p~n"
, [Request, From, State]
),
{noreply, State}.
-spec handle_info(Info::timeout | term(), State::state()) ->
{noreply, state()}.
handle_info( BasicConsumeOK = #'basic.consume_ok'{}
, State = #{client_msgs_handler := MsgsHandlerPid}
) ->
NewMsg = esl_rabbitmq_client_amqp:from_basic_consume_ok(BasicConsumeOK),
MsgsHandlerPid ! NewMsg,
{noreply, State};
handle_info( {BasicDeliver = #'basic.deliver'{}, AMQPMsg = #amqp_msg{}}
, State = #{client_msgs_handler := MsgsHandlerPid}
) ->
NewMsg = esl_rabbitmq_client_amqp:from_basic_deliver(BasicDeliver, AMQPMsg),
MsgsHandlerPid ! NewMsg,
{noreply, State};
handle_info( BasicCancel = #'basic.cancel'{}
, State = #{client_msgs_handler := MsgsHandlerPid}
) ->
NewMsg = esl_rabbitmq_client_amqp:from_basic_cancel(BasicCancel),
MsgsHandlerPid ! NewMsg,
{noreply, State};
handle_info(Info, State) ->
error_logger:info_msg( "Unknown Info -> ~p when state was -> ~p~n"
, [Info, State]
),
{noreply, State}.
-spec handle_cast(Request::term(), State::state()) ->
{noreply, state()}.
handle_cast(Request, State) ->
error_logger:info_msg( "Unknown Request -> ~p when state was -> ~p~n"
, [Request, State]
),
{noreply, State}.
-spec code_change(OldVsn::term(), State::state(), Extra::term()) ->
{ok, state()}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
-spec terminate(Reason::term(), State::state()) ->
ok.
terminate(Reason, State) ->
error_logger:info_msg( "Terminating with reason -> ~p when state was -> ~p~n"
, [Reason, State]
),
ok.
|
7c4b7b32cff8c7967b8fccf02a67478a594ed69594457967267ea4f3a5a7ced5 | spechub/Hets | ProverTools.hs | |
Module : ./Common / ProverTools.hs
Description : Check for availability of provers
Copyright : ( c ) , and Uni Bremen 2008
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
check for provers
Module : ./Common/ProverTools.hs
Description : Check for availability of provers
Copyright : (c) Dminik Luecke, and Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
check for provers
-}
module Common.ProverTools where
import Common.Utils
import System.Directory
import System.FilePath
missingExecutableInPath :: String -> IO Bool
missingExecutableInPath name = do
mp <- findExecutable name
case mp of
Nothing -> return True
Just name' -> do
p1 <- check4File (takeFileName name') "PATH" ()
p2 <- check4File (takeFileName name') "Path" ()
return . null $ p1 ++ p2
check4FileAux :: String -- ^ file name
-> String -- ^ Environment Variable
-> IO [String]
check4FileAux name env = do
pPath <- getEnvDef env ""
let path = "" : splitPaths pPath
exIT <- mapM (doesFileExist . (</> name)) path
return $ map fst $ filter snd $ zip path exIT
-- | Checks if a file exists in PATH
checkBinary :: String -> IO (Maybe String)
checkBinary name =
fmap
( \l ->
if null l
then Just $ "missing binary in $PATH: " ++ name
else Nothing
)
$ check4FileAux name "PATH"
-- | Checks if a file exists
check4File :: String -- ^ file name
-> String -- ^ Environment Variable
-> a
-> IO [a]
check4File name env a = do
ex <- check4FileAux name env
return [a | not $ null ex ]
| check for java and the jar file in the directory of the variable
check4jarFile :: String -- ^ environment Variable
-> String -- ^ jar file name
-> IO (Bool, FilePath)
check4jarFile = check4jarFileWithDefault ""
check4jarFileWithDefault
:: String -- ^ default path
-> String -- ^ environment Variable
-> String -- ^ jar file name
-> IO (Bool, FilePath)
check4jarFileWithDefault def var jar = do
pPath <- getEnvDef var def
hasJar <- doesFileExist $ pPath </> jar
return (hasJar, pPath)
-- | environment variable for HETS_OWL_TOOLS
hetsOWLenv :: String
hetsOWLenv = "HETS_OWL_TOOLS"
-- | check for the jar file under HETS_OWL_TOOLS
check4HetsOWLjar :: String -- ^ jar file name
-> IO (Bool, FilePath)
check4HetsOWLjar = check4jarFileWithDefault "OWL2" hetsOWLenv
checkOWLjar :: String -> IO (Maybe String)
checkOWLjar name =
fmap (\ (b, p) -> if b then Nothing else
Just $ "missing jar ($" ++ hetsOWLenv ++ "): " ++ (p </> name))
$ check4HetsOWLjar name
| null | https://raw.githubusercontent.com/spechub/Hets/65413446b056dfc965a75e9076eaf78b2ec550e7/Common/ProverTools.hs | haskell | ^ file name
^ Environment Variable
| Checks if a file exists in PATH
| Checks if a file exists
^ file name
^ Environment Variable
^ environment Variable
^ jar file name
^ default path
^ environment Variable
^ jar file name
| environment variable for HETS_OWL_TOOLS
| check for the jar file under HETS_OWL_TOOLS
^ jar file name | |
Module : ./Common / ProverTools.hs
Description : Check for availability of provers
Copyright : ( c ) , and Uni Bremen 2008
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
check for provers
Module : ./Common/ProverTools.hs
Description : Check for availability of provers
Copyright : (c) Dminik Luecke, and Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
check for provers
-}
module Common.ProverTools where
import Common.Utils
import System.Directory
import System.FilePath
missingExecutableInPath :: String -> IO Bool
missingExecutableInPath name = do
mp <- findExecutable name
case mp of
Nothing -> return True
Just name' -> do
p1 <- check4File (takeFileName name') "PATH" ()
p2 <- check4File (takeFileName name') "Path" ()
return . null $ p1 ++ p2
-> IO [String]
check4FileAux name env = do
pPath <- getEnvDef env ""
let path = "" : splitPaths pPath
exIT <- mapM (doesFileExist . (</> name)) path
return $ map fst $ filter snd $ zip path exIT
checkBinary :: String -> IO (Maybe String)
checkBinary name =
fmap
( \l ->
if null l
then Just $ "missing binary in $PATH: " ++ name
else Nothing
)
$ check4FileAux name "PATH"
-> a
-> IO [a]
check4File name env a = do
ex <- check4FileAux name env
return [a | not $ null ex ]
| check for java and the jar file in the directory of the variable
-> IO (Bool, FilePath)
check4jarFile = check4jarFileWithDefault ""
check4jarFileWithDefault
-> IO (Bool, FilePath)
check4jarFileWithDefault def var jar = do
pPath <- getEnvDef var def
hasJar <- doesFileExist $ pPath </> jar
return (hasJar, pPath)
hetsOWLenv :: String
hetsOWLenv = "HETS_OWL_TOOLS"
-> IO (Bool, FilePath)
check4HetsOWLjar = check4jarFileWithDefault "OWL2" hetsOWLenv
checkOWLjar :: String -> IO (Maybe String)
checkOWLjar name =
fmap (\ (b, p) -> if b then Nothing else
Just $ "missing jar ($" ++ hetsOWLenv ++ "): " ++ (p </> name))
$ check4HetsOWLjar name
|
91e4bf69b2bfe8940e4269a8db25bae485bab9020424495773b15632d14fc353 | haskell-opengl/OpenGLRaw | FramebufferBlit.hs | # LANGUAGE PatternSynonyms #
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.FramebufferBlit
Copyright : ( c ) 2019
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.FramebufferBlit (
-- * Extension Support
glGetEXTFramebufferBlit,
gl_EXT_framebuffer_blit,
-- * Enums
pattern GL_DRAW_FRAMEBUFFER_BINDING_EXT,
pattern GL_DRAW_FRAMEBUFFER_EXT,
pattern GL_READ_FRAMEBUFFER_BINDING_EXT,
pattern GL_READ_FRAMEBUFFER_EXT,
-- * Functions
glBlitFramebufferEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| null | https://raw.githubusercontent.com/haskell-opengl/OpenGLRaw/57e50c9d28dfa62d6a87ae9b561af28f64ce32a0/src/Graphics/GL/EXT/FramebufferBlit.hs | haskell | ------------------------------------------------------------------------------
|
Module : Graphics.GL.EXT.FramebufferBlit
License : BSD3
Stability : stable
Portability : portable
------------------------------------------------------------------------------
* Extension Support
* Enums
* Functions | # LANGUAGE PatternSynonyms #
Copyright : ( c ) 2019
Maintainer : < >
module Graphics.GL.EXT.FramebufferBlit (
glGetEXTFramebufferBlit,
gl_EXT_framebuffer_blit,
pattern GL_DRAW_FRAMEBUFFER_BINDING_EXT,
pattern GL_DRAW_FRAMEBUFFER_EXT,
pattern GL_READ_FRAMEBUFFER_BINDING_EXT,
pattern GL_READ_FRAMEBUFFER_EXT,
glBlitFramebufferEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
5cbd6037401fd53bb20c2787aa3c1219b508e181a39bb2d381b129d5f6c9127d | dhall-lang/dhall-to-cabal | Util.hs | # language ViewPatterns #
module DhallToCabal.Util
( relativeTo
)
where
import System.FilePath
( dropTrailingPathSeparator, joinPath, normalise, splitDirectories, takeDirectory )
-- | Like 'System.FilePath.makeRelative', but will introduce @..@
-- segments (and hence will misbehave in the presence of symlinks).
--
-- If the path being relativised is identical to the root path, then
-- this will return the empty string.
relativeTo
:: FilePath
-- ^ The path to be relative to. Note that the final file-name is
ignored : @foo / bar@ is relative to @foo/@ , even if @foo / bar@ is
-- a directory.
-> FilePath
-- ^ The path to relativise.
-> FilePath
relativeTo =
\ ( splitDirectories . dropTrailingPathSeparator . takeDirectory . normalise -> base ) ->
\ ( splitDirectories . normalise -> path ) ->
joinPath ( go base path )
where
@normalise " ."@ is @" . "@ , so we have to take care here with dots .
go ( a : as ) ( b : bs )
| a == b = go as bs
| a == "." = go as ( b : bs )
| b == "." = go (a : as) bs
| otherwise = ( ".." <$ ( a : as ) ) ++ ( b : bs )
go [] bs = bs
go as [] = ".." <$ as
| null | https://raw.githubusercontent.com/dhall-lang/dhall-to-cabal/d16c510b87e966c3031e07b1810d152b051bc0d8/lib/DhallToCabal/Util.hs | haskell | | Like 'System.FilePath.makeRelative', but will introduce @..@
segments (and hence will misbehave in the presence of symlinks).
If the path being relativised is identical to the root path, then
this will return the empty string.
^ The path to be relative to. Note that the final file-name is
a directory.
^ The path to relativise. | # language ViewPatterns #
module DhallToCabal.Util
( relativeTo
)
where
import System.FilePath
( dropTrailingPathSeparator, joinPath, normalise, splitDirectories, takeDirectory )
relativeTo
:: FilePath
ignored : @foo / bar@ is relative to @foo/@ , even if @foo / bar@ is
-> FilePath
-> FilePath
relativeTo =
\ ( splitDirectories . dropTrailingPathSeparator . takeDirectory . normalise -> base ) ->
\ ( splitDirectories . normalise -> path ) ->
joinPath ( go base path )
where
@normalise " ."@ is @" . "@ , so we have to take care here with dots .
go ( a : as ) ( b : bs )
| a == b = go as bs
| a == "." = go as ( b : bs )
| b == "." = go (a : as) bs
| otherwise = ( ".." <$ ( a : as ) ) ++ ( b : bs )
go [] bs = bs
go as [] = ".." <$ as
|
90ab819276692c93c63e738e810fb9551a23cddef6b9c52d5a8a03881eb1d178 | JHU-PL-Lab/jaylang | McCarthy9102.ml |
let rec bot _ = bot ()
let fail _ = assert false
let rec mc91_1030 mc91_without_checking_1058 prev_set_flag_mc91_1049 s_prev_mc91_n_1048 n_1031 =
let u = if prev_set_flag_mc91_1049 then
if (111 * 1) + (-s_prev_mc91_n_1048) > (111 * 1) + (-n_1031) &&
(111 * 1) + (-n_1031) >= 0 then
()
else
let u_2195 = fail ()
in
bot()
else () in
mc91_without_checking_1058 prev_set_flag_mc91_1049
s_prev_mc91_n_1048 n_1031
let rec mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047 n_1031 =
let set_flag_mc91_1050 = true
in
let s_mc91_n_1047 = n_1031
in
if n_1031 > 100 then
n_1031 - 10
else
mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047
(mc91_1030 mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047 (n_1031 + 11))
let main_1032 r =
let set_flag_mc91_1050 = false in
let s_mc91_n_1047 = 0 in
mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047 r
| null | https://raw.githubusercontent.com/JHU-PL-Lab/jaylang/484b3876986a515fb57b11768a1b3b50418cde0c/benchmark/cases/mochi_origin/termination/McCarthy9102.ml | ocaml |
let rec bot _ = bot ()
let fail _ = assert false
let rec mc91_1030 mc91_without_checking_1058 prev_set_flag_mc91_1049 s_prev_mc91_n_1048 n_1031 =
let u = if prev_set_flag_mc91_1049 then
if (111 * 1) + (-s_prev_mc91_n_1048) > (111 * 1) + (-n_1031) &&
(111 * 1) + (-n_1031) >= 0 then
()
else
let u_2195 = fail ()
in
bot()
else () in
mc91_without_checking_1058 prev_set_flag_mc91_1049
s_prev_mc91_n_1048 n_1031
let rec mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047 n_1031 =
let set_flag_mc91_1050 = true
in
let s_mc91_n_1047 = n_1031
in
if n_1031 > 100 then
n_1031 - 10
else
mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047
(mc91_1030 mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047 (n_1031 + 11))
let main_1032 r =
let set_flag_mc91_1050 = false in
let s_mc91_n_1047 = 0 in
mc91_without_checking_1058 set_flag_mc91_1050 s_mc91_n_1047 r
| |
da7a7a25dde104b33b3ca74877cef42791b6a1e38dbc64395d26146f4b528bc3 | Mathieu-Desrochers/Schemings | http-client.scm | (import srfi-1)
(import srfi-4)
(import srfi-13)
(import (chicken condition))
(import (chicken format))
(declare (unit http-client))
(declare (uses curl))
(declare (uses http-client-intern))
;; encapsulates an http client request
(define-typed-record http-client-request
(method string)
(url string)
(username (or string false))
(password (or string false))
(headers (or (list-of string) false))
(body (or string false)))
;; encapsulates an http client response
(define-typed-record http-client-response
(status-code fixnum)
(body string))
;; initializes the http client unit
(: http-client-init (-> noreturn))
(define (http-client-init)
(curl-global-init curl-global-all))
;; url encodes a string
(: http-client-url-encode (string -> string))
(define (http-client-url-encode string)
(with-curl*
(lambda (curl*)
(curl-easy-escape curl* string 0))))
;; performs an http client request
(: http-client-perform ((struct http-client-request) -> (struct http-client-response)))
(define (http-client-perform http-client-request)
(with-curl*
(lambda (curl*)
;; set the method options
(cond
((equal? (http-client-request-method http-client-request) "PUT")
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-customrequest "PUT"))
(abort "failed to set CURLOPT_CUSTOMREQUEST to PUT")))
((equal? (http-client-request-method http-client-request) "DELETE")
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-customrequest "DELETE"))
(abort "failed to set CURLOPT_CUSTOMREQUEST to DELETE"))))
;; set the url
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-url
(http-client-request-url http-client-request)))
(abort
(format "failed to set CURLOPT_URL to ~A"
(http-client-request-url http-client-request))))
;; set the username and password
(if (and (http-client-request-username http-client-request)
(http-client-request-password http-client-request))
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-userpwd
(string-append
(http-client-request-username http-client-request) ":"
(http-client-request-password http-client-request))))
(abort "failed to set CURLOPT_USERPWD to *****:*****")))
(with-curl-slist**
(lambda (headers-curl-slist**)
;; set the headers
(if (http-client-request-headers http-client-request)
(begin
(for-each
(lambda (header)
(curl-slist-append headers-curl-slist** header))
(http-client-request-headers http-client-request))
(unless
(eq? curle-ok
(curl-easy-setopt-strings curl* curlopt-httpheader headers-curl-slist**))
(abort
(format "failed to set CURLOPT_HTTPHEADER to ~A"
(http-client-request-headers http-client-request))))))
;; set the body
(if (http-client-request-body http-client-request)
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-copypostfields
(http-client-request-body http-client-request)))
(abort
(format
"failed to set CURLOPT_COPYPOSTFIELDS to ~A..."
(substring/shared
(http-client-request-body http-client-request)
0 256)))))
;; perform the request
(let* ((error-code-vector (make-s32vector 1 0))
(response-body (curl-easy-perform curl* error-code-vector))
(error-code (s32vector-ref error-code-vector 0)))
(unless (eq? error-code 0)
(abort
(format "failed to perform http request ~A ~A with error code ~A"
(http-client-request-method http-client-request)
(http-client-request-url http-client-request)
error-code)))
;; get the response code
(let ((response-code-vector (make-s64vector 1 0)))
(unless
(eq? curle-ok
(curl-easy-getinfo-long curl* curlinfo-response-code response-code-vector))
(abort "failed to get CURLINFO_RESPONSE_CODE"))
;; make the http client response
(let ((response-code (s64vector-ref response-code-vector 0)))
(make-http-client-response
response-code
response-body)))))))))
;; cleans up the http client unit
(: http-client-cleanup (-> noreturn))
(define (http-client-cleanup)
(curl-global-cleanup))
| null | https://raw.githubusercontent.com/Mathieu-Desrochers/Schemings/a7c322ee37bf9f43b696c52fc290488aa2dcc238/sources/units/http-client.scm | scheme | encapsulates an http client request
encapsulates an http client response
initializes the http client unit
url encodes a string
performs an http client request
set the method options
set the url
set the username and password
set the headers
set the body
perform the request
get the response code
make the http client response
cleans up the http client unit | (import srfi-1)
(import srfi-4)
(import srfi-13)
(import (chicken condition))
(import (chicken format))
(declare (unit http-client))
(declare (uses curl))
(declare (uses http-client-intern))
(define-typed-record http-client-request
(method string)
(url string)
(username (or string false))
(password (or string false))
(headers (or (list-of string) false))
(body (or string false)))
(define-typed-record http-client-response
(status-code fixnum)
(body string))
(: http-client-init (-> noreturn))
(define (http-client-init)
(curl-global-init curl-global-all))
(: http-client-url-encode (string -> string))
(define (http-client-url-encode string)
(with-curl*
(lambda (curl*)
(curl-easy-escape curl* string 0))))
(: http-client-perform ((struct http-client-request) -> (struct http-client-response)))
(define (http-client-perform http-client-request)
(with-curl*
(lambda (curl*)
(cond
((equal? (http-client-request-method http-client-request) "PUT")
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-customrequest "PUT"))
(abort "failed to set CURLOPT_CUSTOMREQUEST to PUT")))
((equal? (http-client-request-method http-client-request) "DELETE")
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-customrequest "DELETE"))
(abort "failed to set CURLOPT_CUSTOMREQUEST to DELETE"))))
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-url
(http-client-request-url http-client-request)))
(abort
(format "failed to set CURLOPT_URL to ~A"
(http-client-request-url http-client-request))))
(if (and (http-client-request-username http-client-request)
(http-client-request-password http-client-request))
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-userpwd
(string-append
(http-client-request-username http-client-request) ":"
(http-client-request-password http-client-request))))
(abort "failed to set CURLOPT_USERPWD to *****:*****")))
(with-curl-slist**
(lambda (headers-curl-slist**)
(if (http-client-request-headers http-client-request)
(begin
(for-each
(lambda (header)
(curl-slist-append headers-curl-slist** header))
(http-client-request-headers http-client-request))
(unless
(eq? curle-ok
(curl-easy-setopt-strings curl* curlopt-httpheader headers-curl-slist**))
(abort
(format "failed to set CURLOPT_HTTPHEADER to ~A"
(http-client-request-headers http-client-request))))))
(if (http-client-request-body http-client-request)
(unless
(eq? curle-ok
(curl-easy-setopt-string curl* curlopt-copypostfields
(http-client-request-body http-client-request)))
(abort
(format
"failed to set CURLOPT_COPYPOSTFIELDS to ~A..."
(substring/shared
(http-client-request-body http-client-request)
0 256)))))
(let* ((error-code-vector (make-s32vector 1 0))
(response-body (curl-easy-perform curl* error-code-vector))
(error-code (s32vector-ref error-code-vector 0)))
(unless (eq? error-code 0)
(abort
(format "failed to perform http request ~A ~A with error code ~A"
(http-client-request-method http-client-request)
(http-client-request-url http-client-request)
error-code)))
(let ((response-code-vector (make-s64vector 1 0)))
(unless
(eq? curle-ok
(curl-easy-getinfo-long curl* curlinfo-response-code response-code-vector))
(abort "failed to get CURLINFO_RESPONSE_CODE"))
(let ((response-code (s64vector-ref response-code-vector 0)))
(make-http-client-response
response-code
response-body)))))))))
(: http-client-cleanup (-> noreturn))
(define (http-client-cleanup)
(curl-global-cleanup))
|
c065e17973658fca68fb3ea195afc41e0f71ce620d1d93a97e9d42fef5f676a3 | MATRIXKOO/test_for_sicp | e1_15.scm.rkt | #lang sicp
(define (cube x) (* x x x))
(define (p x) (- (* 3 x) (* 4 (cube x))))
(define (sine angle)
(if (not (> (abs angle) 0.1))
angle
(p (sine (/ angle 3.0)))))
5times
; n(log(3)a) | null | https://raw.githubusercontent.com/MATRIXKOO/test_for_sicp/9772d67fd43ee919c4ba85c1d584d9875745f8ba/Chapter1/e1_15.scm.rkt | racket | n(log(3)a) | #lang sicp
(define (cube x) (* x x x))
(define (p x) (- (* 3 x) (* 4 (cube x))))
(define (sine angle)
(if (not (> (abs angle) 0.1))
angle
(p (sine (/ angle 3.0)))))
5times
|
8b53672f41140f6822829bbabc6fb89f158bc5c8740a5bd9ad0ebd608ac57f08 | nasa/Common-Metadata-Repository | routes.clj | (ns cmr.bootstrap.api.routes
"Defines the HTTP URL routes for the application."
(:require
[cmr.acl.core :as acl]
[cmr.bootstrap.api.bulk-index :as bulk-index]
[cmr.bootstrap.api.bulk-migration :as bulk-migration]
[cmr.bootstrap.api.fingerprint :as fingerprint]
[cmr.bootstrap.api.rebalancing :as rebalancing]
[cmr.bootstrap.api.virtual-products :as virtual-products]
[cmr.bootstrap.services.health-service :as hs]
[cmr.common-app.api.health :as common-health]
[cmr.common-app.api.routes :as common-routes]
[cmr.common.api.context :as context]
[cmr.common.api.errors :as errors]
[cmr.common.jobs :as jobs]
[cmr.common.log :refer [info]]
[cmr.common.generics :as common-generic]
[compojure.core :refer :all]
[compojure.route :as route]
[drift.execute :as drift]
[inflections.core :as inf]
[ring.middleware.json :as ring-json]
[ring.middleware.keyword-params :as keyword-params]
[ring.middleware.nested-params :as nested-params]
[ring.middleware.params :as params]))
(defn- build-routes [system]
(routes
(context (:relative-root-url system) []
for NGAP deployment health check
(GET "/" {} {:status 200})
(context "/bulk_migration" []
(POST "/providers" {:keys [request-context body params]}
(bulk-migration/migrate-provider request-context body params))
(POST "/collections" {:keys [request-context body params]}
(bulk-migration/migrate-collection request-context body params)))
(context "/bulk_index" []
(POST "/providers" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-provider request-context body params))
(POST "/providers/all" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-all-providers request-context params))
(POST "/collections" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-collection request-context body params))
(POST "/after_date_time" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/data-later-than-date-time request-context body params))
(POST "/system_concepts" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-system-concepts request-context params))
(POST "/variables" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-variables request-context params))
(POST "/variables/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-variables request-context params provider-id))
(POST "/services" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-services request-context params))
(POST "/services/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-services request-context params provider-id))
(POST "/tools" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-tools request-context params))
(POST "/tools/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-tools request-context params provider-id))
(POST "/subscriptions" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-subscriptions request-context params))
(POST "/subscriptions/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-subscriptions request-context params provider-id))
(POST "/concepts" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-concepts-by-id request-context body params))
(DELETE "/concepts" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/delete-concepts-by-id request-context body params))
;; generating pluralized endpoints for each generic document type & converting to singular in call
(context ["/:concept-type" :concept-type
(re-pattern common-generic/plural-generic-concept-types-reg-ex)] [concept-type]
(POST "/" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-generics request-context params (inf/singular concept-type)))
(POST "/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-generics request-context params (inf/singular concept-type) provider-id))))
(context "/rebalancing_collections/:concept-id" [concept-id]
;; Start rebalancing
(POST "/start" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(rebalancing/start-collection request-context concept-id params))
;; Get counts of rebalancing data
(GET "/status" {:keys [request-context]}
(acl/verify-ingest-management-permission request-context :update)
(rebalancing/get-status request-context concept-id))
;; Complete reindexing
(POST "/finalize" {:keys [request-context]}
(acl/verify-ingest-management-permission request-context :update)
(rebalancing/finalize-collection request-context concept-id)))
(context "/virtual_products" []
(POST "/" {:keys [request-context params]}
(virtual-products/bootstrap request-context params)))
(context "/fingerprint" []
(POST "/variables" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(fingerprint/fingerprint-variables request-context body params))
(POST "/variables/:concept-id" [concept-id :as {:keys [request-context]}]
(acl/verify-ingest-management-permission request-context :update)
(fingerprint/fingerprint-by-id request-context concept-id)))
;; Add routes for accessing caches
common-routes/cache-api-routes
;; db migration route
(POST "/db-migrate" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(let [migrate-args (if-let [version (:version params)]
["migrate" "-version" version]
["migrate"])]
(info "Running db migration:" migrate-args)
(drift/run (conj migrate-args "-c" "config.bootstrap-migrate-config/app-migrate-config")))
{:status 204})
;; Add routes for checking health of the application
(common-health/health-api-routes hs/health))
(route/not-found "Not Found")))
(defn make-api [system]
(-> (build-routes system)
acl/add-authentication-handler
errors/invalid-url-encoding-handler
errors/exception-handler
common-routes/add-request-id-response-handler
(context/build-request-context-handler system)
keyword-params/wrap-keyword-params
nested-params/wrap-nested-params
ring-json/wrap-json-body
common-routes/pretty-print-response-handler
params/wrap-params))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/5bed3637580e2ad6e0b0ecda4c90845171ffeb8a/bootstrap-app/src/cmr/bootstrap/api/routes.clj | clojure | generating pluralized endpoints for each generic document type & converting to singular in call
Start rebalancing
Get counts of rebalancing data
Complete reindexing
Add routes for accessing caches
db migration route
Add routes for checking health of the application | (ns cmr.bootstrap.api.routes
"Defines the HTTP URL routes for the application."
(:require
[cmr.acl.core :as acl]
[cmr.bootstrap.api.bulk-index :as bulk-index]
[cmr.bootstrap.api.bulk-migration :as bulk-migration]
[cmr.bootstrap.api.fingerprint :as fingerprint]
[cmr.bootstrap.api.rebalancing :as rebalancing]
[cmr.bootstrap.api.virtual-products :as virtual-products]
[cmr.bootstrap.services.health-service :as hs]
[cmr.common-app.api.health :as common-health]
[cmr.common-app.api.routes :as common-routes]
[cmr.common.api.context :as context]
[cmr.common.api.errors :as errors]
[cmr.common.jobs :as jobs]
[cmr.common.log :refer [info]]
[cmr.common.generics :as common-generic]
[compojure.core :refer :all]
[compojure.route :as route]
[drift.execute :as drift]
[inflections.core :as inf]
[ring.middleware.json :as ring-json]
[ring.middleware.keyword-params :as keyword-params]
[ring.middleware.nested-params :as nested-params]
[ring.middleware.params :as params]))
(defn- build-routes [system]
(routes
(context (:relative-root-url system) []
for NGAP deployment health check
(GET "/" {} {:status 200})
(context "/bulk_migration" []
(POST "/providers" {:keys [request-context body params]}
(bulk-migration/migrate-provider request-context body params))
(POST "/collections" {:keys [request-context body params]}
(bulk-migration/migrate-collection request-context body params)))
(context "/bulk_index" []
(POST "/providers" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-provider request-context body params))
(POST "/providers/all" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-all-providers request-context params))
(POST "/collections" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-collection request-context body params))
(POST "/after_date_time" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/data-later-than-date-time request-context body params))
(POST "/system_concepts" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-system-concepts request-context params))
(POST "/variables" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-variables request-context params))
(POST "/variables/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-variables request-context params provider-id))
(POST "/services" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-services request-context params))
(POST "/services/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-services request-context params provider-id))
(POST "/tools" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-tools request-context params))
(POST "/tools/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-tools request-context params provider-id))
(POST "/subscriptions" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-subscriptions request-context params))
(POST "/subscriptions/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-subscriptions request-context params provider-id))
(POST "/concepts" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-concepts-by-id request-context body params))
(DELETE "/concepts" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/delete-concepts-by-id request-context body params))
(context ["/:concept-type" :concept-type
(re-pattern common-generic/plural-generic-concept-types-reg-ex)] [concept-type]
(POST "/" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-generics request-context params (inf/singular concept-type)))
(POST "/:provider-id" [provider-id :as {:keys [request-context params]}]
(acl/verify-ingest-management-permission request-context :update)
(bulk-index/index-generics request-context params (inf/singular concept-type) provider-id))))
(context "/rebalancing_collections/:concept-id" [concept-id]
(POST "/start" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(rebalancing/start-collection request-context concept-id params))
(GET "/status" {:keys [request-context]}
(acl/verify-ingest-management-permission request-context :update)
(rebalancing/get-status request-context concept-id))
(POST "/finalize" {:keys [request-context]}
(acl/verify-ingest-management-permission request-context :update)
(rebalancing/finalize-collection request-context concept-id)))
(context "/virtual_products" []
(POST "/" {:keys [request-context params]}
(virtual-products/bootstrap request-context params)))
(context "/fingerprint" []
(POST "/variables" {:keys [request-context body params]}
(acl/verify-ingest-management-permission request-context :update)
(fingerprint/fingerprint-variables request-context body params))
(POST "/variables/:concept-id" [concept-id :as {:keys [request-context]}]
(acl/verify-ingest-management-permission request-context :update)
(fingerprint/fingerprint-by-id request-context concept-id)))
common-routes/cache-api-routes
(POST "/db-migrate" {:keys [request-context params]}
(acl/verify-ingest-management-permission request-context :update)
(let [migrate-args (if-let [version (:version params)]
["migrate" "-version" version]
["migrate"])]
(info "Running db migration:" migrate-args)
(drift/run (conj migrate-args "-c" "config.bootstrap-migrate-config/app-migrate-config")))
{:status 204})
(common-health/health-api-routes hs/health))
(route/not-found "Not Found")))
(defn make-api [system]
(-> (build-routes system)
acl/add-authentication-handler
errors/invalid-url-encoding-handler
errors/exception-handler
common-routes/add-request-id-response-handler
(context/build-request-context-handler system)
keyword-params/wrap-keyword-params
nested-params/wrap-nested-params
ring-json/wrap-json-body
common-routes/pretty-print-response-handler
params/wrap-params))
|
0ff53e6da764422fccf592c3ce123d2e94dea2a87f4f09c068ed8ee7df6e0043 | astrada/rxocaml | rxObserver.ml | Internal module ( see Rx . Observer )
*
* Implementation based on :
* #Rx.NET/Source/System.Reactive.Core/Observer.Extensions.cs
*
* Implementation based on:
* #Rx.NET/Source/System.Reactive.Core/Observer.Extensions.cs
*)
let create
?(on_completed = fun () -> ())
?(on_error = fun e -> raise e)
on_next =
(on_completed, on_error, on_next)
module type ObserverState = sig
type 'a state
val initial_state : unit -> 'a state
val on_completed : 'a state -> 'a state
val on_error : exn -> 'a state -> 'a state
val on_next : 'a -> 'a state -> 'a state
end
module MakeObserverWithState
(O : ObserverState)
(D : RxCore.MutableData) = struct
let create () =
let state = D.create @@ O.initial_state () in
let update f =
let s = D.get state in
let s' = f s in
D.set s' state
in
let on_completed () = update O.on_completed in
let on_error e = update @@ O.on_error e in
let on_next v = update @@ O.on_next v in
let observer = create ~on_completed ~on_error on_next in
(observer, state)
end
module ObserverBase = struct
(* Original implementation:
* #Rx.NET/Source/System.Reactive.Core/Reactive/ObserverBase.cs
*)
let create (on_completed, on_error, on_next) =
let is_stopped = RxAtomicData.create false in
let stop () =
RxAtomicData.compare_and_set false true is_stopped in
let on_completed' () =
let was_stopped = stop () in
if not was_stopped then on_completed ()
in
let on_error' e =
let was_stopped = stop () in
if not was_stopped then on_error e
in
let on_next' x =
if not (RxAtomicData.unsafe_get is_stopped) then on_next x
in
(on_completed', on_error', on_next')
end
module CheckedObserver = struct
In the original implementation , synchronization for the observer state
* is obtained through CAS ( compare - and - swap ) primitives , but in OCaml we
* do n't have a standard / portable CAS primitive , so I 'm using a mutex .
* ( see #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/CheckedObserver.cs )
* is obtained through CAS (compare-and-swap) primitives, but in OCaml we
* don't have a standard/portable CAS primitive, so I'm using a mutex.
* (see #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/CheckedObserver.cs)
*)
type state = Idle | Busy | Done
let create (on_completed, on_error, on_next) =
let state = RxAtomicData.create Idle in
let check_access () =
RxAtomicData.update
(fun s ->
match s with
| Idle ->
Busy
| Busy ->
failwith "Reentrancy has been detected."
| Done ->
failwith "Observer has already terminated."
) state
in
let wrap_action thunk new_state =
check_access ();
Utils.try_finally
thunk
(fun () -> RxAtomicData.set new_state state)
in
let on_completed' () = wrap_action (fun () -> on_completed ()) Done in
let on_error' e = wrap_action (fun () -> on_error e) Done in
let on_next' x = wrap_action (fun () -> on_next x) Idle in
(on_completed', on_error', on_next')
end
let checked observer = CheckedObserver.create observer
module SynchronizedObserver = struct
(* Original implementation:
* #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/SynchronizedObserver.cs
*)
let create (on_completed, on_error, on_next) =
let lock = BatRMutex.create () in
let with_lock f a = BatRMutex.synchronize ~lock f a in
let on_completed' () = with_lock on_completed () in
let on_error' e = with_lock on_error e in
let on_next' x = with_lock on_next x in
(on_completed', on_error', on_next')
end
let synchronize observer = SynchronizedObserver.create observer
module AsyncLockObserver = struct
(* Original implementation:
* #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/AsyncLockObserver.cs
*)
let create (on_completed, on_error, on_next) =
let async_lock = RxAsyncLock.create () in
let with_lock thunk = RxAsyncLock.wait async_lock thunk in
let on_completed' () = with_lock (fun () -> on_completed ()) in
let on_error' e = with_lock (fun () -> on_error e) in
let on_next' x = with_lock (fun () -> on_next x) in
ObserverBase.create (on_completed', on_error', on_next')
end
let synchronize_async_lock observer = AsyncLockObserver.create observer
| null | https://raw.githubusercontent.com/astrada/rxocaml/6a4ea758108a07a8da129ba1555a260604f79091/src/rxObserver.ml | ocaml | Original implementation:
* #Rx.NET/Source/System.Reactive.Core/Reactive/ObserverBase.cs
Original implementation:
* #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/SynchronizedObserver.cs
Original implementation:
* #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/AsyncLockObserver.cs
| Internal module ( see Rx . Observer )
*
* Implementation based on :
* #Rx.NET/Source/System.Reactive.Core/Observer.Extensions.cs
*
* Implementation based on:
* #Rx.NET/Source/System.Reactive.Core/Observer.Extensions.cs
*)
let create
?(on_completed = fun () -> ())
?(on_error = fun e -> raise e)
on_next =
(on_completed, on_error, on_next)
module type ObserverState = sig
type 'a state
val initial_state : unit -> 'a state
val on_completed : 'a state -> 'a state
val on_error : exn -> 'a state -> 'a state
val on_next : 'a -> 'a state -> 'a state
end
module MakeObserverWithState
(O : ObserverState)
(D : RxCore.MutableData) = struct
let create () =
let state = D.create @@ O.initial_state () in
let update f =
let s = D.get state in
let s' = f s in
D.set s' state
in
let on_completed () = update O.on_completed in
let on_error e = update @@ O.on_error e in
let on_next v = update @@ O.on_next v in
let observer = create ~on_completed ~on_error on_next in
(observer, state)
end
module ObserverBase = struct
let create (on_completed, on_error, on_next) =
let is_stopped = RxAtomicData.create false in
let stop () =
RxAtomicData.compare_and_set false true is_stopped in
let on_completed' () =
let was_stopped = stop () in
if not was_stopped then on_completed ()
in
let on_error' e =
let was_stopped = stop () in
if not was_stopped then on_error e
in
let on_next' x =
if not (RxAtomicData.unsafe_get is_stopped) then on_next x
in
(on_completed', on_error', on_next')
end
module CheckedObserver = struct
In the original implementation , synchronization for the observer state
* is obtained through CAS ( compare - and - swap ) primitives , but in OCaml we
* do n't have a standard / portable CAS primitive , so I 'm using a mutex .
* ( see #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/CheckedObserver.cs )
* is obtained through CAS (compare-and-swap) primitives, but in OCaml we
* don't have a standard/portable CAS primitive, so I'm using a mutex.
* (see #Rx.NET/Source/System.Reactive.Core/Reactive/Internal/CheckedObserver.cs)
*)
type state = Idle | Busy | Done
let create (on_completed, on_error, on_next) =
let state = RxAtomicData.create Idle in
let check_access () =
RxAtomicData.update
(fun s ->
match s with
| Idle ->
Busy
| Busy ->
failwith "Reentrancy has been detected."
| Done ->
failwith "Observer has already terminated."
) state
in
let wrap_action thunk new_state =
check_access ();
Utils.try_finally
thunk
(fun () -> RxAtomicData.set new_state state)
in
let on_completed' () = wrap_action (fun () -> on_completed ()) Done in
let on_error' e = wrap_action (fun () -> on_error e) Done in
let on_next' x = wrap_action (fun () -> on_next x) Idle in
(on_completed', on_error', on_next')
end
let checked observer = CheckedObserver.create observer
module SynchronizedObserver = struct
let create (on_completed, on_error, on_next) =
let lock = BatRMutex.create () in
let with_lock f a = BatRMutex.synchronize ~lock f a in
let on_completed' () = with_lock on_completed () in
let on_error' e = with_lock on_error e in
let on_next' x = with_lock on_next x in
(on_completed', on_error', on_next')
end
let synchronize observer = SynchronizedObserver.create observer
module AsyncLockObserver = struct
let create (on_completed, on_error, on_next) =
let async_lock = RxAsyncLock.create () in
let with_lock thunk = RxAsyncLock.wait async_lock thunk in
let on_completed' () = with_lock (fun () -> on_completed ()) in
let on_error' e = with_lock (fun () -> on_error e) in
let on_next' x = with_lock (fun () -> on_next x) in
ObserverBase.create (on_completed', on_error', on_next')
end
let synchronize_async_lock observer = AsyncLockObserver.create observer
|
13ca8f524140d6edd568c0afdb4aef92e490cf5b4b1185bea392bd84ca1fe344 | mejgun/haskell-tdlib | StickerType.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.StickerType where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
-- | Describes type of a sticker
data StickerType
= -- | The sticker is a regular sticker
StickerTypeRegular
| The sticker is a mask in WEBP format to be placed on photos or videos
StickerTypeMask
| -- | The sticker is a custom emoji to be used inside message text and caption
StickerTypeCustomEmoji
deriving (Eq)
instance Show StickerType where
show StickerTypeRegular =
"StickerTypeRegular"
++ U.cc
[]
show StickerTypeMask =
"StickerTypeMask"
++ U.cc
[]
show StickerTypeCustomEmoji =
"StickerTypeCustomEmoji"
++ U.cc
[]
instance T.FromJSON StickerType where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"stickerTypeRegular" -> parseStickerTypeRegular v
"stickerTypeMask" -> parseStickerTypeMask v
"stickerTypeCustomEmoji" -> parseStickerTypeCustomEmoji v
_ -> mempty
where
parseStickerTypeRegular :: A.Value -> T.Parser StickerType
parseStickerTypeRegular = A.withObject "StickerTypeRegular" $ \_ -> return StickerTypeRegular
parseStickerTypeMask :: A.Value -> T.Parser StickerType
parseStickerTypeMask = A.withObject "StickerTypeMask" $ \_ -> return StickerTypeMask
parseStickerTypeCustomEmoji :: A.Value -> T.Parser StickerType
parseStickerTypeCustomEmoji = A.withObject "StickerTypeCustomEmoji" $ \_ -> return StickerTypeCustomEmoji
parseJSON _ = mempty
instance T.ToJSON StickerType where
toJSON StickerTypeRegular =
A.object
[ "@type" A..= T.String "stickerTypeRegular"
]
toJSON StickerTypeMask =
A.object
[ "@type" A..= T.String "stickerTypeMask"
]
toJSON StickerTypeCustomEmoji =
A.object
[ "@type" A..= T.String "stickerTypeCustomEmoji"
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/cf563ece2c2270b2079e233c73cbc7dfd2f70281/src/TD/Data/StickerType.hs | haskell | # LANGUAGE OverloadedStrings #
|
| Describes type of a sticker
| The sticker is a regular sticker
| The sticker is a custom emoji to be used inside message text and caption |
module TD.Data.StickerType where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
data StickerType
StickerTypeRegular
| The sticker is a mask in WEBP format to be placed on photos or videos
StickerTypeMask
StickerTypeCustomEmoji
deriving (Eq)
instance Show StickerType where
show StickerTypeRegular =
"StickerTypeRegular"
++ U.cc
[]
show StickerTypeMask =
"StickerTypeMask"
++ U.cc
[]
show StickerTypeCustomEmoji =
"StickerTypeCustomEmoji"
++ U.cc
[]
instance T.FromJSON StickerType where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"stickerTypeRegular" -> parseStickerTypeRegular v
"stickerTypeMask" -> parseStickerTypeMask v
"stickerTypeCustomEmoji" -> parseStickerTypeCustomEmoji v
_ -> mempty
where
parseStickerTypeRegular :: A.Value -> T.Parser StickerType
parseStickerTypeRegular = A.withObject "StickerTypeRegular" $ \_ -> return StickerTypeRegular
parseStickerTypeMask :: A.Value -> T.Parser StickerType
parseStickerTypeMask = A.withObject "StickerTypeMask" $ \_ -> return StickerTypeMask
parseStickerTypeCustomEmoji :: A.Value -> T.Parser StickerType
parseStickerTypeCustomEmoji = A.withObject "StickerTypeCustomEmoji" $ \_ -> return StickerTypeCustomEmoji
parseJSON _ = mempty
instance T.ToJSON StickerType where
toJSON StickerTypeRegular =
A.object
[ "@type" A..= T.String "stickerTypeRegular"
]
toJSON StickerTypeMask =
A.object
[ "@type" A..= T.String "stickerTypeMask"
]
toJSON StickerTypeCustomEmoji =
A.object
[ "@type" A..= T.String "stickerTypeCustomEmoji"
]
|
dde1e0210223503c9b2101d8486e1e58cf691bc0a8dc84a41b19e31209837bf5 | ygmpkk/house | Reader.hs | -----------------------------------------------------------------------------
-- |
Module : Control . . Reader
Copyright : ( c ) 2001 ,
( c ) Oregon Graduate Institute of Science and Technology , 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : experimental
-- Portability : non-portable (multi-param classes, functional dependencies)
--
-- Declaration of the Monoid class,and instances for list and functions
--
-- Inspired by the paper
-- /Functional Programming with Overloading and
-- Higher-Order Polymorphism/,
( < /~mpj/ > )
Advanced School of Functional Programming , 1995 .
-----------------------------------------------------------------------------
module Control.Monad.Reader (
MonadReader(..),
asks,
Reader(..),
mapReader,
withReader,
ReaderT(..),
mapReaderT,
withReaderT,
module Control.Monad,
module Control.Monad.Fix,
module Control.Monad.Trans,
) where
import Prelude
import Control.Monad
import Control.Monad.Fix
import Control.Monad.Trans
-- ----------------------------------------------------------------------------
-- class MonadReader
-- asks for the internal (non-mutable) state.
class (Monad m) => MonadReader r m | m -> r where
ask :: m r
local :: (r -> r) -> m a -> m a
-- This allows you to provide a projection function.
asks :: (MonadReader r m) => (r -> a) -> m a
asks f = do
r <- ask
return (f r)
-- ----------------------------------------------------------------------------
-- The partially applied function type is a simple reader monad
instance Functor ((->) r) where
fmap = (.)
instance Monad ((->) r) where
return = const
m >>= k = \r -> k (m r) r
instance MonadFix ((->) r) where
mfix f = \r -> let a = f a r in a
instance MonadReader r ((->) r) where
ask = id
local f m = m . f
-- ---------------------------------------------------------------------------
-- Our parameterizable reader monad
newtype Reader r a = Reader { runReader :: r -> a }
instance Functor (Reader r) where
fmap f m = Reader $ \r -> f (runReader m r)
instance Monad (Reader r) where
return a = Reader $ \_ -> a
m >>= k = Reader $ \r -> runReader (k (runReader m r)) r
instance MonadFix (Reader r) where
mfix f = Reader $ \r -> let a = runReader (f a) r in a
instance MonadReader r (Reader r) where
ask = Reader id
local f m = Reader $ runReader m . f
mapReader :: (a -> b) -> Reader r a -> Reader r b
mapReader f m = Reader $ f . runReader m
-- This is a more general version of local.
withReader :: (r' -> r) -> Reader r a -> Reader r' a
withReader f m = Reader $ runReader m . f
-- ---------------------------------------------------------------------------
-- Our parameterizable reader monad, with an inner monad
newtype ReaderT r m a = ReaderT { runReaderT :: r -> m a }
instance (Monad m) => Functor (ReaderT r m) where
fmap f m = ReaderT $ \r -> do
a <- runReaderT m r
return (f a)
instance (Monad m) => Monad (ReaderT r m) where
return a = ReaderT $ \_ -> return a
m >>= k = ReaderT $ \r -> do
a <- runReaderT m r
runReaderT (k a) r
fail msg = ReaderT $ \_ -> fail msg
instance (MonadPlus m) => MonadPlus (ReaderT r m) where
mzero = ReaderT $ \_ -> mzero
m `mplus` n = ReaderT $ \r -> runReaderT m r `mplus` runReaderT n r
instance (MonadFix m) => MonadFix (ReaderT r m) where
mfix f = ReaderT $ \r -> mfix $ \a -> runReaderT (f a) r
instance (Monad m) => MonadReader r (ReaderT r m) where
ask = ReaderT return
local f m = ReaderT $ \r -> runReaderT m (f r)
instance MonadTrans (ReaderT r) where
lift m = ReaderT $ \_ -> m
instance (MonadIO m) => MonadIO (ReaderT r m) where
liftIO = lift . liftIO
mapReaderT :: (m a -> n b) -> ReaderT w m a -> ReaderT w n b
mapReaderT f m = ReaderT $ f . runReaderT m
withReaderT :: (r' -> r) -> ReaderT r m a -> ReaderT r' m a
withReaderT f m = ReaderT $ runReaderT m . f
| null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/ghc-6.2/libraries/base/Control/Monad/Reader.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable (multi-param classes, functional dependencies)
Declaration of the Monoid class,and instances for list and functions
Inspired by the paper
/Functional Programming with Overloading and
Higher-Order Polymorphism/,
---------------------------------------------------------------------------
----------------------------------------------------------------------------
class MonadReader
asks for the internal (non-mutable) state.
This allows you to provide a projection function.
----------------------------------------------------------------------------
The partially applied function type is a simple reader monad
---------------------------------------------------------------------------
Our parameterizable reader monad
This is a more general version of local.
---------------------------------------------------------------------------
Our parameterizable reader monad, with an inner monad | Module : Control . . Reader
Copyright : ( c ) 2001 ,
( c ) Oregon Graduate Institute of Science and Technology , 2001
( < /~mpj/ > )
Advanced School of Functional Programming , 1995 .
module Control.Monad.Reader (
MonadReader(..),
asks,
Reader(..),
mapReader,
withReader,
ReaderT(..),
mapReaderT,
withReaderT,
module Control.Monad,
module Control.Monad.Fix,
module Control.Monad.Trans,
) where
import Prelude
import Control.Monad
import Control.Monad.Fix
import Control.Monad.Trans
class (Monad m) => MonadReader r m | m -> r where
ask :: m r
local :: (r -> r) -> m a -> m a
asks :: (MonadReader r m) => (r -> a) -> m a
asks f = do
r <- ask
return (f r)
instance Functor ((->) r) where
fmap = (.)
instance Monad ((->) r) where
return = const
m >>= k = \r -> k (m r) r
instance MonadFix ((->) r) where
mfix f = \r -> let a = f a r in a
instance MonadReader r ((->) r) where
ask = id
local f m = m . f
newtype Reader r a = Reader { runReader :: r -> a }
instance Functor (Reader r) where
fmap f m = Reader $ \r -> f (runReader m r)
instance Monad (Reader r) where
return a = Reader $ \_ -> a
m >>= k = Reader $ \r -> runReader (k (runReader m r)) r
instance MonadFix (Reader r) where
mfix f = Reader $ \r -> let a = runReader (f a) r in a
instance MonadReader r (Reader r) where
ask = Reader id
local f m = Reader $ runReader m . f
mapReader :: (a -> b) -> Reader r a -> Reader r b
mapReader f m = Reader $ f . runReader m
withReader :: (r' -> r) -> Reader r a -> Reader r' a
withReader f m = Reader $ runReader m . f
newtype ReaderT r m a = ReaderT { runReaderT :: r -> m a }
instance (Monad m) => Functor (ReaderT r m) where
fmap f m = ReaderT $ \r -> do
a <- runReaderT m r
return (f a)
instance (Monad m) => Monad (ReaderT r m) where
return a = ReaderT $ \_ -> return a
m >>= k = ReaderT $ \r -> do
a <- runReaderT m r
runReaderT (k a) r
fail msg = ReaderT $ \_ -> fail msg
instance (MonadPlus m) => MonadPlus (ReaderT r m) where
mzero = ReaderT $ \_ -> mzero
m `mplus` n = ReaderT $ \r -> runReaderT m r `mplus` runReaderT n r
instance (MonadFix m) => MonadFix (ReaderT r m) where
mfix f = ReaderT $ \r -> mfix $ \a -> runReaderT (f a) r
instance (Monad m) => MonadReader r (ReaderT r m) where
ask = ReaderT return
local f m = ReaderT $ \r -> runReaderT m (f r)
instance MonadTrans (ReaderT r) where
lift m = ReaderT $ \_ -> m
instance (MonadIO m) => MonadIO (ReaderT r m) where
liftIO = lift . liftIO
mapReaderT :: (m a -> n b) -> ReaderT w m a -> ReaderT w n b
mapReaderT f m = ReaderT $ f . runReaderT m
withReaderT :: (r' -> r) -> ReaderT r m a -> ReaderT r' m a
withReaderT f m = ReaderT $ runReaderT m . f
|
bd0aa594507445d0631276073a0ead87d1c0f4310bbbe9d4c46b6dc8942f0c8e | symbiont-io/detsys-testkit | Storage.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
module Ldfi.Storage where
import Control.Arrow (second)
import Control.Exception
import Data.Aeson (decode)
import qualified Data.Binary.Builder as BB
import Data.Hashable (Hashable)
import Data.List (groupBy, intercalate)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Encoding as TextE
import Database.SQLite.Simple
import GHC.Generics (Generic)
import qualified Ldfi.Marshal.Faults as MF
import Ldfi.Traces
import System.Environment
import System.FilePath
import System.IO.Error
------------------------------------------------------------------------
type TestId = Int
type RunId = Int
data Fault = Crash Node Time | Omission Edge Time
deriving (Eq, Ord, Read, Show, Generic)
instance Hashable Fault
data Failures = Failures
{ fFaultsFromFailedRuns :: [[Fault]],
fFaultsPerRun :: Map RunId [Fault]
}
data LdfiEvent = LdfiEvent
{ leTestId :: TestId,
leRunIds :: [RunId],
leFaults :: [String], -- XXX: Fault?
leVersion :: String,
leStatistics :: String
}
data TestInformation = TestInformation
{ tiTestId :: TestId,
tiFailedRuns :: [RunId]
}
data Storage m = Storage
{ load :: TestInformation -> m (Map RunId Trace),
loadFailures :: TestInformation -> m Failures,
store :: LdfiEvent -> m ()
}
emptyFailures :: Failures
emptyFailures =
Failures
{ fFaultsFromFailedRuns = [],
fFaultsPerRun = Map.empty
}
mockStorage :: Monad m => [Trace] -> Storage m
mockStorage ts =
Storage
{ load = const (return $ Map.fromList $ zip [0 ..] ts),
loadFailures = const (return emptyFailures),
store = const (return ())
}
getDbPath :: IO String
getDbPath = do
getEnv "DETSYS_DB"
`catchIOError` \(e :: catchIOError) ->
if isDoesNotExistError e
then do
home <- getEnv "HOME"
return (home </> ".detsys.db")
else throwIO e
data NetworkTraceEvent = NetworkTraceEvent
{ nteRunId :: Int,
nteSender :: String,
nteReceiver :: String,
nteRecvLogicalTime :: Int,
nteSentLogicalTime :: Int
}
instance FromRow NetworkTraceEvent where
fromRow = NetworkTraceEvent <$> field <*> field <*> field <*> field <*> field
sqliteShowSequence :: [Int] -> String
sqliteShowSequence xs = "(" ++ intercalate ", " (map show xs) ++ ")"
sqliteLoad :: TestInformation -> IO (Map RunId Trace)
sqliteLoad testInformation = do
path <- getDbPath
conn <- open path
let testId = tiTestId testInformation
failedRuns = Set.fromList $ tiFailedRuns testInformation
r <-
queryNamed
conn
"SELECT run_id,sender,receiver,recv_logical_time,sent_logical_time FROM network_trace \
\ WHERE test_id = :testId \
\ AND kind <> 'timer' \
\ AND NOT dropped \
\ AND NOT (sender LIKE 'client:%') \
\ AND NOT (receiver LIKE 'client:%') \
\ ORDER BY run_id ASC"
[":testId" := testId] ::
IO [NetworkTraceEvent]
return (Map.fromList . map prepareToMap . groupBy (\e1 e2 -> nteRunId e1 == nteRunId e2) . filter (not . flip Set.member failedRuns . nteRunId) $ r)
where
prepareToMap :: [NetworkTraceEvent] -> (RunId, Trace)
prepareToMap [] = error "impossible"
prepareToMap xs@(h : _) = (nteRunId h, historyToTrace xs)
historyToTrace :: [NetworkTraceEvent] -> Trace
historyToTrace = map go
where
go :: NetworkTraceEvent -> Event
go (NetworkTraceEvent _runId sender receiver recvAt sentAt) =
Event sender (toEnum sentAt) receiver (toEnum recvAt)
sqliteLoadFailure :: TestInformation -> IO Failures
sqliteLoadFailure testInformation = do
path <- getDbPath
conn <- open path
let testId = tiTestId testInformation
failedRuns = Set.fromList $ tiFailedRuns testInformation
r <-
queryNamed
conn
"SELECT run_id, faults FROM run_info WHERE test_id = :testId"
[":testId" := testId] ::
IO [(RunId, Text)]
return . toFailures failedRuns . map (second parse) $ r
where
toFailures :: Set RunId -> [(Int, [Fault])] -> Failures
toFailures failedRuns xs =
Failures
{ fFaultsFromFailedRuns = map snd . filter (flip Set.member failedRuns . fst) $ xs,
fFaultsPerRun = Map.fromList xs
}
parse :: Text -> [Fault]
parse s = case decode (BB.toLazyByteString $ TextE.encodeUtf8Builder s) of
Nothing -> error $ "Unable to parse faults: " ++ Text.unpack s
Just x -> map convert x
convert :: MF.Fault -> Fault
convert (MF.Omission f t a) = Omission (f, t) a
convert (MF.Crash f a) = Crash f a
-- TODO(stevan): What exactly do we need to store? Previous faults are no longer
-- interesting.
sqliteStore :: LdfiEvent -> IO ()
sqliteStore = undefined
sqliteStorage :: Storage IO
sqliteStorage =
Storage
{ load = sqliteLoad,
loadFailures = sqliteLoadFailure,
store = sqliteStore
}
| null | https://raw.githubusercontent.com/symbiont-io/detsys-testkit/29a3a0140730420e4c5cc8db23df6fdb03f9302c/src/ldfi/src/Ldfi/Storage.hs | haskell | # LANGUAGE OverloadedStrings #
----------------------------------------------------------------------
XXX: Fault?
TODO(stevan): What exactly do we need to store? Previous faults are no longer
interesting. | # LANGUAGE DeriveGeneric #
# LANGUAGE ScopedTypeVariables #
module Ldfi.Storage where
import Control.Arrow (second)
import Control.Exception
import Data.Aeson (decode)
import qualified Data.Binary.Builder as BB
import Data.Hashable (Hashable)
import Data.List (groupBy, intercalate)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Encoding as TextE
import Database.SQLite.Simple
import GHC.Generics (Generic)
import qualified Ldfi.Marshal.Faults as MF
import Ldfi.Traces
import System.Environment
import System.FilePath
import System.IO.Error
type TestId = Int
type RunId = Int
data Fault = Crash Node Time | Omission Edge Time
deriving (Eq, Ord, Read, Show, Generic)
instance Hashable Fault
data Failures = Failures
{ fFaultsFromFailedRuns :: [[Fault]],
fFaultsPerRun :: Map RunId [Fault]
}
data LdfiEvent = LdfiEvent
{ leTestId :: TestId,
leRunIds :: [RunId],
leVersion :: String,
leStatistics :: String
}
data TestInformation = TestInformation
{ tiTestId :: TestId,
tiFailedRuns :: [RunId]
}
data Storage m = Storage
{ load :: TestInformation -> m (Map RunId Trace),
loadFailures :: TestInformation -> m Failures,
store :: LdfiEvent -> m ()
}
emptyFailures :: Failures
emptyFailures =
Failures
{ fFaultsFromFailedRuns = [],
fFaultsPerRun = Map.empty
}
mockStorage :: Monad m => [Trace] -> Storage m
mockStorage ts =
Storage
{ load = const (return $ Map.fromList $ zip [0 ..] ts),
loadFailures = const (return emptyFailures),
store = const (return ())
}
getDbPath :: IO String
getDbPath = do
getEnv "DETSYS_DB"
`catchIOError` \(e :: catchIOError) ->
if isDoesNotExistError e
then do
home <- getEnv "HOME"
return (home </> ".detsys.db")
else throwIO e
data NetworkTraceEvent = NetworkTraceEvent
{ nteRunId :: Int,
nteSender :: String,
nteReceiver :: String,
nteRecvLogicalTime :: Int,
nteSentLogicalTime :: Int
}
instance FromRow NetworkTraceEvent where
fromRow = NetworkTraceEvent <$> field <*> field <*> field <*> field <*> field
sqliteShowSequence :: [Int] -> String
sqliteShowSequence xs = "(" ++ intercalate ", " (map show xs) ++ ")"
sqliteLoad :: TestInformation -> IO (Map RunId Trace)
sqliteLoad testInformation = do
path <- getDbPath
conn <- open path
let testId = tiTestId testInformation
failedRuns = Set.fromList $ tiFailedRuns testInformation
r <-
queryNamed
conn
"SELECT run_id,sender,receiver,recv_logical_time,sent_logical_time FROM network_trace \
\ WHERE test_id = :testId \
\ AND kind <> 'timer' \
\ AND NOT dropped \
\ AND NOT (sender LIKE 'client:%') \
\ AND NOT (receiver LIKE 'client:%') \
\ ORDER BY run_id ASC"
[":testId" := testId] ::
IO [NetworkTraceEvent]
return (Map.fromList . map prepareToMap . groupBy (\e1 e2 -> nteRunId e1 == nteRunId e2) . filter (not . flip Set.member failedRuns . nteRunId) $ r)
where
prepareToMap :: [NetworkTraceEvent] -> (RunId, Trace)
prepareToMap [] = error "impossible"
prepareToMap xs@(h : _) = (nteRunId h, historyToTrace xs)
historyToTrace :: [NetworkTraceEvent] -> Trace
historyToTrace = map go
where
go :: NetworkTraceEvent -> Event
go (NetworkTraceEvent _runId sender receiver recvAt sentAt) =
Event sender (toEnum sentAt) receiver (toEnum recvAt)
sqliteLoadFailure :: TestInformation -> IO Failures
sqliteLoadFailure testInformation = do
path <- getDbPath
conn <- open path
let testId = tiTestId testInformation
failedRuns = Set.fromList $ tiFailedRuns testInformation
r <-
queryNamed
conn
"SELECT run_id, faults FROM run_info WHERE test_id = :testId"
[":testId" := testId] ::
IO [(RunId, Text)]
return . toFailures failedRuns . map (second parse) $ r
where
toFailures :: Set RunId -> [(Int, [Fault])] -> Failures
toFailures failedRuns xs =
Failures
{ fFaultsFromFailedRuns = map snd . filter (flip Set.member failedRuns . fst) $ xs,
fFaultsPerRun = Map.fromList xs
}
parse :: Text -> [Fault]
parse s = case decode (BB.toLazyByteString $ TextE.encodeUtf8Builder s) of
Nothing -> error $ "Unable to parse faults: " ++ Text.unpack s
Just x -> map convert x
convert :: MF.Fault -> Fault
convert (MF.Omission f t a) = Omission (f, t) a
convert (MF.Crash f a) = Crash f a
sqliteStore :: LdfiEvent -> IO ()
sqliteStore = undefined
sqliteStorage :: Storage IO
sqliteStorage =
Storage
{ load = sqliteLoad,
loadFailures = sqliteLoadFailure,
store = sqliteStore
}
|
d93befce75a36c8b1fe0652b45b360e1069f8cd2aa3141f73efd2d0f429af343 | inaka/elvis_core | pass_max_function_length_elvis_attr.erl | -module(pass_max_function_length_elvis_attr).
-export([f5/1, f10/1, f15/1]).
-elvis([{elvis_style, max_function_length, #{ max_length => 15,
count_comments => true,
count_whitespace => true }}]).
1
2
3
5
1
2
3
4
6
7
9
10
1
2
3
4
6
7
8
9
11
12
14
15
| null | https://raw.githubusercontent.com/inaka/elvis_core/468bd3498f1782fd74ef3d8eb1b36217b0b76c11/test/examples/pass_max_function_length_elvis_attr.erl | erlang | -module(pass_max_function_length_elvis_attr).
-export([f5/1, f10/1, f15/1]).
-elvis([{elvis_style, max_function_length, #{ max_length => 15,
count_comments => true,
count_whitespace => true }}]).
1
2
3
5
1
2
3
4
6
7
9
10
1
2
3
4
6
7
8
9
11
12
14
15
| |
71e245d02507fd2e469d458f85b74c4ac28b2ab93f529038b8479cff6af31b8a | dbuenzli/uuseg | test.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2014 The uuseg programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2014 The uuseg programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
Checks that Uuseg passes Unicode 's Segmentation and Line break conformance
tests and also performs other tests .
tests and also performs other tests. *)
let str = Format.asprintf
let log f = Format.eprintf (f ^^ "@?")
let fail fmt =
let fail _ = failwith (Format.flush_str_formatter ()) in
Format.kfprintf fail Format.str_formatter fmt
let split_string s sep = (* Grrrr. *)
let rec split accum j =
let i = try (String.rindex_from s j sep) with Not_found -> -1 in
if (i = -1) then
let p = String.sub s 0 (j + 1) in
if p <> "" then p :: accum else accum
else
let p = String.sub s (i + 1) (j - i) in
let accum' = if p <> "" then p :: accum else accum in
split accum' (i - 1)
in
split [] (String.length s - 1)
let stack_to_loc stack = (* Grrrrr. *)
let stack = Printexc.raw_backtrace_to_string stack in
try
let start = String.index stack '\n' in
let fstart = String.index_from stack start '\"' + 1 in
let fend = String.rindex stack '\"' - 1 in
let file = String.sub stack fstart (fend - fstart + 1) in
let lstart = fend + 9 in
let lend = String.rindex stack ',' - 1 in
let line = String.sub stack lstart (lend - lstart + 1) in
str "%s:%d: " file (int_of_string line)
with
| Not_found | Failure _ -> "??:"
let rec pp_spec ppf = function
| [] -> ()
| `B :: spec -> Format.fprintf ppf "÷ "; pp_spec ppf spec
| `U u :: spec ->
Format.fprintf ppf "%04X " (Uchar.to_int u);
(match spec with (`U _) :: _ -> Format.fprintf ppf "× " | _ -> ());
pp_spec ppf spec
let pp_boundary ppf = function
| `Grapheme_cluster -> Format.fprintf ppf "grapheme cluster:"
| `Word -> Format.fprintf ppf "word:"
| `Sentence -> Format.fprintf ppf "sentence:"
| `Line_break -> Format.fprintf ppf "line break:"
let test_case = ref 0
let fail = ref 0
let test seg src spec =
incr test_case;
let loc = Printexc.get_callstack 2 in
let n = Uuseg.create seg in
let ended = ref false in
let rec add acc v = match Uuseg.add n v with
| `Uchar u -> add ((`U u) :: acc) `Await
| `Boundary -> add (`B :: acc) `Await
| `Await -> ended := false; acc
| `End -> ended := true; acc
in
let add_uchar acc u = add acc (`Uchar u) in
let nseq = List.rev (add (List.fold_left add_uchar [] src) `End) in
if not (!ended) then begin
incr fail;
Format.printf "@.%s%a did not finish with `End."
(stack_to_loc loc) pp_boundary seg
end else begin
if nseq = spec then () else
begin
incr fail;
Format.printf "@.%s%a mismatch:@.impl: %a@.spec: %a@."
(stack_to_loc loc) pp_boundary seg pp_spec nseq pp_spec spec
end
end
let test_string_list l spec =
let rec pp_list ppf = function
| [] -> ()
| s :: ss -> Format.fprintf ppf "%S;@ " s; pp_list ppf ss
in
let loc = Printexc.get_callstack 2 in
incr test_case;
if l = spec then () else
begin
incr fail;
Format.printf "@.%s mismatch:@.impl: @[[%a]@]@.spec: @[[%a]@]@."
(stack_to_loc loc) pp_list l pp_list spec
end
(* Conformance data decoding *)
let cp_of_string v = (* parses a code point value. *)
let is_hex c = (0x30 <= c && c <= 0x39) || (0x41 <= c && c <= 0x46) in
let cp = ref 0 in
for k = 0 to (String.length v) - 1 do
let c = Char.code v.[k] in
if not (is_hex c) then (failwith v) else
cp := !cp * 16 + (if c <= 0x39 then c - 48 else c - 55)
done;
!cp
let decode_conformance_specs ignores ic =
let rec loop specs =
match try Some (input_line ic) with End_of_file -> None with
| None -> List.rev specs
| Some l ->
if String.length l > 0 && l.[0] = '#' then loop specs else
try begin match split_string l '#' with
| [comment] -> loop specs
| test :: comment ->
let spec = split_string test ' ' in
begin try
let rec to_spec acc = function
| ( "\xC3\x97" (* × *) | "\xC3\x97\t" ) :: rest ->
to_spec acc rest
| ( "\xC3\xB7" (* ÷ *) | "\xC3\xB7\t") :: rest ->
to_spec (`B :: acc) rest
| uchar :: rest ->
let u = cp_of_string uchar in
if not (Uchar.is_valid u) then raise Exit else
to_spec (`U (Uchar.of_int u) :: acc) rest
| [] ->
List.rev acc
in
let spec = to_spec [] spec in
if ignores = [] then loop (spec :: specs) else
try
let reason = List.assoc spec ignores in
log "Skip test (%s): %s\n" reason test;
loop (specs)
with
| Not_found -> loop (spec :: specs)
with Exit ->
log "Skip test (surrogate not a scalar value): %s\n" test;
loop specs
end
| [] -> failwith ""
end
with Failure f ->
log "FAILURE: `%s'" f;
log "Unable to parse line:\n`%s'\n" l; incr fail;
loop specs
in
loop []
let line_break_ignores =
(* Conformance tests of line breaking algorithm implement a tailoring
that we don't implement. Here are the tests break according
to that tailoring. *)
let u u = `U (Uchar.of_int u) in
[[u 0x007D; `B; u 0x0025; `B], "tailoring, violates LB25.1";
[u 0x007D; u 0x0308; `B; u 0x0025; `B], "tailoring, violates LB25.1";
[u 0x007D; `B; u 0x0024; `B], "tailoring, violates LB25.3";
[u 0x007D; u 0x0308; `B; u 0x0024; `B], "tailoring, violates LB24.3";
(* *)
[u 0x0029; `B; u 0x0025; `B], "tailoring, violates LB25.2";
[u 0x0029; u 0x0308; `B; u 0x0025; `B], "tailoring, violates LB25.2";
[u 0x0029; `B; u 0x0024; `B], "tailoring, violates LB25.4";
[u 0x0029; u 0x0308; `B; u 0x0024; `B], "tailoring, violates LB24.4";
(* *)
[u 0x002C; `B; u 0x0030; `B], "tailoring, violates LB25.12";
[u 0x002C; u 0x0308; `B; u 0x0030; `B], "tailoring, violates LB25.12";
(* *)
[u 0x0025; `B; u 0x0028; `B], "tailoring, violates LB25.7";
[u 0x0025; u 0x0308; `B; u 0x0028; `B], "tailoring, violates LB25.7";
(* *)
[u 0x0024; `B; u 0x0028; `B], "tailoring, violates LB25.9";
[u 0x0024; u 0x0308; `B; u 0x0028; `B], "tailoring, violates LB25.9";
(* *)
[u 0x002F; `B; u 0x0030; `B], "tailoring, violates LB25.14";
[u 0x002F; u 0x0308; `B; u 0x0030; `B], "tailoring, violates LB25.14";
(* *)
[ u 0x0065; u 0x0071; u 0x0075; u 0x0061; u 0x006C; u 0x0073;
u 0x0020; u 0x002E; `B; u 0x0033; u 0x0035; u 0x0020; `B; u 0x0063;
u 0x0065; u 0x006E; u 0x0074; u 0x0073; `B ],
"tailoring, violates LB25.12";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; `B; u 0x005C; `B;
u 0x0028; u 0x0073; `B; u 0x005C; u 0x0029; `B ],
"tailoring, violates LB25.9";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; `B; u 0x005C; `B; u 0x007B;
u 0x0073; `B; u 0x005C; u 0x007D; `B ],
"tailoring, violates LB25.9";
(* *)
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B; u 0x672C; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B; u 0x0915; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x672C; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x307E; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x0033; `B ],
"tailoring, violates LB25.12";
[ u 0x0041; u 0x002E; `B; u 0x0031; u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0xBD24; `B; u 0xC5B4; u 0x002E; u 0x0020; `B; u 0x0041; u 0x002E;
`B; u 0x0032; u 0x0020; `B; u 0xBCFC; `B ],
"tailoring, violates LB25.12";
[ u 0xBD10; `B; u 0xC694; u 0x002E; u 0x0020; `B; u 0x0041; u 0x002E;
`B; u 0x0033; u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0xC694; u 0x002E; u 0x0020; `B; u 0x0041; u 0x002E; `B; u 0x0034;
u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x300C; `B ],
"tailoring, violates LB25.12";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; u 0x005C; `B; u 0x0028;
u 0x0073; u 0x005C; u 0x0029; `B ],
"tailoring, violates PR × OP of LB25";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; u 0x005C; `B; u 0x007B;
u 0x0073; u 0x005C; u 0x007D; `B],
"tailoring, violates PR × OP of LB25";
[ u 0x0025; `B; u 0x2329; `B],
"tailoring, violates PO × OP of LB25";
[ u 0x0024; `B; u 0x2329; `B],
"tailoring, violates PR × OP of LB25";
[ u 0x0025; u 0x0308; `B; u 0x2329; `B],
"tailoring, violates PO × OP of LB25";
[ u 0x0024; u 0x0308; `B; u 0x2329; `B],
"tailoring, violates PR × OP of LB25";
]
let rec seq_of_spec acc = function
| `U u :: rest -> seq_of_spec (u :: acc) rest
| `B :: rest -> seq_of_spec acc rest
| [] -> List.rev acc
let test_conformance seg name ignores inf =
try
log "Testing conformance of %s\n" name;
let ic = open_in inf in
let specs = decode_conformance_specs ignores ic in
let test spec = test seg (seq_of_spec [] spec) spec in
List.iter test specs;
close_in ic
with Sys_error e -> log "%s\n" e; incr fail
let uchar = Uchar.of_int
let test_others () =
let g = `Grapheme_cluster in
test g [] [];
test g [uchar 0x0020] [`B; `U (uchar 0x0020); `B;];
test g (* éa *) [uchar 0x0065; uchar 0x0301; uchar 0x0061;]
[`B; `U (uchar 0x0065); `U (uchar 0x0301); `B; `U (uchar 0x0061); `B;];
let w = `Word in
test w [] [];
let s = `Sentence in
test s [] [];
let l = `Line_break in
test l [] [];
()
let test_uuseg_string () =
let fold8 seg s =
List.rev (Uuseg_string.fold_utf_8 seg (fun acc s -> s :: acc) [] s)
in
test_string_list (fold8 `Grapheme_cluster "") [];
test_string_list (fold8 `Grapheme_cluster "ab cd") ["a"; "b"; " "; "c"; "d"];
test_string_list (fold8 `Word "") [];
test_string_list (fold8 `Word "ab cd") ["ab"; " "; "cd"];
test_string_list (fold8 `Sentence "") [];
test_string_list (fold8 `Sentence "ab cd") ["ab cd"];
test_string_list (fold8 `Line_break "") [];
test_string_list (fold8 `Line_break "ab cd") ["ab "; "cd"];
()
let test_LB30b_assumption () =
let rec loop u =
let c = Uucp.Emoji.is_extended_pictographic u &&
Uucp.Gc.general_category u = `Cn
in
if c then begin
if Uucp.Break.line u = `ID then () else
(log "LB30b assumption failure for U+%04d" (Uchar.to_int u))
end;
if Uchar.equal u Uchar.max then log " PASS!\n" else loop (Uchar.succ u)
in
loop Uchar.min
This is needed by our implementation of LB30b
let test g_file w_file s_file l_file =
try
log "Testing LB30b's data assumption.";
test_LB30b_assumption ();
test_conformance `Grapheme_cluster "grapheme cluster boundary" [] g_file;
test_conformance `Word "word boundary" [] w_file;
test_conformance `Sentence "sentence boundary" [] s_file;
test_conformance `Line_break "line break boundary" line_break_ignores
l_file;
log "Making other tests.\n";
test_others ();
log "Testing Uutf_string.\n";
test_uuseg_string ();
if !fail > 0
then log "There %d FAILURES out of %d tests.\n" !fail !test_case
else log "Success on %d tests!\n" !test_case
with Sys_error e -> log "%s\n" e; exit 1
let main () =
let usage = Printf.sprintf
"Usage: %s [INFILE]\n\
\ Runs the Unicode segmentation conformance test.\n\
Options:" (Filename.basename Sys.executable_name)
in
let err _ = raise (Arg.Bad "no positional argument supported") in
let g_file = ref "test/GraphemeBreakTest.txt" in
let w_file = ref "test/WordBreakTest.txt" in
let s_file = ref "test/SentenceBreakTest.txt" in
let l_file = ref "test/LineBreakTest.txt" in
let options =
[ "-g", Arg.String (fun f -> g_file := f),
"Specifies the GraphemeBreakTest.txt file";
"-w", Arg.String (fun f -> w_file := f),
"Specifies the WordBreakTest.txt file";
"-s", Arg.String (fun f -> s_file := f),
"Specifies the SentenceBreakTest.txt file";
"-l", Arg.String (fun f -> l_file := f),
"Specifies the LineBreakTest.txt file"; ]
in
Arg.parse (Arg.align options) err usage;
test !g_file !w_file !s_file !l_file
let () = if (not !Sys.interactive) then main ()
---------------------------------------------------------------------------
Copyright ( c ) 2014 The uuseg programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 The uuseg programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/uuseg/fc4530bce6873a28a7e2deea9bd34ec415d496b3/test/test.ml | ocaml | Grrrr.
Grrrrr.
Conformance data decoding
parses a code point value.
×
÷
Conformance tests of line breaking algorithm implement a tailoring
that we don't implement. Here are the tests break according
to that tailoring.
éa | ---------------------------------------------------------------------------
Copyright ( c ) 2014 The uuseg programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2014 The uuseg programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
Checks that Uuseg passes Unicode 's Segmentation and Line break conformance
tests and also performs other tests .
tests and also performs other tests. *)
let str = Format.asprintf
let log f = Format.eprintf (f ^^ "@?")
let fail fmt =
let fail _ = failwith (Format.flush_str_formatter ()) in
Format.kfprintf fail Format.str_formatter fmt
let rec split accum j =
let i = try (String.rindex_from s j sep) with Not_found -> -1 in
if (i = -1) then
let p = String.sub s 0 (j + 1) in
if p <> "" then p :: accum else accum
else
let p = String.sub s (i + 1) (j - i) in
let accum' = if p <> "" then p :: accum else accum in
split accum' (i - 1)
in
split [] (String.length s - 1)
let stack = Printexc.raw_backtrace_to_string stack in
try
let start = String.index stack '\n' in
let fstart = String.index_from stack start '\"' + 1 in
let fend = String.rindex stack '\"' - 1 in
let file = String.sub stack fstart (fend - fstart + 1) in
let lstart = fend + 9 in
let lend = String.rindex stack ',' - 1 in
let line = String.sub stack lstart (lend - lstart + 1) in
str "%s:%d: " file (int_of_string line)
with
| Not_found | Failure _ -> "??:"
let rec pp_spec ppf = function
| [] -> ()
| `B :: spec -> Format.fprintf ppf "÷ "; pp_spec ppf spec
| `U u :: spec ->
Format.fprintf ppf "%04X " (Uchar.to_int u);
(match spec with (`U _) :: _ -> Format.fprintf ppf "× " | _ -> ());
pp_spec ppf spec
let pp_boundary ppf = function
| `Grapheme_cluster -> Format.fprintf ppf "grapheme cluster:"
| `Word -> Format.fprintf ppf "word:"
| `Sentence -> Format.fprintf ppf "sentence:"
| `Line_break -> Format.fprintf ppf "line break:"
let test_case = ref 0
let fail = ref 0
let test seg src spec =
incr test_case;
let loc = Printexc.get_callstack 2 in
let n = Uuseg.create seg in
let ended = ref false in
let rec add acc v = match Uuseg.add n v with
| `Uchar u -> add ((`U u) :: acc) `Await
| `Boundary -> add (`B :: acc) `Await
| `Await -> ended := false; acc
| `End -> ended := true; acc
in
let add_uchar acc u = add acc (`Uchar u) in
let nseq = List.rev (add (List.fold_left add_uchar [] src) `End) in
if not (!ended) then begin
incr fail;
Format.printf "@.%s%a did not finish with `End."
(stack_to_loc loc) pp_boundary seg
end else begin
if nseq = spec then () else
begin
incr fail;
Format.printf "@.%s%a mismatch:@.impl: %a@.spec: %a@."
(stack_to_loc loc) pp_boundary seg pp_spec nseq pp_spec spec
end
end
let test_string_list l spec =
let rec pp_list ppf = function
| [] -> ()
| s :: ss -> Format.fprintf ppf "%S;@ " s; pp_list ppf ss
in
let loc = Printexc.get_callstack 2 in
incr test_case;
if l = spec then () else
begin
incr fail;
Format.printf "@.%s mismatch:@.impl: @[[%a]@]@.spec: @[[%a]@]@."
(stack_to_loc loc) pp_list l pp_list spec
end
let is_hex c = (0x30 <= c && c <= 0x39) || (0x41 <= c && c <= 0x46) in
let cp = ref 0 in
for k = 0 to (String.length v) - 1 do
let c = Char.code v.[k] in
if not (is_hex c) then (failwith v) else
cp := !cp * 16 + (if c <= 0x39 then c - 48 else c - 55)
done;
!cp
let decode_conformance_specs ignores ic =
let rec loop specs =
match try Some (input_line ic) with End_of_file -> None with
| None -> List.rev specs
| Some l ->
if String.length l > 0 && l.[0] = '#' then loop specs else
try begin match split_string l '#' with
| [comment] -> loop specs
| test :: comment ->
let spec = split_string test ' ' in
begin try
let rec to_spec acc = function
to_spec acc rest
to_spec (`B :: acc) rest
| uchar :: rest ->
let u = cp_of_string uchar in
if not (Uchar.is_valid u) then raise Exit else
to_spec (`U (Uchar.of_int u) :: acc) rest
| [] ->
List.rev acc
in
let spec = to_spec [] spec in
if ignores = [] then loop (spec :: specs) else
try
let reason = List.assoc spec ignores in
log "Skip test (%s): %s\n" reason test;
loop (specs)
with
| Not_found -> loop (spec :: specs)
with Exit ->
log "Skip test (surrogate not a scalar value): %s\n" test;
loop specs
end
| [] -> failwith ""
end
with Failure f ->
log "FAILURE: `%s'" f;
log "Unable to parse line:\n`%s'\n" l; incr fail;
loop specs
in
loop []
let line_break_ignores =
let u u = `U (Uchar.of_int u) in
[[u 0x007D; `B; u 0x0025; `B], "tailoring, violates LB25.1";
[u 0x007D; u 0x0308; `B; u 0x0025; `B], "tailoring, violates LB25.1";
[u 0x007D; `B; u 0x0024; `B], "tailoring, violates LB25.3";
[u 0x007D; u 0x0308; `B; u 0x0024; `B], "tailoring, violates LB24.3";
[u 0x0029; `B; u 0x0025; `B], "tailoring, violates LB25.2";
[u 0x0029; u 0x0308; `B; u 0x0025; `B], "tailoring, violates LB25.2";
[u 0x0029; `B; u 0x0024; `B], "tailoring, violates LB25.4";
[u 0x0029; u 0x0308; `B; u 0x0024; `B], "tailoring, violates LB24.4";
[u 0x002C; `B; u 0x0030; `B], "tailoring, violates LB25.12";
[u 0x002C; u 0x0308; `B; u 0x0030; `B], "tailoring, violates LB25.12";
[u 0x0025; `B; u 0x0028; `B], "tailoring, violates LB25.7";
[u 0x0025; u 0x0308; `B; u 0x0028; `B], "tailoring, violates LB25.7";
[u 0x0024; `B; u 0x0028; `B], "tailoring, violates LB25.9";
[u 0x0024; u 0x0308; `B; u 0x0028; `B], "tailoring, violates LB25.9";
[u 0x002F; `B; u 0x0030; `B], "tailoring, violates LB25.14";
[u 0x002F; u 0x0308; `B; u 0x0030; `B], "tailoring, violates LB25.14";
[ u 0x0065; u 0x0071; u 0x0075; u 0x0061; u 0x006C; u 0x0073;
u 0x0020; u 0x002E; `B; u 0x0033; u 0x0035; u 0x0020; `B; u 0x0063;
u 0x0065; u 0x006E; u 0x0074; u 0x0073; `B ],
"tailoring, violates LB25.12";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; `B; u 0x005C; `B;
u 0x0028; u 0x0073; `B; u 0x005C; u 0x0029; `B ],
"tailoring, violates LB25.9";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; `B; u 0x005C; `B; u 0x007B;
u 0x0073; `B; u 0x005C; u 0x007D; `B ],
"tailoring, violates LB25.9";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B; u 0x672C; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B; u 0x0915; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x672C; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x307E; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x0033; `B ],
"tailoring, violates LB25.12";
[ u 0x0041; u 0x002E; `B; u 0x0031; u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0xBD24; `B; u 0xC5B4; u 0x002E; u 0x0020; `B; u 0x0041; u 0x002E;
`B; u 0x0032; u 0x0020; `B; u 0xBCFC; `B ],
"tailoring, violates LB25.12";
[ u 0xBD10; `B; u 0xC694; u 0x002E; u 0x0020; `B; u 0x0041; u 0x002E;
`B; u 0x0033; u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0xC694; u 0x002E; u 0x0020; `B; u 0x0041; u 0x002E; `B; u 0x0034;
u 0x0020; `B; u 0xBABB; `B ],
"tailoring, violates LB25.12";
[ u 0x0061; u 0x002E; `B; u 0x0032; u 0x3000; `B; u 0x300C; `B ],
"tailoring, violates LB25.12";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; u 0x005C; `B; u 0x0028;
u 0x0073; u 0x005C; u 0x0029; `B ],
"tailoring, violates PR × OP of LB25";
[ u 0x0063; u 0x006F; u 0x0064; u 0x0065; u 0x005C; `B; u 0x007B;
u 0x0073; u 0x005C; u 0x007D; `B],
"tailoring, violates PR × OP of LB25";
[ u 0x0025; `B; u 0x2329; `B],
"tailoring, violates PO × OP of LB25";
[ u 0x0024; `B; u 0x2329; `B],
"tailoring, violates PR × OP of LB25";
[ u 0x0025; u 0x0308; `B; u 0x2329; `B],
"tailoring, violates PO × OP of LB25";
[ u 0x0024; u 0x0308; `B; u 0x2329; `B],
"tailoring, violates PR × OP of LB25";
]
let rec seq_of_spec acc = function
| `U u :: rest -> seq_of_spec (u :: acc) rest
| `B :: rest -> seq_of_spec acc rest
| [] -> List.rev acc
let test_conformance seg name ignores inf =
try
log "Testing conformance of %s\n" name;
let ic = open_in inf in
let specs = decode_conformance_specs ignores ic in
let test spec = test seg (seq_of_spec [] spec) spec in
List.iter test specs;
close_in ic
with Sys_error e -> log "%s\n" e; incr fail
let uchar = Uchar.of_int
let test_others () =
let g = `Grapheme_cluster in
test g [] [];
test g [uchar 0x0020] [`B; `U (uchar 0x0020); `B;];
[`B; `U (uchar 0x0065); `U (uchar 0x0301); `B; `U (uchar 0x0061); `B;];
let w = `Word in
test w [] [];
let s = `Sentence in
test s [] [];
let l = `Line_break in
test l [] [];
()
let test_uuseg_string () =
let fold8 seg s =
List.rev (Uuseg_string.fold_utf_8 seg (fun acc s -> s :: acc) [] s)
in
test_string_list (fold8 `Grapheme_cluster "") [];
test_string_list (fold8 `Grapheme_cluster "ab cd") ["a"; "b"; " "; "c"; "d"];
test_string_list (fold8 `Word "") [];
test_string_list (fold8 `Word "ab cd") ["ab"; " "; "cd"];
test_string_list (fold8 `Sentence "") [];
test_string_list (fold8 `Sentence "ab cd") ["ab cd"];
test_string_list (fold8 `Line_break "") [];
test_string_list (fold8 `Line_break "ab cd") ["ab "; "cd"];
()
let test_LB30b_assumption () =
let rec loop u =
let c = Uucp.Emoji.is_extended_pictographic u &&
Uucp.Gc.general_category u = `Cn
in
if c then begin
if Uucp.Break.line u = `ID then () else
(log "LB30b assumption failure for U+%04d" (Uchar.to_int u))
end;
if Uchar.equal u Uchar.max then log " PASS!\n" else loop (Uchar.succ u)
in
loop Uchar.min
This is needed by our implementation of LB30b
let test g_file w_file s_file l_file =
try
log "Testing LB30b's data assumption.";
test_LB30b_assumption ();
test_conformance `Grapheme_cluster "grapheme cluster boundary" [] g_file;
test_conformance `Word "word boundary" [] w_file;
test_conformance `Sentence "sentence boundary" [] s_file;
test_conformance `Line_break "line break boundary" line_break_ignores
l_file;
log "Making other tests.\n";
test_others ();
log "Testing Uutf_string.\n";
test_uuseg_string ();
if !fail > 0
then log "There %d FAILURES out of %d tests.\n" !fail !test_case
else log "Success on %d tests!\n" !test_case
with Sys_error e -> log "%s\n" e; exit 1
let main () =
let usage = Printf.sprintf
"Usage: %s [INFILE]\n\
\ Runs the Unicode segmentation conformance test.\n\
Options:" (Filename.basename Sys.executable_name)
in
let err _ = raise (Arg.Bad "no positional argument supported") in
let g_file = ref "test/GraphemeBreakTest.txt" in
let w_file = ref "test/WordBreakTest.txt" in
let s_file = ref "test/SentenceBreakTest.txt" in
let l_file = ref "test/LineBreakTest.txt" in
let options =
[ "-g", Arg.String (fun f -> g_file := f),
"Specifies the GraphemeBreakTest.txt file";
"-w", Arg.String (fun f -> w_file := f),
"Specifies the WordBreakTest.txt file";
"-s", Arg.String (fun f -> s_file := f),
"Specifies the SentenceBreakTest.txt file";
"-l", Arg.String (fun f -> l_file := f),
"Specifies the LineBreakTest.txt file"; ]
in
Arg.parse (Arg.align options) err usage;
test !g_file !w_file !s_file !l_file
let () = if (not !Sys.interactive) then main ()
---------------------------------------------------------------------------
Copyright ( c ) 2014 The uuseg programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 The uuseg programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
19eb08198201d736dde19ce5fdfa99925da4fb46ff19c0557038a3147001cdc0 | zenspider/schemers | exercise.3.71.scm | #lang racket/base
Exercise 3.71
Numbers that can be expressed as the sum of two
cubes in more than one way are sometimes called " Ramanujan
numbers " , in honor of the mathematician Ramanujan.(6 )
;; Ordered streams of pairs provide an elegant solution to the
;; problem of computing these numbers. To find a number that can be
written as the sum of two cubes in two different ways , we need
;; only generate the stream of pairs of integers (i,j) weighted
according to the sum i^3 + j^3 ( see * Note Exercise 3 - 70 : :) , then
search the stream for two consecutive pairs with the same weight .
Write a procedure to generate the Ramanujan numbers . The first
such number is 1,729 . What are the next five ?
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_3/exercise.3.71.scm | scheme | Ordered streams of pairs provide an elegant solution to the
problem of computing these numbers. To find a number that can be
only generate the stream of pairs of integers (i,j) weighted | #lang racket/base
Exercise 3.71
Numbers that can be expressed as the sum of two
cubes in more than one way are sometimes called " Ramanujan
numbers " , in honor of the mathematician Ramanujan.(6 )
written as the sum of two cubes in two different ways , we need
according to the sum i^3 + j^3 ( see * Note Exercise 3 - 70 : :) , then
search the stream for two consecutive pairs with the same weight .
Write a procedure to generate the Ramanujan numbers . The first
such number is 1,729 . What are the next five ?
|
ce330f967842d5995f6086b5fcd28dfddf02d02ecd13f06220c5d1515292868f | w3ntao/sicp-solution | 2-37.rkt | #lang racket
(define nil '())
(define (accumulate op initial sequence)
(if (null? sequence)
initial
(op (car sequence)
(accumulate op initial (cdr sequence)))))
(define (dot-product v w);both v and w are vectors
(accumulate + 0 (map * v w)))
(define (matrix-*-vector m v)
(map (lambda (x)
(dot-product x v))
m))
(define (accumulate-n op init seqs)
(if (null? (car seqs))
nil
(cons (accumulate op
init
(get-first-row seqs))
(accumulate-n op init (get-remain-row seqs)))))
(define (get-first-row seq)
(if (null? seq)
nil
(cons (car (car seq))
(get-first-row (cdr seq)))))
(define (get-remain-row seq)
(if (null? seq)
nil
(cons (cdr (car seq))
(get-remain-row (cdr seq)))))
(define (transpose mat)
(accumulate-n cons nil mat))
(define (matrix-*-matrix m n)
(let ((cols (transpose n)))
(map (lambda (x)
(matrix-*-vector cols x))
m)))
(define test-matrix (list (list 1 2 3 4) (list 4 5 6 6) (list 6 7 8 9)))
(define matrix-2 (list (list 1 2 3) (list 4 5 6) (list 7 8 9)))
(define test-vec (list 1 2 3 4))
;(dot-product test-vec test-vec)
;(matrix-*-vector test-matrix test-vec)
;(transpose matrix-2)
(matrix-*-matrix matrix-2 matrix-2)
;(map * (list 1 2 3) (list 1 2 3)) | null | https://raw.githubusercontent.com/w3ntao/sicp-solution/00be3a7b4da50bb266f8a2db521a24e9f8c156be/chap-2/2-37.rkt | racket | both v and w are vectors
(dot-product test-vec test-vec)
(matrix-*-vector test-matrix test-vec)
(transpose matrix-2)
(map * (list 1 2 3) (list 1 2 3)) | #lang racket
(define nil '())
(define (accumulate op initial sequence)
(if (null? sequence)
initial
(op (car sequence)
(accumulate op initial (cdr sequence)))))
(accumulate + 0 (map * v w)))
(define (matrix-*-vector m v)
(map (lambda (x)
(dot-product x v))
m))
(define (accumulate-n op init seqs)
(if (null? (car seqs))
nil
(cons (accumulate op
init
(get-first-row seqs))
(accumulate-n op init (get-remain-row seqs)))))
(define (get-first-row seq)
(if (null? seq)
nil
(cons (car (car seq))
(get-first-row (cdr seq)))))
(define (get-remain-row seq)
(if (null? seq)
nil
(cons (cdr (car seq))
(get-remain-row (cdr seq)))))
(define (transpose mat)
(accumulate-n cons nil mat))
(define (matrix-*-matrix m n)
(let ((cols (transpose n)))
(map (lambda (x)
(matrix-*-vector cols x))
m)))
(define test-matrix (list (list 1 2 3 4) (list 4 5 6 6) (list 6 7 8 9)))
(define matrix-2 (list (list 1 2 3) (list 4 5 6) (list 7 8 9)))
(define test-vec (list 1 2 3 4))
(matrix-*-matrix matrix-2 matrix-2)
|
51a4dc4057edbfbcf448d4ec7e13f1c9d8546e0a4bc3b932594d84c56d428f85 | melisgl/mgl | optimize.lisp | (in-package :mgl-opt)
(defsection @mgl-opt (:title "Gradient Based Optimization")
"We have a real valued, differentiable function F and the task is to
find the parameters that minimize its value. Optimization starts
from a single point in the parameter space of F, and this single
point is updated iteratively based on the gradient and value of F at
or around the current point.
Note that while the stated problem is that of global optimization,
for non-convex functions, most algorithms will tend to converge to a
local optimum.
Currently, there are two optimization algorithms:
MGL-GD:@MGL-GD (with several variants) and MGL-CG:@MGL-CG both of
which are first order methods (they do not need second order
gradients) but more can be added with the @MGL-OPT-EXTENSION-API."
(minimize function)
(@mgl-opt-iterative-optimizer section)
(@mgl-opt-cost section)
(mgl-gd:@mgl-gd section)
(mgl-cg:@mgl-cg section)
(@mgl-opt-extension-api section))
(defun minimize (optimizer gradient-source
&key (weights (list-segments gradient-source))
(dataset *infinitely-empty-dataset*))
"Minimize the value of the real valued function represented by
GRADIENT-SOURCE by updating some of its parameters in WEIGHTS (a MAT
or a sequence of MATs). Return WEIGHTS. DATASET (see
MGL:@MGL-DATASET) is a set of unoptimized parameters of the same
function. For example, WEIGHTS may be the weights of a neural
network while DATASET is the training set consisting of inputs
suitable for SET-INPUT. The default
DATASET, (*INFINITELY-EMPTY-DATASET*) is suitable for when all
parameters are optimized, so there is nothing left to come from the
environment.
Optimization terminates if DATASET is a sampler and it runs out or
when some other condition met (see TERMINATION, for example). If
DATASET is a SEQUENCE, then it is reused over and over again.
Examples for various optimizers are provided in MGL-GD:@MGL-GD and
MGL-CG:@MGL-CG."
(let ((weights (ensure-seq weights)))
(initialize-optimizer* optimizer gradient-source weights dataset)
(initialize-gradient-source* optimizer gradient-source weights dataset)
(minimize* optimizer gradient-source weights dataset))
weights)
(defun ensure-seq (obj)
(if (typep obj 'sequence)
obj
(list obj)))
(defgeneric minimize* (optimizer gradient-source weights dataset)
(:documentation "Called by MINIMIZE after INITIALIZE-OPTIMIZER* and
INITIALIZE-GRADIENT-SOURCE*, this generic function is the main
extension point for writing optimizers."))
(defsection @mgl-opt-iterative-optimizer (:title "Iterative Optimizer")
(iterative-optimizer class)
(n-instances (reader iterative-optimizer))
(termination (accessor iterative-optimizer))
(on-optimization-started (accessor iterative-optimizer))
(on-optimization-finished (accessor iterative-optimizer))
(on-n-instances-changed (accessor iterative-optimizer))
"Now let's discuss a few handy utilities."
(monitor-optimization-periodically function)
(reset-optimization-monitors generic-function)
(reset-optimization-monitors (method () (iterative-optimizer t)))
(report-optimization-parameters generic-function))
(defclass iterative-optimizer ()
((n-instances
:initform 0 :initarg :n-instances :reader n-instances
:documentation "The number of instances this optimizer has seen so
far. Incremented automatically during optimization.")
(termination
:initform nil
:initarg :termination
:accessor termination
:documentation "If a number, it's the number of instances to train
on in the sense of N-INSTANCES. If N-INSTANCES is equal or greater
than this value optimization stops. If TERMINATION is NIL, then
optimization will continue. If it is T, then optimization will
stop. If it is a function of no arguments, then its return value
is processed as if it was returned by TERMINATION.")
(on-optimization-started
:initform ()
:initarg :on-optimization-started
:accessor on-optimization-started
:documentation "An event hook with parameters `(OPTIMIZER
GRADIENT-SOURCE N-INSTANCES)`. Called after initializations are
performed (INITIALIZE-OPTIMIZER*, INITIALIZE-GRADIENT-SOURCE*) but
before optimization is started.")
(on-optimization-finished
:initform ()
:initarg :on-optimization-finished
:accessor on-optimization-finished
:documentation "An event hook with parameters `(OPTIMIZER
GRADIENT-SOURCE N-INSTANCES)`. Called when optimization has
finished.")
(on-n-instances-changed
:initform ()
:initarg :on-n-instances-changed
:accessor on-n-instances-changed
:documentation "An event hook with parameters `(OPTIMIZER
GRADIENT-SOURCE N-INSTANCES)`. Called when optimization of a batch
of instances is done and N-INSTANCES is incremented."))
(:documentation "An abstract base class of MGL-GD:@MGL-GD and
MGL-CG:@MGL-CG based optimizers that iterate over instances until a
termination condition is met."))
(defmethod minimize* :around ((optimizer iterative-optimizer) gradient-source
weights dataset)
(apply-monitors (on-optimization-started optimizer)
optimizer gradient-source (n-instances optimizer))
(multiple-value-prog1
(call-next-method)
(apply-monitors (on-optimization-finished optimizer)
optimizer gradient-source (n-instances optimizer))))
(defmethod monitors ((optimizer iterative-optimizer))
())
(defun monitor-optimization-periodically (optimizer periodic-fns)
"For each periodic function in the list of PERIODIC-FNS, add a
monitor to OPTIMIZER's ON-OPTIMIZATION-STARTED,
ON-OPTIMIZATION-FINISHED and ON-N-INSTANCES-CHANGED hooks. The
monitors are simple functions that just call each periodic function
with the event parameters (OPTIMIZER GRADIENT-SOURCE N-INSTANCES).
Return OPTIMIZER.
To log and reset the monitors of the gradient source after every
1000 instances seen by OPTIMIZER:
(monitor-optimization-periodically optimizer
'((:fn log-my-test-error
:period 2000)
(:fn reset-optimization-monitors
:period 1000
:last-eval 0)))
Note how we don't pass it's allowed to just pass the initargs for a
PERIODIC-FN instead of PERIODIC-FN itself. The :LAST-EVAL 0 bit
prevents RESET-OPTIMIZATION-MONITORS from being called at the start
of the optimization when the monitors are empty anyway."
(dolist (periodic-fn periodic-fns)
(monitor-optimization-periodically* optimizer periodic-fn))
optimizer)
(defun monitor-optimization-periodically* (optimizer periodic-fn)
(check-type periodic-fn (or periodic-fn list))
(let ((periodic-fn (if (listp periodic-fn)
(apply #'make-instance 'periodic-fn
periodic-fn)
periodic-fn)))
(push (lambda (optimizer gradient-source n-instances)
(call-periodic-fn! n-instances periodic-fn
optimizer gradient-source))
(on-optimization-started optimizer))
(push (lambda (optimizer gradient-source n-instances)
(call-periodic-fn n-instances periodic-fn
optimizer gradient-source))
(on-n-instances-changed optimizer))
(push (lambda (optimizer gradient-source n-instances)
(call-periodic-fn! n-instances periodic-fn
optimizer gradient-source))
(on-optimization-finished optimizer))))
(defgeneric reset-optimization-monitors (optimizer gradient-source)
(:documentation "Report the state of [MONITORS][generic-function] of
OPTIMIZER and GRADIENT-SOURCE and reset their counters. See
MONITOR-OPTIMIZATION-PERIODICALLY for an example of how this is
used."))
(defmethod reset-optimization-monitors ((optimizer iterative-optimizer)
gradient-source)
"Log the counters of the monitors of OPTIMIZER and GRADIENT-SOURCE
and reset them."
(log-msg "training at n-instances: ~S~%" (n-instances optimizer))
(let ((counters (remove nil (mapcar #'counter
(append (monitors optimizer)
(monitors gradient-source))))))
(log-padded counters)
(map nil #'reset-counter counters)))
(defgeneric report-optimization-parameters (optimizer gradient-source)
(:documentation "A utility that's often called at the start of
optimization (from ON-OPTIMIZATION-STARTED). The default
implementation logs the description of GRADIENT-SOURCE (as in
DESCRIBE) and OPTIMIZER and calls LOG-MAT-ROOM.")
(:method (optimizer gradient-source)
(let ((*print-level* nil))
(with-logging-entry (stream)
(format stream "Describing gradient source:~%")
(describe gradient-source stream))
(with-logging-entry (stream)
(format stream "Describing optimizer:~%")
(describe optimizer stream)))
(log-mat-room)))
(defsection @mgl-opt-cost (:title "Cost Function")
"The function being minimized is often called the _cost_ or the
_loss_ function."
(cost generic-function)
(make-cost-monitors function)
(make-cost-monitors* generic-function))
(defgeneric cost (model)
(:documentation "Return the value of the cost function being
minimized. Calling this only makes sense in the context of an
ongoing optimization (see MINIMIZE). The cost is that of a batch of
instances."))
FIXME / FIXDOC : composite models may produce many monitors ( i.e. one
per clump in an FNN ) , or one ( such as in an RNN ) where the time
;;; steps make it difficult to go the other way easily.
(defun make-cost-monitors (model &key operation-mode attributes)
"Return a list of MONITOR objects, each associated with one
BASIC-COUNTER with attribute :TYPE \"cost\". Implemented in terms of
MAKE-COST-MONITORS*."
(make-cost-monitors* model operation-mode attributes))
(defgeneric make-cost-monitors* (model operation-mode attributes)
(:documentation "Identical to MAKE-COST-MONITORS bar the keywords
arguments. Specialize this to add to support for new model types.")
(:method (object operation-mode attributes)
(when (applies-to-p #'cost object)
(list
(make-instance
'monitor
:measurer (lambda (instances result)
(declare (ignore instances result))
(cost object))
:counter (make-instance
'basic-counter
:prepend-attributes
(append attributes
'(:type "cost")
(if (uninterned-symbol-p (name object))
()
`(:component ,(name object))))))))))
(defsection @mgl-opt-extension-api (:title "Extension API")
(@mgl-opt-optimizer section)
(@mgl-opt-gradient-source section)
(@mgl-opt-gradient-sink section))
(defsection @mgl-opt-optimizer (:title "Implementing Optimizers")
"The following generic functions must be specialized for new
optimizer types."
(minimize* generic-function)
(initialize-optimizer* generic-function)
(segments generic-function)
"The rest are just useful for utilities for implementing
optimizers."
(terminate-optimization-p function)
(set-n-instances function)
(segment-set class)
(segments (reader segment-set))
(size (reader segment-set))
(do-segment-set macro)
(segment-set<-mat function)
(segment-set->mat function))
(defgeneric initialize-optimizer* (optimizer gradient-source weights dataset)
(:documentation "Called automatically before training starts, this
function sets up OPTIMIZER to be suitable for optimizing
GRADIENT-SOURCE. It typically creates appropriately sized
accumulators for the gradients."))
(defgeneric segments (optimizer)
(:documentation "Several weight matrices known as *segments* can be
optimized by a single optimizer. This function returns them as a
list."))
(defun terminate-optimization-p (n-instances termination)
"Utility function for subclasses of ITERATIVE-OPTIMIZER. It returns
whether optimization is to be terminated based on N-INSTANCES and
TERMINATION that are values of the respective accessors of
ITERATIVE-OPTIMIZER."
(cond ((numberp termination)
(<= termination n-instances))
((member termination '(nil t))
termination)
(t
(terminate-optimization-p n-instances (funcall termination)))))
(defun set-n-instances (optimizer gradient-source n-instances)
"Set [N-INSTANCES][(reader iterative-optimizer)] of OPTIMIZER and
fire ON-N-INSTANCES-CHANGED. ITERATIVE-OPTIMIZER subclasses must
call this to increment [N-INSTANCES][(reader iterative-optimizer)]."
(setf (slot-value optimizer 'n-instances) n-instances)
(apply-monitors (on-n-instances-changed optimizer)
optimizer gradient-source n-instances)
n-instances)
(defclass segment-set ()
((segments
:initarg :segments :reader segments
:documentation "A list of weight matrices.")
(start-indices :reader start-indices)
(size
:reader size
:documentation "The sum of the sizes of the weight matrices of
SEGMENTS."))
(:documentation "This is a utility class for optimizers that have a
list of SEGMENTS and (the weights being optimized) is able to copy
back and forth between those segments and a single MAT (the
accumulator)."))
(defmethod print-object ((set segment-set) stream)
(pprint-logical-block (stream ())
(print-unreadable-object (set stream :type t :identity t)
(format stream "~A" (segments set))))
set)
(defmethod initialize-instance :after ((segment-set segment-set)
&key &allow-other-keys)
(let ((n 0)
(start-indices '()))
(dolist (segment (segments segment-set))
(push n start-indices)
(incf n (mat-size (segment-weights segment))))
(setf (slot-value segment-set 'start-indices) (reverse start-indices)
(slot-value segment-set 'size) n)))
(defmacro do-segment-set ((segment &optional start) segment-set
&body body)
"Iterate over SEGMENTS in SEGMENT-SET. If START is specified, the it
is bound to the start index of SEGMENT within SEGMENT-SET. The start
index is the sum of the sizes of previous segments."
(alexandria:with-gensyms (%segment-set %start-index)
`(let* ((,%segment-set ,segment-set))
(loop for ,segment in (segments ,%segment-set)
,@(when start
(list 'for %start-index 'in
(list 'start-indices %segment-set)))
do (let (,@(when start
(list (list start %start-index))))
,@(when start
`((declare (type index ,start))))
,@body)))))
(defun segment-set<-mat (segment-set mat)
"Copy the values of MAT to the weight matrices of SEGMENT-SET as if
they were concatenated into a single MAT."
(map-concat (lambda (m mat) (copy! mat m))
(segments segment-set) mat :key #'segment-weights))
(defun segment-set->mat (segment-set mat)
"Copy the values of SEGMENT-SET to MAT as if they were concatenated
into a single MAT."
(map-concat #'copy! (segments segment-set) mat :key #'segment-weights))
(defsection @mgl-opt-gradient-source (:title "Implementing Gradient Sources")
"Weights can be stored in a multitude of ways. Optimizers need to
update weights, so it is assumed that weights are stored in any
number of MAT objects called segments.
The generic functions in this section must all be specialized for
new gradient sources except where noted."
(map-segments generic-function)
(map-segment-runs generic-function)
(segment-weights generic-function)
(segment-weights (method () (mat)))
(segment-derivatives generic-function)
(list-segments function)
(initialize-gradient-source* generic-function)
(initialize-gradient-source* (method () (t t t t)))
(accumulate-gradients* generic-function))
(defgeneric map-segments (fn gradient-source)
(:documentation "Apply FN to each segment of GRADIENT-SOURCE.")
(:method (fn (segment-list list))
(mapc fn segment-list)))
(defgeneric segment-weights (segment)
(:documentation "Return the weight matrix of SEGMENT. A segment
doesn't need to be a MAT object itself. For example, it may be a
MGL-BM:CHUNK of a [MGL-BM:BM][CLASS] or a MGL-BP:LUMP of a
[MGL-BP:BPN][CLASS] whose NODES slot holds the weights.")
(:method ((mat mat))
"When the segment is really a MAT, then just return it."
mat))
(defgeneric segment-derivatives (segment)
(:documentation "Return the derivatives matrix of SEGMENT. A segment
doesn't need to be a MAT object itself. For example, it may be a
MGL-BM:CHUNK of a [MGL-BM:BM][CLASS] or a MGL-BP:LUMP of a
[MGL-BP:BPN][CLASS] whose DERIVATIVES slot holds the gradient."))
(defgeneric map-segment-runs (fn segment)
(:documentation "Call FN with start and end of intervals of
consecutive indices that are not missing in SEGMENT. Called by
optimizers that support partial updates. The default implementation
assumes that all weights are present. This only needs to be
specialized if one plans to use an optimizer that knows how to deal
unused/missing weights such as MGL-GD:NORMALIZED-BATCH-GD-OPTIMIZER
and OPTIMIZER MGL-GD:PER-WEIGHT-BATCH-GD-OPTIMIZER.")
(:method (fn segment)
(let ((mat (segment-weights segment)))
(funcall fn mat 0 (mat-size mat)))))
(defun list-segments (gradient-source)
"A utility function that returns the list of segments from
MAP-SEGMENTS on GRADIENT-SOURCE."
(let ((segments ()))
(map-segments (lambda (segment)
(push segment segments))
gradient-source)
(reverse segments)))
(defgeneric initialize-gradient-source* (optimizer gradient-source weights
dataset)
(:documentation "Called automatically before MINIMIZE* is called,
this function may be specialized if GRADIENT-SOURCE needs some kind
of setup.")
(:method (optimizer gradient-source weights dataset)
"The default method does nothing."
nil))
(defgeneric accumulate-gradients* (gradient-source sink batch multiplier valuep)
(:documentation "Add MULTIPLIER times the sum of first-order
gradients to accumulators of SINK (normally accessed with
DO-GRADIENT-SINK) and if VALUEP, return the sum of values of the
function being optimized for a BATCH of instances. GRADIENT-SOURCE
is the object representing the function being optimized, SINK is
gradient sink.
Note the number of instances in BATCH may be larger than what
GRADIENT-SOURCE process in one go (in the sense of say,
MAX-N-STRIPES), so DO-BATCHES-FOR-MODEL or something like (GROUP
BATCH MAX-N-STRIPES) can be handy."))
(defsection @mgl-opt-gradient-sink (:title "Implementing Gradient Sinks")
"Optimizers call ACCUMULATE-GRADIENTS* on gradient sources. One
parameter of ACCUMULATE-GRADIENTS* is the SINK. A gradient sink
knows what accumulator matrix (if any) belongs to a segment. Sinks
are defined entirely by MAP-GRADIENT-SINK."
(map-gradient-sink generic-function)
(do-gradient-sink macro))
(defgeneric map-gradient-sink (fn sink)
(:documentation "Call FN of lambda list (SEGMENT ACCUMULATOR) on
each segment and their corresponding accumulator MAT in SINK."))
(defmacro do-gradient-sink (((segment accumulator) sink)
&body body)
"A convenience macro on top of MAP-GRADIENT-SINK."
`(map-gradient-sink (lambda (,segment ,accumulator)
,@body)
,sink))
| null | https://raw.githubusercontent.com/melisgl/mgl/27a2552632a6a9330c1a133e519e676d9c6ca714/src/optimize.lisp | lisp | steps make it difficult to go the other way easily. | (in-package :mgl-opt)
(defsection @mgl-opt (:title "Gradient Based Optimization")
"We have a real valued, differentiable function F and the task is to
find the parameters that minimize its value. Optimization starts
from a single point in the parameter space of F, and this single
point is updated iteratively based on the gradient and value of F at
or around the current point.
Note that while the stated problem is that of global optimization,
for non-convex functions, most algorithms will tend to converge to a
local optimum.
Currently, there are two optimization algorithms:
MGL-GD:@MGL-GD (with several variants) and MGL-CG:@MGL-CG both of
which are first order methods (they do not need second order
gradients) but more can be added with the @MGL-OPT-EXTENSION-API."
(minimize function)
(@mgl-opt-iterative-optimizer section)
(@mgl-opt-cost section)
(mgl-gd:@mgl-gd section)
(mgl-cg:@mgl-cg section)
(@mgl-opt-extension-api section))
(defun minimize (optimizer gradient-source
&key (weights (list-segments gradient-source))
(dataset *infinitely-empty-dataset*))
"Minimize the value of the real valued function represented by
GRADIENT-SOURCE by updating some of its parameters in WEIGHTS (a MAT
or a sequence of MATs). Return WEIGHTS. DATASET (see
MGL:@MGL-DATASET) is a set of unoptimized parameters of the same
function. For example, WEIGHTS may be the weights of a neural
network while DATASET is the training set consisting of inputs
suitable for SET-INPUT. The default
DATASET, (*INFINITELY-EMPTY-DATASET*) is suitable for when all
parameters are optimized, so there is nothing left to come from the
environment.
Optimization terminates if DATASET is a sampler and it runs out or
when some other condition met (see TERMINATION, for example). If
DATASET is a SEQUENCE, then it is reused over and over again.
Examples for various optimizers are provided in MGL-GD:@MGL-GD and
MGL-CG:@MGL-CG."
(let ((weights (ensure-seq weights)))
(initialize-optimizer* optimizer gradient-source weights dataset)
(initialize-gradient-source* optimizer gradient-source weights dataset)
(minimize* optimizer gradient-source weights dataset))
weights)
(defun ensure-seq (obj)
(if (typep obj 'sequence)
obj
(list obj)))
(defgeneric minimize* (optimizer gradient-source weights dataset)
(:documentation "Called by MINIMIZE after INITIALIZE-OPTIMIZER* and
INITIALIZE-GRADIENT-SOURCE*, this generic function is the main
extension point for writing optimizers."))
(defsection @mgl-opt-iterative-optimizer (:title "Iterative Optimizer")
(iterative-optimizer class)
(n-instances (reader iterative-optimizer))
(termination (accessor iterative-optimizer))
(on-optimization-started (accessor iterative-optimizer))
(on-optimization-finished (accessor iterative-optimizer))
(on-n-instances-changed (accessor iterative-optimizer))
"Now let's discuss a few handy utilities."
(monitor-optimization-periodically function)
(reset-optimization-monitors generic-function)
(reset-optimization-monitors (method () (iterative-optimizer t)))
(report-optimization-parameters generic-function))
(defclass iterative-optimizer ()
((n-instances
:initform 0 :initarg :n-instances :reader n-instances
:documentation "The number of instances this optimizer has seen so
far. Incremented automatically during optimization.")
(termination
:initform nil
:initarg :termination
:accessor termination
:documentation "If a number, it's the number of instances to train
on in the sense of N-INSTANCES. If N-INSTANCES is equal or greater
than this value optimization stops. If TERMINATION is NIL, then
optimization will continue. If it is T, then optimization will
stop. If it is a function of no arguments, then its return value
is processed as if it was returned by TERMINATION.")
(on-optimization-started
:initform ()
:initarg :on-optimization-started
:accessor on-optimization-started
:documentation "An event hook with parameters `(OPTIMIZER
GRADIENT-SOURCE N-INSTANCES)`. Called after initializations are
performed (INITIALIZE-OPTIMIZER*, INITIALIZE-GRADIENT-SOURCE*) but
before optimization is started.")
(on-optimization-finished
:initform ()
:initarg :on-optimization-finished
:accessor on-optimization-finished
:documentation "An event hook with parameters `(OPTIMIZER
GRADIENT-SOURCE N-INSTANCES)`. Called when optimization has
finished.")
(on-n-instances-changed
:initform ()
:initarg :on-n-instances-changed
:accessor on-n-instances-changed
:documentation "An event hook with parameters `(OPTIMIZER
GRADIENT-SOURCE N-INSTANCES)`. Called when optimization of a batch
of instances is done and N-INSTANCES is incremented."))
(:documentation "An abstract base class of MGL-GD:@MGL-GD and
MGL-CG:@MGL-CG based optimizers that iterate over instances until a
termination condition is met."))
(defmethod minimize* :around ((optimizer iterative-optimizer) gradient-source
weights dataset)
(apply-monitors (on-optimization-started optimizer)
optimizer gradient-source (n-instances optimizer))
(multiple-value-prog1
(call-next-method)
(apply-monitors (on-optimization-finished optimizer)
optimizer gradient-source (n-instances optimizer))))
(defmethod monitors ((optimizer iterative-optimizer))
())
(defun monitor-optimization-periodically (optimizer periodic-fns)
"For each periodic function in the list of PERIODIC-FNS, add a
monitor to OPTIMIZER's ON-OPTIMIZATION-STARTED,
ON-OPTIMIZATION-FINISHED and ON-N-INSTANCES-CHANGED hooks. The
monitors are simple functions that just call each periodic function
with the event parameters (OPTIMIZER GRADIENT-SOURCE N-INSTANCES).
Return OPTIMIZER.
To log and reset the monitors of the gradient source after every
1000 instances seen by OPTIMIZER:
(monitor-optimization-periodically optimizer
'((:fn log-my-test-error
:period 2000)
(:fn reset-optimization-monitors
:period 1000
:last-eval 0)))
Note how we don't pass it's allowed to just pass the initargs for a
PERIODIC-FN instead of PERIODIC-FN itself. The :LAST-EVAL 0 bit
prevents RESET-OPTIMIZATION-MONITORS from being called at the start
of the optimization when the monitors are empty anyway."
(dolist (periodic-fn periodic-fns)
(monitor-optimization-periodically* optimizer periodic-fn))
optimizer)
(defun monitor-optimization-periodically* (optimizer periodic-fn)
(check-type periodic-fn (or periodic-fn list))
(let ((periodic-fn (if (listp periodic-fn)
(apply #'make-instance 'periodic-fn
periodic-fn)
periodic-fn)))
(push (lambda (optimizer gradient-source n-instances)
(call-periodic-fn! n-instances periodic-fn
optimizer gradient-source))
(on-optimization-started optimizer))
(push (lambda (optimizer gradient-source n-instances)
(call-periodic-fn n-instances periodic-fn
optimizer gradient-source))
(on-n-instances-changed optimizer))
(push (lambda (optimizer gradient-source n-instances)
(call-periodic-fn! n-instances periodic-fn
optimizer gradient-source))
(on-optimization-finished optimizer))))
(defgeneric reset-optimization-monitors (optimizer gradient-source)
(:documentation "Report the state of [MONITORS][generic-function] of
OPTIMIZER and GRADIENT-SOURCE and reset their counters. See
MONITOR-OPTIMIZATION-PERIODICALLY for an example of how this is
used."))
(defmethod reset-optimization-monitors ((optimizer iterative-optimizer)
gradient-source)
"Log the counters of the monitors of OPTIMIZER and GRADIENT-SOURCE
and reset them."
(log-msg "training at n-instances: ~S~%" (n-instances optimizer))
(let ((counters (remove nil (mapcar #'counter
(append (monitors optimizer)
(monitors gradient-source))))))
(log-padded counters)
(map nil #'reset-counter counters)))
(defgeneric report-optimization-parameters (optimizer gradient-source)
(:documentation "A utility that's often called at the start of
optimization (from ON-OPTIMIZATION-STARTED). The default
implementation logs the description of GRADIENT-SOURCE (as in
DESCRIBE) and OPTIMIZER and calls LOG-MAT-ROOM.")
(:method (optimizer gradient-source)
(let ((*print-level* nil))
(with-logging-entry (stream)
(format stream "Describing gradient source:~%")
(describe gradient-source stream))
(with-logging-entry (stream)
(format stream "Describing optimizer:~%")
(describe optimizer stream)))
(log-mat-room)))
(defsection @mgl-opt-cost (:title "Cost Function")
"The function being minimized is often called the _cost_ or the
_loss_ function."
(cost generic-function)
(make-cost-monitors function)
(make-cost-monitors* generic-function))
(defgeneric cost (model)
(:documentation "Return the value of the cost function being
minimized. Calling this only makes sense in the context of an
ongoing optimization (see MINIMIZE). The cost is that of a batch of
instances."))
FIXME / FIXDOC : composite models may produce many monitors ( i.e. one
per clump in an FNN ) , or one ( such as in an RNN ) where the time
(defun make-cost-monitors (model &key operation-mode attributes)
"Return a list of MONITOR objects, each associated with one
BASIC-COUNTER with attribute :TYPE \"cost\". Implemented in terms of
MAKE-COST-MONITORS*."
(make-cost-monitors* model operation-mode attributes))
(defgeneric make-cost-monitors* (model operation-mode attributes)
(:documentation "Identical to MAKE-COST-MONITORS bar the keywords
arguments. Specialize this to add to support for new model types.")
(:method (object operation-mode attributes)
(when (applies-to-p #'cost object)
(list
(make-instance
'monitor
:measurer (lambda (instances result)
(declare (ignore instances result))
(cost object))
:counter (make-instance
'basic-counter
:prepend-attributes
(append attributes
'(:type "cost")
(if (uninterned-symbol-p (name object))
()
`(:component ,(name object))))))))))
(defsection @mgl-opt-extension-api (:title "Extension API")
(@mgl-opt-optimizer section)
(@mgl-opt-gradient-source section)
(@mgl-opt-gradient-sink section))
(defsection @mgl-opt-optimizer (:title "Implementing Optimizers")
"The following generic functions must be specialized for new
optimizer types."
(minimize* generic-function)
(initialize-optimizer* generic-function)
(segments generic-function)
"The rest are just useful for utilities for implementing
optimizers."
(terminate-optimization-p function)
(set-n-instances function)
(segment-set class)
(segments (reader segment-set))
(size (reader segment-set))
(do-segment-set macro)
(segment-set<-mat function)
(segment-set->mat function))
(defgeneric initialize-optimizer* (optimizer gradient-source weights dataset)
(:documentation "Called automatically before training starts, this
function sets up OPTIMIZER to be suitable for optimizing
GRADIENT-SOURCE. It typically creates appropriately sized
accumulators for the gradients."))
(defgeneric segments (optimizer)
(:documentation "Several weight matrices known as *segments* can be
optimized by a single optimizer. This function returns them as a
list."))
(defun terminate-optimization-p (n-instances termination)
"Utility function for subclasses of ITERATIVE-OPTIMIZER. It returns
whether optimization is to be terminated based on N-INSTANCES and
TERMINATION that are values of the respective accessors of
ITERATIVE-OPTIMIZER."
(cond ((numberp termination)
(<= termination n-instances))
((member termination '(nil t))
termination)
(t
(terminate-optimization-p n-instances (funcall termination)))))
(defun set-n-instances (optimizer gradient-source n-instances)
"Set [N-INSTANCES][(reader iterative-optimizer)] of OPTIMIZER and
fire ON-N-INSTANCES-CHANGED. ITERATIVE-OPTIMIZER subclasses must
call this to increment [N-INSTANCES][(reader iterative-optimizer)]."
(setf (slot-value optimizer 'n-instances) n-instances)
(apply-monitors (on-n-instances-changed optimizer)
optimizer gradient-source n-instances)
n-instances)
(defclass segment-set ()
((segments
:initarg :segments :reader segments
:documentation "A list of weight matrices.")
(start-indices :reader start-indices)
(size
:reader size
:documentation "The sum of the sizes of the weight matrices of
SEGMENTS."))
(:documentation "This is a utility class for optimizers that have a
list of SEGMENTS and (the weights being optimized) is able to copy
back and forth between those segments and a single MAT (the
accumulator)."))
(defmethod print-object ((set segment-set) stream)
(pprint-logical-block (stream ())
(print-unreadable-object (set stream :type t :identity t)
(format stream "~A" (segments set))))
set)
(defmethod initialize-instance :after ((segment-set segment-set)
&key &allow-other-keys)
(let ((n 0)
(start-indices '()))
(dolist (segment (segments segment-set))
(push n start-indices)
(incf n (mat-size (segment-weights segment))))
(setf (slot-value segment-set 'start-indices) (reverse start-indices)
(slot-value segment-set 'size) n)))
(defmacro do-segment-set ((segment &optional start) segment-set
&body body)
"Iterate over SEGMENTS in SEGMENT-SET. If START is specified, the it
is bound to the start index of SEGMENT within SEGMENT-SET. The start
index is the sum of the sizes of previous segments."
(alexandria:with-gensyms (%segment-set %start-index)
`(let* ((,%segment-set ,segment-set))
(loop for ,segment in (segments ,%segment-set)
,@(when start
(list 'for %start-index 'in
(list 'start-indices %segment-set)))
do (let (,@(when start
(list (list start %start-index))))
,@(when start
`((declare (type index ,start))))
,@body)))))
(defun segment-set<-mat (segment-set mat)
"Copy the values of MAT to the weight matrices of SEGMENT-SET as if
they were concatenated into a single MAT."
(map-concat (lambda (m mat) (copy! mat m))
(segments segment-set) mat :key #'segment-weights))
(defun segment-set->mat (segment-set mat)
"Copy the values of SEGMENT-SET to MAT as if they were concatenated
into a single MAT."
(map-concat #'copy! (segments segment-set) mat :key #'segment-weights))
(defsection @mgl-opt-gradient-source (:title "Implementing Gradient Sources")
"Weights can be stored in a multitude of ways. Optimizers need to
update weights, so it is assumed that weights are stored in any
number of MAT objects called segments.
The generic functions in this section must all be specialized for
new gradient sources except where noted."
(map-segments generic-function)
(map-segment-runs generic-function)
(segment-weights generic-function)
(segment-weights (method () (mat)))
(segment-derivatives generic-function)
(list-segments function)
(initialize-gradient-source* generic-function)
(initialize-gradient-source* (method () (t t t t)))
(accumulate-gradients* generic-function))
(defgeneric map-segments (fn gradient-source)
(:documentation "Apply FN to each segment of GRADIENT-SOURCE.")
(:method (fn (segment-list list))
(mapc fn segment-list)))
(defgeneric segment-weights (segment)
(:documentation "Return the weight matrix of SEGMENT. A segment
doesn't need to be a MAT object itself. For example, it may be a
MGL-BM:CHUNK of a [MGL-BM:BM][CLASS] or a MGL-BP:LUMP of a
[MGL-BP:BPN][CLASS] whose NODES slot holds the weights.")
(:method ((mat mat))
"When the segment is really a MAT, then just return it."
mat))
(defgeneric segment-derivatives (segment)
(:documentation "Return the derivatives matrix of SEGMENT. A segment
doesn't need to be a MAT object itself. For example, it may be a
MGL-BM:CHUNK of a [MGL-BM:BM][CLASS] or a MGL-BP:LUMP of a
[MGL-BP:BPN][CLASS] whose DERIVATIVES slot holds the gradient."))
(defgeneric map-segment-runs (fn segment)
(:documentation "Call FN with start and end of intervals of
consecutive indices that are not missing in SEGMENT. Called by
optimizers that support partial updates. The default implementation
assumes that all weights are present. This only needs to be
specialized if one plans to use an optimizer that knows how to deal
unused/missing weights such as MGL-GD:NORMALIZED-BATCH-GD-OPTIMIZER
and OPTIMIZER MGL-GD:PER-WEIGHT-BATCH-GD-OPTIMIZER.")
(:method (fn segment)
(let ((mat (segment-weights segment)))
(funcall fn mat 0 (mat-size mat)))))
(defun list-segments (gradient-source)
"A utility function that returns the list of segments from
MAP-SEGMENTS on GRADIENT-SOURCE."
(let ((segments ()))
(map-segments (lambda (segment)
(push segment segments))
gradient-source)
(reverse segments)))
(defgeneric initialize-gradient-source* (optimizer gradient-source weights
dataset)
(:documentation "Called automatically before MINIMIZE* is called,
this function may be specialized if GRADIENT-SOURCE needs some kind
of setup.")
(:method (optimizer gradient-source weights dataset)
"The default method does nothing."
nil))
(defgeneric accumulate-gradients* (gradient-source sink batch multiplier valuep)
(:documentation "Add MULTIPLIER times the sum of first-order
gradients to accumulators of SINK (normally accessed with
DO-GRADIENT-SINK) and if VALUEP, return the sum of values of the
function being optimized for a BATCH of instances. GRADIENT-SOURCE
is the object representing the function being optimized, SINK is
gradient sink.
Note the number of instances in BATCH may be larger than what
GRADIENT-SOURCE process in one go (in the sense of say,
MAX-N-STRIPES), so DO-BATCHES-FOR-MODEL or something like (GROUP
BATCH MAX-N-STRIPES) can be handy."))
(defsection @mgl-opt-gradient-sink (:title "Implementing Gradient Sinks")
"Optimizers call ACCUMULATE-GRADIENTS* on gradient sources. One
parameter of ACCUMULATE-GRADIENTS* is the SINK. A gradient sink
knows what accumulator matrix (if any) belongs to a segment. Sinks
are defined entirely by MAP-GRADIENT-SINK."
(map-gradient-sink generic-function)
(do-gradient-sink macro))
(defgeneric map-gradient-sink (fn sink)
(:documentation "Call FN of lambda list (SEGMENT ACCUMULATOR) on
each segment and their corresponding accumulator MAT in SINK."))
(defmacro do-gradient-sink (((segment accumulator) sink)
&body body)
"A convenience macro on top of MAP-GRADIENT-SINK."
`(map-gradient-sink (lambda (,segment ,accumulator)
,@body)
,sink))
|
8ba15bc3350e1bc77960ac074d27b5dc781b8e5eaf0c1dac64757f52ba018564 | osteele/cl-spec | bdd.lisp | Author :
;;; Source:
ows 2008 - 01 - 05 -- fixed compiler errors and warnings
ows 2008 - 01 - 08 -- eval - > funcall , to capture lexical scope
(in-package #:cl-spec)
Utilities
(eval-when (:compile-toplevel :load-toplevel :execute)
(defmethod obj->string ((s string))
s)
(defmethod obj->string ((s symbol))
(string s))
(defun concat-symbol (&rest args)
(intern (apply #'concatenate 'string
(mapcar #'string-upcase (mapcar #'obj->string args)))))
)
(defun respond-to? (o method &rest args)
(restart-case
(handler-bind ((undefined-function #'(lambda (c)
(declare (ignore c))
(invoke-restart 'no)))
(simple-error #'(lambda (c)
(declare (ignore c))
(invoke-restart 'no))))
(let ((sf (symbol-function method))
(cpl (sb-pcl::class-precedence-list (class-of o))))
(typecase sf
(standard-generic-function
(find t
(mapcar #'(lambda (klass)
(not (null
(find-method sf '()
(cons klass
(mapcar #'(lambda (c) (declare (ignore c)) t) args)) nil))))
cpl)))
(function t))))
(no (&optional v)
(declare (ignore v))
nil)))
;; Conditions
(define-condition expectation-not-met ()
((expectation :initarg :expectation :reader expectation)
(form :initarg :form :reader expectation-form)))
Expectations
(defclass expectation ()
((value-fn :initarg :value-fn :reader value-fn-of)
(value :accessor value-of)
(args :initarg :args :reader args-of)))
(defclass should (expectation)
())
(defmethod evaluate ((e expectation))
(setf (value-of e) (funcall (value-fn-of e))))
(defgeneric fulfills? (expectation))
(defmethod fulfills? ((e should))
(flet ((match (matcher-class args)
(restart-case
(handler-bind ((simple-error #'(lambda (c)
(declare (ignore c))
(invoke-restart 'fun))))
(matches? (make-instance matcher-class :args args) e))
(fun (&optional v)
;; This happens when matcher-class doesn't actually name
a class ; e.g. (= > 1 should = 1 ) instead of
(= > 0 should be zero )
(declare (ignore v))
(apply matcher-class (append (list (evaluate e)) (mapcar #'eval args)))))))
(with-slots (args) e
(if (eq (car args) 'not)
(not (match (cadr args) (cddr args)))
(match (car args) (cdr args))))))
Matchers
(defclass matcher ()
((args :initarg :args :reader args-of)))
(defclass be (matcher)
())
(defmethod initialize-instance :after ((matcher be) &rest initargs)
(declare (ignore initargs))
(with-slots (args) matcher
(when (equal (car args) 'a)
(pop args))))
(defgeneric matches? (matcher expr))
(defmethod matches? ((matcher be) expr)
(with-slots (args) matcher
(let* ((value (evaluate expr))
(arguments (cdr args))
(message-forms (mapcar #'(lambda (suffix)
(concat-symbol (car args) suffix))
'("" "p" "-p" "?"))))
(when (equal (car arguments) 'of)
(pop arguments)) ;; am I crazy?
(setf arguments (mapcar #'eval arguments))
(some #'(lambda (form)
(and (respond-to? value form arguments)
(apply form value arguments)))
message-forms))))
(defclass raise (matcher)
())
(defmethod matches? ((matcher raise) e)
(with-slots (args) matcher
(let ((class-name (car args)))
(restart-case
(handler-bind ((t #'(lambda (c)
(setf (value-of e) c)
(if (typep c class-name)
(invoke-restart 'raises)
(invoke-restart 'donot)))))
(evaluate e)
nil)
(raises (&optional v)
(declare (ignore v))
t)
(donot (&optional v)
(declare (ignore v))
nil)))))
;;
(defmacro => (form &rest specification)
(let ((expectation-class (car specification))
(args (cdr specification)))
`(let ((expectation
(make-instance ',expectation-class
:value-fn #'(lambda () ,form)
:args ',args)))
(unless (fulfills? expectation)
(error (make-instance 'expectation-not-met
:expectation expectation
:form ',form)))
(value-of expectation))))
Grouping
(defmacro define-with-spec-grouping (name)
(let ((with-grouping (concat-symbol "with-" name ))
(spec-groupings (concat-symbol "*spec-" name "s*"))
(spec-grouping (concat-symbol "*spec-" name "*")))
`(defmacro ,with-grouping (grouping-name &body body)
`(progn
(unless (and (boundp ',',spec-groupings) (listp ,',spec-groupings))
(defvar ,',spec-groupings nil))
(let* ((,',spec-groupings (cons ,grouping-name ,',spec-groupings))
(,',spec-grouping (car ,',spec-groupings)))
,@body)))))
(define-with-spec-grouping context)
(define-with-spec-grouping aspect)
(defmacro specify (name &body body)
`(let ((*spec-specification* ,name))
,@body))
| null | https://raw.githubusercontent.com/osteele/cl-spec/d83b8a89d55771691e439e2fb910ce202dbd6abe/bdd.lisp | lisp | Source:
Conditions
This happens when matcher-class doesn't actually name
e.g. (= > 1 should = 1 ) instead of
am I crazy?
| Author :
ows 2008 - 01 - 05 -- fixed compiler errors and warnings
ows 2008 - 01 - 08 -- eval - > funcall , to capture lexical scope
(in-package #:cl-spec)
Utilities
(eval-when (:compile-toplevel :load-toplevel :execute)
(defmethod obj->string ((s string))
s)
(defmethod obj->string ((s symbol))
(string s))
(defun concat-symbol (&rest args)
(intern (apply #'concatenate 'string
(mapcar #'string-upcase (mapcar #'obj->string args)))))
)
(defun respond-to? (o method &rest args)
(restart-case
(handler-bind ((undefined-function #'(lambda (c)
(declare (ignore c))
(invoke-restart 'no)))
(simple-error #'(lambda (c)
(declare (ignore c))
(invoke-restart 'no))))
(let ((sf (symbol-function method))
(cpl (sb-pcl::class-precedence-list (class-of o))))
(typecase sf
(standard-generic-function
(find t
(mapcar #'(lambda (klass)
(not (null
(find-method sf '()
(cons klass
(mapcar #'(lambda (c) (declare (ignore c)) t) args)) nil))))
cpl)))
(function t))))
(no (&optional v)
(declare (ignore v))
nil)))
(define-condition expectation-not-met ()
((expectation :initarg :expectation :reader expectation)
(form :initarg :form :reader expectation-form)))
Expectations
(defclass expectation ()
((value-fn :initarg :value-fn :reader value-fn-of)
(value :accessor value-of)
(args :initarg :args :reader args-of)))
(defclass should (expectation)
())
(defmethod evaluate ((e expectation))
(setf (value-of e) (funcall (value-fn-of e))))
(defgeneric fulfills? (expectation))
(defmethod fulfills? ((e should))
(flet ((match (matcher-class args)
(restart-case
(handler-bind ((simple-error #'(lambda (c)
(declare (ignore c))
(invoke-restart 'fun))))
(matches? (make-instance matcher-class :args args) e))
(fun (&optional v)
(= > 0 should be zero )
(declare (ignore v))
(apply matcher-class (append (list (evaluate e)) (mapcar #'eval args)))))))
(with-slots (args) e
(if (eq (car args) 'not)
(not (match (cadr args) (cddr args)))
(match (car args) (cdr args))))))
Matchers
(defclass matcher ()
((args :initarg :args :reader args-of)))
(defclass be (matcher)
())
(defmethod initialize-instance :after ((matcher be) &rest initargs)
(declare (ignore initargs))
(with-slots (args) matcher
(when (equal (car args) 'a)
(pop args))))
(defgeneric matches? (matcher expr))
(defmethod matches? ((matcher be) expr)
(with-slots (args) matcher
(let* ((value (evaluate expr))
(arguments (cdr args))
(message-forms (mapcar #'(lambda (suffix)
(concat-symbol (car args) suffix))
'("" "p" "-p" "?"))))
(when (equal (car arguments) 'of)
(setf arguments (mapcar #'eval arguments))
(some #'(lambda (form)
(and (respond-to? value form arguments)
(apply form value arguments)))
message-forms))))
(defclass raise (matcher)
())
(defmethod matches? ((matcher raise) e)
(with-slots (args) matcher
(let ((class-name (car args)))
(restart-case
(handler-bind ((t #'(lambda (c)
(setf (value-of e) c)
(if (typep c class-name)
(invoke-restart 'raises)
(invoke-restart 'donot)))))
(evaluate e)
nil)
(raises (&optional v)
(declare (ignore v))
t)
(donot (&optional v)
(declare (ignore v))
nil)))))
(defmacro => (form &rest specification)
(let ((expectation-class (car specification))
(args (cdr specification)))
`(let ((expectation
(make-instance ',expectation-class
:value-fn #'(lambda () ,form)
:args ',args)))
(unless (fulfills? expectation)
(error (make-instance 'expectation-not-met
:expectation expectation
:form ',form)))
(value-of expectation))))
Grouping
(defmacro define-with-spec-grouping (name)
(let ((with-grouping (concat-symbol "with-" name ))
(spec-groupings (concat-symbol "*spec-" name "s*"))
(spec-grouping (concat-symbol "*spec-" name "*")))
`(defmacro ,with-grouping (grouping-name &body body)
`(progn
(unless (and (boundp ',',spec-groupings) (listp ,',spec-groupings))
(defvar ,',spec-groupings nil))
(let* ((,',spec-groupings (cons ,grouping-name ,',spec-groupings))
(,',spec-grouping (car ,',spec-groupings)))
,@body)))))
(define-with-spec-grouping context)
(define-with-spec-grouping aspect)
(defmacro specify (name &body body)
`(let ((*spec-specification* ,name))
,@body))
|
f02ab0547c3e3b5bf1f3265a0de3e69a0286d35afe25ee179d42998b00fa3374 | Plutonomicon/plutarch-plutus | Gen.hs | # OPTIONS_GHC -Wno - orphans #
-- | Generator helpers
module Plutarch.Test.Property.Gen (
genRational,
genInteger,
genList,
bsOfLength,
) where
import Control.Monad (MonadPlus, liftM2, mfilter)
import Data.List (nub, sortOn)
import Data.Ratio ((%))
import Hedgehog (MonadGen)
import Hedgehog.Gen qualified as Gen
import Hedgehog.Range qualified as Range
import Test.Tasty.QuickCheck (
Arbitrary,
Gen,
Negative (getNegative),
Positive (getPositive),
arbitrary,
choose,
elements,
listOf1,
oneof,
vectorOf,
)
import PlutusLedgerApi.V1
import Data.ByteString (ByteString)
import Data.ByteString qualified as BS
import Data.ByteString.Internal (c2w)
import Test.QuickCheck.Instances ()
import PlutusTx.AssocMap qualified as PlutusMap
genInteger :: MonadGen g => g Integer
genInteger = Gen.integral (Range.linear (-1_000_000_000) 1_000_000_000)
genRational :: (MonadPlus g, MonadGen g) => g Rational
genRational = liftM2 (%) genInteger (mfilter (/= 0) genInteger)
genList :: MonadGen g => g a -> g [a]
genList = Gen.list (Range.linear 0 100)
----------------- Arbitrary instances for several ApiTypes -----------------------
bsOfLength :: Int -> Gen ByteString
bsOfLength n =
BS.pack <$> vectorOf n (c2w <$> choose (minBound :: Char, maxBound))
instance Arbitrary BuiltinByteString where
arbitrary = toBuiltin @ByteString <$> arbitrary
instance Arbitrary CurrencySymbol where
arbitrary =
let arbitrary' =
((fmap (toBuiltin @ByteString) . bsOfLength) =<< elements [0, 28])
in CurrencySymbol <$> arbitrary'
instance Arbitrary Value where
arbitrary =
(\a -> Value . PlutusMap.fromList . sortOn fst . zip a)
<$> currSyms
<*> listOf1 arbitraryTokMap
where
-- List of unique token names.
tokNames = nub <$> listOf1 (arbitrary @TokenName)
-- List of unique currency symbols.
currSyms = nub <$> listOf1 (arbitrary @CurrencySymbol)
arbitraryTokMap =
(\a -> PlutusMap.fromList . sortOn fst . zip a)
<$> tokNames
<*> listOf1 (oneof [getPositive @Integer <$> arbitrary, getNegative @Integer <$> arbitrary])
instance Arbitrary TokenName where
arbitrary = do
ln <- choose (0, 32)
str <-
BS.pack
<$> vectorOf
ln
( oneof $
fmap
(fmap c2w)
[ choose ('a', 'f')
, choose ('A', 'F')
, choose ('0', '9')
]
)
pure . TokenName . toBuiltin @ByteString $ str
instance Arbitrary PubKeyHash where
arbitrary =
let arbitrary' =
toBuiltin @ByteString <$> bsOfLength 28
in PubKeyHash <$> arbitrary'
instance Arbitrary ScriptHash where
arbitrary =
let arbitrary' =
toBuiltin @ByteString <$> bsOfLength 28
in ScriptHash <$> arbitrary'
instance Arbitrary Credential where
arbitrary =
oneof
[ PubKeyCredential <$> arbitrary
, ScriptCredential <$> arbitrary
]
instance Arbitrary StakingCredential where
arbitrary =
oneof
[ StakingHash <$> arbitrary
, StakingPtr <$> arbitrary <*> arbitrary <*> arbitrary
]
instance Arbitrary Address where
arbitrary = Address <$> arbitrary <*> arbitrary
| null | https://raw.githubusercontent.com/Plutonomicon/plutarch-plutus/9b83892057f2aaaed76e3af6193ad1ae242244cc/plutarch-test/Plutarch/Test/Property/Gen.hs | haskell | | Generator helpers
--------------- Arbitrary instances for several ApiTypes -----------------------
List of unique token names.
List of unique currency symbols. | # OPTIONS_GHC -Wno - orphans #
module Plutarch.Test.Property.Gen (
genRational,
genInteger,
genList,
bsOfLength,
) where
import Control.Monad (MonadPlus, liftM2, mfilter)
import Data.List (nub, sortOn)
import Data.Ratio ((%))
import Hedgehog (MonadGen)
import Hedgehog.Gen qualified as Gen
import Hedgehog.Range qualified as Range
import Test.Tasty.QuickCheck (
Arbitrary,
Gen,
Negative (getNegative),
Positive (getPositive),
arbitrary,
choose,
elements,
listOf1,
oneof,
vectorOf,
)
import PlutusLedgerApi.V1
import Data.ByteString (ByteString)
import Data.ByteString qualified as BS
import Data.ByteString.Internal (c2w)
import Test.QuickCheck.Instances ()
import PlutusTx.AssocMap qualified as PlutusMap
genInteger :: MonadGen g => g Integer
genInteger = Gen.integral (Range.linear (-1_000_000_000) 1_000_000_000)
genRational :: (MonadPlus g, MonadGen g) => g Rational
genRational = liftM2 (%) genInteger (mfilter (/= 0) genInteger)
genList :: MonadGen g => g a -> g [a]
genList = Gen.list (Range.linear 0 100)
bsOfLength :: Int -> Gen ByteString
bsOfLength n =
BS.pack <$> vectorOf n (c2w <$> choose (minBound :: Char, maxBound))
instance Arbitrary BuiltinByteString where
arbitrary = toBuiltin @ByteString <$> arbitrary
instance Arbitrary CurrencySymbol where
arbitrary =
let arbitrary' =
((fmap (toBuiltin @ByteString) . bsOfLength) =<< elements [0, 28])
in CurrencySymbol <$> arbitrary'
instance Arbitrary Value where
arbitrary =
(\a -> Value . PlutusMap.fromList . sortOn fst . zip a)
<$> currSyms
<*> listOf1 arbitraryTokMap
where
tokNames = nub <$> listOf1 (arbitrary @TokenName)
currSyms = nub <$> listOf1 (arbitrary @CurrencySymbol)
arbitraryTokMap =
(\a -> PlutusMap.fromList . sortOn fst . zip a)
<$> tokNames
<*> listOf1 (oneof [getPositive @Integer <$> arbitrary, getNegative @Integer <$> arbitrary])
instance Arbitrary TokenName where
arbitrary = do
ln <- choose (0, 32)
str <-
BS.pack
<$> vectorOf
ln
( oneof $
fmap
(fmap c2w)
[ choose ('a', 'f')
, choose ('A', 'F')
, choose ('0', '9')
]
)
pure . TokenName . toBuiltin @ByteString $ str
instance Arbitrary PubKeyHash where
arbitrary =
let arbitrary' =
toBuiltin @ByteString <$> bsOfLength 28
in PubKeyHash <$> arbitrary'
instance Arbitrary ScriptHash where
arbitrary =
let arbitrary' =
toBuiltin @ByteString <$> bsOfLength 28
in ScriptHash <$> arbitrary'
instance Arbitrary Credential where
arbitrary =
oneof
[ PubKeyCredential <$> arbitrary
, ScriptCredential <$> arbitrary
]
instance Arbitrary StakingCredential where
arbitrary =
oneof
[ StakingHash <$> arbitrary
, StakingPtr <$> arbitrary <*> arbitrary <*> arbitrary
]
instance Arbitrary Address where
arbitrary = Address <$> arbitrary <*> arbitrary
|
fe77471692aed8f93bb7401ad5088faed50f2d45a844fc92c01254cfd5aa66e8 | anton-k/sharc-timbre | Types.hs | module Types where
-- primitives
type Hz = Double
type Db = Double
type PitchName = String
data Pitch = Pitch
{ pitchFund :: Hz
, pitchKeyNum :: Int
, pitchName :: PitchName
} deriving (Show)
-- sharc db
type Sharc = [Instr]
data Instr = Instr
{ instrId :: String
, instrName :: String
, instrLegend :: Legend
, instrRange :: Range InstrRange
, instrNotes :: [Note]
} deriving (Show)
data HarmonicFreq = HarmonicFreq
{ harmonicFreqHarmNum :: Int
, harmonicFreqPitch :: Pitch
} deriving (Show)
data Amplitude = Amplitude
{ amplitudeFreq :: Hz
, amplitudeHarmonicFreq :: HarmonicFreq
, amplitudeValue :: Db
} deriving (Show)
data Note = Note
{ notePitch :: Pitch
, noteSeq :: Int
, noteRange :: Range NoteRange
, noteHarmonics :: [Harmonic]
} deriving (Show)
-- | Harmonic is repesented with number in the spectrum, phase (-pi, pi), and amplitude
-- (given in decibels relative to the amplitude of the loudest harmonic for that note)
data Harmonic = Harmonic
{ harmonicId :: Int
, harmonicPhase :: Double
, harmonicAmplitude :: Db
} deriving (Show)
---------------------------------------------------
-- ranges
data Range a = Range
{ rangeLowest :: a
, rangeHighest :: a
} deriving (Show)
data InstrRange = InstrRange
{ instrRangeHarmonicFreq :: HarmonicFreq
, instrRangePitch :: Pitch
, instrRangeAmplitude :: Amplitude
} deriving (Show)
data NoteRange = NoteRange
{ noteRangeAmplitude :: NoteRangeAmplitude
, noteRangeHarmonicFreq :: NoteRangeHarmonicFreq
} deriving (Show)
data NoteRangeAmplitude = NoteRangeAmplitude
{ noteRangeAmplitudeFreq :: Hz
, noteRangeAmplitudeHarmNum :: Int
, noteRangeAmplitudeValue :: Db
} deriving (Show)
data NoteRangeHarmonicFreq = NoteRangeHarmonicFreq
{ noteRangeHarmonicFreqHarmNum :: Int
, noteRangeHarmonicFreqValue :: Hz
} deriving (Show)
---------------------------------------------------
-- Track legend
data Legend = Legend
{ legendSource :: String
, legendCd :: String
, legendTrack :: String
} deriving (Show)
| null | https://raw.githubusercontent.com/anton-k/sharc-timbre/14be260021c02f31905b3e63269f582030a45c8d/tools/Types.hs | haskell | primitives
sharc db
| Harmonic is repesented with number in the spectrum, phase (-pi, pi), and amplitude
(given in decibels relative to the amplitude of the loudest harmonic for that note)
-------------------------------------------------
ranges
-------------------------------------------------
Track legend | module Types where
type Hz = Double
type Db = Double
type PitchName = String
data Pitch = Pitch
{ pitchFund :: Hz
, pitchKeyNum :: Int
, pitchName :: PitchName
} deriving (Show)
type Sharc = [Instr]
data Instr = Instr
{ instrId :: String
, instrName :: String
, instrLegend :: Legend
, instrRange :: Range InstrRange
, instrNotes :: [Note]
} deriving (Show)
data HarmonicFreq = HarmonicFreq
{ harmonicFreqHarmNum :: Int
, harmonicFreqPitch :: Pitch
} deriving (Show)
data Amplitude = Amplitude
{ amplitudeFreq :: Hz
, amplitudeHarmonicFreq :: HarmonicFreq
, amplitudeValue :: Db
} deriving (Show)
data Note = Note
{ notePitch :: Pitch
, noteSeq :: Int
, noteRange :: Range NoteRange
, noteHarmonics :: [Harmonic]
} deriving (Show)
data Harmonic = Harmonic
{ harmonicId :: Int
, harmonicPhase :: Double
, harmonicAmplitude :: Db
} deriving (Show)
data Range a = Range
{ rangeLowest :: a
, rangeHighest :: a
} deriving (Show)
data InstrRange = InstrRange
{ instrRangeHarmonicFreq :: HarmonicFreq
, instrRangePitch :: Pitch
, instrRangeAmplitude :: Amplitude
} deriving (Show)
data NoteRange = NoteRange
{ noteRangeAmplitude :: NoteRangeAmplitude
, noteRangeHarmonicFreq :: NoteRangeHarmonicFreq
} deriving (Show)
data NoteRangeAmplitude = NoteRangeAmplitude
{ noteRangeAmplitudeFreq :: Hz
, noteRangeAmplitudeHarmNum :: Int
, noteRangeAmplitudeValue :: Db
} deriving (Show)
data NoteRangeHarmonicFreq = NoteRangeHarmonicFreq
{ noteRangeHarmonicFreqHarmNum :: Int
, noteRangeHarmonicFreqValue :: Hz
} deriving (Show)
data Legend = Legend
{ legendSource :: String
, legendCd :: String
, legendTrack :: String
} deriving (Show)
|
423fda693072d4ac2b6213b1103921e215e8052eb57891833a5292ed19e9fa95 | grin-compiler/ghc-wpc-sample-programs | Native.hs | {-# LANGUAGE OverloadedStrings #-}
|
Module : Text . Pandoc . Readers . Native
Copyright : Copyright ( C ) 2011 - 2020
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Conversion of a string representation of a pandoc type ( @Pandoc@ ,
@[Block]@ , @Block@ , @[Inline]@ , or @Inline@ ) to a @Pandoc@ document .
Module : Text.Pandoc.Readers.Native
Copyright : Copyright (C) 2011-2020 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <>
Stability : alpha
Portability : portable
Conversion of a string representation of a pandoc type (@Pandoc@,
@[Block]@, @Block@, @[Inline]@, or @Inline@) to a @Pandoc@ document.
-}
module Text.Pandoc.Readers.Native ( readNative ) where
import Text.Pandoc.Definition
import Text.Pandoc.Options (ReaderOptions)
import Text.Pandoc.Shared (safeRead)
import Control.Monad.Except (throwError)
import Data.Text (Text)
import Text.Pandoc.Class.PandocMonad (PandocMonad)
import Text.Pandoc.Error
| Read native formatted text and return a Pandoc document .
-- The input may be a full pandoc document, a block list, a block,
-- an inline list, or an inline. Thus, for example,
--
> " hi "
--
-- will be treated as if it were
--
-- > Pandoc nullMeta [Plain [Str "hi"]]
--
readNative :: PandocMonad m
=> ReaderOptions
-> Text -- ^ String to parse (assuming @'\n'@ line endings)
-> m Pandoc
readNative _ s =
case maybe (Pandoc nullMeta <$> readBlocks s) Right (safeRead s) of
Right doc -> return doc
Left _ -> throwError $ PandocParseError "couldn't read native"
readBlocks :: Text -> Either PandocError [Block]
readBlocks s = maybe ((:[]) <$> readBlock s) Right (safeRead s)
readBlock :: Text -> Either PandocError Block
readBlock s = maybe (Plain <$> readInlines s) Right (safeRead s)
readInlines :: Text -> Either PandocError [Inline]
readInlines s = maybe ((:[]) <$> readInline s) Right (safeRead s)
readInline :: Text -> Either PandocError Inline
readInline s = maybe (Left . PandocParseError $ "Could not read: " <> s) Right (safeRead s)
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/pandoc-11df2a3c0f2b1b8e351ad8caaa7cdf583e1b3b2e/src/Text/Pandoc/Readers/Native.hs | haskell | # LANGUAGE OverloadedStrings #
The input may be a full pandoc document, a block list, a block,
an inline list, or an inline. Thus, for example,
will be treated as if it were
> Pandoc nullMeta [Plain [Str "hi"]]
^ String to parse (assuming @'\n'@ line endings) | |
Module : Text . Pandoc . Readers . Native
Copyright : Copyright ( C ) 2011 - 2020
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Conversion of a string representation of a pandoc type ( @Pandoc@ ,
@[Block]@ , @Block@ , @[Inline]@ , or @Inline@ ) to a @Pandoc@ document .
Module : Text.Pandoc.Readers.Native
Copyright : Copyright (C) 2011-2020 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <>
Stability : alpha
Portability : portable
Conversion of a string representation of a pandoc type (@Pandoc@,
@[Block]@, @Block@, @[Inline]@, or @Inline@) to a @Pandoc@ document.
-}
module Text.Pandoc.Readers.Native ( readNative ) where
import Text.Pandoc.Definition
import Text.Pandoc.Options (ReaderOptions)
import Text.Pandoc.Shared (safeRead)
import Control.Monad.Except (throwError)
import Data.Text (Text)
import Text.Pandoc.Class.PandocMonad (PandocMonad)
import Text.Pandoc.Error
| Read native formatted text and return a Pandoc document .
> " hi "
readNative :: PandocMonad m
=> ReaderOptions
-> m Pandoc
readNative _ s =
case maybe (Pandoc nullMeta <$> readBlocks s) Right (safeRead s) of
Right doc -> return doc
Left _ -> throwError $ PandocParseError "couldn't read native"
readBlocks :: Text -> Either PandocError [Block]
readBlocks s = maybe ((:[]) <$> readBlock s) Right (safeRead s)
readBlock :: Text -> Either PandocError Block
readBlock s = maybe (Plain <$> readInlines s) Right (safeRead s)
readInlines :: Text -> Either PandocError [Inline]
readInlines s = maybe ((:[]) <$> readInline s) Right (safeRead s)
readInline :: Text -> Either PandocError Inline
readInline s = maybe (Left . PandocParseError $ "Could not read: " <> s) Right (safeRead s)
|
e39a8260edaa4f1b420bf3aba6c91c512c1c97ddd681b792dc4c6c4072481de3 | ChrisTitusTech/gimphelp | 210_sketch_sketch-drawing.scm | 210_sketch_sketch-drawing.scm
last modified / tested by [ gimphelp.org ]
05/11/2019 on GIMP 2.10.10
;==================================================
;
; Installation:
; This script should be placed in the user or system-wide script folder.
;
; Windows 7/10
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
; or
C:\Users\YOUR - NAME\AppData\Roaming\GIMP\2.10\scripts
;
;
; Linux
; /home/yourname/.config/GIMP/2.10/scripts
; or
; Linux system-wide
; /usr/share/gimp/2.0/scripts
;
;==================================================
;
; 02/15/2014 - accommodated indexed images,
; added option to flatten upon completion (and made flatten default)
;
; LICENSE
;
; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program. If not, see </>.
;
;==============================================================
; Original information
;
Drawing script for GIMP 2.2
Copyright ( C ) 2007 < >
;==============================================================
(define (210-drawing
img
drawable
thickness
inFlatten
)
(gimp-image-undo-group-start img)
(define indexed (car (gimp-drawable-is-indexed drawable)))
(if (= indexed TRUE)(gimp-image-convert-rgb img))
(let* (
(width (car (gimp-drawable-width drawable)))
(height (car (gimp-drawable-height drawable)))
(old-selection (car (gimp-selection-save img)))
(thf (* height 0.005 thickness ))
(image-type (car (gimp-image-base-type img)))
(layer-type (car (gimp-drawable-type drawable)))
(layer-temp1 (car (gimp-layer-new img width height layer-type "temp1" 100 LAYER-MODE-NORMAL-LEGACY)))
(layer-temp2 (car (gimp-layer-new img width height layer-type "temp2" 100 LAYER-MODE-NORMAL-LEGACY)))
)
(if (eqv? (car (gimp-selection-is-empty img)) TRUE)
(gimp-drawable-fill old-selection 2)) ; so Empty and All are the same.
(gimp-selection-none img)
(gimp-image-insert-layer img layer-temp1 0 -1)
(gimp-image-insert-layer img layer-temp2 0 -1)
(gimp-edit-copy drawable)
(gimp-floating-sel-anchor (car (gimp-edit-paste layer-temp1 0)))
(if (eqv? (car (gimp-drawable-is-gray drawable)) FALSE)
(gimp-desaturate layer-temp1))
(gimp-edit-copy layer-temp1)
(gimp-floating-sel-anchor (car (gimp-edit-paste layer-temp2 0)))
(gimp-invert layer-temp2)
(plug-in-gauss 1 img layer-temp2 thf thf 0)
(gimp-layer-set-mode layer-temp2 16)
(gimp-image-merge-down img layer-temp2 0)
(set! layer-temp1 (car (gimp-image-get-active-layer img)))
(gimp-levels layer-temp1 0 215 235 1.0 0 255)
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-selection-invert img)
(if (eqv? (car (gimp-selection-is-empty img)) FALSE) ; both Empty and All are denied
(begin
(gimp-edit-clear layer-temp1)
))
(gimp-item-set-name layer-temp1 "Drawing")
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-image-remove-channel img old-selection)
(if (= inFlatten TRUE)(gimp-image-flatten img))
(gimp-image-undo-group-end img)
(gimp-displays-flush)
)
)
(script-fu-register
"210-drawing"
"Drawing"
"Creates a drawing.\n\nThis version lets you adjust the line thickness."
"Eddy Verlinden <>"
"Eddy Verlinden"
"2007, juli"
"*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-ADJUSTMENT "thickness" '(2 1 5 1 1 0 0)
SF-TOGGLE "Flatten image when complete?" TRUE
)
(script-fu-menu-register "210-drawing" "<Image>/Script-Fu/Sketch")
| null | https://raw.githubusercontent.com/ChrisTitusTech/gimphelp/fdbc7e3671ce6bd74cefd83ecf7216e5ee0f1542/gimp_scripts-2.10/210_sketch_sketch-drawing.scm | scheme | ==================================================
Installation:
This script should be placed in the user or system-wide script folder.
Windows 7/10
or
Linux
/home/yourname/.config/GIMP/2.10/scripts
or
Linux system-wide
/usr/share/gimp/2.0/scripts
==================================================
02/15/2014 - accommodated indexed images,
added option to flatten upon completion (and made flatten default)
LICENSE
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
==============================================================
Original information
==============================================================
so Empty and All are the same.
both Empty and All are denied | 210_sketch_sketch-drawing.scm
last modified / tested by [ gimphelp.org ]
05/11/2019 on GIMP 2.10.10
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
C:\Users\YOUR - NAME\AppData\Roaming\GIMP\2.10\scripts
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Drawing script for GIMP 2.2
Copyright ( C ) 2007 < >
(define (210-drawing
img
drawable
thickness
inFlatten
)
(gimp-image-undo-group-start img)
(define indexed (car (gimp-drawable-is-indexed drawable)))
(if (= indexed TRUE)(gimp-image-convert-rgb img))
(let* (
(width (car (gimp-drawable-width drawable)))
(height (car (gimp-drawable-height drawable)))
(old-selection (car (gimp-selection-save img)))
(thf (* height 0.005 thickness ))
(image-type (car (gimp-image-base-type img)))
(layer-type (car (gimp-drawable-type drawable)))
(layer-temp1 (car (gimp-layer-new img width height layer-type "temp1" 100 LAYER-MODE-NORMAL-LEGACY)))
(layer-temp2 (car (gimp-layer-new img width height layer-type "temp2" 100 LAYER-MODE-NORMAL-LEGACY)))
)
(if (eqv? (car (gimp-selection-is-empty img)) TRUE)
(gimp-selection-none img)
(gimp-image-insert-layer img layer-temp1 0 -1)
(gimp-image-insert-layer img layer-temp2 0 -1)
(gimp-edit-copy drawable)
(gimp-floating-sel-anchor (car (gimp-edit-paste layer-temp1 0)))
(if (eqv? (car (gimp-drawable-is-gray drawable)) FALSE)
(gimp-desaturate layer-temp1))
(gimp-edit-copy layer-temp1)
(gimp-floating-sel-anchor (car (gimp-edit-paste layer-temp2 0)))
(gimp-invert layer-temp2)
(plug-in-gauss 1 img layer-temp2 thf thf 0)
(gimp-layer-set-mode layer-temp2 16)
(gimp-image-merge-down img layer-temp2 0)
(set! layer-temp1 (car (gimp-image-get-active-layer img)))
(gimp-levels layer-temp1 0 215 235 1.0 0 255)
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-selection-invert img)
(begin
(gimp-edit-clear layer-temp1)
))
(gimp-item-set-name layer-temp1 "Drawing")
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-image-remove-channel img old-selection)
(if (= inFlatten TRUE)(gimp-image-flatten img))
(gimp-image-undo-group-end img)
(gimp-displays-flush)
)
)
(script-fu-register
"210-drawing"
"Drawing"
"Creates a drawing.\n\nThis version lets you adjust the line thickness."
"Eddy Verlinden <>"
"Eddy Verlinden"
"2007, juli"
"*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-ADJUSTMENT "thickness" '(2 1 5 1 1 0 0)
SF-TOGGLE "Flatten image when complete?" TRUE
)
(script-fu-menu-register "210-drawing" "<Image>/Script-Fu/Sketch")
|
a3173f4f732d481edc25f4d57f36c65d9f4aac09d5c71851867545b40ac3c0b2 | herbelin/coq-hh | safe_marshal.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
let uuencode s =
let norm_s = s ^ ( String.make ( String.length s mod 3 ) ' \000 ' ) in
let rec y f x = f ( y f ) x in
let chop rem = function
| " " - > [ ]
| s - > String.sub s 0 3 : : ( rem ( String.sub s 3 ( String.length ( s - 3 ) ) ) ) in
let chunks = y chop norm_s in
let uuencode s =
let norm_s = s ^ (String.make (String.length s mod 3) '\000') in
let rec y f x = f (y f) x in
let chop rem = function
| "" -> []
| s -> String.sub s 0 3 :: (rem (String.sub s 3 (String.length (s - 3)))) in
let chunks = y chop norm_s in
*)
let hobcnv = Array.init 256 (fun i -> Printf.sprintf "%.2x" i)
let bohcnv = Array.init 256 (fun i -> i -
(if 0x30 <= i then 0x30 else 0) -
(if 0x41 <= i then 0x7 else 0) -
(if 0x61 <= i then 0x20 else 0))
let hex_of_bin ch = hobcnv.(int_of_char ch)
let bin_of_hex s = char_of_int (bohcnv.(int_of_char s.[0]) * 16 + bohcnv.(int_of_char s.[1]))
let send cout expr =
let mshl_expr = Marshal.to_string expr [] in
let payload = Buffer.create (String.length mshl_expr * 2) in
String.iter (fun c -> Buffer.add_string payload (hex_of_bin c)) mshl_expr;
Buffer.add_char payload '\n';
output_string cout (Buffer.contents payload);
flush cout
let receive cin =
let payload = input_line cin in
let mshl_expr_len = String.length payload / 2 in
let mshl_expr = Buffer.create mshl_expr_len in
let buf = String.create 2 in
for i = 0 to mshl_expr_len - 1 do
String.blit payload (2*i) buf 0 2;
Buffer.add_char mshl_expr (bin_of_hex buf)
done;
Marshal.from_string (Buffer.contents mshl_expr) 0
| null | https://raw.githubusercontent.com/herbelin/coq-hh/296d03d5049fea661e8bdbaf305ed4bf6d2001d2/lib/safe_marshal.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
let uuencode s =
let norm_s = s ^ ( String.make ( String.length s mod 3 ) ' \000 ' ) in
let rec y f x = f ( y f ) x in
let chop rem = function
| " " - > [ ]
| s - > String.sub s 0 3 : : ( rem ( String.sub s 3 ( String.length ( s - 3 ) ) ) ) in
let chunks = y chop norm_s in
let uuencode s =
let norm_s = s ^ (String.make (String.length s mod 3) '\000') in
let rec y f x = f (y f) x in
let chop rem = function
| "" -> []
| s -> String.sub s 0 3 :: (rem (String.sub s 3 (String.length (s - 3)))) in
let chunks = y chop norm_s in
*)
let hobcnv = Array.init 256 (fun i -> Printf.sprintf "%.2x" i)
let bohcnv = Array.init 256 (fun i -> i -
(if 0x30 <= i then 0x30 else 0) -
(if 0x41 <= i then 0x7 else 0) -
(if 0x61 <= i then 0x20 else 0))
let hex_of_bin ch = hobcnv.(int_of_char ch)
let bin_of_hex s = char_of_int (bohcnv.(int_of_char s.[0]) * 16 + bohcnv.(int_of_char s.[1]))
let send cout expr =
let mshl_expr = Marshal.to_string expr [] in
let payload = Buffer.create (String.length mshl_expr * 2) in
String.iter (fun c -> Buffer.add_string payload (hex_of_bin c)) mshl_expr;
Buffer.add_char payload '\n';
output_string cout (Buffer.contents payload);
flush cout
let receive cin =
let payload = input_line cin in
let mshl_expr_len = String.length payload / 2 in
let mshl_expr = Buffer.create mshl_expr_len in
let buf = String.create 2 in
for i = 0 to mshl_expr_len - 1 do
String.blit payload (2*i) buf 0 2;
Buffer.add_char mshl_expr (bin_of_hex buf)
done;
Marshal.from_string (Buffer.contents mshl_expr) 0
|
a3c9ad4a1e7ecc6d239d87d16e1000a749306896fea9785f8ce163764e5d503e | awakesecurity/spectacle | Diehard.hs | # LANGUAGE OverloadedLabels #
module Specifications.Diehard where
import Language.Spectacle
( Action,
ActionType (ActionUF),
Fairness (Unfair),
Modality (Always),
Specification (Specification),
Temporal,
TemporalType (PropG),
interaction,
plain,
prime,
specInit,
specNext,
specProp,
(.=),
pattern ConF,
pattern NilF,
type (#),
)
-- -------------------------------------------------------------------------------------------------
interactDiehardSpec :: IO ()
interactDiehardSpec = interaction diehardSpec
-- -------------------------------------------------------------------------------------------------
type DiehardSpec =
Specification
DiehardVars
'[ "emptySmall" # 'Unfair
, "emptyBig" # 'Unfair
, "fillSmall" # 'Unfair
, "fillBig" # 'Unfair
, "smallToBig" # 'Unfair
, "bigToSmall" # 'Unfair
]
'[ "isSolved" # 'Always
]
type DiehardVars =
'[ "smallJug" # Int
, "bigJug" # Int
]
emptySmall :: Action DiehardVars Bool
emptySmall = do
#smallJug .= pure 0
return True
emptyBig :: Action DiehardVars Bool
emptyBig = do
#bigJug .= pure 0
pure True
fillSmall :: Action DiehardVars Bool
fillSmall = do
#smallJug .= pure 3
return True
fillBig :: Action DiehardVars Bool
fillBig = do
#bigJug .= pure 5
pure True
bigToSmall :: Action DiehardVars Bool
bigToSmall = do
bigJug <- plain #bigJug
smallJug <- plain #smallJug
#smallJug .= pure (min (bigJug + smallJug) 3)
#bigJug .= do
smallJug' <- prime #smallJug
pure (bigJug - (smallJug' - smallJug))
pure True
smallToBig :: Action DiehardVars Bool
smallToBig = do
bigJug <- plain #bigJug
smallJug <- plain #smallJug
#bigJug .= pure (min (bigJug + smallJug) 5)
#smallJug .= do
bigJug' <- prime #bigJug
pure (smallJug - (bigJug' - bigJug))
pure True
isSolved :: Temporal DiehardVars Bool
isSolved = do
bigJug <- plain #bigJug
pure (bigJug /= 4)
diehardSpec :: DiehardSpec
diehardSpec =
Specification
{ specInit =
ConF #smallJug (pure 0)
. ConF #bigJug (pure 0)
$ NilF
, specNext =
ConF #emptySmall (ActionUF emptySmall)
. ConF #emptyBig (ActionUF emptyBig)
. ConF #fillSmall (ActionUF fillSmall)
. ConF #fillBig (ActionUF fillBig)
. ConF #smallToBig (ActionUF smallToBig)
. ConF #bigToSmall (ActionUF bigToSmall)
$ NilF
, specProp =
ConF #isSolved (PropG isSolved) NilF
}
| null | https://raw.githubusercontent.com/awakesecurity/spectacle/70501d0dc8b7fbefe1b52afff405c65e663fbf4e/test/integration/Specifications/Diehard.hs | haskell | -------------------------------------------------------------------------------------------------
------------------------------------------------------------------------------------------------- | # LANGUAGE OverloadedLabels #
module Specifications.Diehard where
import Language.Spectacle
( Action,
ActionType (ActionUF),
Fairness (Unfair),
Modality (Always),
Specification (Specification),
Temporal,
TemporalType (PropG),
interaction,
plain,
prime,
specInit,
specNext,
specProp,
(.=),
pattern ConF,
pattern NilF,
type (#),
)
interactDiehardSpec :: IO ()
interactDiehardSpec = interaction diehardSpec
type DiehardSpec =
Specification
DiehardVars
'[ "emptySmall" # 'Unfair
, "emptyBig" # 'Unfair
, "fillSmall" # 'Unfair
, "fillBig" # 'Unfair
, "smallToBig" # 'Unfair
, "bigToSmall" # 'Unfair
]
'[ "isSolved" # 'Always
]
type DiehardVars =
'[ "smallJug" # Int
, "bigJug" # Int
]
emptySmall :: Action DiehardVars Bool
emptySmall = do
#smallJug .= pure 0
return True
emptyBig :: Action DiehardVars Bool
emptyBig = do
#bigJug .= pure 0
pure True
fillSmall :: Action DiehardVars Bool
fillSmall = do
#smallJug .= pure 3
return True
fillBig :: Action DiehardVars Bool
fillBig = do
#bigJug .= pure 5
pure True
bigToSmall :: Action DiehardVars Bool
bigToSmall = do
bigJug <- plain #bigJug
smallJug <- plain #smallJug
#smallJug .= pure (min (bigJug + smallJug) 3)
#bigJug .= do
smallJug' <- prime #smallJug
pure (bigJug - (smallJug' - smallJug))
pure True
smallToBig :: Action DiehardVars Bool
smallToBig = do
bigJug <- plain #bigJug
smallJug <- plain #smallJug
#bigJug .= pure (min (bigJug + smallJug) 5)
#smallJug .= do
bigJug' <- prime #bigJug
pure (smallJug - (bigJug' - bigJug))
pure True
isSolved :: Temporal DiehardVars Bool
isSolved = do
bigJug <- plain #bigJug
pure (bigJug /= 4)
diehardSpec :: DiehardSpec
diehardSpec =
Specification
{ specInit =
ConF #smallJug (pure 0)
. ConF #bigJug (pure 0)
$ NilF
, specNext =
ConF #emptySmall (ActionUF emptySmall)
. ConF #emptyBig (ActionUF emptyBig)
. ConF #fillSmall (ActionUF fillSmall)
. ConF #fillBig (ActionUF fillBig)
. ConF #smallToBig (ActionUF smallToBig)
. ConF #bigToSmall (ActionUF bigToSmall)
$ NilF
, specProp =
ConF #isSolved (PropG isSolved) NilF
}
|
951dbec2e18571187ad56d4e53a6c0b74b1df303053beeaea10eb991e7a50bdd | aharisu/Gauche-CV | save.scm | (use cv)
(let* ([src (cv-load-image "data/image/items.jpg")]
[gray (make-image (slot-ref src 'width)
(slot-ref src 'height)
IPL_DEPTH_8U 1)])
(cv-cvt-color src gray CV_BGR2GRAY)
(cv-set-image-roi gray (make-cv-rect 0 0
(quotient (slot-ref gray 'width) 2)
(quotient (slot-ref gray 'height) 2)))
(cv-threshold gray gray 90 255 CV_THRESH_BINARY)
(cv-reset-image-roi gray)
(cv-save "image.xml" gray)
(cv-show-image "src" gray)
(cv-wait-key 0))
| null | https://raw.githubusercontent.com/aharisu/Gauche-CV/5e4c51501431c72270765121ea4d92693f11d60b/sample/save.scm | scheme | (use cv)
(let* ([src (cv-load-image "data/image/items.jpg")]
[gray (make-image (slot-ref src 'width)
(slot-ref src 'height)
IPL_DEPTH_8U 1)])
(cv-cvt-color src gray CV_BGR2GRAY)
(cv-set-image-roi gray (make-cv-rect 0 0
(quotient (slot-ref gray 'width) 2)
(quotient (slot-ref gray 'height) 2)))
(cv-threshold gray gray 90 255 CV_THRESH_BINARY)
(cv-reset-image-roi gray)
(cv-save "image.xml" gray)
(cv-show-image "src" gray)
(cv-wait-key 0))
| |
c4634d9607d1c7d1bb680658b14a73e5cf9d75b494dcdbbc92c7e932727c380b | otto-de-legacy/oscillator | dsl.clj | (ns de.otto.oscillator.graphite.dsl
(:require [clojure.string :as cs]))
(defn aliaz [target name]
(str "alias(" target ",\"" name "\")"))
(defn sum-series [target]
(str "sumSeries(" target ")"))
(defn max-series [& targets]
(str "maxSeries(" (cs/join "," targets) ")"))
(defn group [& targets]
(str "group(" (cs/join "," targets) ")"))
(defn most-deviant [amount & targets]
(str "mostDeviant(" (cs/join "," targets) "," amount ")"))
(defn summarize [target, timespan]
(str "summarize(" target ",\"" timespan "\",\"avg\")"))
(defn diff-series [target1, target2]
(str "diffSeries(" target1 ", " target2 ")"))
(defn non-negative-derivative [target]
(str "nonNegativeDerivative(" target ")"))
(defn average-series [target]
(str "averageSeries(" target ")"))
(defn keep-last-value [target]
(str "keepLastValue(" target ")"))
(defn divide-series [dividend-target divisor-target]
(str "divideSeries(" dividend-target "," divisor-target ")"))
(defn scale [target factor]
(str "scale("target "," factor ")"))
(defn scale-to-seconds [target seconds]
(str "scaleToSeconds(" target "," seconds ")"))
(defn time-shift [target shift]
(str "timeShift(" target ",\"" shift "\")"))
| null | https://raw.githubusercontent.com/otto-de-legacy/oscillator/c1ddf409a70cb91329f0039b95c7bc12289922e2/src/de/otto/oscillator/graphite/dsl.clj | clojure | (ns de.otto.oscillator.graphite.dsl
(:require [clojure.string :as cs]))
(defn aliaz [target name]
(str "alias(" target ",\"" name "\")"))
(defn sum-series [target]
(str "sumSeries(" target ")"))
(defn max-series [& targets]
(str "maxSeries(" (cs/join "," targets) ")"))
(defn group [& targets]
(str "group(" (cs/join "," targets) ")"))
(defn most-deviant [amount & targets]
(str "mostDeviant(" (cs/join "," targets) "," amount ")"))
(defn summarize [target, timespan]
(str "summarize(" target ",\"" timespan "\",\"avg\")"))
(defn diff-series [target1, target2]
(str "diffSeries(" target1 ", " target2 ")"))
(defn non-negative-derivative [target]
(str "nonNegativeDerivative(" target ")"))
(defn average-series [target]
(str "averageSeries(" target ")"))
(defn keep-last-value [target]
(str "keepLastValue(" target ")"))
(defn divide-series [dividend-target divisor-target]
(str "divideSeries(" dividend-target "," divisor-target ")"))
(defn scale [target factor]
(str "scale("target "," factor ")"))
(defn scale-to-seconds [target seconds]
(str "scaleToSeconds(" target "," seconds ")"))
(defn time-shift [target shift]
(str "timeShift(" target ",\"" shift "\")"))
| |
1bf9470c14e31361811d07bad5e8e5ffe3d186f4b19f2dcb1fce025731cbee9b | stepcut/plugins | B.hs | module B where
resource = "i'm in b"
| null | https://raw.githubusercontent.com/stepcut/plugins/52c660b5bc71182627d14c1d333d0234050cac01/testsuite/load/thiemann2/prog/B.hs | haskell | module B where
resource = "i'm in b"
| |
6f4f9c255b47e4b69b87251b4ffdf2c0aff643b82088e5e58cdaa8b950e12cb6 | sjl/coding-math | ballistics.lisp | (in-package #:coding-math.2d.ballistics)
;;;; Config
(defparameter *demo* nil)
(defparameter *width* 600)
(defparameter *height* 400)
(defparameter *center-x* (/ *width* 2))
(defparameter *center-y* (/ *height* 2))
;;;; Drawing
(defparameter *gun-pen* (make-pen :stroke (gray 0.0) :fill (gray 0.0)))
(defparameter *ball-pen* (make-pen :stroke (gray 0.1) :fill (gray 0.6)))
(defparameter *force-bg-pen* (make-pen :fill (gray 0.6)))
(defparameter *target-pen* (make-pen :stroke (rgb 0.6 0 0) :weight 2 :fill (rgb 1.0 0 0)))
(defparameter *force-fg-pen* (make-pen :fill (rgb 1.000 0.478 0.749)))
(defun draw-gun (gun)
(in-context
(translate (getf gun 'x) (getf gun 'y))
(with-pen *gun-pen*
(circle 0 0 25)
(rotate (degrees (getf gun 'angle)))
(rect 0 -8 40 16))))
(defun draw-ball (ball)
(with-pen *ball-pen*
(circle (particle-x ball) (particle-y ball) (particle-radius ball))))
(defun draw-force (force)
(with-pen *force-bg-pen*
(circle 20 (- *height* 50) 15))
(with-pen *force-fg-pen*
(circle 20
(- *height* 50)
(losh:map-range -1.0 1.0 0 15 force))))
(defun draw-target (target)
(when target
(with-pen *target-pen*
(circle (getf target :x)
(getf target :y)
(getf target :radius)))))
;;;; Game
(defun aim (gun x y)
(setf (getf gun 'angle)
(losh:clamp (- (/ losh:tau 4))
-0.3
(atan (- y (getf gun 'y))
(- x (getf gun 'x))))))
(defun shoot (game)
(force-output)
(with-slots (gun cannonball firedp raw-force) game
(let ((angle (getf gun 'angle)))
(setf
firedp t
(particle-x cannonball) (+ (getf gun 'x) (* 40 (cos angle)))
(particle-y cannonball) (+ (getf gun 'y) (* 40 (sin angle)))
(particle-speed cannonball) (losh:map-range -1.0 1.0 2 20.0 raw-force)
(particle-direction cannonball) angle))))
(defun update-ball (game)
(with-slots (cannonball firedp) game
(particle-update! cannonball)
(when (> (- (particle-y cannonball)
(particle-radius cannonball))
*height*)
(setf firedp nil))))
(defun check-target (game)
(when (and (game-target game)
(circles-collide-p (game-cannonball game)
(game-target game)))
(setf (game-win game) t)))
(defun random-target ()
(list :x (random-range 200 *width*)
:y *height*
:radius (random-range 10 40)))
(defsketch game
((width *width*)
(height *height*)
(aiming)
(gun)
(cannonball)
(can-shoot-p)
(firedp)
(force-speed 0.05)
(force-angle 0.0)
(raw-force)
(target)
(win)
)
(with-fps
(background (gray 1))
;;
(when (not firedp)
(incf force-angle force-speed)
(setf raw-force (sin force-angle)))
(when (not target)
(setf target (random-target)))
(draw-ball cannonball)
(draw-gun gun)
(draw-force raw-force)
(draw-target target)
(when (and *demo* firedp)
(update-ball *demo*)
(check-target *demo*))
(when win
(text "You win!" *center-x* *center-y*))
;;
))
;;;; Mouse
(defmethod kit.sdl2:mousebutton-event
((game game) state timestamp button x y)
(declare (ignore timestamp x y))
(when (= 1 button)
(case state
(:mousebuttondown (setf (slot-value game 'aiming) t))
(:mousebuttonup (setf (slot-value game 'aiming) nil)))))
(defmethod kit.sdl2:mousemotion-event
((game game) timestamp button-mask x y xrel yrel)
(declare (ignore timestamp button-mask xrel yrel))
(when (slot-value game 'aiming)
(aim (slot-value game 'gun) x y)))
;;;; Keyboard
(defun keydown (game scancode)
(declare (ignore game))
(scancode-case scancode
(:scancode-space
nil)))
(defun keyup (game scancode)
(scancode-case scancode
(:scancode-space
(when (not (game-firedp game))
(shoot game)))))
(defmethod kit.sdl2:keyboard-event ((instance game) state timestamp repeatp keysym)
(declare (ignore timestamp repeatp))
(cond
((eql state :keyup) (keyup instance (sdl2:scancode-value keysym)))
((eql state :keydown) (keydown instance (sdl2:scancode-value keysym)))
(t nil)))
;;;; Run
;; (defparameter *demo* (make-instance 'game))
| null | https://raw.githubusercontent.com/sjl/coding-math/8e2add14d033da41cb3ac0aac63ad67edb4dd66a/src/2d/ballistics.lisp | lisp | Config
Drawing
Game
Mouse
Keyboard
Run
(defparameter *demo* (make-instance 'game)) | (in-package #:coding-math.2d.ballistics)
(defparameter *demo* nil)
(defparameter *width* 600)
(defparameter *height* 400)
(defparameter *center-x* (/ *width* 2))
(defparameter *center-y* (/ *height* 2))
(defparameter *gun-pen* (make-pen :stroke (gray 0.0) :fill (gray 0.0)))
(defparameter *ball-pen* (make-pen :stroke (gray 0.1) :fill (gray 0.6)))
(defparameter *force-bg-pen* (make-pen :fill (gray 0.6)))
(defparameter *target-pen* (make-pen :stroke (rgb 0.6 0 0) :weight 2 :fill (rgb 1.0 0 0)))
(defparameter *force-fg-pen* (make-pen :fill (rgb 1.000 0.478 0.749)))
(defun draw-gun (gun)
(in-context
(translate (getf gun 'x) (getf gun 'y))
(with-pen *gun-pen*
(circle 0 0 25)
(rotate (degrees (getf gun 'angle)))
(rect 0 -8 40 16))))
(defun draw-ball (ball)
(with-pen *ball-pen*
(circle (particle-x ball) (particle-y ball) (particle-radius ball))))
(defun draw-force (force)
(with-pen *force-bg-pen*
(circle 20 (- *height* 50) 15))
(with-pen *force-fg-pen*
(circle 20
(- *height* 50)
(losh:map-range -1.0 1.0 0 15 force))))
(defun draw-target (target)
(when target
(with-pen *target-pen*
(circle (getf target :x)
(getf target :y)
(getf target :radius)))))
(defun aim (gun x y)
(setf (getf gun 'angle)
(losh:clamp (- (/ losh:tau 4))
-0.3
(atan (- y (getf gun 'y))
(- x (getf gun 'x))))))
(defun shoot (game)
(force-output)
(with-slots (gun cannonball firedp raw-force) game
(let ((angle (getf gun 'angle)))
(setf
firedp t
(particle-x cannonball) (+ (getf gun 'x) (* 40 (cos angle)))
(particle-y cannonball) (+ (getf gun 'y) (* 40 (sin angle)))
(particle-speed cannonball) (losh:map-range -1.0 1.0 2 20.0 raw-force)
(particle-direction cannonball) angle))))
(defun update-ball (game)
(with-slots (cannonball firedp) game
(particle-update! cannonball)
(when (> (- (particle-y cannonball)
(particle-radius cannonball))
*height*)
(setf firedp nil))))
(defun check-target (game)
(when (and (game-target game)
(circles-collide-p (game-cannonball game)
(game-target game)))
(setf (game-win game) t)))
(defun random-target ()
(list :x (random-range 200 *width*)
:y *height*
:radius (random-range 10 40)))
(defsketch game
((width *width*)
(height *height*)
(aiming)
(gun)
(cannonball)
(can-shoot-p)
(firedp)
(force-speed 0.05)
(force-angle 0.0)
(raw-force)
(target)
(win)
)
(with-fps
(background (gray 1))
(when (not firedp)
(incf force-angle force-speed)
(setf raw-force (sin force-angle)))
(when (not target)
(setf target (random-target)))
(draw-ball cannonball)
(draw-gun gun)
(draw-force raw-force)
(draw-target target)
(when (and *demo* firedp)
(update-ball *demo*)
(check-target *demo*))
(when win
(text "You win!" *center-x* *center-y*))
))
(defmethod kit.sdl2:mousebutton-event
((game game) state timestamp button x y)
(declare (ignore timestamp x y))
(when (= 1 button)
(case state
(:mousebuttondown (setf (slot-value game 'aiming) t))
(:mousebuttonup (setf (slot-value game 'aiming) nil)))))
(defmethod kit.sdl2:mousemotion-event
((game game) timestamp button-mask x y xrel yrel)
(declare (ignore timestamp button-mask xrel yrel))
(when (slot-value game 'aiming)
(aim (slot-value game 'gun) x y)))
(defun keydown (game scancode)
(declare (ignore game))
(scancode-case scancode
(:scancode-space
nil)))
(defun keyup (game scancode)
(scancode-case scancode
(:scancode-space
(when (not (game-firedp game))
(shoot game)))))
(defmethod kit.sdl2:keyboard-event ((instance game) state timestamp repeatp keysym)
(declare (ignore timestamp repeatp))
(cond
((eql state :keyup) (keyup instance (sdl2:scancode-value keysym)))
((eql state :keydown) (keydown instance (sdl2:scancode-value keysym)))
(t nil)))
|
6d960554747865a5595d8f422ec6147de3453df9451ea345b1d98e8bbb0a63ab | daypack-dev/telltime | from_now_cmd.ml | open Cmdliner
let expr_arg =
let doc = "Duration expression" in
Arg.(required & pos 0 (some string) None & info [] ~docv:"EXPR" ~doc)
let run (duration_expr : string) : unit =
match Timere_parse.duration duration_expr with
| Error msg -> print_endline msg
| Ok duration ->
let duration_in_seconds = Timere.Duration.to_seconds duration in
Fmt.pr "Now : %a\n" (Timere.pp_timestamp ())
Config.cur_timestamp;
Fmt.pr "Duration (original) : %s\n" duration_expr;
Fmt.pr "Duration (normalized) : %a\n" Timere.Duration.pp duration;
Fmt.pr "Now + duration : %a\n" (Timere.pp_timestamp ())
(Int64.add Config.cur_timestamp duration_in_seconds)
let cmd = (Term.(const run $ expr_arg), Term.info "from-now")
| null | https://raw.githubusercontent.com/daypack-dev/telltime/44397926939dfc27da600903a471219cd1ff1c47/src/from_now_cmd.ml | ocaml | open Cmdliner
let expr_arg =
let doc = "Duration expression" in
Arg.(required & pos 0 (some string) None & info [] ~docv:"EXPR" ~doc)
let run (duration_expr : string) : unit =
match Timere_parse.duration duration_expr with
| Error msg -> print_endline msg
| Ok duration ->
let duration_in_seconds = Timere.Duration.to_seconds duration in
Fmt.pr "Now : %a\n" (Timere.pp_timestamp ())
Config.cur_timestamp;
Fmt.pr "Duration (original) : %s\n" duration_expr;
Fmt.pr "Duration (normalized) : %a\n" Timere.Duration.pp duration;
Fmt.pr "Now + duration : %a\n" (Timere.pp_timestamp ())
(Int64.add Config.cur_timestamp duration_in_seconds)
let cmd = (Term.(const run $ expr_arg), Term.info "from-now")
| |
9ed9085a62cca87d66d27d5cc18f1c18ff2b64d05e5aff3db101d37df9ef186c | brianium/yoose | spec.clj | (ns brianium.yoose.spec
(:require [clojure.spec.alpha :as s]
[brianium.yoose :as yoose]))
(s/def ::use-case yoose/use-case?)
(s/def ::pull-handler (s/fspec :args (s/cat :x any?)))
(s/def ::port-value (complement nil?))
(s/fdef yoose/push!
:args (s/cat :use-case ::use-case :value ::port-value)
:ret ::use-case)
(s/fdef yoose/pull!
:args (s/cat :use-case ::use-case :fn1-handler ::pull-handler)
:ret ::use-case)
(s/fdef yoose/pull!!
:args (s/cat :use-case ::use-case)
:ret ::port-value)
(s/fdef yoose/<in
:args (s/cat :use-case ::use-case)
:ret ::port-value)
(s/fdef yoose/>out
:args (s/cat :use-case ::use-case :value ::port-value)
:ret ::use-case)
(s/fdef yoose/close!
:args (s/cat :use-case ::use-case)
:ret nil?)
| null | https://raw.githubusercontent.com/brianium/yoose/c4f2892798cbdfef726ce221723c9397108e472e/src/brianium/yoose/spec.clj | clojure | (ns brianium.yoose.spec
(:require [clojure.spec.alpha :as s]
[brianium.yoose :as yoose]))
(s/def ::use-case yoose/use-case?)
(s/def ::pull-handler (s/fspec :args (s/cat :x any?)))
(s/def ::port-value (complement nil?))
(s/fdef yoose/push!
:args (s/cat :use-case ::use-case :value ::port-value)
:ret ::use-case)
(s/fdef yoose/pull!
:args (s/cat :use-case ::use-case :fn1-handler ::pull-handler)
:ret ::use-case)
(s/fdef yoose/pull!!
:args (s/cat :use-case ::use-case)
:ret ::port-value)
(s/fdef yoose/<in
:args (s/cat :use-case ::use-case)
:ret ::port-value)
(s/fdef yoose/>out
:args (s/cat :use-case ::use-case :value ::port-value)
:ret ::use-case)
(s/fdef yoose/close!
:args (s/cat :use-case ::use-case)
:ret nil?)
| |
91c29b6312b2afa4b41939f6aa93927643e3e9d8c9b27863554f01cb2fc860e7 | metabase/toucan | phone_number.clj | (ns toucan.test-models.phone-number
(:require [toucan.models :as models]))
(models/defmodel PhoneNumber :phone_numbers
models/IModel
(primary-key [_] :number))
| null | https://raw.githubusercontent.com/metabase/toucan/29a921750f3051dce350255cfbd33512428bc3f8/test/toucan/test_models/phone_number.clj | clojure | (ns toucan.test-models.phone-number
(:require [toucan.models :as models]))
(models/defmodel PhoneNumber :phone_numbers
models/IModel
(primary-key [_] :number))
| |
595b7b42a46d0157ad20ed757f31d11775de52afbec9fa724b9a199b48dfecc7 | GrammaticalFramework/gf-core | Interpreter.hs | module GF.Command.Interpreter (
CommandEnv(..),mkCommandEnv,
interpretCommandLine,
getCommandOp
) where
import GF.Command.CommandInfo
import GF.Command.Abstract
import GF.Command.Parse
import PGF.Internal(Expr(..))
import GF.Infra.UseIO(putStrLnE)
import Control.Monad(when)
import qualified Data.Map as Map
import GF.Infra.UseIO (Output)
import qualified Control.Monad.Fail as Fail
data CommandEnv m = CommandEnv {
commands :: Map.Map String (CommandInfo m),
commandmacros :: Map.Map String CommandLine,
expmacros :: Map.Map String Expr
}
--mkCommandEnv :: PGFEnv -> CommandEnv
mkCommandEnv cmds = CommandEnv cmds Map.empty Map.empty
: : CommandEnv - > String - > SIO ( )
interpretCommandLine :: (Fail.MonadFail m, Output m, TypeCheckArg m) => CommandEnv m -> String -> m ()
interpretCommandLine env line =
case readCommandLine line of
Just [] -> return ()
Just pipes -> mapM_ (interpretPipe env) pipes
Nothing -> putStrLnE $ "command not parsed: "++line
interpretPipe env cs = do
Piped v@(_,s) <- intercs cs void
putStrLnE s
return ()
where
intercs [] args = return args
intercs (c:cs) (Piped (args,_)) = interc c args >>= intercs cs
interc comm@(Command co opts arg) args =
case co of
'%':f -> case Map.lookup f (commandmacros env) of
Just css ->
do args <- getCommandTrees env False arg args
mapM_ (interpretPipe env) (appLine args css)
return void
Nothing -> do
putStrLnE $ "command macro " ++ co ++ " not interpreted"
return void
_ -> interpret env args comm
appLine = map . map . appCommand
-- | macro definition applications: replace ?i by (exps !! i)
appCommand :: CommandArguments -> Command -> Command
appCommand args c@(Command i os arg) = case arg of
AExpr e -> Command i os (AExpr (app e))
_ -> c
where
xs = toExprs args
app e = case e of
EAbs b x e -> EAbs b x (app e)
EApp e1 e2 -> EApp (app e1) (app e2)
ELit l -> ELit l
EMeta i -> xs !! i
EFun x -> EFun x
-- | return the trees to be sent in pipe, and the output possibly printed
interpret : : CommandEnv - > [ Expr ] - > Command - > SIO CommandOutput
interpret env trees comm =
do (info,opts,trees) <- getCommand env trees comm
tss@(Piped (_,s)) <- exec info opts trees
when (isOpt "tr" opts) $ putStrLnE s
return tss
-- | analyse command parse tree to a uniform datastructure, normalizing comm name
--- the env is needed for macro lookup
getCommand : : CommandEnv - > [ Expr ] - > Command - > Either String ( CommandInfo PGFEnv,[Option],[Expr ] )
getCommand env es co@(Command c opts arg) =
do info <- getCommandInfo env c
checkOpts info opts
es <- getCommandTrees env (needsTypeCheck info) arg es
return (info,opts,es)
getCommandInfo : : CommandEnv - > String - > Either String ( CommandInfo PGFEnv )
getCommandInfo env cmd =
case Map.lookup (getCommandOp cmd) (commands env) of
Just info -> return info
Nothing -> fail $ "command not found: " ++ cmd
checkOpts : : CommandInfo env - > [ Option ] - > Either String ( )
checkOpts info opts =
case
[o | OOpt o <- opts, notElem o ("tr" : map fst (options info))] ++
[o | OFlag o _ <- opts, notElem o (map fst (flags info))]
of
[] -> return ()
[o] -> fail $ "option not interpreted: " ++ o
os -> fail $ "options not interpreted: " ++ unwords os
getCommandTrees : : CommandEnv - [ Expr ] - > Either String [ Expr ]
getCommandTrees env needsTypeCheck a args =
case a of
AMacro m -> case Map.lookup m (expmacros env) of
Just e -> one e
_ -> return (Exprs []) -- report error?
AExpr e -> if needsTypeCheck
then one =<< typeCheckArg e
else one e
ATerm t -> return (Term t)
ANoArg -> return args -- use piped
where
one e = return (Exprs [e]) -- ignore piped
| null | https://raw.githubusercontent.com/GrammaticalFramework/gf-core/9b4f2dd18b64b770aaebfa1885085e8e3447f119/src/compiler/GF/Command/Interpreter.hs | haskell | mkCommandEnv :: PGFEnv -> CommandEnv
| macro definition applications: replace ?i by (exps !! i)
| return the trees to be sent in pipe, and the output possibly printed
| analyse command parse tree to a uniform datastructure, normalizing comm name
- the env is needed for macro lookup
report error?
use piped
ignore piped | module GF.Command.Interpreter (
CommandEnv(..),mkCommandEnv,
interpretCommandLine,
getCommandOp
) where
import GF.Command.CommandInfo
import GF.Command.Abstract
import GF.Command.Parse
import PGF.Internal(Expr(..))
import GF.Infra.UseIO(putStrLnE)
import Control.Monad(when)
import qualified Data.Map as Map
import GF.Infra.UseIO (Output)
import qualified Control.Monad.Fail as Fail
data CommandEnv m = CommandEnv {
commands :: Map.Map String (CommandInfo m),
commandmacros :: Map.Map String CommandLine,
expmacros :: Map.Map String Expr
}
mkCommandEnv cmds = CommandEnv cmds Map.empty Map.empty
: : CommandEnv - > String - > SIO ( )
interpretCommandLine :: (Fail.MonadFail m, Output m, TypeCheckArg m) => CommandEnv m -> String -> m ()
interpretCommandLine env line =
case readCommandLine line of
Just [] -> return ()
Just pipes -> mapM_ (interpretPipe env) pipes
Nothing -> putStrLnE $ "command not parsed: "++line
interpretPipe env cs = do
Piped v@(_,s) <- intercs cs void
putStrLnE s
return ()
where
intercs [] args = return args
intercs (c:cs) (Piped (args,_)) = interc c args >>= intercs cs
interc comm@(Command co opts arg) args =
case co of
'%':f -> case Map.lookup f (commandmacros env) of
Just css ->
do args <- getCommandTrees env False arg args
mapM_ (interpretPipe env) (appLine args css)
return void
Nothing -> do
putStrLnE $ "command macro " ++ co ++ " not interpreted"
return void
_ -> interpret env args comm
appLine = map . map . appCommand
appCommand :: CommandArguments -> Command -> Command
appCommand args c@(Command i os arg) = case arg of
AExpr e -> Command i os (AExpr (app e))
_ -> c
where
xs = toExprs args
app e = case e of
EAbs b x e -> EAbs b x (app e)
EApp e1 e2 -> EApp (app e1) (app e2)
ELit l -> ELit l
EMeta i -> xs !! i
EFun x -> EFun x
interpret : : CommandEnv - > [ Expr ] - > Command - > SIO CommandOutput
interpret env trees comm =
do (info,opts,trees) <- getCommand env trees comm
tss@(Piped (_,s)) <- exec info opts trees
when (isOpt "tr" opts) $ putStrLnE s
return tss
getCommand : : CommandEnv - > [ Expr ] - > Command - > Either String ( CommandInfo PGFEnv,[Option],[Expr ] )
getCommand env es co@(Command c opts arg) =
do info <- getCommandInfo env c
checkOpts info opts
es <- getCommandTrees env (needsTypeCheck info) arg es
return (info,opts,es)
getCommandInfo : : CommandEnv - > String - > Either String ( CommandInfo PGFEnv )
getCommandInfo env cmd =
case Map.lookup (getCommandOp cmd) (commands env) of
Just info -> return info
Nothing -> fail $ "command not found: " ++ cmd
checkOpts : : CommandInfo env - > [ Option ] - > Either String ( )
checkOpts info opts =
case
[o | OOpt o <- opts, notElem o ("tr" : map fst (options info))] ++
[o | OFlag o _ <- opts, notElem o (map fst (flags info))]
of
[] -> return ()
[o] -> fail $ "option not interpreted: " ++ o
os -> fail $ "options not interpreted: " ++ unwords os
getCommandTrees : : CommandEnv - [ Expr ] - > Either String [ Expr ]
getCommandTrees env needsTypeCheck a args =
case a of
AMacro m -> case Map.lookup m (expmacros env) of
Just e -> one e
AExpr e -> if needsTypeCheck
then one =<< typeCheckArg e
else one e
ATerm t -> return (Term t)
where
|
66861030bea7e2cdb8ac73abfaa01b73c6f17d3a903c5d23a3512c2e93f97e63 | jebberjeb/specviz | spec.cljc | (ns specviz.spec
"Analyze clojure.spec specs."
(:require
[clojure.spec.alpha :as s]))
(defn registered?
"Returns true if `x` is the keyword of a registered spec?"
[x]
(some? (s/get-spec x)))
(defn depends-on*
[names spec-form]
(cond (coll? spec-form)
(doseq [s spec-form] (depends-on* names s))
(and (registered? spec-form)
(not (contains? @names spec-form)))
(do (swap! names conj spec-form)
(depends-on* names (s/form (s/get-spec spec-form))))))
(defn depends-on
"Returns a collection of the qualified-keywords of all specs referenced
by the spec-form, transatively."
[spec-name]
(let [names (atom #{spec-name})]
(depends-on* names (s/form (s/get-spec spec-name)))
@names))
(defn conform-or-throw
[spec x]
"Return the result of conforming `x` using `spec`. If `x` does not conform,
throw an exception."
(when-let [reason (s/explain-data spec x)]
(throw (ex-info "invalid spec" {:reason reason})))
(s/conform spec x))
(defn literal?
"Returns true if `x` is a spec literal, ex: `(clojure.spec/coll-of int?)`."
[x]
(when (coll? x)
(= (namespace (first x))
"clojure.spec")))
| null | https://raw.githubusercontent.com/jebberjeb/specviz/89d3ff269232dc21d1d1be226dd0bc10388ec68f/src/specviz/spec.cljc | clojure | (ns specviz.spec
"Analyze clojure.spec specs."
(:require
[clojure.spec.alpha :as s]))
(defn registered?
"Returns true if `x` is the keyword of a registered spec?"
[x]
(some? (s/get-spec x)))
(defn depends-on*
[names spec-form]
(cond (coll? spec-form)
(doseq [s spec-form] (depends-on* names s))
(and (registered? spec-form)
(not (contains? @names spec-form)))
(do (swap! names conj spec-form)
(depends-on* names (s/form (s/get-spec spec-form))))))
(defn depends-on
"Returns a collection of the qualified-keywords of all specs referenced
by the spec-form, transatively."
[spec-name]
(let [names (atom #{spec-name})]
(depends-on* names (s/form (s/get-spec spec-name)))
@names))
(defn conform-or-throw
[spec x]
"Return the result of conforming `x` using `spec`. If `x` does not conform,
throw an exception."
(when-let [reason (s/explain-data spec x)]
(throw (ex-info "invalid spec" {:reason reason})))
(s/conform spec x))
(defn literal?
"Returns true if `x` is a spec literal, ex: `(clojure.spec/coll-of int?)`."
[x]
(when (coll? x)
(= (namespace (first x))
"clojure.spec")))
| |
1dfd73acc12602e3858e4165ec21c0b6e3054c3d89d112a11c972559364e80a8 | facebook/duckling | SK.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory .
module Duckling.Dimensions.SK
( allDimensions
) where
import Duckling.Dimensions.Types
allDimensions :: [Seal Dimension]
allDimensions =
[ Seal Numeral
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/Dimensions/SK.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant | Copyright ( c ) 2016 - present , Facebook , Inc.
of patent rights can be found in the PATENTS file in the same directory .
module Duckling.Dimensions.SK
( allDimensions
) where
import Duckling.Dimensions.Types
allDimensions :: [Seal Dimension]
allDimensions =
[ Seal Numeral
]
|
bf0f18503625b85c0f2be6c7e48bb944bf8ed596c042fe3087619d9925a75192 | Raynes/conch | conch.clj | (ns me.raynes.conch
(:require [me.raynes.conch.low-level :as conch]
[clojure.java.io :as io]
[clojure.string :as string])
(:import java.util.concurrent.LinkedBlockingQueue))
(def ^:dynamic *throw*
"If set to false, exit codes are ignored. If true (default),
throw exceptions for non-zero exit codes."
true)
(defprotocol Redirectable
(redirect [this options k proc]))
(defn byte? [x]
(and (not (nil? x))
(= java.lang.Byte (.getClass x))))
(defn test-array
[t]
(let [check (type (t []))]
(fn [arg] (instance? check arg))))
(def byte-array?
(test-array byte-array))
(defn write-to-writer [writer s is-binary]
(cond
(byte? (first s)) (.write writer (byte-array s))
(or (not is-binary)
(byte-array? (first s))) (if (char? (first s))
(.write writer (apply str s))
(doseq [x s] (.write writer x)))))
(extend-type java.io.File
Redirectable
(redirect [f options k proc]
(let [s (k proc)
is-binary (:binary options)]
(with-open [writer (if is-binary (io/output-stream f) (java.io.FileWriter. f))]
(write-to-writer writer s is-binary)))))
(extend-type clojure.lang.IFn
Redirectable
(redirect [f options k proc]
(doseq [buffer (get proc k)]
(f buffer proc))))
(extend-type java.io.Writer
Redirectable
(redirect [w options k proc]
(let [s (get proc k)]
(write-to-writer w s (:binary options)))))
(defn seqify? [options k]
(let [seqify (:seq options)]
(or (= seqify k)
(true? seqify))))
(extend-type nil
Redirectable
(redirect [_ options k proc]
(let [seqify (:seq options)
s (k proc)]
(cond
(seqify? options k) s
(byte? (first s)) (byte-array s)
(byte-array? (first s)) (byte-array (mapcat seq s))
:else (string/join s)))))
(defprotocol Drinkable
(drink [this proc]))
(extend-type clojure.lang.ISeq
Drinkable
(drink [s proc]
(with-open [writer (java.io.PrintWriter. (:in proc))]
(binding [*out* writer]
(doseq [x s]
(println x))))
(conch/done proc)))
(extend-type java.io.Reader
Drinkable
(drink [r proc]
(conch/feed-from proc r)
(conch/done proc)))
(extend-type java.io.File
Drinkable
(drink [f proc]
(drink (io/reader f) proc)))
(extend-type java.lang.String
Drinkable
(drink [s proc]
(conch/feed-from-string proc s)
(conch/done proc)))
(defn get-drunk [item proc]
(drink
(if (coll? item)
(seq item)
item)
proc))
(defn add-proc-args [args options]
(if (seq options)
(apply concat args
(select-keys options
[:redirect-err
:env
:clear-env
:dir]))
args))
(defn queue-seq [q]
(lazy-seq
(let [x (.take q)]
(when-not (= x :eof)
(cons x (queue-seq q))))))
(defmulti buffer (fn [kind _ _]
(if (number? kind)
:number
kind)))
(defmethod buffer :number [kind reader binary]
#(try
(let [cbuf (make-array (if binary Byte/TYPE Character/TYPE) kind)
size (.read reader cbuf)]
(when-not (neg? size)
(let [result (if (= size kind)
cbuf
(take size cbuf))]
(if binary
(if (seq? result) (byte-array result) result)
(string/join result)))))
(catch java.io.IOException _)))
(defn ubyte [val]
(if (>= val 128)
(byte (- val 256))
(byte val)))
(defmethod buffer :none [_ reader binary]
#(try
(let [c (.read reader)]
(when-not (neg? c)
(if binary
;; Return a byte (convert from unsigned value)
(ubyte c)
;; Return a char
(char c))))
(catch java.io.IOException _)))
(defmethod buffer :line [_ reader binary]
#(try
(.readLine reader)
(catch java.io.IOException _)))
(defn queue-stream [stream buffer-type binary]
(let [queue (LinkedBlockingQueue.)
read-object (if binary stream (io/reader stream))]
(.start
(Thread.
(fn []
(doseq [x (take-while identity (repeatedly (buffer buffer-type read-object binary)))]
(.put queue x))
(.put queue :eof))))
(queue-seq queue)))
(defn queue-output [proc buffer-type binary]
(assoc proc
:out (queue-stream (:out proc) buffer-type binary)
:err (queue-stream (:err proc) buffer-type binary)))
(defn compute-buffer [options]
(update-in options [:buffer]
#(if-let [buffer %]
buffer
(if (and (not (:binary options))
(or (:seq options)
(:pipe options)
(ifn? (:out options))
(ifn? (:err options))))
:line
1024))))
(defn exit-exception [verbose]
(throw (ex-info (str "Program returned non-zero exit code "
@(:exit-code verbose))
verbose)))
(defn run-command [name args options]
(let [proc (apply conch/proc name (add-proc-args (map str args) options))
options (compute-buffer options)
{:keys [buffer out in err timeout verbose binary]} options
proc (queue-output proc buffer binary)
exit-code (future (if timeout
(conch/exit-code proc timeout)
(conch/exit-code proc)))]
(when in (future (get-drunk in proc)))
(let [proc-out (future (redirect out options :out proc))
proc-err (future (redirect err options :err proc))
proc-out @proc-out
proc-err @proc-err
verbose-out {:proc proc
:exit-code exit-code
:stdout proc-out
:stderr proc-err}
result (cond
verbose verbose-out
(= (:seq options) :err) proc-err
:else proc-out)]
Not using ` zero ? ` here because exit - code can be a keyword .
(if (= 0 @exit-code)
result
(cond (and (contains? options :throw)
(:throw options))
(exit-exception verbose-out)
(and (not (contains? options :throw))
*throw*)
(exit-exception verbose-out)
:else result)))))
(defn execute [name & args]
(let [[[options] args] ((juxt filter remove) map? args)]
(if (:background options)
(future (run-command name args options))
(run-command name args options))))
(defn execute [name & args]
(let [end (last args)
in-arg (first (filter #(seq? %) args))
args (remove #(seq? %) args)
options (when (map? end) end)
args (if options (drop-last args) args)
options (if in-arg (assoc options :in in-arg) options)]
(if (:background options)
(future (run-command name args options))
(run-command name args options))))
(defmacro programs
"Creates functions corresponding to progams on the PATH, named by names."
[& names]
`(do ~@(for [name names]
`(defn ~name [& ~'args]
(apply execute ~(str name) ~'args)))))
(defn- program-form [prog]
`(fn [& args#] (apply execute ~prog args#)))
(defn map-nth
"Calls f on every nth element of coll. If start is passed, starts
at that element (counting from zero), otherwise starts with zero."
([f nth coll] (map-nth f 0 nth coll))
([f start nth coll]
(map #(% %2)
(concat (repeat start identity)
(cycle (cons f (repeat (dec nth) identity))))
coll)))
(defmacro let-programs
"Like let, but expects bindings to be symbols to strings of paths to
programs."
[bindings & body]
`(let [~@(map-nth #(program-form %) 1 2 bindings)]
~@body))
(defmacro with-programs
"Like programs, but only binds names in the scope of the with-programs call."
[programs & body]
`(let [~@(interleave programs (map (comp program-form str) programs))]
~@body))
| null | https://raw.githubusercontent.com/Raynes/conch/3acbfe564173aa7d6d0ef46d69f17f8e9993db75/src/me/raynes/conch.clj | clojure | Return a byte (convert from unsigned value)
Return a char | (ns me.raynes.conch
(:require [me.raynes.conch.low-level :as conch]
[clojure.java.io :as io]
[clojure.string :as string])
(:import java.util.concurrent.LinkedBlockingQueue))
(def ^:dynamic *throw*
"If set to false, exit codes are ignored. If true (default),
throw exceptions for non-zero exit codes."
true)
(defprotocol Redirectable
(redirect [this options k proc]))
(defn byte? [x]
(and (not (nil? x))
(= java.lang.Byte (.getClass x))))
(defn test-array
[t]
(let [check (type (t []))]
(fn [arg] (instance? check arg))))
(def byte-array?
(test-array byte-array))
(defn write-to-writer [writer s is-binary]
(cond
(byte? (first s)) (.write writer (byte-array s))
(or (not is-binary)
(byte-array? (first s))) (if (char? (first s))
(.write writer (apply str s))
(doseq [x s] (.write writer x)))))
(extend-type java.io.File
Redirectable
(redirect [f options k proc]
(let [s (k proc)
is-binary (:binary options)]
(with-open [writer (if is-binary (io/output-stream f) (java.io.FileWriter. f))]
(write-to-writer writer s is-binary)))))
(extend-type clojure.lang.IFn
Redirectable
(redirect [f options k proc]
(doseq [buffer (get proc k)]
(f buffer proc))))
(extend-type java.io.Writer
Redirectable
(redirect [w options k proc]
(let [s (get proc k)]
(write-to-writer w s (:binary options)))))
(defn seqify? [options k]
(let [seqify (:seq options)]
(or (= seqify k)
(true? seqify))))
(extend-type nil
Redirectable
(redirect [_ options k proc]
(let [seqify (:seq options)
s (k proc)]
(cond
(seqify? options k) s
(byte? (first s)) (byte-array s)
(byte-array? (first s)) (byte-array (mapcat seq s))
:else (string/join s)))))
(defprotocol Drinkable
(drink [this proc]))
(extend-type clojure.lang.ISeq
Drinkable
(drink [s proc]
(with-open [writer (java.io.PrintWriter. (:in proc))]
(binding [*out* writer]
(doseq [x s]
(println x))))
(conch/done proc)))
(extend-type java.io.Reader
Drinkable
(drink [r proc]
(conch/feed-from proc r)
(conch/done proc)))
(extend-type java.io.File
Drinkable
(drink [f proc]
(drink (io/reader f) proc)))
(extend-type java.lang.String
Drinkable
(drink [s proc]
(conch/feed-from-string proc s)
(conch/done proc)))
(defn get-drunk [item proc]
(drink
(if (coll? item)
(seq item)
item)
proc))
(defn add-proc-args [args options]
(if (seq options)
(apply concat args
(select-keys options
[:redirect-err
:env
:clear-env
:dir]))
args))
(defn queue-seq [q]
(lazy-seq
(let [x (.take q)]
(when-not (= x :eof)
(cons x (queue-seq q))))))
(defmulti buffer (fn [kind _ _]
(if (number? kind)
:number
kind)))
(defmethod buffer :number [kind reader binary]
#(try
(let [cbuf (make-array (if binary Byte/TYPE Character/TYPE) kind)
size (.read reader cbuf)]
(when-not (neg? size)
(let [result (if (= size kind)
cbuf
(take size cbuf))]
(if binary
(if (seq? result) (byte-array result) result)
(string/join result)))))
(catch java.io.IOException _)))
(defn ubyte [val]
(if (>= val 128)
(byte (- val 256))
(byte val)))
(defmethod buffer :none [_ reader binary]
#(try
(let [c (.read reader)]
(when-not (neg? c)
(if binary
(ubyte c)
(char c))))
(catch java.io.IOException _)))
(defmethod buffer :line [_ reader binary]
#(try
(.readLine reader)
(catch java.io.IOException _)))
(defn queue-stream [stream buffer-type binary]
(let [queue (LinkedBlockingQueue.)
read-object (if binary stream (io/reader stream))]
(.start
(Thread.
(fn []
(doseq [x (take-while identity (repeatedly (buffer buffer-type read-object binary)))]
(.put queue x))
(.put queue :eof))))
(queue-seq queue)))
(defn queue-output [proc buffer-type binary]
(assoc proc
:out (queue-stream (:out proc) buffer-type binary)
:err (queue-stream (:err proc) buffer-type binary)))
(defn compute-buffer [options]
(update-in options [:buffer]
#(if-let [buffer %]
buffer
(if (and (not (:binary options))
(or (:seq options)
(:pipe options)
(ifn? (:out options))
(ifn? (:err options))))
:line
1024))))
(defn exit-exception [verbose]
(throw (ex-info (str "Program returned non-zero exit code "
@(:exit-code verbose))
verbose)))
(defn run-command [name args options]
(let [proc (apply conch/proc name (add-proc-args (map str args) options))
options (compute-buffer options)
{:keys [buffer out in err timeout verbose binary]} options
proc (queue-output proc buffer binary)
exit-code (future (if timeout
(conch/exit-code proc timeout)
(conch/exit-code proc)))]
(when in (future (get-drunk in proc)))
(let [proc-out (future (redirect out options :out proc))
proc-err (future (redirect err options :err proc))
proc-out @proc-out
proc-err @proc-err
verbose-out {:proc proc
:exit-code exit-code
:stdout proc-out
:stderr proc-err}
result (cond
verbose verbose-out
(= (:seq options) :err) proc-err
:else proc-out)]
Not using ` zero ? ` here because exit - code can be a keyword .
(if (= 0 @exit-code)
result
(cond (and (contains? options :throw)
(:throw options))
(exit-exception verbose-out)
(and (not (contains? options :throw))
*throw*)
(exit-exception verbose-out)
:else result)))))
(defn execute [name & args]
(let [[[options] args] ((juxt filter remove) map? args)]
(if (:background options)
(future (run-command name args options))
(run-command name args options))))
(defn execute [name & args]
(let [end (last args)
in-arg (first (filter #(seq? %) args))
args (remove #(seq? %) args)
options (when (map? end) end)
args (if options (drop-last args) args)
options (if in-arg (assoc options :in in-arg) options)]
(if (:background options)
(future (run-command name args options))
(run-command name args options))))
(defmacro programs
"Creates functions corresponding to progams on the PATH, named by names."
[& names]
`(do ~@(for [name names]
`(defn ~name [& ~'args]
(apply execute ~(str name) ~'args)))))
(defn- program-form [prog]
`(fn [& args#] (apply execute ~prog args#)))
(defn map-nth
"Calls f on every nth element of coll. If start is passed, starts
at that element (counting from zero), otherwise starts with zero."
([f nth coll] (map-nth f 0 nth coll))
([f start nth coll]
(map #(% %2)
(concat (repeat start identity)
(cycle (cons f (repeat (dec nth) identity))))
coll)))
(defmacro let-programs
"Like let, but expects bindings to be symbols to strings of paths to
programs."
[bindings & body]
`(let [~@(map-nth #(program-form %) 1 2 bindings)]
~@body))
(defmacro with-programs
"Like programs, but only binds names in the scope of the with-programs call."
[programs & body]
`(let [~@(interleave programs (map (comp program-form str) programs))]
~@body))
|
b7d1b10c8370868979c1ad1998d5b900031faad51ff6e173b16e32490d86678e | Leapsight/plum_db | plum_db_dvvset.erl | %%-------------------------------------------------------------------
%%
%% File: plum_db_dvvset.erl
%%
@author < >
@author < >
%
The MIT License ( MIT )
Copyright ( C ) 2013
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files ( the " Software " ) , to deal in the Software without restriction ,
%% including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR IMPLIED , INCLUDING
%% BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
%%
%% @doc
%% An Erlang implementation of *compact* Dotted Version Vectors, which
%% provides a container for a set of concurrent values (siblings) with causal
%% order information.
%%
%% For further reading, visit the
< a href=" / ricardobcl / Dotted - Version - Vectors / tree / ompact">github page</a > .
%% @end
%%
%% <a href="">
%% Dotted Version Vectors: Logical Clocks for Optimistic Replication
%% </a>
%% @end
%%
%%-------------------------------------------------------------------
-module(plum_db_dvvset).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/1,
new/2,
sync/1,
join/1,
update/2,
update/3,
size/1,
ids/1,
values/1,
equal/2,
less/2,
map/2,
last/2,
lww/2,
reconcile/2
]).
-export_type([clock/0, vector/0, id/0, value/0]).
% % @doc
%% STRUCTURE details:
%% * entries() are sorted by id()
%% * each counter() also includes the number of values in that id()
%% * the values in each triple of entries() are causally ordered and each new value goes to the head of the list
-type clock() :: {entries(), values()}.
-type vector() :: [{id(), counter()}].
-type entries() :: [{id(), counter(), values()}].
-type id() :: any().
-type values() :: [value()].
-type value() :: any().
-type counter() :: non_neg_integer().
%% @doc Constructs a new clock set without causal history,
%% and receives a list of values that gos to the anonymous list.
-spec new(value() | [value()]) -> clock().
new(Vs) when is_list(Vs) -> {[], Vs};
new(V) -> {[], [V]}.
%% @doc Constructs a new clock set with the causal history
%% of the given version vector / vector clock,
%% and receives a list of values that gos to the anonymous list.
The version vector SHOULD BE a direct result of .
-spec new(vector(), value() | [value()]) -> clock().
new(VV, Vs) when is_list(Vs) ->
VVS = lists:sort(VV), % defense against non-order preserving serialization
{[{I, N, []} || {I, N} <- VVS], Vs};
new(VV, V) -> new(VV, [V]).
@doc Synchronizes a list of clocks using sync/2 .
%% It discards (causally) outdated values,
%% while merging all causal histories.
-spec sync([clock()]) -> clock().
sync(L) -> lists:foldl(fun sync/2, {}, L).
%% Private function
-spec sync(clock(), clock()) -> clock().
sync({}, C) -> C;
sync(C ,{}) -> C;
sync(C1={E1,V1},C2={E2,V2}) ->
V = case less(C1,C2) of
C1 < C2 = > return V2
false -> case less(C2,C1) of
C2 < C1 = > return V1
false -> % keep all unique anonymous values and sync entries()
sets:to_list(sets:from_list(V1++V2))
end
end,
{sync2(E1,E2),V}.
%% Private function
-spec sync2(entries(), entries()) -> entries().
sync2([], C) -> C;
sync2(C, []) -> C;
sync2([{I1, N1, L1}=H1 | T1]=C1, [{I2, N2, L2}=H2 | T2]=C2) ->
if
I1 < I2 -> [H1 | sync2(T1, C2)];
I1 > I2 -> [H2 | sync2(T2, C1)];
true -> [merge(I1, N1, L1, N2, L2) | sync2(T1, T2)]
end.
%% Private function
-spec merge(id(), counter(), values(), counter(), values()) -> {id(), counter(), values()}.
merge(I, N1, L1, N2, L2) ->
LL1 = length(L1),
LL2 = length(L2),
case N1 >= N2 of
true ->
case N1 - LL1 >= N2 - LL2 of
true -> {I, N1, L1};
false -> {I, N1, lists:sublist(L1, N1 - N2 + LL2)}
end;
false ->
case N2 - LL2 >= N1 - LL1 of
true -> {I, N2, L2};
false -> {I, N2, lists:sublist(L2, N2 - N1 + LL1)}
end
end.
%% @doc Return a version vector that represents the causal history.
-spec join(clock()) -> vector().
join({C,_}) -> [{I, N} || {I, N, _} <- C].
%% @doc Advances the causal history with the given id.
%% The new value is the *anonymous dot* of the clock.
The client clock SHOULD BE a direct result of .
-spec update(clock(), id()) -> clock().
update({C,[V]}, I) -> {event(C, I, V), []}.
%% @doc Advances the causal history of the
first clock with the given i d , while synchronizing
with the second clock , thus the new clock is
%% causally newer than both clocks in the argument.
%% The new value is the *anonymous dot* of the clock.
The first clock SHOULD BE a direct result of ,
%% which is intended to be the client clock with
%% the new value in the *anonymous dot* while
the second clock is from the local server .
-spec update(clock(), clock(), id()) -> clock().
update({Cc,[V]}, Cr, I) ->
Sync both clocks without the new value
{C,Vs} = sync({Cc,[]}, Cr),
%% We create a new event on the synced causal history,
%% with the id I and the new value.
%% The anonymous values that were synced still remain.
{event(C, I, V), Vs}.
%% Private function
-spec event(entries(), id(), value()) -> entries().
event([], I, V) -> [{I, 1, [V]}];
event([{I, N, L} | T], I, V) -> [{I, N+1, [V | L]} | T];
event([{I1, _, _} | _]=C, I, V) when I1 > I -> [{I, 1, [V]} | C];
event([H | T], I, V) -> [H | event(T, I, V)].
%% @doc Returns the total number of values in this clock set.
-spec size(clock()) -> non_neg_integer().
size({C,Vs}) -> lists:sum([length(L) || {_,_,L} <- C]) + length(Vs).
%% @doc Returns all the ids used in this clock set.
-spec ids(clock()) -> [id()].
ids({C,_}) -> ([I || {I,_,_} <- C]).
%% @doc Returns all the values used in this clock set,
%% including the anonymous values.
-spec values(clock()) -> [value()].
values({C,Vs}) -> Vs ++ lists:append([L || {_,_,L} <- C]).
%% @doc Compares the equality of both clocks, regarding
%% only the causal histories, thus ignoring the values.
-spec equal(clock() | vector(), clock() | vector()) -> boolean().
equal({C1,_},{C2,_}) -> equal2(C1,C2); % DVVSet
equal(C1,C2) when is_list(C1) and is_list(C2) -> equal2(C1,C2). %vector clocks
%% Private function
-spec equal2(vector(), vector()) -> boolean().
equal2([], []) -> true;
equal2([{I, C, L1} | T1], [{I, C, L2} | T2])
when length(L1) =:= length(L2) ->
equal2(T1, T2);
equal2(_, _) -> false.
@doc Returns True if the first clock is causally older than
the second clock , thus values on the first clock are outdated .
%% Returns False otherwise.
-spec less(clock(), clock()) -> boolean().
less({C1,_}, {C2,_}) -> greater(C2, C1, false).
%% Private function
-spec greater(vector(), vector(), boolean()) -> boolean().
greater([], [], Strict) -> Strict;
greater([_|_], [], _) -> true;
greater([], [_|_], _) -> false;
greater([{I, N1, _} | T1], [{I, N2, _} | T2], Strict) ->
if
N1 == N2 -> greater(T1, T2, Strict);
N1 > N2 -> greater(T1, T2, true);
N1 < N2 -> false
end;
greater([{I1, _, _} | T1], [{I2, _, _} | _]=C2, _) when I1 < I2 -> greater(T1, C2, true);
greater(_, _, _) -> false.
%% @doc Maps (applies) a function on all values in this clock set,
%% returning the same clock set with the updated values.
-spec map(fun((value()) -> value()), clock()) -> clock().
map(F, {C,Vs}) ->
{[ {I, N, lists:map(F, V)} || {I, N, V} <- C], lists:map(F, Vs)}.
@doc Return a clock with the same causal history , but with only one
%% value in the anonymous placeholder. This value is the result of
the function F , which takes all values and returns a single new value .
-spec reconcile(Winner::fun(([value()]) -> value()), clock()) -> clock().
reconcile(F, C) ->
V = F(values(C)),
new(join(C),[V]).
%% @doc Returns the latest value in the clock set,
%% according to function F(A,B), which returns *true* if
%% A compares less than or equal to B, false otherwise.
-spec last(LessOrEqual::fun((value(),value()) -> boolean()), clock()) -> value().
last(F, C) ->
{_ ,_ , V2} = find_entry(F, C),
V2.
@doc Return a clock with the same causal history , but with only one
%% value in its original position. This value is the newest value
%% in the given clock, according to function F(A,B), which returns *true*
%% if A compares less than or equal to B, false otherwise.
-spec lww(LessOrEqual::fun((value(),value()) -> boolean()), clock()) -> clock().
lww(F, C={E,_}) ->
case find_entry(F, C) of
{id, I, V} -> {join_and_replace(I, V, E),[]};
{anonym, _, V} -> new(join(C),[V])
end.
%% find_entry/2 - Private function
find_entry(F, {[], [V|T]}) -> find_entry(F, null, V, {[],T}, anonym);
find_entry(F, {[{_, _, []} | T], Vs}) -> find_entry(F, {T,Vs});
find_entry(F, {[{I, _, [V|_]} | T], Vs}) -> find_entry(F, I, V, {T,Vs}, id).
%% find_entry/5 - Private function
find_entry(F, I, V, C, Flag) ->
Fun = fun (A,B) ->
case F(A,B) of
false -> {left,A}; % A is newer than B
true -> {right,B} % A is older than B
end
end,
find_entry2(Fun, I, V, C, Flag).
%% find_entry2/5 - Private function
find_entry2(_, I, V, {[], []}, anonym) -> {anonym, I , V};
find_entry2(_, I, V, {[], []}, id) -> {id, I, V};
find_entry2(F, I, V, {[], [V1 | T]}, Flag) ->
case F(V, V1) of
{left,V2} -> find_entry2(F, I, V2, {[],T}, Flag);
{right,V2} -> find_entry2(F, I, V2, {[],T}, anonym)
end;
find_entry2(F, I, V, {[{_, _, []} | T], Vs}, Flag) -> find_entry2(F, I, V, {T, Vs}, Flag);
find_entry2(F, I, V, {[{I1, _, [V1|_]} | T], Vs}, Flag) ->
case F(V, V1) of
{left,V2} -> find_entry2(F, I, V2, {T, Vs}, Flag);
{right,V2} -> find_entry2(F, I1, V2, {T, Vs}, Flag)
end.
%% Private function
join_and_replace(Ir, V, C) ->
[if
I == Ir -> {I, N, [V]};
true -> {I, N, []}
end
|| {I, N, _} <- C].
%% ===================================================================
EUnit tests
%% ===================================================================
-ifdef(TEST).
join_test() ->
A = new([v1]),
A1 = update(A,a),
B = new(join(A1),[v2]),
B1 = update(B, A1, b),
?assertEqual( join(A) , [] ),
?assertEqual( join(A1) , [{a,1}] ),
?assertEqual( join(B1) , [{a,1},{b,1}] ),
ok.
update_test() ->
A0 = update(new([v1]),a),
A1 = update(new(join(A0),[v2]), A0, a),
A2 = update(new(join(A1),[v3]), A1, b),
A3 = update(new(join(A0),[v4]), A1, b),
A4 = update(new(join(A0),[v5]), A1, a),
?assertEqual( A0 , {[{a,1,[v1]}],[]} ),
?assertEqual( A1 , {[{a,2,[v2]}],[]} ),
?assertEqual( A2 , {[{a,2,[]}, {b,1,[v3]}],[]} ),
?assertEqual( A3 , {[{a,2,[v2]}, {b,1,[v4]}],[]} ),
?assertEqual( A4 , {[{a,3,[v5,v2]}],[]} ),
ok.
sync_test() ->
X = {[{x,1,[]}],[]},
A = update(new([v1]),a),
Y = update(new([v2]),b),
A1 = update(new(join(A),[v2]), a),
A3 = update(new(join(A1),[v3]), b),
A4 = update(new(join(A1),[v3]), c),
F = fun (L,R) -> L>R end,
W = {[{a,1,[]}],[]},
Z = {[{a,2,[v2,v1]}],[]},
?assertEqual( sync([W,Z]) , {[{a,2,[v2]}],[]} ),
?assertEqual( sync([W,Z]) , sync([Z,W]) ),
?assertEqual( sync([A,A1]) , sync([A1,A]) ),
?assertEqual( sync([A4,A3]) , sync([A3,A4]) ),
?assertEqual( sync([A4,A3]) , {[{a,2,[]}, {b,1,[v3]}, {c,1,[v3]}],[]} ),
?assertEqual( sync([X,A]) , {[{a,1,[v1]},{x,1,[]}],[]} ),
?assertEqual( sync([X,A]) , sync([A,X]) ),
?assertEqual( sync([X,A]) , sync([A,X]) ),
?assertEqual( sync([A,Y]) , {[{a,1,[v1]},{b,1,[v2]}],[]} ),
?assertEqual( sync([Y,A]) , sync([A,Y]) ),
?assertEqual( sync([Y,A]) , sync([A,Y]) ),
?assertEqual( sync([A,X]) , sync([X,A]) ),
?assertEqual( lww(F,A4) , sync([A4,lww(F,A4)]) ),
ok.
syn_update_test() ->
writes v1 w/o VV
reads v1 with version vector ( VV )
writes v2 w/o VV
writes v3 with VV from v1
?assertEqual( VV1 , [{a,1}] ),
?assertEqual( A0 , {[{a,1,[v1]}],[]} ),
?assertEqual( A1 , {[{a,2,[v2,v1]}],[]} ),
%% now A2 should only have v2 and v3, since v3 was causally newer than v1
?assertEqual( A2 , {[{a,3,[v3,v2]}],[]} ),
ok.
event_test() ->
{A,_} = update(new([v1]),a),
?assertEqual( event(A,a,v2) , [{a,2,[v2,v1]}] ),
?assertEqual( event(A,b,v2) , [{a,1,[v1]}, {b,1,[v2]}] ),
ok.
lww_last_test() ->
F = fun (A,B) -> A =< B end,
F2 = fun ({_,TS1}, {_,TS2}) -> TS1 =< TS2 end,
X = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[]},
Y = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[10,0]},
Z = {[{a,4,[5,2]}, {b,1,[1]}], [3]},
A = {[{a,4,[{5, 1002345}, {7, 1002340}]}, {b,1,[{4, 1001340}]}], [{2, 1001140}]},
?assertEqual( last(F,X) , 5 ),
?assertEqual( last(F,Y) , 10 ),
?assertEqual( lww(F,X) , {[{a,4,[5]},{b,1,[]},{c,1,[]}],[]} ),
?assertEqual( lww(F,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[10]} ),
?assertEqual( lww(F,Z) , {[{a,4,[5]},{b,1,[]}],[]} ),
?assertEqual( lww(F2,A) , {[{a,4,[{5, 1002345}]}, {b,1,[]}], []} ),
ok.
fww_last_test() ->
F = fun (A,B) -> A >= B end,
F2 = fun ({_,TS1}, {_,TS2}) -> TS1 >= TS2 end,
X = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[]},
Y = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[10,0]},
Z = {[{a,4,[5,2]}, {b,1,[1]}], [3]},
A = {[{a,4,[{5, 1002345}, {7, 1002340}]}, {b,1,[{4, 1001340}]}], [{2, 1001140}]},
?assertEqual( last(F,X) , 3 ),
?assertEqual( last(F,Y) , 0 ),
?assertEqual( lww(F,X) , {[{a,4,[]},{b,1,[]},{c,1,[3]}],[]} ),
?assertEqual( lww(F,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[0]} ),
?assertEqual( lww(F,Z) , {[{a,4,[]},{b,1,[1]}],[]} ),
?assertEqual( lww(F2,A) , {[{a,4,[]}, {b,1,[]}], [{2, 1001140}]} ),
ok.
reconcile_test() ->
F1 = fun (L) -> lists:sum(L) end,
F2 = fun (L) -> hd(lists:sort(L)) end,
X = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[]},
Y = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[10,0]},
?assertEqual( reconcile(F1,X) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[10]} ),
?assertEqual( reconcile(F1,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[20]} ),
?assertEqual( reconcile(F2,X) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[2]} ),
?assertEqual( reconcile(F2,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[0]} ),
ok.
less_test() ->
A = update(new(v1),[a]),
B = update(new(join(A),[v2]), a),
B2 = update(new(join(A),[v2]), b),
B3 = update(new(join(A),[v2]), z),
C = update(new(join(B),[v3]), A, c),
D = update(new(join(C),[v4]), B2, d),
?assert( less(A,B) ),
?assert( less(A,C) ),
?assert( less(B,C) ),
?assert( less(B,D) ),
?assert( less(B2,D) ),
?assert( less(A,D) ),
?assertNot( less(B2,C) ),
?assertNot( less(B,B2) ),
?assertNot( less(B2,B) ),
?assertNot( less(A,A) ),
?assertNot( less(C,C) ),
?assertNot( less(D,B2) ),
?assertNot( less(B3,D) ),
ok.
equal_test() ->
A = {[{a,4,[v5,v0]},{b,0,[]},{c,1,[v3]}], [v0]},
B = {[{a,4,[v555,v0]}, {b,0,[]}, {c,1,[v3]}], []},
C = {[{a,4,[v5,v0]},{b,0,[]}], [v6,v1]},
% compare only the causal history
?assert( equal(A,B) ),
?assert( equal(B,A) ),
?assertNot( equal(A,C) ),
?assertNot( equal(B,C) ),
ok.
size_test() ->
?assertEqual( 1 , ?MODULE:size(new([v1])) ),
?assertEqual( 5 , ?MODULE:size({[{a,4,[v5,v0]},{b,0,[]},{c,1,[v3]}],[v4,v1]}) ),
ok.
ids_values_test() ->
A = {[{a,4,[v0,v5]},{b,0,[]},{c,1,[v3]}], [v1]},
B = {[{a,4,[v0,v555]}, {b,0,[]}, {c,1,[v3]}], []},
C = {[{a,4,[]},{b,0,[]}], [v1,v6]},
?assertEqual( ids(A) , [a,b,c] ),
?assertEqual( ids(B) , [a,b,c] ),
?assertEqual( ids(C) , [a,b] ),
?assertEqual( lists:sort(values(A)) , [v0,v1,v3,v5] ),
?assertEqual( lists:sort(values(B)) , [v0,v3,v555] ),
?assertEqual( lists:sort(values(C)) , [v1,v6] ),
ok.
map_test() ->
A = {[{a,4,[]},{b,0,[]},{c,1,[]}],[10]},
B = {[{a,4,[5,0]},{b,0,[]},{c,1,[2]}],[20,10]},
F = fun (X) -> X*2 end,
?assertEqual( map(F,A) , {[{a,4,[]},{b,0,[]},{c,1,[]}],[20]} ),
?assertEqual( map(F,B) , {[{a,4,[10,0]},{b,0,[]},{c,1,[4]}],[40,20]} ),
ok.
-endif. | null | https://raw.githubusercontent.com/Leapsight/plum_db/76f4d5113ed26cce4b38ce504f6965a2bcda95ed/src/plum_db_dvvset.erl | erlang | -------------------------------------------------------------------
File: plum_db_dvvset.erl
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
including without limitation the rights to use, copy, modify, merge, publish, distribute,
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@doc
An Erlang implementation of *compact* Dotted Version Vectors, which
provides a container for a set of concurrent values (siblings) with causal
order information.
For further reading, visit the
@end
<a href="">
Dotted Version Vectors: Logical Clocks for Optimistic Replication
</a>
@end
-------------------------------------------------------------------
% @doc
STRUCTURE details:
* entries() are sorted by id()
* each counter() also includes the number of values in that id()
* the values in each triple of entries() are causally ordered and each new value goes to the head of the list
@doc Constructs a new clock set without causal history,
and receives a list of values that gos to the anonymous list.
@doc Constructs a new clock set with the causal history
of the given version vector / vector clock,
and receives a list of values that gos to the anonymous list.
defense against non-order preserving serialization
It discards (causally) outdated values,
while merging all causal histories.
Private function
keep all unique anonymous values and sync entries()
Private function
Private function
@doc Return a version vector that represents the causal history.
@doc Advances the causal history with the given id.
The new value is the *anonymous dot* of the clock.
@doc Advances the causal history of the
causally newer than both clocks in the argument.
The new value is the *anonymous dot* of the clock.
which is intended to be the client clock with
the new value in the *anonymous dot* while
We create a new event on the synced causal history,
with the id I and the new value.
The anonymous values that were synced still remain.
Private function
@doc Returns the total number of values in this clock set.
@doc Returns all the ids used in this clock set.
@doc Returns all the values used in this clock set,
including the anonymous values.
@doc Compares the equality of both clocks, regarding
only the causal histories, thus ignoring the values.
DVVSet
vector clocks
Private function
Returns False otherwise.
Private function
@doc Maps (applies) a function on all values in this clock set,
returning the same clock set with the updated values.
value in the anonymous placeholder. This value is the result of
@doc Returns the latest value in the clock set,
according to function F(A,B), which returns *true* if
A compares less than or equal to B, false otherwise.
value in its original position. This value is the newest value
in the given clock, according to function F(A,B), which returns *true*
if A compares less than or equal to B, false otherwise.
find_entry/2 - Private function
find_entry/5 - Private function
A is newer than B
A is older than B
find_entry2/5 - Private function
Private function
===================================================================
===================================================================
now A2 should only have v2 and v3, since v3 was causally newer than v1
compare only the causal history | @author < >
@author < >
The MIT License ( MIT )
Copyright ( C ) 2013
associated documentation files ( the " Software " ) , to deal in the Software without restriction ,
sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is
substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR IMPLIED , INCLUDING
DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
< a href=" / ricardobcl / Dotted - Version - Vectors / tree / ompact">github page</a > .
-module(plum_db_dvvset).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/1,
new/2,
sync/1,
join/1,
update/2,
update/3,
size/1,
ids/1,
values/1,
equal/2,
less/2,
map/2,
last/2,
lww/2,
reconcile/2
]).
-export_type([clock/0, vector/0, id/0, value/0]).
-type clock() :: {entries(), values()}.
-type vector() :: [{id(), counter()}].
-type entries() :: [{id(), counter(), values()}].
-type id() :: any().
-type values() :: [value()].
-type value() :: any().
-type counter() :: non_neg_integer().
-spec new(value() | [value()]) -> clock().
new(Vs) when is_list(Vs) -> {[], Vs};
new(V) -> {[], [V]}.
The version vector SHOULD BE a direct result of .
-spec new(vector(), value() | [value()]) -> clock().
new(VV, Vs) when is_list(Vs) ->
{[{I, N, []} || {I, N} <- VVS], Vs};
new(VV, V) -> new(VV, [V]).
@doc Synchronizes a list of clocks using sync/2 .
-spec sync([clock()]) -> clock().
sync(L) -> lists:foldl(fun sync/2, {}, L).
-spec sync(clock(), clock()) -> clock().
sync({}, C) -> C;
sync(C ,{}) -> C;
sync(C1={E1,V1},C2={E2,V2}) ->
V = case less(C1,C2) of
C1 < C2 = > return V2
false -> case less(C2,C1) of
C2 < C1 = > return V1
sets:to_list(sets:from_list(V1++V2))
end
end,
{sync2(E1,E2),V}.
-spec sync2(entries(), entries()) -> entries().
sync2([], C) -> C;
sync2(C, []) -> C;
sync2([{I1, N1, L1}=H1 | T1]=C1, [{I2, N2, L2}=H2 | T2]=C2) ->
if
I1 < I2 -> [H1 | sync2(T1, C2)];
I1 > I2 -> [H2 | sync2(T2, C1)];
true -> [merge(I1, N1, L1, N2, L2) | sync2(T1, T2)]
end.
-spec merge(id(), counter(), values(), counter(), values()) -> {id(), counter(), values()}.
merge(I, N1, L1, N2, L2) ->
LL1 = length(L1),
LL2 = length(L2),
case N1 >= N2 of
true ->
case N1 - LL1 >= N2 - LL2 of
true -> {I, N1, L1};
false -> {I, N1, lists:sublist(L1, N1 - N2 + LL2)}
end;
false ->
case N2 - LL2 >= N1 - LL1 of
true -> {I, N2, L2};
false -> {I, N2, lists:sublist(L2, N2 - N1 + LL1)}
end
end.
-spec join(clock()) -> vector().
join({C,_}) -> [{I, N} || {I, N, _} <- C].
The client clock SHOULD BE a direct result of .
-spec update(clock(), id()) -> clock().
update({C,[V]}, I) -> {event(C, I, V), []}.
first clock with the given i d , while synchronizing
with the second clock , thus the new clock is
The first clock SHOULD BE a direct result of ,
the second clock is from the local server .
-spec update(clock(), clock(), id()) -> clock().
update({Cc,[V]}, Cr, I) ->
Sync both clocks without the new value
{C,Vs} = sync({Cc,[]}, Cr),
{event(C, I, V), Vs}.
-spec event(entries(), id(), value()) -> entries().
event([], I, V) -> [{I, 1, [V]}];
event([{I, N, L} | T], I, V) -> [{I, N+1, [V | L]} | T];
event([{I1, _, _} | _]=C, I, V) when I1 > I -> [{I, 1, [V]} | C];
event([H | T], I, V) -> [H | event(T, I, V)].
-spec size(clock()) -> non_neg_integer().
size({C,Vs}) -> lists:sum([length(L) || {_,_,L} <- C]) + length(Vs).
-spec ids(clock()) -> [id()].
ids({C,_}) -> ([I || {I,_,_} <- C]).
-spec values(clock()) -> [value()].
values({C,Vs}) -> Vs ++ lists:append([L || {_,_,L} <- C]).
-spec equal(clock() | vector(), clock() | vector()) -> boolean().
-spec equal2(vector(), vector()) -> boolean().
equal2([], []) -> true;
equal2([{I, C, L1} | T1], [{I, C, L2} | T2])
when length(L1) =:= length(L2) ->
equal2(T1, T2);
equal2(_, _) -> false.
@doc Returns True if the first clock is causally older than
the second clock , thus values on the first clock are outdated .
-spec less(clock(), clock()) -> boolean().
less({C1,_}, {C2,_}) -> greater(C2, C1, false).
-spec greater(vector(), vector(), boolean()) -> boolean().
greater([], [], Strict) -> Strict;
greater([_|_], [], _) -> true;
greater([], [_|_], _) -> false;
greater([{I, N1, _} | T1], [{I, N2, _} | T2], Strict) ->
if
N1 == N2 -> greater(T1, T2, Strict);
N1 > N2 -> greater(T1, T2, true);
N1 < N2 -> false
end;
greater([{I1, _, _} | T1], [{I2, _, _} | _]=C2, _) when I1 < I2 -> greater(T1, C2, true);
greater(_, _, _) -> false.
-spec map(fun((value()) -> value()), clock()) -> clock().
map(F, {C,Vs}) ->
{[ {I, N, lists:map(F, V)} || {I, N, V} <- C], lists:map(F, Vs)}.
@doc Return a clock with the same causal history , but with only one
the function F , which takes all values and returns a single new value .
-spec reconcile(Winner::fun(([value()]) -> value()), clock()) -> clock().
reconcile(F, C) ->
V = F(values(C)),
new(join(C),[V]).
-spec last(LessOrEqual::fun((value(),value()) -> boolean()), clock()) -> value().
last(F, C) ->
{_ ,_ , V2} = find_entry(F, C),
V2.
@doc Return a clock with the same causal history , but with only one
-spec lww(LessOrEqual::fun((value(),value()) -> boolean()), clock()) -> clock().
lww(F, C={E,_}) ->
case find_entry(F, C) of
{id, I, V} -> {join_and_replace(I, V, E),[]};
{anonym, _, V} -> new(join(C),[V])
end.
find_entry(F, {[], [V|T]}) -> find_entry(F, null, V, {[],T}, anonym);
find_entry(F, {[{_, _, []} | T], Vs}) -> find_entry(F, {T,Vs});
find_entry(F, {[{I, _, [V|_]} | T], Vs}) -> find_entry(F, I, V, {T,Vs}, id).
find_entry(F, I, V, C, Flag) ->
Fun = fun (A,B) ->
case F(A,B) of
end
end,
find_entry2(Fun, I, V, C, Flag).
find_entry2(_, I, V, {[], []}, anonym) -> {anonym, I , V};
find_entry2(_, I, V, {[], []}, id) -> {id, I, V};
find_entry2(F, I, V, {[], [V1 | T]}, Flag) ->
case F(V, V1) of
{left,V2} -> find_entry2(F, I, V2, {[],T}, Flag);
{right,V2} -> find_entry2(F, I, V2, {[],T}, anonym)
end;
find_entry2(F, I, V, {[{_, _, []} | T], Vs}, Flag) -> find_entry2(F, I, V, {T, Vs}, Flag);
find_entry2(F, I, V, {[{I1, _, [V1|_]} | T], Vs}, Flag) ->
case F(V, V1) of
{left,V2} -> find_entry2(F, I, V2, {T, Vs}, Flag);
{right,V2} -> find_entry2(F, I1, V2, {T, Vs}, Flag)
end.
join_and_replace(Ir, V, C) ->
[if
I == Ir -> {I, N, [V]};
true -> {I, N, []}
end
|| {I, N, _} <- C].
EUnit tests
-ifdef(TEST).
join_test() ->
A = new([v1]),
A1 = update(A,a),
B = new(join(A1),[v2]),
B1 = update(B, A1, b),
?assertEqual( join(A) , [] ),
?assertEqual( join(A1) , [{a,1}] ),
?assertEqual( join(B1) , [{a,1},{b,1}] ),
ok.
update_test() ->
A0 = update(new([v1]),a),
A1 = update(new(join(A0),[v2]), A0, a),
A2 = update(new(join(A1),[v3]), A1, b),
A3 = update(new(join(A0),[v4]), A1, b),
A4 = update(new(join(A0),[v5]), A1, a),
?assertEqual( A0 , {[{a,1,[v1]}],[]} ),
?assertEqual( A1 , {[{a,2,[v2]}],[]} ),
?assertEqual( A2 , {[{a,2,[]}, {b,1,[v3]}],[]} ),
?assertEqual( A3 , {[{a,2,[v2]}, {b,1,[v4]}],[]} ),
?assertEqual( A4 , {[{a,3,[v5,v2]}],[]} ),
ok.
sync_test() ->
X = {[{x,1,[]}],[]},
A = update(new([v1]),a),
Y = update(new([v2]),b),
A1 = update(new(join(A),[v2]), a),
A3 = update(new(join(A1),[v3]), b),
A4 = update(new(join(A1),[v3]), c),
F = fun (L,R) -> L>R end,
W = {[{a,1,[]}],[]},
Z = {[{a,2,[v2,v1]}],[]},
?assertEqual( sync([W,Z]) , {[{a,2,[v2]}],[]} ),
?assertEqual( sync([W,Z]) , sync([Z,W]) ),
?assertEqual( sync([A,A1]) , sync([A1,A]) ),
?assertEqual( sync([A4,A3]) , sync([A3,A4]) ),
?assertEqual( sync([A4,A3]) , {[{a,2,[]}, {b,1,[v3]}, {c,1,[v3]}],[]} ),
?assertEqual( sync([X,A]) , {[{a,1,[v1]},{x,1,[]}],[]} ),
?assertEqual( sync([X,A]) , sync([A,X]) ),
?assertEqual( sync([X,A]) , sync([A,X]) ),
?assertEqual( sync([A,Y]) , {[{a,1,[v1]},{b,1,[v2]}],[]} ),
?assertEqual( sync([Y,A]) , sync([A,Y]) ),
?assertEqual( sync([Y,A]) , sync([A,Y]) ),
?assertEqual( sync([A,X]) , sync([X,A]) ),
?assertEqual( lww(F,A4) , sync([A4,lww(F,A4)]) ),
ok.
syn_update_test() ->
writes v1 w/o VV
reads v1 with version vector ( VV )
writes v2 w/o VV
writes v3 with VV from v1
?assertEqual( VV1 , [{a,1}] ),
?assertEqual( A0 , {[{a,1,[v1]}],[]} ),
?assertEqual( A1 , {[{a,2,[v2,v1]}],[]} ),
?assertEqual( A2 , {[{a,3,[v3,v2]}],[]} ),
ok.
event_test() ->
{A,_} = update(new([v1]),a),
?assertEqual( event(A,a,v2) , [{a,2,[v2,v1]}] ),
?assertEqual( event(A,b,v2) , [{a,1,[v1]}, {b,1,[v2]}] ),
ok.
lww_last_test() ->
F = fun (A,B) -> A =< B end,
F2 = fun ({_,TS1}, {_,TS2}) -> TS1 =< TS2 end,
X = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[]},
Y = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[10,0]},
Z = {[{a,4,[5,2]}, {b,1,[1]}], [3]},
A = {[{a,4,[{5, 1002345}, {7, 1002340}]}, {b,1,[{4, 1001340}]}], [{2, 1001140}]},
?assertEqual( last(F,X) , 5 ),
?assertEqual( last(F,Y) , 10 ),
?assertEqual( lww(F,X) , {[{a,4,[5]},{b,1,[]},{c,1,[]}],[]} ),
?assertEqual( lww(F,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[10]} ),
?assertEqual( lww(F,Z) , {[{a,4,[5]},{b,1,[]}],[]} ),
?assertEqual( lww(F2,A) , {[{a,4,[{5, 1002345}]}, {b,1,[]}], []} ),
ok.
fww_last_test() ->
F = fun (A,B) -> A >= B end,
F2 = fun ({_,TS1}, {_,TS2}) -> TS1 >= TS2 end,
X = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[]},
Y = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[10,0]},
Z = {[{a,4,[5,2]}, {b,1,[1]}], [3]},
A = {[{a,4,[{5, 1002345}, {7, 1002340}]}, {b,1,[{4, 1001340}]}], [{2, 1001140}]},
?assertEqual( last(F,X) , 3 ),
?assertEqual( last(F,Y) , 0 ),
?assertEqual( lww(F,X) , {[{a,4,[]},{b,1,[]},{c,1,[3]}],[]} ),
?assertEqual( lww(F,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[0]} ),
?assertEqual( lww(F,Z) , {[{a,4,[]},{b,1,[1]}],[]} ),
?assertEqual( lww(F2,A) , {[{a,4,[]}, {b,1,[]}], [{2, 1001140}]} ),
ok.
reconcile_test() ->
F1 = fun (L) -> lists:sum(L) end,
F2 = fun (L) -> hd(lists:sort(L)) end,
X = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[]},
Y = {[{a,4,[5,2]},{b,1,[]},{c,1,[3]}],[10,0]},
?assertEqual( reconcile(F1,X) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[10]} ),
?assertEqual( reconcile(F1,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[20]} ),
?assertEqual( reconcile(F2,X) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[2]} ),
?assertEqual( reconcile(F2,Y) , {[{a,4,[]},{b,1,[]},{c,1,[]}],[0]} ),
ok.
less_test() ->
A = update(new(v1),[a]),
B = update(new(join(A),[v2]), a),
B2 = update(new(join(A),[v2]), b),
B3 = update(new(join(A),[v2]), z),
C = update(new(join(B),[v3]), A, c),
D = update(new(join(C),[v4]), B2, d),
?assert( less(A,B) ),
?assert( less(A,C) ),
?assert( less(B,C) ),
?assert( less(B,D) ),
?assert( less(B2,D) ),
?assert( less(A,D) ),
?assertNot( less(B2,C) ),
?assertNot( less(B,B2) ),
?assertNot( less(B2,B) ),
?assertNot( less(A,A) ),
?assertNot( less(C,C) ),
?assertNot( less(D,B2) ),
?assertNot( less(B3,D) ),
ok.
equal_test() ->
A = {[{a,4,[v5,v0]},{b,0,[]},{c,1,[v3]}], [v0]},
B = {[{a,4,[v555,v0]}, {b,0,[]}, {c,1,[v3]}], []},
C = {[{a,4,[v5,v0]},{b,0,[]}], [v6,v1]},
?assert( equal(A,B) ),
?assert( equal(B,A) ),
?assertNot( equal(A,C) ),
?assertNot( equal(B,C) ),
ok.
size_test() ->
?assertEqual( 1 , ?MODULE:size(new([v1])) ),
?assertEqual( 5 , ?MODULE:size({[{a,4,[v5,v0]},{b,0,[]},{c,1,[v3]}],[v4,v1]}) ),
ok.
ids_values_test() ->
A = {[{a,4,[v0,v5]},{b,0,[]},{c,1,[v3]}], [v1]},
B = {[{a,4,[v0,v555]}, {b,0,[]}, {c,1,[v3]}], []},
C = {[{a,4,[]},{b,0,[]}], [v1,v6]},
?assertEqual( ids(A) , [a,b,c] ),
?assertEqual( ids(B) , [a,b,c] ),
?assertEqual( ids(C) , [a,b] ),
?assertEqual( lists:sort(values(A)) , [v0,v1,v3,v5] ),
?assertEqual( lists:sort(values(B)) , [v0,v3,v555] ),
?assertEqual( lists:sort(values(C)) , [v1,v6] ),
ok.
map_test() ->
A = {[{a,4,[]},{b,0,[]},{c,1,[]}],[10]},
B = {[{a,4,[5,0]},{b,0,[]},{c,1,[2]}],[20,10]},
F = fun (X) -> X*2 end,
?assertEqual( map(F,A) , {[{a,4,[]},{b,0,[]},{c,1,[]}],[20]} ),
?assertEqual( map(F,B) , {[{a,4,[10,0]},{b,0,[]},{c,1,[4]}],[40,20]} ),
ok.
-endif. |
78e8c31ac0831793b55d7a856f4c39dd22d32b9ae8b334104cc142918c3c49bb | GregorySchwartz/too-many-cells | Differential.hs | TooManyCells . Differential . Differential
Functions for finding the differential between groups of cells .
Gregory W. Schwartz
Functions for finding the differential between groups of cells.
-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
# LANGUAGE QuasiQuotes #
module TooManyCells.Differential.Differential
( scToTwoD
, getDEGraph
, getDEGraphKruskalWallis
, getDEString
, getDEStringKruskalWallis
, getSingleDiff
, combineNodesLabels
, getAllDEGraphKruskalWallis
, getAllDEStringKruskalWallis
) where
-- Remote
import Data.Bool (bool)
import BirchBeer.Types
import BirchBeer.Utility (getGraphLeaves, getGraphLeafItems)
import Control.Monad (join, mfilter)
import Data.Function (on)
import Data.List (sort, sortBy, groupBy, genericLength, partition, foldl')
import Data.Maybe (fromMaybe, catMaybes, isJust)
import Data.Monoid ((<>))
import Language.R as R
import Language.R.QQ (r)
import TextShow (showt)
import Control.Parallel.Strategies (parMap, withStrategy, parBuffer, rdeepseq)
import qualified "differential" Differential as Diff
import qualified "differential" Plot as Diff
import qualified "differential" Types as Diff
import qualified Control.Concurrent.Async as Async
import qualified Control.Foldl as Fold
import qualified Control.Lens as L
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.Csv as CSV
import qualified Data.Foldable as F
import qualified Data.Graph.Inductive as G
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Sparse.Common as S
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified H.Prelude as H
import qualified System.FilePath as FP
import qualified System.Random.MWC as MWC
import qualified System.Random.MWC.Distributions as MWC
-- Local
import TooManyCells.Differential.Types
import TooManyCells.MakeTree.Types
import TooManyCells.Matrix.Types
| Convert a single cell matrix to a two dimensional matrix .
scToTwoD :: [(Int, Cell, (Int, Diff.Status))] -> SingleCells -> Diff.TwoDMat
scToTwoD cellGroups sc =
Diff.TwoDMat rNames cNames statuses nRows nCols . S.toListSM $ filteredMat
where
rNames = fmap (Diff.Name . unFeature) . V.toList . _colNames $ sc
cNames = fmap (Diff.Name . unCell . L.view L._2) cellGroups -- We flip row and column because cells are columns here
statuses = fmap (Diff.Status . showt . L.view (L._3 . L._1)) cellGroups
nRows = S.nrows filteredMat
nCols = S.ncols filteredMat
filteredMat = S.fromColsL -- Here the columns should be observations.
. fmap (S.extractRow (unMatObsRow . _matrix $ sc) . L.view L._1)
. filter ((>) (S.nrows . unMatObsRow $ _matrix sc) . L.view L._1)
$ cellGroups
-- | Get the indices and statuses for a list of groups of nodes.
getStatuses
:: Maybe LabelMap
-> [([G.Node], Maybe (Set.Set Label))]
-> ClusterGraph CellInfo
-> [(Int, Cell, (Int, Diff.Status))]
getStatuses lm gs (ClusterGraph gr) =
sort
. concatMap F.toList
. zipWith (\x (a, b) -> collapseStatus x a b) [1..]
$ gs
where
collapseStatus s vs ls =
fmap (\ !x -> (unRow . _cellRow $ x, _barcode x, (s, Diff.Status $ statusName vs ls)))
. mfilter (validCellInfo lm ls)
. join
. mconcat
. fmap (fmap (fromMaybe mempty . snd) . getGraphLeaves gr)
$ vs
statusName vs Nothing = showt vs
statusName vs (Just ls) =
(T.intercalate " " . fmap unLabel . Set.toAscList $ ls) <> " " <> showt vs
-- | Get the indices and statuses for a list of groups of nodes and subsample if
-- desired.
subsampleGetStatuses
:: Seed
-> Maybe Subsample
-> Maybe LabelMap
-> [([G.Node], Maybe (Set.Set Label))]
-> ClusterGraph CellInfo
-> IO [(Int, Cell, (Int, Diff.Status))]
subsampleGetStatuses seed subN lm gs gr =
maybe return (subsampleGroups seed) subN $ getStatuses lm gs gr
-- | Filter barcodes by labels.
validCellInfo :: Maybe LabelMap -> Maybe (Set.Set Label) -> CellInfo -> Bool
validCellInfo Nothing _ = const True
validCellInfo _ Nothing = const True
validCellInfo (Just (LabelMap lm)) (Just ls) =
maybe False (flip Set.member ls)
. flip Map.lookup lm
. Id
. unCell
. L.view barcode
-- | Filter barcodes by labels.
validCell :: Maybe LabelMap -> Maybe (Set.Set Label) -> Cell -> Bool
validCell Nothing _ = const True
validCell _ Nothing = const True
validCell (Just (LabelMap lm)) (Just ls) =
maybe False (flip Set.member ls)
. flip Map.lookup lm
. Id
. unCell
| Get the differential expression of two sets of cells , filtered by labels .
getDEGraph :: Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> SingleCells
-> ([G.Node], Maybe (Set.Set Label))
-> ([G.Node], Maybe (Set.Set Label))
-> ClusterGraph CellInfo
-> R.R s [(Diff.Name, Double, Diff.PValue, Diff.FDR)]
getDEGraph seed subN (TopN topN) lm sc v1 v2 gr = do
cellGroups <- H.io $ subsampleGetStatuses seed subN lm [v1, v2] gr
let mat = scToTwoD cellGroups sc
Diff.edgeR topN mat
| Get the differential expression using of two sets of cells ,
-- filtered by labels.
getDEGraphKruskalWallis
:: Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> SingleCells
-> ([G.Node], Maybe (Set.Set Label))
-> ([G.Node], Maybe (Set.Set Label))
-> ClusterGraph CellInfo
-> IO [ ( Feature
, Diff.Log2Diff
, Maybe Diff.PValue
, Maybe Diff.FDR
, Maybe Diff.QValue
)
]
getDEGraphKruskalWallis seed subN (TopN topN) lm sc v1 v2 gr = do
cellGroups <- subsampleGetStatuses seed subN lm [v1, v2] gr
let fastFiveCheck = (< 5) . length . take 5
res = filter (isJust . L.view L._3)
. zipWith
(\name (!a, !b, !c, !d) -> (name, a, b, c, d))
(V.toList . L.view colNames $ sc)
. Diff.differentialMatrixFeatRow as bs
. S.transpose
. unMatObsRow
. L.view matrix
$ sc
(as, bs) = L.over L.both (fmap (L.view L._1))
. partition ((== 1) . L.view (L._3 . L._1))
$ cellGroups
if fastFiveCheck as || fastFiveCheck bs
then error "Less than five cells in one node to compare."
else return . take topN . sortBy (compare `on` L.view L._3) $ res
-- | Get the differential expression of each cluster to all other cells in the
data set using KruskalWallis .
getAllDEGraphKruskalWallis
:: Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> DiffLabels
-> SingleCells
-> ClusterGraph CellInfo
-> IO [(G.Node, Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)]
getAllDEGraphKruskalWallis seed subN topN lm ls sc gr =
mconcat
. catMaybes
<$> Async.mapConcurrently (\n -> compareClusterToOthersKruskalWallis n seed subN topN lm ls sc mat gr)
nodes
where
nodes = filter (/= 0) . G.nodes . unClusterGraph $ gr -- Don't want to look at root.
mat = S.transpose . unMatObsRow . L.view matrix $ sc
-- | Get the differential expression of a cluster (n) to all other cells in the
data set ( ns ) using KruskalWallis such that n / ns .
compareClusterToOthersKruskalWallis
:: G.Node
-> Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> DiffLabels
-> SingleCells
-> S.SpMatrix Double
-> ClusterGraph CellInfo
-> IO (Maybe [(G.Node, Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)])
compareClusterToOthersKruskalWallis n (Seed seed) subN (TopN topN) lm (DiffLabels (ls1, ls2)) sc mat gr = do
let fastFiveCheck = (< 5) . length . take 5
nCells' = F.toList $ getGraphLeafItems gr n
nCellsSet = Set.fromList . fmap (L.view barcode) $ nCells'
nCells = fmap (unRow . L.view cellRow)
. filter (validCellInfo lm ls2)
$ nCells' -- All cells from node and labels
nsCells =
fmap fst
. filter (\ (_, !x) -> validCell lm ls1 x && not (Set.member x nCellsSet)) -- All cells outside of node and from labels
. zip [0..]
. V.toList
. L.view rowNames
$ sc
sample n x =
maybe
return
(\s -> fmap (take s . V.toList) . flip MWC.uniformShuffle x . V.fromList)
n
subN' = bool (min (length nCells) (length nsCells)) (maybe 0 unSubsample subN)
. (/= Subsample 0)
<$> subN
g <- MWC.restore . MWC.toSeed $ V.fromList [fromIntegral seed]
nCellsFinal <- sample subN' g nCells
nsCellsFinal <- sample subN' g nsCells
let res = filter (isJust . L.view L._4)
. ( zipWith
(\name (!a, !b, !c, !d) -> (n, name, a, b, c, d))
(V.toList . L.view colNames $ sc)
)
$ Diff.differentialMatrixFeatRow nsCellsFinal nCellsFinal mat -- Here the matrix rows are features
if fastFiveCheck nCellsFinal || fastFiveCheck nsCellsFinal
then return Nothing
else return . Just . take topN . sortBy (compare `on` (L.view L._4)) $ res
| Get the differential expression of two sets of cells .
getDEString :: [(Diff.Name, Double, Diff.PValue, Diff.FDR)]
-> B.ByteString
getDEString xs = header <> "\n" <> body
where
edgeR calls q - values
body = CSV.encode
. fmap ( L.over L._1 Diff.unName
. L.over L._3 Diff.unPValue
. L.over L._4 Diff.unFDR
)
$ xs
-- | Get the differential expression of each node to all other nodes using
KruskalWallis .
getAllDEStringKruskalWallis
:: [(G.Node, Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)]
-> B.ByteString
getAllDEStringKruskalWallis xs = header <> "\n" <> body
where
header = "node,feature,log2FC,pVal,qVal"
body = CSV.encode
. fmap ( (\(!a, !b, !c, !d, !e, !f) -> (a,b,c,d,f))
. L.over L._6 (maybe "NA" (showt . Diff.unQValue))
. L.over L._4 (maybe "NA" (showt . Diff.unPValue))
. L.over L._3 Diff.unLog2Diff
. L.over L._2 unFeature
)
$ xs
| Get the differential expression string between two sets of nodes using
KruskalWallis .
getDEStringKruskalWallis
:: [(Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)]
-> B.ByteString
getDEStringKruskalWallis xs = header <> "\n" <> body
where
header = "feature,log2FC,pVal,qVal"
body = CSV.encode
. fmap ( (\(!a, !b, !c, !d, !e) -> (a,b,c,e))
. L.over L._5 (maybe "NA" (showt . Diff.unQValue))
. L.over L._4 (maybe "NA" (showt . Diff.unFDR))
. L.over L._3 (maybe "NA" (showt . Diff.unPValue))
. L.over L._2 Diff.unLog2Diff
. L.over L._1 unFeature
)
$ xs
-- | Convert a single cell matrix to a list of Entities with the specified
-- features. Also aggregates features by average value or not.
scToEntities :: Aggregate
-> [Feature]
-> [(Int, Cell, (Int, Diff.Status))]
-> SingleCells
-> [Diff.Entity]
scToEntities aggregate features cellGroups sc =
concatMap (\x -> toEntity aggregate x featureIdxs) cellGroups
where
mat = getMatrix sc
toEntity (Aggregate False) (cellIdx, (Cell b), (_, status)) =
fmap (\ (Feature feature, idx) -> Diff.Entity (Diff.Name feature) status (Diff.Id b)
$ S.lookupWD_SM mat (cellIdx, idx)
)
toEntity (Aggregate True) (cellIdx, (Cell b), (_, status)) =
(:[])
. Diff.Entity
(Diff.Name . T.intercalate " " . fmap (unFeature . fst) $ featureIdxs)
status
(Diff.Id b)
. (/ n)
. foldl' (+) 0
. fmap (\(_, idx) -> S.lookupWD_SM mat (cellIdx, idx))
n = genericLength featureIdxs
featureIdxs :: [(Feature, Int)]
featureIdxs = fmap (\ !x -> ( x
, fromMaybe (err x)
$ V.elemIndex (unFeature x) (getColNames sc)
)
) features
err x = error $ "Feature " <> show x <> " not found for differential."
-- | Get the differential expression plot of features (or aggregate of features
-- by average) over statuses, filtered by labels.
getSingleDiff :: Seed
-> Maybe Subsample
-> Bool
-> ViolinFlag
-> NoOutlierFlag
-> Aggregate
-> SeparateNodes
-> SeparateLabels
-> Maybe LabelMap
-> SingleCells
-> ([G.Node], Maybe (Set.Set Label))
-> ([G.Node], Maybe (Set.Set Label))
-> [Feature]
-> ClusterGraph CellInfo
-> R.R s (R.SomeSEXP s)
getSingleDiff seed subN normalize (ViolinFlag vf) (NoOutlierFlag noOutlierF) aggregate sn sl lm sc v1 v2 features gr = do
let splitNodeGroup (!ns, !ls) = fmap (\ !x -> ([x], ls)) ns
splitLabelGroup (!ns, !ls) =
maybe
[(ns, ls)]
(fmap (\ !l -> (ns, Just $ Set.singleton l)) . Set.toAscList)
ls
groupsAssign' = case (unSeparateNodes sn, unSeparateLabels sl) of
(False, False) -> [v1, v2]
(True, False) -> concatMap splitNodeGroup [v1, v2]
(False, True) -> concatMap splitLabelGroup [v1, v2]
(True, True) -> concatMap splitNodeGroup
. concatMap splitLabelGroup
$ [v1, v2]
cellGroups <- H.io $ subsampleGetStatuses seed subN lm groupsAssign' gr
let entities = scToEntities aggregate features cellGroups sc
Diff.plotSingleDiff normalize vf noOutlierF entities
-- | Combine nodes and labels.
combineNodesLabels
:: DiffNodes
-> Maybe DiffLabels
-> (([G.Node], Maybe (Set.Set Label)), ([G.Node], Maybe (Set.Set Label)))
combineNodesLabels (DiffNodes (v1, v2)) Nothing = ((v1, Nothing), (v2, Nothing))
combineNodesLabels (DiffNodes (v1, v2)) (Just (DiffLabels (l1, l2))) =
((v1, l1), (v2, l2))
-- | Subsample a cell group list with provided subsampling number or the
smallest of the two groups if not provided .
subsampleGroups :: Seed
-> Subsample
-> [(Int, Cell, (Int, Diff.Status))]
-> IO [(Int, Cell, (Int, Diff.Status))]
subsampleGroups (Seed seed) subN xs = do
g <- MWC.restore . MWC.toSeed $ V.fromList [fromIntegral seed]
let sample n x = fmap (take n . V.toList) . flip MWC.uniformShuffle x
grouped = fmap V.fromList
. groupBy ((==) `on` L.view (L._3 . L._1))
. sortBy (compare `on` L.view (L._3 . L._1))
$ xs
subN' = bool (minimum . fmap V.length $ grouped) (unSubsample subN)
. (/= 0)
$ unSubsample subN
fmap mconcat $ mapM (sample subN' g) grouped
| null | https://raw.githubusercontent.com/GregorySchwartz/too-many-cells/358391c358006e8728beb57c3947fef91ce8ca66/src/TooManyCells/Differential/Differential.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE OverloadedStrings #
# LANGUAGE PackageImports #
Remote
Local
We flip row and column because cells are columns here
Here the columns should be observations.
| Get the indices and statuses for a list of groups of nodes.
| Get the indices and statuses for a list of groups of nodes and subsample if
desired.
| Filter barcodes by labels.
| Filter barcodes by labels.
filtered by labels.
| Get the differential expression of each cluster to all other cells in the
Don't want to look at root.
| Get the differential expression of a cluster (n) to all other cells in the
All cells from node and labels
All cells outside of node and from labels
Here the matrix rows are features
| Get the differential expression of each node to all other nodes using
| Convert a single cell matrix to a list of Entities with the specified
features. Also aggregates features by average value or not.
| Get the differential expression plot of features (or aggregate of features
by average) over statuses, filtered by labels.
| Combine nodes and labels.
| Subsample a cell group list with provided subsampling number or the | TooManyCells . Differential . Differential
Functions for finding the differential between groups of cells .
Gregory W. Schwartz
Functions for finding the differential between groups of cells.
-}
# LANGUAGE QuasiQuotes #
module TooManyCells.Differential.Differential
( scToTwoD
, getDEGraph
, getDEGraphKruskalWallis
, getDEString
, getDEStringKruskalWallis
, getSingleDiff
, combineNodesLabels
, getAllDEGraphKruskalWallis
, getAllDEStringKruskalWallis
) where
import Data.Bool (bool)
import BirchBeer.Types
import BirchBeer.Utility (getGraphLeaves, getGraphLeafItems)
import Control.Monad (join, mfilter)
import Data.Function (on)
import Data.List (sort, sortBy, groupBy, genericLength, partition, foldl')
import Data.Maybe (fromMaybe, catMaybes, isJust)
import Data.Monoid ((<>))
import Language.R as R
import Language.R.QQ (r)
import TextShow (showt)
import Control.Parallel.Strategies (parMap, withStrategy, parBuffer, rdeepseq)
import qualified "differential" Differential as Diff
import qualified "differential" Plot as Diff
import qualified "differential" Types as Diff
import qualified Control.Concurrent.Async as Async
import qualified Control.Foldl as Fold
import qualified Control.Lens as L
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.Csv as CSV
import qualified Data.Foldable as F
import qualified Data.Graph.Inductive as G
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Sparse.Common as S
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified H.Prelude as H
import qualified System.FilePath as FP
import qualified System.Random.MWC as MWC
import qualified System.Random.MWC.Distributions as MWC
import TooManyCells.Differential.Types
import TooManyCells.MakeTree.Types
import TooManyCells.Matrix.Types
| Convert a single cell matrix to a two dimensional matrix .
scToTwoD :: [(Int, Cell, (Int, Diff.Status))] -> SingleCells -> Diff.TwoDMat
scToTwoD cellGroups sc =
Diff.TwoDMat rNames cNames statuses nRows nCols . S.toListSM $ filteredMat
where
rNames = fmap (Diff.Name . unFeature) . V.toList . _colNames $ sc
statuses = fmap (Diff.Status . showt . L.view (L._3 . L._1)) cellGroups
nRows = S.nrows filteredMat
nCols = S.ncols filteredMat
. fmap (S.extractRow (unMatObsRow . _matrix $ sc) . L.view L._1)
. filter ((>) (S.nrows . unMatObsRow $ _matrix sc) . L.view L._1)
$ cellGroups
getStatuses
:: Maybe LabelMap
-> [([G.Node], Maybe (Set.Set Label))]
-> ClusterGraph CellInfo
-> [(Int, Cell, (Int, Diff.Status))]
getStatuses lm gs (ClusterGraph gr) =
sort
. concatMap F.toList
. zipWith (\x (a, b) -> collapseStatus x a b) [1..]
$ gs
where
collapseStatus s vs ls =
fmap (\ !x -> (unRow . _cellRow $ x, _barcode x, (s, Diff.Status $ statusName vs ls)))
. mfilter (validCellInfo lm ls)
. join
. mconcat
. fmap (fmap (fromMaybe mempty . snd) . getGraphLeaves gr)
$ vs
statusName vs Nothing = showt vs
statusName vs (Just ls) =
(T.intercalate " " . fmap unLabel . Set.toAscList $ ls) <> " " <> showt vs
subsampleGetStatuses
:: Seed
-> Maybe Subsample
-> Maybe LabelMap
-> [([G.Node], Maybe (Set.Set Label))]
-> ClusterGraph CellInfo
-> IO [(Int, Cell, (Int, Diff.Status))]
subsampleGetStatuses seed subN lm gs gr =
maybe return (subsampleGroups seed) subN $ getStatuses lm gs gr
validCellInfo :: Maybe LabelMap -> Maybe (Set.Set Label) -> CellInfo -> Bool
validCellInfo Nothing _ = const True
validCellInfo _ Nothing = const True
validCellInfo (Just (LabelMap lm)) (Just ls) =
maybe False (flip Set.member ls)
. flip Map.lookup lm
. Id
. unCell
. L.view barcode
validCell :: Maybe LabelMap -> Maybe (Set.Set Label) -> Cell -> Bool
validCell Nothing _ = const True
validCell _ Nothing = const True
validCell (Just (LabelMap lm)) (Just ls) =
maybe False (flip Set.member ls)
. flip Map.lookup lm
. Id
. unCell
| Get the differential expression of two sets of cells , filtered by labels .
getDEGraph :: Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> SingleCells
-> ([G.Node], Maybe (Set.Set Label))
-> ([G.Node], Maybe (Set.Set Label))
-> ClusterGraph CellInfo
-> R.R s [(Diff.Name, Double, Diff.PValue, Diff.FDR)]
getDEGraph seed subN (TopN topN) lm sc v1 v2 gr = do
cellGroups <- H.io $ subsampleGetStatuses seed subN lm [v1, v2] gr
let mat = scToTwoD cellGroups sc
Diff.edgeR topN mat
| Get the differential expression using of two sets of cells ,
getDEGraphKruskalWallis
:: Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> SingleCells
-> ([G.Node], Maybe (Set.Set Label))
-> ([G.Node], Maybe (Set.Set Label))
-> ClusterGraph CellInfo
-> IO [ ( Feature
, Diff.Log2Diff
, Maybe Diff.PValue
, Maybe Diff.FDR
, Maybe Diff.QValue
)
]
getDEGraphKruskalWallis seed subN (TopN topN) lm sc v1 v2 gr = do
cellGroups <- subsampleGetStatuses seed subN lm [v1, v2] gr
let fastFiveCheck = (< 5) . length . take 5
res = filter (isJust . L.view L._3)
. zipWith
(\name (!a, !b, !c, !d) -> (name, a, b, c, d))
(V.toList . L.view colNames $ sc)
. Diff.differentialMatrixFeatRow as bs
. S.transpose
. unMatObsRow
. L.view matrix
$ sc
(as, bs) = L.over L.both (fmap (L.view L._1))
. partition ((== 1) . L.view (L._3 . L._1))
$ cellGroups
if fastFiveCheck as || fastFiveCheck bs
then error "Less than five cells in one node to compare."
else return . take topN . sortBy (compare `on` L.view L._3) $ res
data set using KruskalWallis .
getAllDEGraphKruskalWallis
:: Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> DiffLabels
-> SingleCells
-> ClusterGraph CellInfo
-> IO [(G.Node, Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)]
getAllDEGraphKruskalWallis seed subN topN lm ls sc gr =
mconcat
. catMaybes
<$> Async.mapConcurrently (\n -> compareClusterToOthersKruskalWallis n seed subN topN lm ls sc mat gr)
nodes
where
mat = S.transpose . unMatObsRow . L.view matrix $ sc
data set ( ns ) using KruskalWallis such that n / ns .
compareClusterToOthersKruskalWallis
:: G.Node
-> Seed
-> Maybe Subsample
-> TopN
-> Maybe LabelMap
-> DiffLabels
-> SingleCells
-> S.SpMatrix Double
-> ClusterGraph CellInfo
-> IO (Maybe [(G.Node, Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)])
compareClusterToOthersKruskalWallis n (Seed seed) subN (TopN topN) lm (DiffLabels (ls1, ls2)) sc mat gr = do
let fastFiveCheck = (< 5) . length . take 5
nCells' = F.toList $ getGraphLeafItems gr n
nCellsSet = Set.fromList . fmap (L.view barcode) $ nCells'
nCells = fmap (unRow . L.view cellRow)
. filter (validCellInfo lm ls2)
nsCells =
fmap fst
. zip [0..]
. V.toList
. L.view rowNames
$ sc
sample n x =
maybe
return
(\s -> fmap (take s . V.toList) . flip MWC.uniformShuffle x . V.fromList)
n
subN' = bool (min (length nCells) (length nsCells)) (maybe 0 unSubsample subN)
. (/= Subsample 0)
<$> subN
g <- MWC.restore . MWC.toSeed $ V.fromList [fromIntegral seed]
nCellsFinal <- sample subN' g nCells
nsCellsFinal <- sample subN' g nsCells
let res = filter (isJust . L.view L._4)
. ( zipWith
(\name (!a, !b, !c, !d) -> (n, name, a, b, c, d))
(V.toList . L.view colNames $ sc)
)
if fastFiveCheck nCellsFinal || fastFiveCheck nsCellsFinal
then return Nothing
else return . Just . take topN . sortBy (compare `on` (L.view L._4)) $ res
| Get the differential expression of two sets of cells .
getDEString :: [(Diff.Name, Double, Diff.PValue, Diff.FDR)]
-> B.ByteString
getDEString xs = header <> "\n" <> body
where
edgeR calls q - values
body = CSV.encode
. fmap ( L.over L._1 Diff.unName
. L.over L._3 Diff.unPValue
. L.over L._4 Diff.unFDR
)
$ xs
KruskalWallis .
getAllDEStringKruskalWallis
:: [(G.Node, Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)]
-> B.ByteString
getAllDEStringKruskalWallis xs = header <> "\n" <> body
where
header = "node,feature,log2FC,pVal,qVal"
body = CSV.encode
. fmap ( (\(!a, !b, !c, !d, !e, !f) -> (a,b,c,d,f))
. L.over L._6 (maybe "NA" (showt . Diff.unQValue))
. L.over L._4 (maybe "NA" (showt . Diff.unPValue))
. L.over L._3 Diff.unLog2Diff
. L.over L._2 unFeature
)
$ xs
| Get the differential expression string between two sets of nodes using
KruskalWallis .
getDEStringKruskalWallis
:: [(Feature, Diff.Log2Diff, Maybe Diff.PValue, Maybe Diff.FDR, Maybe Diff.QValue)]
-> B.ByteString
getDEStringKruskalWallis xs = header <> "\n" <> body
where
header = "feature,log2FC,pVal,qVal"
body = CSV.encode
. fmap ( (\(!a, !b, !c, !d, !e) -> (a,b,c,e))
. L.over L._5 (maybe "NA" (showt . Diff.unQValue))
. L.over L._4 (maybe "NA" (showt . Diff.unFDR))
. L.over L._3 (maybe "NA" (showt . Diff.unPValue))
. L.over L._2 Diff.unLog2Diff
. L.over L._1 unFeature
)
$ xs
scToEntities :: Aggregate
-> [Feature]
-> [(Int, Cell, (Int, Diff.Status))]
-> SingleCells
-> [Diff.Entity]
scToEntities aggregate features cellGroups sc =
concatMap (\x -> toEntity aggregate x featureIdxs) cellGroups
where
mat = getMatrix sc
toEntity (Aggregate False) (cellIdx, (Cell b), (_, status)) =
fmap (\ (Feature feature, idx) -> Diff.Entity (Diff.Name feature) status (Diff.Id b)
$ S.lookupWD_SM mat (cellIdx, idx)
)
toEntity (Aggregate True) (cellIdx, (Cell b), (_, status)) =
(:[])
. Diff.Entity
(Diff.Name . T.intercalate " " . fmap (unFeature . fst) $ featureIdxs)
status
(Diff.Id b)
. (/ n)
. foldl' (+) 0
. fmap (\(_, idx) -> S.lookupWD_SM mat (cellIdx, idx))
n = genericLength featureIdxs
featureIdxs :: [(Feature, Int)]
featureIdxs = fmap (\ !x -> ( x
, fromMaybe (err x)
$ V.elemIndex (unFeature x) (getColNames sc)
)
) features
err x = error $ "Feature " <> show x <> " not found for differential."
getSingleDiff :: Seed
-> Maybe Subsample
-> Bool
-> ViolinFlag
-> NoOutlierFlag
-> Aggregate
-> SeparateNodes
-> SeparateLabels
-> Maybe LabelMap
-> SingleCells
-> ([G.Node], Maybe (Set.Set Label))
-> ([G.Node], Maybe (Set.Set Label))
-> [Feature]
-> ClusterGraph CellInfo
-> R.R s (R.SomeSEXP s)
getSingleDiff seed subN normalize (ViolinFlag vf) (NoOutlierFlag noOutlierF) aggregate sn sl lm sc v1 v2 features gr = do
let splitNodeGroup (!ns, !ls) = fmap (\ !x -> ([x], ls)) ns
splitLabelGroup (!ns, !ls) =
maybe
[(ns, ls)]
(fmap (\ !l -> (ns, Just $ Set.singleton l)) . Set.toAscList)
ls
groupsAssign' = case (unSeparateNodes sn, unSeparateLabels sl) of
(False, False) -> [v1, v2]
(True, False) -> concatMap splitNodeGroup [v1, v2]
(False, True) -> concatMap splitLabelGroup [v1, v2]
(True, True) -> concatMap splitNodeGroup
. concatMap splitLabelGroup
$ [v1, v2]
cellGroups <- H.io $ subsampleGetStatuses seed subN lm groupsAssign' gr
let entities = scToEntities aggregate features cellGroups sc
Diff.plotSingleDiff normalize vf noOutlierF entities
combineNodesLabels
:: DiffNodes
-> Maybe DiffLabels
-> (([G.Node], Maybe (Set.Set Label)), ([G.Node], Maybe (Set.Set Label)))
combineNodesLabels (DiffNodes (v1, v2)) Nothing = ((v1, Nothing), (v2, Nothing))
combineNodesLabels (DiffNodes (v1, v2)) (Just (DiffLabels (l1, l2))) =
((v1, l1), (v2, l2))
smallest of the two groups if not provided .
subsampleGroups :: Seed
-> Subsample
-> [(Int, Cell, (Int, Diff.Status))]
-> IO [(Int, Cell, (Int, Diff.Status))]
subsampleGroups (Seed seed) subN xs = do
g <- MWC.restore . MWC.toSeed $ V.fromList [fromIntegral seed]
let sample n x = fmap (take n . V.toList) . flip MWC.uniformShuffle x
grouped = fmap V.fromList
. groupBy ((==) `on` L.view (L._3 . L._1))
. sortBy (compare `on` L.view (L._3 . L._1))
$ xs
subN' = bool (minimum . fmap V.length $ grouped) (unSubsample subN)
. (/= 0)
$ unSubsample subN
fmap mconcat $ mapM (sample subN' g) grouped
|
a612590ceb069e77269c1f02587795e3ab0e50048d6fffe018b0602a67a39ea6 | gregr/racket-misc | cursor-monad.rkt | #lang racket/base
(provide
with-cursor
with-cursor*
:::^
:::^*
:::@
:::@?
:::.
:::=
:::~
:::@*
:::@?*
:::.*
:::=*
:::~*
)
(require
"cursor.rkt"
"either.rkt"
"monad.rkt"
"state-monad.rkt"
racket/function
racket/match
)
(module+ test
(require rackunit))
(define-syntax with-cursor
(syntax-rules ()
((_ cursor body ...)
((state-run (begin/with-monad state-monad body ...)) cursor))))
(define-syntax with-cursor*
(syntax-rules ()
((_ structure body ...)
(match-let (((cons val cursor)
((state-run (begin/with-monad state-monad body ...))
(::0 structure))))
(cons val (::^*. cursor))))))
(define :::^ (with-monad state-monad (modify ::^)))
(define :::^* (with-monad state-monad (modify ::^*)))
(define (:::@ path)
(begin/with-monad state-monad
cur <- get
(put (::@ cur path))))
(define (:::@? path)
(begin/with-monad state-monad
cur <- get
(match (::@? cur path)
((right cur) (begin/monad
_ <- (put cur)
(pure '())))
((left keys) (pure keys)))))
(define (:::. path)
(begin/with-monad state-monad
cur <- get
(pure (::. cur path))))
(define (:::= val path)
(begin/with-monad state-monad
cur <- get
(put (::= cur val path))))
(define (:::~ trans path)
(begin/with-monad state-monad
cur <- get
(put (::~ cur trans path))))
(define (:::@* . path) (:::@ path))
(define (:::@?* . path) (:::@? path))
(define (:::.* . path) (:::. path))
(define (:::=* val . path) (:::= val path))
(define (:::~* trans . path) (:::~ trans path))
(module+ test
(check-equal?
(with-cursor* '(1 (2 3) 4 (5 ((6) 7) 8))
v0 <- (:::.* 'first)
v1 <- (:::.* 'rest 'first 'first)
_ <- (:::@* 'rest 'rest 'rest)
v2 <- (:::.* 'first 'rest 'first 'first 'first)
_ <- (:::=* 10 'first 'first)
_ <- (:::~* (curry + 10) 'first 'rest 'first 'rest 'first)
_ <- :::^
v3 <- (:::.* 'first)
keys <- (:::@?* 'first 'first 'rest)
(pure (list v0 v1 v2 v3 keys)))
(cons (list 1 2 6 4 '(first rest))
'(1 (2 3) 4 (10 ((6) 17) 8)))
))
| null | https://raw.githubusercontent.com/gregr/racket-misc/0a5c9d4875288795e209d06982b82848c989d08b/cursor-monad.rkt | racket | #lang racket/base
(provide
with-cursor
with-cursor*
:::^
:::^*
:::@
:::@?
:::.
:::=
:::~
:::@*
:::@?*
:::.*
:::=*
:::~*
)
(require
"cursor.rkt"
"either.rkt"
"monad.rkt"
"state-monad.rkt"
racket/function
racket/match
)
(module+ test
(require rackunit))
(define-syntax with-cursor
(syntax-rules ()
((_ cursor body ...)
((state-run (begin/with-monad state-monad body ...)) cursor))))
(define-syntax with-cursor*
(syntax-rules ()
((_ structure body ...)
(match-let (((cons val cursor)
((state-run (begin/with-monad state-monad body ...))
(::0 structure))))
(cons val (::^*. cursor))))))
(define :::^ (with-monad state-monad (modify ::^)))
(define :::^* (with-monad state-monad (modify ::^*)))
(define (:::@ path)
(begin/with-monad state-monad
cur <- get
(put (::@ cur path))))
(define (:::@? path)
(begin/with-monad state-monad
cur <- get
(match (::@? cur path)
((right cur) (begin/monad
_ <- (put cur)
(pure '())))
((left keys) (pure keys)))))
(define (:::. path)
(begin/with-monad state-monad
cur <- get
(pure (::. cur path))))
(define (:::= val path)
(begin/with-monad state-monad
cur <- get
(put (::= cur val path))))
(define (:::~ trans path)
(begin/with-monad state-monad
cur <- get
(put (::~ cur trans path))))
(define (:::@* . path) (:::@ path))
(define (:::@?* . path) (:::@? path))
(define (:::.* . path) (:::. path))
(define (:::=* val . path) (:::= val path))
(define (:::~* trans . path) (:::~ trans path))
(module+ test
(check-equal?
(with-cursor* '(1 (2 3) 4 (5 ((6) 7) 8))
v0 <- (:::.* 'first)
v1 <- (:::.* 'rest 'first 'first)
_ <- (:::@* 'rest 'rest 'rest)
v2 <- (:::.* 'first 'rest 'first 'first 'first)
_ <- (:::=* 10 'first 'first)
_ <- (:::~* (curry + 10) 'first 'rest 'first 'rest 'first)
_ <- :::^
v3 <- (:::.* 'first)
keys <- (:::@?* 'first 'first 'rest)
(pure (list v0 v1 v2 v3 keys)))
(cons (list 1 2 6 4 '(first rest))
'(1 (2 3) 4 (10 ((6) 17) 8)))
))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.