_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
2a5083a9987f1dcaf8419bbe55f7ccf7c9feb596ee64bb7ca0d82697f6557d92 | exercism/haskell | Poker.hs | module Poker (bestHands) where
import Data.Maybe (fromJust)
import Data.List (nub, elemIndex, sortBy)
validHand :: [String] -> Bool
validHand h = and [ length h == 5
, all (`elem` "A23456789TJQK") (head <$> h)
, all (`elem` "HSDC") (last <$> h)
, all ((==2) . length) h]
parseHand :: String -> [String]
parseHand = map f . words
where
f ('1':'0':xs) = 'T' : xs
f xs = xs
rankHand :: [String] -> (Int, [Int])
rankHand h | counts == [5] = (9, ranks')
| straight && flush = (8, ranks')
| counts == [4,1] = (7, ranks')
| counts == [3,2] = (6, ranks')
| flush = (5, ranks')
| straight = (4, ranks')
| counts == [3,1,1] = (3, ranks')
| counts == [2,2,1] = (2, ranks')
| counts == [2,1,1,1] = (1, ranks')
| otherwise = (0, ranks')
where
r = fromJust . flip elemIndex "..23456789TJQKA" . head <$> h
groups = let x = nub r in sortBy (flip compare) (zip (times r <$> x) x)
counts = fst <$> groups
ranks = snd <$> groups
ranks' = if ranks == [14,5,4,3,2] then [5,4,3,2,1] else ranks
straight = length counts == 5 && (maximum ranks' - minimum ranks') == 4
flush = length (nub (last <$> h)) == 1
times xs x = length $ filter (==x) xs
bestHands :: [String] -> Maybe [String]
bestHands hands | not (all validHand hands') = Nothing
| otherwise = Just $ f (0,[]) [] (zip hands hands')
where
hands' = parseHand <$> hands
f _ r [] = r
f m r ((x,y):xs) | null r || rank > m = f rank [x] xs
| rank == m = f m (x : r) xs
| otherwise = f m r xs
where
rank = rankHand y
| null | https://raw.githubusercontent.com/exercism/haskell/ae17e9fc5ca736a228db6dda5e3f3b057fa6f3d0/exercises/practice/poker/.meta/examples/success-standard/src/Poker.hs | haskell | module Poker (bestHands) where
import Data.Maybe (fromJust)
import Data.List (nub, elemIndex, sortBy)
validHand :: [String] -> Bool
validHand h = and [ length h == 5
, all (`elem` "A23456789TJQK") (head <$> h)
, all (`elem` "HSDC") (last <$> h)
, all ((==2) . length) h]
parseHand :: String -> [String]
parseHand = map f . words
where
f ('1':'0':xs) = 'T' : xs
f xs = xs
rankHand :: [String] -> (Int, [Int])
rankHand h | counts == [5] = (9, ranks')
| straight && flush = (8, ranks')
| counts == [4,1] = (7, ranks')
| counts == [3,2] = (6, ranks')
| flush = (5, ranks')
| straight = (4, ranks')
| counts == [3,1,1] = (3, ranks')
| counts == [2,2,1] = (2, ranks')
| counts == [2,1,1,1] = (1, ranks')
| otherwise = (0, ranks')
where
r = fromJust . flip elemIndex "..23456789TJQKA" . head <$> h
groups = let x = nub r in sortBy (flip compare) (zip (times r <$> x) x)
counts = fst <$> groups
ranks = snd <$> groups
ranks' = if ranks == [14,5,4,3,2] then [5,4,3,2,1] else ranks
straight = length counts == 5 && (maximum ranks' - minimum ranks') == 4
flush = length (nub (last <$> h)) == 1
times xs x = length $ filter (==x) xs
bestHands :: [String] -> Maybe [String]
bestHands hands | not (all validHand hands') = Nothing
| otherwise = Just $ f (0,[]) [] (zip hands hands')
where
hands' = parseHand <$> hands
f _ r [] = r
f m r ((x,y):xs) | null r || rank > m = f rank [x] xs
| rank == m = f m (x : r) xs
| otherwise = f m r xs
where
rank = rankHand y
| |
a4c03c1be5d17079d451d7afdfc29d70481068b100578b257bde3bb01bbabfe0 | haskell-suite/haskell-src-exts | SimpleDeriving.hs | data T = T deriving Eq
| null | https://raw.githubusercontent.com/haskell-suite/haskell-src-exts/84a4930e0e5c051b7d9efd20ef7c822d5fc1c33b/tests/examples/SimpleDeriving.hs | haskell | data T = T deriving Eq
| |
013134fe6278fa05554ea138f1324c57a4c00e84fdcf6ad87eac4ad3aeb28bb1 | CRogers/obc | util.ml |
* util.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright ( c ) 2006
* All rights reserved
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
* 2 . Redistributions in binary form must reproduce the above copyright notice ,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution .
* 3 . The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS OR
* IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED .
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
* SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ;
* OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
* OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
* $ I d : util.ml 519 2008 - 02 - 22 18:21:39Z
* util.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright (c) 2006 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* $Id: util.ml 519 2008-02-22 18:21:39Z mike $
*)
let rcsid = "$Id: util.ml 519 2008-02-22 18:21:39Z mike $"
open List
(* copy -- list of n copies of a value *)
let rec copy n x =
if n = 0 then [] else x :: copy (n-1) x
take -- first n elements of list
let rec take n xs =
if n = 0 || xs = [] then []
else hd xs :: take (n-1) (tl xs)
drop -- all but first n elements of list
let rec drop n xs =
if n = 0 || xs = [] then xs
else drop (n-1) (tl xs)
let rec range a b =
if a > b then [] else a :: range (a+1) b
let split_at c s =
let i = String.index s c in
(String.sub s 0 i, String.sub s (i+1) (String.length s - i - 1))
let split_string s =
let n = String.length s in
let len = ref 0 and words = ref [] in
for i = 0 to n do
if i = n || s.[i] = ' ' || s.[i] = '\t' || s.[i] = '\n' then begin
if !len > 0 then words := !words @ [String.sub s (i - !len) !len];
len := 0
end
else begin
incr len
end
done;
!words
let hex_of_int n =
if n = 0 then "0" else Printf.sprintf "%#.8x" n
let hex_of_int32 n =
if n = Int32.of_int 0 then "0" else Printf.sprintf "%#.8lx" n
let float_as_string x = Printf.sprintf "%.20e" x
(* Make a hash table *)
let make_hash n ps =
let table = Hashtbl.create n in
List.iter (function (x, y) -> Hashtbl.add table x y) ps;
table
(* Search a directory path *)
let rec search_path fn =
function
[] -> raise Not_found
| d::ds ->
let name = if d = "." then fn else Filename.concat d fn in
try let f = open_in name in close_in f; name with
Sys_error _ -> search_path fn ds
let can f x = try f x; true with Not_found -> false
(* offset -- add base address and offset *)
let offset addr k = Int32.add addr (Int32.of_int k)
| null | https://raw.githubusercontent.com/CRogers/obc/49064db244e0c9d2ec2a83420c8d0ee917b54196/compiler/util.ml | ocaml | copy -- list of n copies of a value
Make a hash table
Search a directory path
offset -- add base address and offset |
* util.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright ( c ) 2006
* All rights reserved
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
* 2 . Redistributions in binary form must reproduce the above copyright notice ,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution .
* 3 . The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS OR
* IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED .
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
* SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ;
* OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
* OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
* $ I d : util.ml 519 2008 - 02 - 22 18:21:39Z
* util.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright (c) 2006 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* $Id: util.ml 519 2008-02-22 18:21:39Z mike $
*)
let rcsid = "$Id: util.ml 519 2008-02-22 18:21:39Z mike $"
open List
let rec copy n x =
if n = 0 then [] else x :: copy (n-1) x
take -- first n elements of list
let rec take n xs =
if n = 0 || xs = [] then []
else hd xs :: take (n-1) (tl xs)
drop -- all but first n elements of list
let rec drop n xs =
if n = 0 || xs = [] then xs
else drop (n-1) (tl xs)
let rec range a b =
if a > b then [] else a :: range (a+1) b
let split_at c s =
let i = String.index s c in
(String.sub s 0 i, String.sub s (i+1) (String.length s - i - 1))
let split_string s =
let n = String.length s in
let len = ref 0 and words = ref [] in
for i = 0 to n do
if i = n || s.[i] = ' ' || s.[i] = '\t' || s.[i] = '\n' then begin
if !len > 0 then words := !words @ [String.sub s (i - !len) !len];
len := 0
end
else begin
incr len
end
done;
!words
let hex_of_int n =
if n = 0 then "0" else Printf.sprintf "%#.8x" n
let hex_of_int32 n =
if n = Int32.of_int 0 then "0" else Printf.sprintf "%#.8lx" n
let float_as_string x = Printf.sprintf "%.20e" x
let make_hash n ps =
let table = Hashtbl.create n in
List.iter (function (x, y) -> Hashtbl.add table x y) ps;
table
let rec search_path fn =
function
[] -> raise Not_found
| d::ds ->
let name = if d = "." then fn else Filename.concat d fn in
try let f = open_in name in close_in f; name with
Sys_error _ -> search_path fn ds
let can f x = try f x; true with Not_found -> false
let offset addr k = Int32.add addr (Int32.of_int k)
|
0037c543595ea58e7022640507f10569f0f17cf9792bd0fa6375576fb749caaf | rbkmoney/fistful-server | ff_identity_SUITE.erl | -module(ff_identity_SUITE).
-export([all/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
-export([get_missing_fails/1]).
-export([create_missing_fails/1]).
-export([create_ok/1]).
%%
-import(ff_pipeline, [unwrap/1]).
-type config() :: ct_helper:config().
-type test_case_name() :: ct_helper:test_case_name().
-type group_name() :: ct_helper:group_name().
-type test_return() :: _ | no_return().
-spec all() -> [test_case_name() | {group, group_name()}].
all() ->
[
get_missing_fails,
create_missing_fails,
create_ok
].
-spec get_missing_fails(config()) -> test_return().
-spec create_missing_fails(config()) -> test_return().
-spec create_ok(config()) -> test_return().
-spec init_per_suite(config()) -> config().
init_per_suite(C) ->
ct_helper:makeup_cfg(
[
ct_helper:test_case_name(init),
ct_payment_system:setup()
],
C
).
-spec end_per_suite(config()) -> _.
end_per_suite(C) ->
ok = ct_payment_system:shutdown(C),
ok.
%%
-spec init_per_testcase(test_case_name(), config()) -> config().
init_per_testcase(Name, C) ->
C1 = ct_helper:makeup_cfg([ct_helper:test_case_name(Name), ct_helper:woody_ctx()], C),
ok = ct_helper:set_context(C1),
C1.
-spec end_per_testcase(test_case_name(), config()) -> _.
end_per_testcase(_Name, _C) ->
ok = ct_helper:unset_context().
%%
get_missing_fails(_C) ->
ID = genlib:unique(),
{error, notfound} = ff_identity_machine:get(ID).
create_missing_fails(C) ->
ID = genlib:unique(),
Party = create_party(C),
Name = <<"Identity Name">>,
{error, {provider, notfound}} = ff_identity_machine:create(
#{
id => ID,
name => Name,
party => Party,
provider => <<"who">>
},
#{<<"com.rbkmoney.wapi">> => #{<<"name">> => Name}}
).
create_ok(C) ->
ID = genlib:unique(),
Party = create_party(C),
Name = <<"Identity Name">>,
ok = ff_identity_machine:create(
#{
id => ID,
name => Name,
party => Party,
provider => <<"good-one">>
},
#{<<"com.rbkmoney.wapi">> => #{<<"name">> => Name}}
),
I1 = ff_identity_machine:identity(unwrap(ff_identity_machine:get(ID))),
{ok, accessible} = ff_identity:is_accessible(I1),
Party = ff_identity:party(I1).
create_party(_C) ->
ID = genlib:bsuuid(),
_ = ff_party:create(ID),
ID.
| null | https://raw.githubusercontent.com/rbkmoney/fistful-server/f6155acb0475987e47a4fbc911758c595e129c80/apps/fistful/test/ff_identity_SUITE.erl | erlang | -module(ff_identity_SUITE).
-export([all/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
-export([get_missing_fails/1]).
-export([create_missing_fails/1]).
-export([create_ok/1]).
-import(ff_pipeline, [unwrap/1]).
-type config() :: ct_helper:config().
-type test_case_name() :: ct_helper:test_case_name().
-type group_name() :: ct_helper:group_name().
-type test_return() :: _ | no_return().
-spec all() -> [test_case_name() | {group, group_name()}].
all() ->
[
get_missing_fails,
create_missing_fails,
create_ok
].
-spec get_missing_fails(config()) -> test_return().
-spec create_missing_fails(config()) -> test_return().
-spec create_ok(config()) -> test_return().
-spec init_per_suite(config()) -> config().
init_per_suite(C) ->
ct_helper:makeup_cfg(
[
ct_helper:test_case_name(init),
ct_payment_system:setup()
],
C
).
-spec end_per_suite(config()) -> _.
end_per_suite(C) ->
ok = ct_payment_system:shutdown(C),
ok.
-spec init_per_testcase(test_case_name(), config()) -> config().
init_per_testcase(Name, C) ->
C1 = ct_helper:makeup_cfg([ct_helper:test_case_name(Name), ct_helper:woody_ctx()], C),
ok = ct_helper:set_context(C1),
C1.
-spec end_per_testcase(test_case_name(), config()) -> _.
end_per_testcase(_Name, _C) ->
ok = ct_helper:unset_context().
get_missing_fails(_C) ->
ID = genlib:unique(),
{error, notfound} = ff_identity_machine:get(ID).
create_missing_fails(C) ->
ID = genlib:unique(),
Party = create_party(C),
Name = <<"Identity Name">>,
{error, {provider, notfound}} = ff_identity_machine:create(
#{
id => ID,
name => Name,
party => Party,
provider => <<"who">>
},
#{<<"com.rbkmoney.wapi">> => #{<<"name">> => Name}}
).
create_ok(C) ->
ID = genlib:unique(),
Party = create_party(C),
Name = <<"Identity Name">>,
ok = ff_identity_machine:create(
#{
id => ID,
name => Name,
party => Party,
provider => <<"good-one">>
},
#{<<"com.rbkmoney.wapi">> => #{<<"name">> => Name}}
),
I1 = ff_identity_machine:identity(unwrap(ff_identity_machine:get(ID))),
{ok, accessible} = ff_identity:is_accessible(I1),
Party = ff_identity:party(I1).
create_party(_C) ->
ID = genlib:bsuuid(),
_ = ff_party:create(ID),
ID.
| |
a3fc0294de6d82cadb61d73d8742e26b6df2f9187e703783fce5be61211b0c4b | JeffreyBenjaminBrown/digraphs-with-text | replaceUsf.hs | replaceUsf :: Node -> Expr -> RSLT -> RSLT
replaceUsf n expr g =
let (Just (a,b,expr',d), g') = match n g
in if areLikeExprs expr expr' then (a,b,expr,d) & g'
else error "unlike Exprs"
| null | https://raw.githubusercontent.com/JeffreyBenjaminBrown/digraphs-with-text/34e47a52aa9abb6fd42028deba1623a92e278aae/stale/Dwt/replaceUsf.hs | haskell | replaceUsf :: Node -> Expr -> RSLT -> RSLT
replaceUsf n expr g =
let (Just (a,b,expr',d), g') = match n g
in if areLikeExprs expr expr' then (a,b,expr,d) & g'
else error "unlike Exprs"
| |
be7535ae395170153db2f0a7243d0982d58389b646026840733d30ae3ee6fbdb | wilkerlucio/tailwind-garden | tables.cljc | (ns com.wsscode.tailwind-garden.components.tables)
(defn border-collapse
"-collapse"
[]
[[:.border-collapse {:border-collapse "collapse"}]
[:.border-separate {:border-collapse "separate"}]])
(defn table-layout
"-layout"
[]
[[:.table-auto {:table-layout "auto"}]
[:.table-fixed {:table-layout "fixed"}]])
| null | https://raw.githubusercontent.com/wilkerlucio/tailwind-garden/4f8af13165dd997de15b20ac5e7dd06351821acd/src/main/com/wsscode/tailwind_garden/components/tables.cljc | clojure | (ns com.wsscode.tailwind-garden.components.tables)
(defn border-collapse
"-collapse"
[]
[[:.border-collapse {:border-collapse "collapse"}]
[:.border-separate {:border-collapse "separate"}]])
(defn table-layout
"-layout"
[]
[[:.table-auto {:table-layout "auto"}]
[:.table-fixed {:table-layout "fixed"}]])
| |
bf1c65c12d25802b50b28e9afbf45c2441411672ad2f27d85d82e952d20a9d25 | metabase/metabase | malli.cljs | (ns metabase.domain-entities.malli
(:require
[malli.core :as mc]
[malli.util :as mut]
[metabase.domain-entities.converters])
(:require-macros [metabase.domain-entities.malli]))
(clojure.core/defn schema-for-path
"Given a schema and a *value path* (as opposed to a *schema path*), finds the schema for that
path. Throws if there are multiple such paths and those paths have different schemas."
[schema path]
(let [paths (-> schema mc/schema (mut/in->paths path))]
(cond
(empty? paths) (throw (ex-info "Path does not match schema" {:schema schema :path path}))
(= (count paths) 1) (mut/get-in schema (first paths))
:else (let [child-schemas (map #(mut/get-in schema %) paths)]
(if (apply = child-schemas)
(first child-schemas)
(throw (ex-info "Value path has multiple schema paths, with different schemas"
{:schema schema
:paths paths
:child-schemas child-schemas})))))))
| null | https://raw.githubusercontent.com/metabase/metabase/4580eab946097e9dda36cc0bc0406fc10d5b01cd/src/metabase/domain_entities/malli.cljs | clojure | (ns metabase.domain-entities.malli
(:require
[malli.core :as mc]
[malli.util :as mut]
[metabase.domain-entities.converters])
(:require-macros [metabase.domain-entities.malli]))
(clojure.core/defn schema-for-path
"Given a schema and a *value path* (as opposed to a *schema path*), finds the schema for that
path. Throws if there are multiple such paths and those paths have different schemas."
[schema path]
(let [paths (-> schema mc/schema (mut/in->paths path))]
(cond
(empty? paths) (throw (ex-info "Path does not match schema" {:schema schema :path path}))
(= (count paths) 1) (mut/get-in schema (first paths))
:else (let [child-schemas (map #(mut/get-in schema %) paths)]
(if (apply = child-schemas)
(first child-schemas)
(throw (ex-info "Value path has multiple schema paths, with different schemas"
{:schema schema
:paths paths
:child-schemas child-schemas})))))))
| |
62336c6f9fc22f2ff16026615c3216a82d5aacdad35d2f4ea0a735dc55b183b3 | cicakhq/potato | user.lisp | (in-package :potato-client-clim)
(declaim (optimize (speed 0) (safety 3) (debug 3)))
(defclass user ()
((id :type string
:initarg :id
:reader user/id)
(description :type string
:initarg :description
:accessor user/description)
(nickname :type string
:initarg :nickname
:accessor user/nickname)))
(defmethod print-object ((obj user) stream)
(print-unreadable-safely (id description) obj stream
(format stream "ID ~s NAME ~s" id description)))
(defclass user-db ()
((users :type hash-table
:initform (make-hash-table :test 'equal)
:reader user-db/users)
(lock :type t
:initform (bordeaux-threads:make-lock "User database lock")
:reader user-db/lock)
(callback-fn :type (or null function)
:initarg :callback-fn
:initform nil
:reader user-db/callback-fn)))
(defun find-user (user-db uid)
(check-type user-db user-db)
(check-type uid string)
(bordeaux-threads:with-lock-held ((user-db/lock user-db))
(let ((user (gethash uid (user-db/users user-db))))
(cond ((null user)
(let ((u (make-instance 'user
:id uid
:description "empty"
:nickname "empty")))
(setf (gethash uid (user-db/users user-db)) u)
(log:warn "Currently not updating the user name")
u))
(t
user)))))
(defun update-user (user-db uid description nickname)
(let* ((user (gethash uid (user-db/users user-db))))
(cond (user
(setf (user/description user) description)
(setf (user/nickname user) nickname)
user)
(t
(setf (gethash uid (user-db/users user-db))
(make-instance 'user :id uid :description description :nickname nickname))))))
(defun update-users-from-channel (user-db conn cid)
(check-type user-db user-db)
(check-type cid string)
(let ((res (potato-client:list-users cid :connection conn)))
(bordeaux-threads:with-lock-held ((user-db/lock user-db))
(let ((updated (loop
for user-data in res
collect (update-user user-db
(cdr (assoc :id user-data))
(cdr (assoc :description user-data))
(cdr (assoc :nickname user-data))))))
(alexandria:when-let ((callback (user-db/callback-fn user-db)))
(funcall callback updated))))))
(defun users-in-db (user-db)
(check-type user-db user-db)
(sort (loop
for ch being each hash-value in (user-db/users user-db)
collect ch)
#'string< :key #'user/description))
(defmethod load-image-from-src ((user user) stream cache)
(handler-case
(progn
(potato-client:user-image (user/id user) stream :connection (image-cache/connection cache))
"image/png")
(potato-client:request-error (condition)
(cond ((= (potato-client:request-error/code condition) 404)
nil)
(t
(log:error "Error downloading image. code: ~a, reason: ~a"
(potato-client:request-error/code condition)
(potato-client:request-error/reason condition))
nil)))))
(defmethod make-image-cache-key ((user user))
(list :user (user/id user)))
| null | https://raw.githubusercontent.com/cicakhq/potato/88b6c92dbbc80a6c9552435604f7b1ae6f2a4026/contrib/potato-client-clim/src/user.lisp | lisp | (in-package :potato-client-clim)
(declaim (optimize (speed 0) (safety 3) (debug 3)))
(defclass user ()
((id :type string
:initarg :id
:reader user/id)
(description :type string
:initarg :description
:accessor user/description)
(nickname :type string
:initarg :nickname
:accessor user/nickname)))
(defmethod print-object ((obj user) stream)
(print-unreadable-safely (id description) obj stream
(format stream "ID ~s NAME ~s" id description)))
(defclass user-db ()
((users :type hash-table
:initform (make-hash-table :test 'equal)
:reader user-db/users)
(lock :type t
:initform (bordeaux-threads:make-lock "User database lock")
:reader user-db/lock)
(callback-fn :type (or null function)
:initarg :callback-fn
:initform nil
:reader user-db/callback-fn)))
(defun find-user (user-db uid)
(check-type user-db user-db)
(check-type uid string)
(bordeaux-threads:with-lock-held ((user-db/lock user-db))
(let ((user (gethash uid (user-db/users user-db))))
(cond ((null user)
(let ((u (make-instance 'user
:id uid
:description "empty"
:nickname "empty")))
(setf (gethash uid (user-db/users user-db)) u)
(log:warn "Currently not updating the user name")
u))
(t
user)))))
(defun update-user (user-db uid description nickname)
(let* ((user (gethash uid (user-db/users user-db))))
(cond (user
(setf (user/description user) description)
(setf (user/nickname user) nickname)
user)
(t
(setf (gethash uid (user-db/users user-db))
(make-instance 'user :id uid :description description :nickname nickname))))))
(defun update-users-from-channel (user-db conn cid)
(check-type user-db user-db)
(check-type cid string)
(let ((res (potato-client:list-users cid :connection conn)))
(bordeaux-threads:with-lock-held ((user-db/lock user-db))
(let ((updated (loop
for user-data in res
collect (update-user user-db
(cdr (assoc :id user-data))
(cdr (assoc :description user-data))
(cdr (assoc :nickname user-data))))))
(alexandria:when-let ((callback (user-db/callback-fn user-db)))
(funcall callback updated))))))
(defun users-in-db (user-db)
(check-type user-db user-db)
(sort (loop
for ch being each hash-value in (user-db/users user-db)
collect ch)
#'string< :key #'user/description))
(defmethod load-image-from-src ((user user) stream cache)
(handler-case
(progn
(potato-client:user-image (user/id user) stream :connection (image-cache/connection cache))
"image/png")
(potato-client:request-error (condition)
(cond ((= (potato-client:request-error/code condition) 404)
nil)
(t
(log:error "Error downloading image. code: ~a, reason: ~a"
(potato-client:request-error/code condition)
(potato-client:request-error/reason condition))
nil)))))
(defmethod make-image-cache-key ((user user))
(list :user (user/id user)))
| |
2f1d306ce179aba19a138f2d4d5bfdc99dd5b218feed3871f60f39965097fb13 | processone/stun | stun.erl | %%%-------------------------------------------------------------------
%%% File : stun.erl
Author : < >
Description : / RFC5766 implementation .
Created : 8 Aug 2009 by < >
%%%
%%%
Copyright ( C ) 2002 - 2023 ProcessOne , SARL . All Rights Reserved .
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%%-------------------------------------------------------------------
-module(stun).
-define(GEN_FSM, p1_fsm).
-behaviour(?GEN_FSM).
%% API
-export([start_link/2,
start/2,
stop/1,
socket_type/0,
tcp_init/2,
udp_init/2,
udp_recv/5]).
%% gen_fsm callbacks
-export([init/1,
handle_event/3,
handle_sync_event/4,
handle_info/3,
terminate/3,
code_change/4]).
%% gen_fsm states
-export([session_established/2]).
%% helper functions
-export([rand_uniform/0, rand_uniform/1, rand_uniform/2, unmap_v4_addr/1]).
-include("stun.hrl").
-include("stun_logger.hrl").
64 kb
1 minute
-define(TCP_ACTIVE, 500).
1 minute ( in usec )
-define(SERVER_NAME, <<"P1 STUN library">>).
-type addr() :: {inet:ip_address(), inet:port_number()}.
-record(state,
{sock :: inet:socket() | fast_tls:tls_socket() | undefined,
sock_mod = gen_tcp :: gen_udp | gen_tcp | fast_tls,
peer = {{0,0,0,0}, 0} :: addr(),
tref :: reference() | undefined,
use_turn = false :: boolean(),
relay_ipv4_ip = {127,0,0,1} :: inet:ip4_address(),
relay_ipv6_ip :: inet:ip6_address() | undefined,
min_port = 49152 :: non_neg_integer(),
max_port = 65535 :: non_neg_integer(),
max_allocs = 10 :: non_neg_integer() | infinity,
shaper = none :: stun_shaper:shaper(),
max_permissions = 10 :: non_neg_integer() | infinity,
blacklist = [] :: turn:accesslist(),
whitelist = [] :: turn:accesslist(),
auth = user :: anonymous | user,
nonces = treap:empty() :: treap:treap(),
realm = <<"">> :: binary(),
auth_fun :: function() | undefined,
hook_fun :: function() | undefined,
server_name = ?SERVER_NAME :: binary(),
buf = <<>> :: binary(),
session_id :: binary() | undefined}).
%%====================================================================
%% API
%%====================================================================
start({gen_tcp, Sock}, Opts) ->
supervisor:start_child(stun_tmp_sup, [Sock, Opts]).
stop(Pid) ->
?GEN_FSM:send_all_state_event(Pid, stop).
start_link(Sock, Opts) ->
?GEN_FSM:start_link(?MODULE, [Sock, Opts], []).
socket_type() ->
raw.
tcp_init(_Sock, Opts) ->
Opts.
udp_init(Sock, Opts) ->
prepare_state(Opts, Sock, {{0,0,0,0}, 0}, gen_udp).
udp_recv(Sock, Addr, Port, Data, State) ->
NewState = prepare_state(State, Sock, {Addr, Port}, gen_udp),
case stun_codec:decode(Data, datagram) of
{ok, Msg} ->
?LOG_DEBUG(#{verbatim => {"Received:~n~s", [stun_codec:pp(Msg)]}}),
process(NewState, Msg);
{error, Reason} ->
?LOG_DEBUG("Cannot parse packet: ~s", [Reason]),
NewState
end.
%%====================================================================
%% gen_fsm callbacks
%%====================================================================
init([Sock, Opts]) ->
process_flag(trap_exit, true),
case get_peername(Sock, Opts) of
{ok, Addr} ->
case get_sockmod(Opts, Sock) of
{ok, SockMod} ->
State = prepare_state(Opts, Sock, Addr, SockMod),
case maybe_starttls(Sock, SockMod, Opts) of
{ok, NewSock} ->
TRef = erlang:start_timer(?TIMEOUT, self(), stop),
NewState = State#state{sock = NewSock, tref = TRef},
activate_socket(NewState),
{ok, session_established, NewState};
{error, Reason} ->
{stop, Reason}
end
end;
{error, Reason} ->
{stop, Reason}
end.
session_established(Event, State) ->
?LOG_ERROR("Unexpected event in 'session_established': ~p", [Event]),
{next_state, session_established, State}.
handle_event(stop, _StateName, State) ->
{stop, normal, State};
handle_event(_Event, StateName, State) ->
{next_state, StateName, State}.
handle_sync_event(_Event, _From, StateName, State) ->
{reply, {error, badarg}, StateName, State}.
handle_info({tcp, _Sock, TLSData}, StateName,
#state{sock_mod = fast_tls} = State) ->
NewState = update_shaper(State, TLSData),
case fast_tls:recv_data(NewState#state.sock, TLSData) of
{ok, Data} ->
process_data(StateName, NewState, Data);
{error, Reason} ->
?LOG_INFO("Connection failure: ~s", [Reason]),
{stop, normal, NewState}
end;
handle_info({tcp, _Sock, Data}, StateName, State) ->
NewState = update_shaper(State, Data),
process_data(StateName, NewState, Data);
handle_info({tcp_passive, _Sock}, StateName, State) ->
activate_socket(State),
{next_state, StateName, State};
handle_info({tcp_closed, _Sock}, _StateName, State) ->
?LOG_INFO("Connection reset by peer"),
{stop, normal, State};
handle_info({tcp_error, _Sock, _Reason}, _StateName, State) ->
?LOG_INFO("Connection error: ~p", [_Reason]),
{stop, normal, State};
handle_info({timeout, TRef, stop}, _StateName,
#state{tref = TRef} = State) ->
?LOG_INFO("Connection timed out"),
{stop, normal, State};
handle_info({timeout, _TRef, activate}, StateName, State) ->
activate_socket(State),
{next_state, StateName, State};
handle_info(Info, StateName, State) ->
?LOG_ERROR("Unexpected info in '~s': ~p", [StateName, Info]),
{next_state, StateName, State}.
terminate(_Reason, _StateName, State) ->
catch (State#state.sock_mod):close(State#state.sock),
ok.
code_change(_OldVsn, StateName, State, _Extra) ->
{ok, StateName, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
process(State, #stun{class = request,
method = ?STUN_METHOD_BINDING,
'MESSAGE-INTEGRITY' = undefined} = Msg) ->
process(State, Msg, undefined);
process(#state{auth = anonymous} = State,
#stun{class = request, 'MESSAGE-INTEGRITY' = undefined} = Msg) ->
process(State, Msg, undefined);
process(#state{auth = user} = State,
#stun{class = request, 'MESSAGE-INTEGRITY' = undefined} = Msg) ->
Resp = prepare_response(State, Msg),
{Nonce, Nonces} = make_nonce(State#state.peer,
State#state.nonces),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401),
'REALM' = State#state.realm,
'NONCE' = Nonce},
send(State#state{nonces = Nonces}, R);
process(#state{auth = anonymous} = State,
#stun{class = request,
'USERNAME' = User,
'REALM' = Realm,
'NONCE' = Nonce} = Msg)
when User /= undefined, Realm /= undefined, Nonce /= undefined ->
?LOG_NOTICE("Rejecting request: Credentials provided for anonymous "
"service"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401)},
run_hook(protocol_error, State, R),
send(State, R);
process(#state{auth = user} = State,
#stun{class = request,
'USERNAME' = User,
'REALM' = Realm,
'NONCE' = Nonce} = Msg)
when User /= undefined, Realm /= undefined, Nonce /= undefined ->
stun_logger:add_metadata(#{stun_user => User}),
Resp = prepare_response(State, Msg),
{HaveNonce, Nonces} = have_nonce(Nonce, State#state.nonces),
case HaveNonce of
true ->
NewState = State#state{nonces = Nonces},
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401),
'REALM' = State#state.realm,
'NONCE' = Nonce},
case (State#state.auth_fun)(User, Realm) of
<<"">> ->
?LOG_NOTICE("Failed long-term STUN/TURN authentication"),
run_hook(protocol_error, State, R),
send(NewState, R);
Pass0 ->
{Pass, IsExpired} = check_expired_tag(Pass0),
case check_integrity(User, Realm, Msg, Pass) of
{true, Key} ->
?LOG_INFO("Accepting long-term STUN/TURN "
"authentication"),
process(NewState, Msg, Key, IsExpired);
false ->
?LOG_NOTICE("Failed long-term STUN/TURN "
"authentication"),
run_hook(protocol_error, State, R),
send(NewState, R)
end
end;
false ->
?LOG_NOTICE("Rejecting request: Nonexistent nonce"),
{NewNonce, NewNonces} = make_nonce(State#state.peer, Nonces),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(438),
'REALM' = State#state.realm,
'NONCE' = NewNonce},
run_hook(protocol_error, State, R),
send(State#state{nonces = NewNonces}, R)
end;
process(State, #stun{class = request,
'USERNAME' = User,
'REALM' = undefined,
'NONCE' = undefined} = Msg) when User /= undefined ->
?LOG_NOTICE("Rejecting request: Missing realm and nonce"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401)},
run_hook(protocol_error, State, R),
send(State, R);
process(State, #stun{class = request} = Msg) ->
?LOG_NOTICE("Rejecting malformed request"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(400)},
run_hook(protocol_error, State, R),
send(State, R);
process(State, #stun{class = indication,
method = ?STUN_METHOD_SEND} = Msg) ->
route_on_turn(State, Msg);
process(State, Msg) when is_record(Msg, turn) ->
route_on_turn(State, Msg);
process(State, _Msg) ->
State.
process(State, Msg, Secret) ->
process(State, Msg, Secret, false).
process(State, #stun{class = request, unsupported = [_|_] = Unsupported} = Msg,
Secret, _IsExpired) ->
?LOG_DEBUG("Rejecting request with unknown attribute(s): ~p",
[Unsupported]),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'UNKNOWN-ATTRIBUTES' = Unsupported,
'ERROR-CODE' = stun_codec:error(420)},
run_hook(protocol_error, State, R),
send(State, R, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_BINDING} = Msg, Secret,
_IsExpired) ->
Resp = prepare_response(State, Msg),
AddrPort = unmap_v4_addr(State#state.peer),
R = case stun_codec:version(Msg) of
old ->
?LOG_DEBUG("Responding to 'classic' STUN request"),
Resp#stun{class = response, 'MAPPED-ADDRESS' = AddrPort};
new ->
?LOG_DEBUG("Responding to STUN request"),
Resp#stun{class = response, 'XOR-MAPPED-ADDRESS' = AddrPort}
end,
run_hook(stun_query, State, Msg),
send(State, R, Secret);
process(#state{use_turn = false} = State,
#stun{class = request} = Msg, Secret, _IsExpired) ->
?LOG_NOTICE("Rejecting TURN request: TURN is disabled"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error, 'ERROR-CODE' = stun_codec:error(405)},
run_hook(protocol_error, State, R),
send(State, R, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_ALLOCATE} = Msg,
Secret, IsExpired) ->
Resp = prepare_response(State, Msg),
AddrPort = State#state.peer,
SockMod = State#state.sock_mod,
case turn_sm:find_allocation(AddrPort) of
{ok, Pid} ->
turn:route(Pid, Msg),
State;
_ when IsExpired ->
?LOG_NOTICE("Rejecting request: credentials expired"),
R = Resp#stun{class = error, 'ERROR-CODE' = stun_codec:error(401)},
run_hook(protocol_error, State, R),
send(State, R);
_ ->
Opts = [{sock, State#state.sock},
{sock_mod, SockMod},
{username, Msg#stun.'USERNAME'},
{realm, State#state.realm},
{key, Secret},
{server_name, State#state.server_name},
{max_allocs, State#state.max_allocs},
{max_permissions, State#state.max_permissions},
{blacklist, State#state.blacklist},
{whitelist, State#state.whitelist},
{addr, AddrPort},
{relay_ipv4_ip, State#state.relay_ipv4_ip},
{relay_ipv6_ip, State#state.relay_ipv6_ip},
{min_port, State#state.min_port},
{max_port, State#state.max_port},
{hook_fun, State#state.hook_fun},
{session_id, State#state.session_id} |
if SockMod /= gen_udp ->
[{owner, self()}];
true ->
[]
end],
case turn:start(Opts) of
{ok, Pid} ->
cancel_timer(State#state.tref),
turn:route(Pid, Msg),
State;
{error, limit} ->
?LOG_NOTICE("Rejecting request: Allocation quota reached"),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(486)},
run_hook(protocol_error, State, R),
send(State, R, Secret);
{error, stale} ->
?LOG_NOTICE("Rejecting request: Stale nonce"),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(438)},
run_hook(protocol_error, State, R),
send(State, R);
{error, Reason} ->
?LOG_ERROR("Cannot start TURN session: ~s", [Reason]),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(500)},
run_hook(protocol_error, State, R),
send(State, R, Secret)
end
end;
process(State, #stun{class = request,
method = ?STUN_METHOD_REFRESH} = Msg, Secret,
_IsExpired) ->
route_on_turn(State, Msg, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_CREATE_PERMISSION} = Msg, Secret,
_IsExpired) ->
route_on_turn(State, Msg, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_CHANNEL_BIND} = Msg, Secret,
_IsExpired) ->
route_on_turn(State, Msg, Secret);
process(State, #stun{class = request} = Msg, Secret, _IsExpired) ->
?LOG_NOTICE("Rejecting request: Method not allowed"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error, 'ERROR-CODE' = stun_codec:error(405)},
run_hook(protocol_error, State, R),
send(State, R, Secret).
process_data(NextStateName, #state{buf = Buf} = State, Data) ->
NewBuf = <<Buf/binary, Data/binary>>,
case stun_codec:decode(NewBuf, stream) of
{ok, Msg, Tail} ->
?LOG_DEBUG(#{verbatim => {"Received:~n~s", [stun_codec:pp(Msg)]}}),
NewState = process(State, Msg),
process_data(NextStateName, NewState#state{buf = <<>>}, Tail);
empty ->
NewState = State#state{buf = <<>>},
{next_state, NextStateName, NewState};
more when size(NewBuf) < ?MAX_BUF_SIZE ->
NewState = State#state{buf = NewBuf},
{next_state, NextStateName, NewState};
{error, Reason} ->
?LOG_DEBUG("Cannot parse packet: ~p", [Reason]),
{stop, normal, State}
end.
update_shaper(#state{shaper = none} = State, _Data) ->
State;
update_shaper(#state{shaper = Shaper} = State, Data) ->
{NewShaper, Pause} = stun_shaper:update(Shaper, size(Data)),
if Pause > 0 ->
erlang:start_timer(Pause, self(), activate);
true ->
activate_socket(State)
end,
State#state{shaper = NewShaper}.
send(State, Data) when is_binary(Data) ->
SockMod = State#state.sock_mod,
Sock = State#state.sock,
case SockMod of
gen_udp ->
{Addr, Port} = State#state.peer,
gen_udp:send(Sock, Addr, Port, Data);
_ ->
case SockMod:send(Sock, Data) of
ok -> ok;
_ -> exit(normal)
end
end,
State;
send(State, Msg) ->
send(State, Msg, undefined).
send(State, Msg, Pass) ->
?LOG_DEBUG(#{verbatim => {"Sending:~n~s", [stun_codec:pp(Msg)]}}),
send(State, stun_codec:encode(Msg, Pass)).
route_on_turn(State, Msg) ->
route_on_turn(State, Msg, undefined).
route_on_turn(State, Msg, Pass) ->
case turn_sm:find_allocation(State#state.peer) of
{ok, Pid} ->
turn:route(Pid, Msg),
State;
_ ->
case Msg of
#stun{class = request} ->
?LOG_NOTICE("Rejecting request: Allocation mismatch"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(437)},
run_hook(protocol_error, State, R),
send(State, R, Pass);
_ ->
State
end
end.
prepare_state(Opts, Sock, Peer, SockMod) when is_list(Opts) ->
ID = case proplists:get_value(session_id, Opts) of
ID0 when is_binary(ID0) ->
Stick to listener 's session ID .
undefined ->
stun_logger:make_id()
end,
stun_logger:set_metadata(stun, SockMod, ID, Peer),
case proplists:get_bool(use_turn, Opts) of
true ->
lists:foldl(
fun({turn_ip, IP}, State) ->
case prepare_addr(IP) of
{ok, Addr} ->
?LOG_WARNING("'turn_ip' is deprecated, specify "
"'turn_ipv4_address' and optionally "
"'turn_ipv6_address' instead"),
State#state{relay_ipv4_ip = Addr};
{error, _} ->
?LOG_ERROR("Wrong 'turn_ip' value: ~p", [IP]),
State
end;
({turn_ipv4_address, IP}, State) ->
case prepare_addr(IP) of
{ok, Addr} ->
State#state{relay_ipv4_ip = Addr};
{error, _} ->
?LOG_ERROR("Wrong 'turn_ipv4_address' value: ~p",
[IP]),
State
end;
({turn_ipv6_address, IP}, State) ->
case prepare_addr(IP) of
{ok, Addr} ->
State#state{relay_ipv6_ip = Addr};
{error, _} ->
?LOG_ERROR("Wrong 'turn_ipv6_address' value: ~p",
[IP]),
State
end;
({turn_min_port, Min}, State)
when is_integer(Min), Min > 1024, Min < 65536 ->
State#state{min_port = Min};
({turn_min_port, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_min_port' value: ~p", [Wrong]),
State;
({turn_max_port, Max}, State)
when is_integer(Max), Max > 1024, Max < 65536 ->
State#state{max_port = Max};
({turn_max_port, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_max_port' value: ~p", [Wrong]),
State;
({turn_max_allocations, N}, State)
when (is_integer(N) andalso N > 0) orelse is_atom(N) ->
State#state{max_allocs = N};
({turn_max_allocations, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_max_allocations' value: ~p",
[Wrong]),
State;
({turn_max_permissions, N}, State)
when (is_integer(N) andalso N > 0) orelse is_atom(N) ->
State#state{max_permissions = N};
({turn_max_permissions, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_max_permissions' value: ~p",
[Wrong]),
State;
({turn_blacklist, B}, State) ->
case lists:all(fun is_valid_subnet/1, B) of
true ->
State#state{blacklist = B};
false ->
?LOG_ERROR("Wrong 'turn_blacklist' value: ~p",
[B]),
State
end;
({turn_whitelist, B}, State) ->
case lists:all(fun is_valid_subnet/1, B) of
true ->
State#state{whitelist = B};
false ->
?LOG_ERROR("Wrong 'turn_whitelist' value: ~p",
[B]),
State
end;
({shaper, S}, State)
when S == none orelse (is_integer(S) andalso (S > 0)) ->
State#state{shaper = stun_shaper:new(S)};
({shaper, Wrong}, State) ->
?LOG_ERROR("Wrong 'shaper' value: ~p", [Wrong]),
State;
({server_name, S}, State) ->
try
State#state{server_name = iolist_to_binary(S)}
catch _:_ ->
?LOG_ERROR("Wrong 'server_name' value: ~p", [S]),
State
end;
({auth_realm, R}, State) ->
try
State#state{realm = iolist_to_binary(R)}
catch _:_ ->
?LOG_ERROR("Wrong 'auth_realm' value: ~p", [R]),
State
end;
({auth_fun, F}, State) when is_function(F) ->
State#state{auth_fun = F};
({auth_fun, Wrong}, State) ->
?LOG_ERROR("Wrong 'auth_fun' value: ~p", [Wrong]),
State;
({hook_fun, F}, State) when is_function(F) ->
State#state{hook_fun = F};
({hook_fun, Wrong}, State) ->
?LOG_ERROR("Wrong 'hook_fun' value: ~p", [Wrong]),
State;
({auth_type, anonymous}, State) ->
State#state{auth = anonymous};
({auth_type, user}, State) ->
State#state{auth = user};
({auth_type, Wrong}, State) ->
?LOG_ERROR("Wrong 'auth_type' value: ~p", [Wrong]),
State;
({use_turn, _}, State) -> State;
(use_turn, State) -> State;
(inet, State) -> State;
({ip, _}, State) -> State;
({backlog, _}, State) -> State;
({certfile, _}, State) -> State;
({dhfile, _}, State) -> State;
({ciphers, _}, State) -> State;
({protocol_options, _}, State) -> State;
({tls, _}, State) -> State;
(tls, State) -> State;
({proxy_protocol, _}, State) -> State;
(proxy_protocol, State) -> State;
({sock_peer_name, _}, State) -> State;
({session_id, _}, State) -> State;
(Opt, State) ->
?LOG_ERROR("Ignoring unknown option '~p'", [Opt]),
State
end,
#state{session_id = ID, peer = Peer, sock = Sock,
sock_mod = SockMod, use_turn = true},
Opts);
_ ->
#state{session_id = ID, sock = Sock, sock_mod = SockMod,
peer = Peer, hook_fun = proplists:get_value(hook_fun, Opts),
auth = anonymous}
end;
prepare_state(State, _Sock, Peer, SockMod) ->
ID = stun_logger:make_id(),
stun_logger:set_metadata(stun, SockMod, ID, Peer),
State#state{session_id = ID, peer = Peer}.
prepare_addr(IPBin) when is_binary(IPBin) ->
prepare_addr(binary_to_list(IPBin));
prepare_addr(IPS) when is_list(IPS) ->
inet_parse:address(IPS);
prepare_addr(T) when is_tuple(T) ->
try
inet_parse:address(inet_parse:ntoa(T))
catch _:_ ->
{error, einval}
end.
activate_socket(#state{sock = Sock, sock_mod = gen_tcp, shaper = none}) ->
inet:setopts(Sock, [{active, ?TCP_ACTIVE}]);
activate_socket(#state{sock = Sock, sock_mod = SockMod, shaper = none}) ->
SockMod:setopts(Sock, [{active, ?TCP_ACTIVE}]);
activate_socket(#state{sock = Sock, sock_mod = gen_tcp}) ->
inet:setopts(Sock, [{active, once}]);
activate_socket(#state{sock = Sock, sock_mod = SockMod}) ->
SockMod:setopts(Sock, [{active, once}]).
cancel_timer(undefined) ->
ok;
cancel_timer(TRef) ->
case erlang:cancel_timer(TRef) of
false ->
receive
{timeout, TRef, _} ->
ok
after 0 ->
ok
end;
_ ->
ok
end.
now_priority() ->
{p1_time_compat:monotonic_time(micro_seconds), p1_time_compat:unique_integer([monotonic])}.
clean_treap(Treap, CleanPriority) ->
case treap:is_empty(Treap) of
true ->
Treap;
false ->
{_Key, {TS, _}, _Value} = treap:get_root(Treap),
if TS > CleanPriority ->
clean_treap(treap:delete_root(Treap), CleanPriority);
true ->
Treap
end
end.
make_nonce(Addr, Nonces) ->
Priority = now_priority(),
{TS, _} = Priority,
Nonce = list_to_binary(integer_to_list(rand_uniform(1 bsl 32))),
NewNonces = clean_treap(Nonces, TS + ?NONCE_LIFETIME),
{Nonce, treap:insert(Nonce, Priority, Addr, NewNonces)}.
have_nonce(Nonce, Nonces) ->
TS = p1_time_compat:monotonic_time(micro_seconds),
NewNonces = clean_treap(Nonces, TS + ?NONCE_LIFETIME),
case treap:lookup(Nonce, NewNonces) of
{ok, _, _} ->
{true, NewNonces};
_ ->
{false, NewNonces}
end.
check_integrity(User, Realm, Msg, Pass) when is_binary(Pass) ->
check_integrity(User, Realm, Msg, [Pass]);
check_integrity(_User, _Realm, _Msg, []) ->
false;
check_integrity(User, Realm, Msg, [Pass | T]) ->
Key = {User, Realm, Pass},
case stun_codec:check_integrity(Msg, Key) of
true ->
{true, Key};
false ->
check_integrity(User, Realm, Msg, T)
end.
check_expired_tag({expired, Pass}) ->
{Pass, true};
check_expired_tag(Pass) ->
{Pass, false}.
unmap_v4_addr({{0, 0, 0, 0, 0, 16#FFFF, D7, D8}, Port}) ->
{{D7 bsr 8, D7 band 255, D8 bsr 8, D8 band 255}, Port};
unmap_v4_addr(AddrPort) ->
AddrPort.
is_valid_subnet({{IP1, IP2, IP3, IP4}, Mask}) ->
(IP1 >= 0) and (IP1 =< 255) and
(IP2 >= 0) and (IP2 =< 255) and
(IP3 >= 0) and (IP3 =< 255) and
(IP4 >= 0) and (IP4 =< 255) and
(Mask >= 0) and (Mask =< 32);
is_valid_subnet({{IP1, IP2, IP3, IP4, IP5, IP6, IP7, IP8}, Mask}) ->
(IP1 >= 0) and (IP1 =< 65535) and
(IP2 >= 0) and (IP2 =< 65535) and
(IP3 >= 0) and (IP3 =< 65535) and
(IP4 >= 0) and (IP4 =< 65535) and
(IP5 >= 0) and (IP5 =< 65535) and
(IP6 >= 0) and (IP6 =< 65535) and
(IP7 >= 0) and (IP7 =< 65535) and
(IP8 >= 0) and (IP8 =< 65535) and
(Mask >= 0) and (Mask =< 128);
is_valid_subnet(_) ->
false.
get_sockmod(Opts, Sock) ->
case proplists:get_value(tls, Opts, false) of
true ->
{ok, fast_tls};
false ->
{ok, gen_tcp};
optional ->
case is_tls_handshake(Sock) of
true ->
{ok, fast_tls};
false ->
{ok, gen_tcp}
end
end.
get_peername(Sock, Opts) ->
case proplists:get_value(sock_peer_name, Opts) of
{_, Addr} ->
{ok, Addr};
undefined ->
inet:peername(Sock)
end.
-ifdef(USE_OLD_INET_BACKEND).
-dialyzer({[no_match], [get_sockmod/2]}).
is_tls_handshake(_Sock) ->
?LOG_ERROR("Multiplexing TCP and TLS requires a newer Erlang/OTP version"),
{error, eprotonosupport}.
-else.
is_tls_handshake({_, _, {_, Socket}}) ->
case socket:recvfrom(Socket, 10, [peek], ?TIMEOUT) of
{ok, {_, <<22, 3, _:4/binary, 0, _:2/binary, 3>>}} ->
?LOG_DEBUG("Determined transport protocol: TLS"),
true;
{ok, {_, _}} ->
?LOG_DEBUG("Determined transport protocol: TCP"),
false;
{error, Reason} ->
?LOG_INFO("Cannot determine transport protocol: ~s", [Reason]),
false
end.
-endif.
maybe_starttls(Sock, fast_tls, Opts) ->
case proplists:is_defined(certfile, Opts) of
true ->
TLSOpts = lists:filter(
fun({certfile, _Val}) ->
true;
({dhfile, _Val}) ->
true;
({ciphers, _Val}) ->
true;
({protocol_options, _Val}) ->
true;
(_Opt) ->
false
end, Opts),
fast_tls:tcp_to_tls(Sock, [verify_none | TLSOpts]);
false ->
?LOG_ERROR("Cannot accept TLS connection: "
"option 'certfile' is not set"),
{error, eprotonosupport}
end;
maybe_starttls(Sock, gen_tcp, _Opts) ->
{ok, Sock}.
prepare_response(State, Msg) ->
#stun{method = Msg#stun.method,
magic = Msg#stun.magic,
trid = Msg#stun.trid,
'SOFTWARE' = State#state.server_name}.
run_hook(HookName,
#state{session_id = ID,
peer = Client,
sock_mod = SockMod,
hook_fun = HookFun},
#stun{'USERNAME' = User,
'REALM' = Realm,
'ERROR-CODE' = Reason} = Msg)
when is_function(HookFun) ->
Info = #{id => ID,
user => User,
realm => Realm,
client => Client,
transport => stun_logger:encode_transport(SockMod),
version => stun_codec:version(Msg),
reason => Reason},
?LOG_DEBUG("Running '~s' hook", [HookName]),
try HookFun(HookName, Info)
catch _:Err -> ?LOG_ERROR("Hook '~s' failed: ~p", [HookName, Err])
end;
run_hook(HookName, _State, _Msg) ->
?LOG_DEBUG("No callback function specified for '~s' hook", [HookName]),
ok.
-define(THRESHOLD, 16#10000000000000000).
-ifdef(RAND_UNIFORM).
rand_uniform() ->
rand:uniform().
rand_uniform(N) ->
rand:uniform(N).
rand_uniform(N, M) ->
rand:uniform(M-N+1) + N-1.
-else.
rand_uniform() ->
crypto:rand_uniform(0, ?THRESHOLD)/?THRESHOLD.
rand_uniform(N) ->
crypto:rand_uniform(1, N+1).
rand_uniform(N, M) ->
crypto:rand_uniform(N, M+1).
-endif.
| null | https://raw.githubusercontent.com/processone/stun/5e41e347d97da0f68182a0870af806354996ab99/src/stun.erl | erlang | -------------------------------------------------------------------
File : stun.erl
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------
API
gen_fsm callbacks
gen_fsm states
helper functions
====================================================================
API
====================================================================
====================================================================
gen_fsm callbacks
====================================================================
--------------------------------------------------------------------
-------------------------------------------------------------------- | Author : < >
Description : / RFC5766 implementation .
Created : 8 Aug 2009 by < >
Copyright ( C ) 2002 - 2023 ProcessOne , SARL . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(stun).
-define(GEN_FSM, p1_fsm).
-behaviour(?GEN_FSM).
-export([start_link/2,
start/2,
stop/1,
socket_type/0,
tcp_init/2,
udp_init/2,
udp_recv/5]).
-export([init/1,
handle_event/3,
handle_sync_event/4,
handle_info/3,
terminate/3,
code_change/4]).
-export([session_established/2]).
-export([rand_uniform/0, rand_uniform/1, rand_uniform/2, unmap_v4_addr/1]).
-include("stun.hrl").
-include("stun_logger.hrl").
64 kb
1 minute
-define(TCP_ACTIVE, 500).
1 minute ( in usec )
-define(SERVER_NAME, <<"P1 STUN library">>).
-type addr() :: {inet:ip_address(), inet:port_number()}.
-record(state,
{sock :: inet:socket() | fast_tls:tls_socket() | undefined,
sock_mod = gen_tcp :: gen_udp | gen_tcp | fast_tls,
peer = {{0,0,0,0}, 0} :: addr(),
tref :: reference() | undefined,
use_turn = false :: boolean(),
relay_ipv4_ip = {127,0,0,1} :: inet:ip4_address(),
relay_ipv6_ip :: inet:ip6_address() | undefined,
min_port = 49152 :: non_neg_integer(),
max_port = 65535 :: non_neg_integer(),
max_allocs = 10 :: non_neg_integer() | infinity,
shaper = none :: stun_shaper:shaper(),
max_permissions = 10 :: non_neg_integer() | infinity,
blacklist = [] :: turn:accesslist(),
whitelist = [] :: turn:accesslist(),
auth = user :: anonymous | user,
nonces = treap:empty() :: treap:treap(),
realm = <<"">> :: binary(),
auth_fun :: function() | undefined,
hook_fun :: function() | undefined,
server_name = ?SERVER_NAME :: binary(),
buf = <<>> :: binary(),
session_id :: binary() | undefined}).
start({gen_tcp, Sock}, Opts) ->
supervisor:start_child(stun_tmp_sup, [Sock, Opts]).
stop(Pid) ->
?GEN_FSM:send_all_state_event(Pid, stop).
start_link(Sock, Opts) ->
?GEN_FSM:start_link(?MODULE, [Sock, Opts], []).
socket_type() ->
raw.
tcp_init(_Sock, Opts) ->
Opts.
udp_init(Sock, Opts) ->
prepare_state(Opts, Sock, {{0,0,0,0}, 0}, gen_udp).
udp_recv(Sock, Addr, Port, Data, State) ->
NewState = prepare_state(State, Sock, {Addr, Port}, gen_udp),
case stun_codec:decode(Data, datagram) of
{ok, Msg} ->
?LOG_DEBUG(#{verbatim => {"Received:~n~s", [stun_codec:pp(Msg)]}}),
process(NewState, Msg);
{error, Reason} ->
?LOG_DEBUG("Cannot parse packet: ~s", [Reason]),
NewState
end.
init([Sock, Opts]) ->
process_flag(trap_exit, true),
case get_peername(Sock, Opts) of
{ok, Addr} ->
case get_sockmod(Opts, Sock) of
{ok, SockMod} ->
State = prepare_state(Opts, Sock, Addr, SockMod),
case maybe_starttls(Sock, SockMod, Opts) of
{ok, NewSock} ->
TRef = erlang:start_timer(?TIMEOUT, self(), stop),
NewState = State#state{sock = NewSock, tref = TRef},
activate_socket(NewState),
{ok, session_established, NewState};
{error, Reason} ->
{stop, Reason}
end
end;
{error, Reason} ->
{stop, Reason}
end.
session_established(Event, State) ->
?LOG_ERROR("Unexpected event in 'session_established': ~p", [Event]),
{next_state, session_established, State}.
handle_event(stop, _StateName, State) ->
{stop, normal, State};
handle_event(_Event, StateName, State) ->
{next_state, StateName, State}.
handle_sync_event(_Event, _From, StateName, State) ->
{reply, {error, badarg}, StateName, State}.
handle_info({tcp, _Sock, TLSData}, StateName,
#state{sock_mod = fast_tls} = State) ->
NewState = update_shaper(State, TLSData),
case fast_tls:recv_data(NewState#state.sock, TLSData) of
{ok, Data} ->
process_data(StateName, NewState, Data);
{error, Reason} ->
?LOG_INFO("Connection failure: ~s", [Reason]),
{stop, normal, NewState}
end;
handle_info({tcp, _Sock, Data}, StateName, State) ->
NewState = update_shaper(State, Data),
process_data(StateName, NewState, Data);
handle_info({tcp_passive, _Sock}, StateName, State) ->
activate_socket(State),
{next_state, StateName, State};
handle_info({tcp_closed, _Sock}, _StateName, State) ->
?LOG_INFO("Connection reset by peer"),
{stop, normal, State};
handle_info({tcp_error, _Sock, _Reason}, _StateName, State) ->
?LOG_INFO("Connection error: ~p", [_Reason]),
{stop, normal, State};
handle_info({timeout, TRef, stop}, _StateName,
#state{tref = TRef} = State) ->
?LOG_INFO("Connection timed out"),
{stop, normal, State};
handle_info({timeout, _TRef, activate}, StateName, State) ->
activate_socket(State),
{next_state, StateName, State};
handle_info(Info, StateName, State) ->
?LOG_ERROR("Unexpected info in '~s': ~p", [StateName, Info]),
{next_state, StateName, State}.
terminate(_Reason, _StateName, State) ->
catch (State#state.sock_mod):close(State#state.sock),
ok.
code_change(_OldVsn, StateName, State, _Extra) ->
{ok, StateName, State}.
Internal functions
process(State, #stun{class = request,
method = ?STUN_METHOD_BINDING,
'MESSAGE-INTEGRITY' = undefined} = Msg) ->
process(State, Msg, undefined);
process(#state{auth = anonymous} = State,
#stun{class = request, 'MESSAGE-INTEGRITY' = undefined} = Msg) ->
process(State, Msg, undefined);
process(#state{auth = user} = State,
#stun{class = request, 'MESSAGE-INTEGRITY' = undefined} = Msg) ->
Resp = prepare_response(State, Msg),
{Nonce, Nonces} = make_nonce(State#state.peer,
State#state.nonces),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401),
'REALM' = State#state.realm,
'NONCE' = Nonce},
send(State#state{nonces = Nonces}, R);
process(#state{auth = anonymous} = State,
#stun{class = request,
'USERNAME' = User,
'REALM' = Realm,
'NONCE' = Nonce} = Msg)
when User /= undefined, Realm /= undefined, Nonce /= undefined ->
?LOG_NOTICE("Rejecting request: Credentials provided for anonymous "
"service"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401)},
run_hook(protocol_error, State, R),
send(State, R);
process(#state{auth = user} = State,
#stun{class = request,
'USERNAME' = User,
'REALM' = Realm,
'NONCE' = Nonce} = Msg)
when User /= undefined, Realm /= undefined, Nonce /= undefined ->
stun_logger:add_metadata(#{stun_user => User}),
Resp = prepare_response(State, Msg),
{HaveNonce, Nonces} = have_nonce(Nonce, State#state.nonces),
case HaveNonce of
true ->
NewState = State#state{nonces = Nonces},
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401),
'REALM' = State#state.realm,
'NONCE' = Nonce},
case (State#state.auth_fun)(User, Realm) of
<<"">> ->
?LOG_NOTICE("Failed long-term STUN/TURN authentication"),
run_hook(protocol_error, State, R),
send(NewState, R);
Pass0 ->
{Pass, IsExpired} = check_expired_tag(Pass0),
case check_integrity(User, Realm, Msg, Pass) of
{true, Key} ->
?LOG_INFO("Accepting long-term STUN/TURN "
"authentication"),
process(NewState, Msg, Key, IsExpired);
false ->
?LOG_NOTICE("Failed long-term STUN/TURN "
"authentication"),
run_hook(protocol_error, State, R),
send(NewState, R)
end
end;
false ->
?LOG_NOTICE("Rejecting request: Nonexistent nonce"),
{NewNonce, NewNonces} = make_nonce(State#state.peer, Nonces),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(438),
'REALM' = State#state.realm,
'NONCE' = NewNonce},
run_hook(protocol_error, State, R),
send(State#state{nonces = NewNonces}, R)
end;
process(State, #stun{class = request,
'USERNAME' = User,
'REALM' = undefined,
'NONCE' = undefined} = Msg) when User /= undefined ->
?LOG_NOTICE("Rejecting request: Missing realm and nonce"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(401)},
run_hook(protocol_error, State, R),
send(State, R);
process(State, #stun{class = request} = Msg) ->
?LOG_NOTICE("Rejecting malformed request"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(400)},
run_hook(protocol_error, State, R),
send(State, R);
process(State, #stun{class = indication,
method = ?STUN_METHOD_SEND} = Msg) ->
route_on_turn(State, Msg);
process(State, Msg) when is_record(Msg, turn) ->
route_on_turn(State, Msg);
process(State, _Msg) ->
State.
process(State, Msg, Secret) ->
process(State, Msg, Secret, false).
process(State, #stun{class = request, unsupported = [_|_] = Unsupported} = Msg,
Secret, _IsExpired) ->
?LOG_DEBUG("Rejecting request with unknown attribute(s): ~p",
[Unsupported]),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'UNKNOWN-ATTRIBUTES' = Unsupported,
'ERROR-CODE' = stun_codec:error(420)},
run_hook(protocol_error, State, R),
send(State, R, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_BINDING} = Msg, Secret,
_IsExpired) ->
Resp = prepare_response(State, Msg),
AddrPort = unmap_v4_addr(State#state.peer),
R = case stun_codec:version(Msg) of
old ->
?LOG_DEBUG("Responding to 'classic' STUN request"),
Resp#stun{class = response, 'MAPPED-ADDRESS' = AddrPort};
new ->
?LOG_DEBUG("Responding to STUN request"),
Resp#stun{class = response, 'XOR-MAPPED-ADDRESS' = AddrPort}
end,
run_hook(stun_query, State, Msg),
send(State, R, Secret);
process(#state{use_turn = false} = State,
#stun{class = request} = Msg, Secret, _IsExpired) ->
?LOG_NOTICE("Rejecting TURN request: TURN is disabled"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error, 'ERROR-CODE' = stun_codec:error(405)},
run_hook(protocol_error, State, R),
send(State, R, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_ALLOCATE} = Msg,
Secret, IsExpired) ->
Resp = prepare_response(State, Msg),
AddrPort = State#state.peer,
SockMod = State#state.sock_mod,
case turn_sm:find_allocation(AddrPort) of
{ok, Pid} ->
turn:route(Pid, Msg),
State;
_ when IsExpired ->
?LOG_NOTICE("Rejecting request: credentials expired"),
R = Resp#stun{class = error, 'ERROR-CODE' = stun_codec:error(401)},
run_hook(protocol_error, State, R),
send(State, R);
_ ->
Opts = [{sock, State#state.sock},
{sock_mod, SockMod},
{username, Msg#stun.'USERNAME'},
{realm, State#state.realm},
{key, Secret},
{server_name, State#state.server_name},
{max_allocs, State#state.max_allocs},
{max_permissions, State#state.max_permissions},
{blacklist, State#state.blacklist},
{whitelist, State#state.whitelist},
{addr, AddrPort},
{relay_ipv4_ip, State#state.relay_ipv4_ip},
{relay_ipv6_ip, State#state.relay_ipv6_ip},
{min_port, State#state.min_port},
{max_port, State#state.max_port},
{hook_fun, State#state.hook_fun},
{session_id, State#state.session_id} |
if SockMod /= gen_udp ->
[{owner, self()}];
true ->
[]
end],
case turn:start(Opts) of
{ok, Pid} ->
cancel_timer(State#state.tref),
turn:route(Pid, Msg),
State;
{error, limit} ->
?LOG_NOTICE("Rejecting request: Allocation quota reached"),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(486)},
run_hook(protocol_error, State, R),
send(State, R, Secret);
{error, stale} ->
?LOG_NOTICE("Rejecting request: Stale nonce"),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(438)},
run_hook(protocol_error, State, R),
send(State, R);
{error, Reason} ->
?LOG_ERROR("Cannot start TURN session: ~s", [Reason]),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(500)},
run_hook(protocol_error, State, R),
send(State, R, Secret)
end
end;
process(State, #stun{class = request,
method = ?STUN_METHOD_REFRESH} = Msg, Secret,
_IsExpired) ->
route_on_turn(State, Msg, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_CREATE_PERMISSION} = Msg, Secret,
_IsExpired) ->
route_on_turn(State, Msg, Secret);
process(State, #stun{class = request,
method = ?STUN_METHOD_CHANNEL_BIND} = Msg, Secret,
_IsExpired) ->
route_on_turn(State, Msg, Secret);
process(State, #stun{class = request} = Msg, Secret, _IsExpired) ->
?LOG_NOTICE("Rejecting request: Method not allowed"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error, 'ERROR-CODE' = stun_codec:error(405)},
run_hook(protocol_error, State, R),
send(State, R, Secret).
process_data(NextStateName, #state{buf = Buf} = State, Data) ->
NewBuf = <<Buf/binary, Data/binary>>,
case stun_codec:decode(NewBuf, stream) of
{ok, Msg, Tail} ->
?LOG_DEBUG(#{verbatim => {"Received:~n~s", [stun_codec:pp(Msg)]}}),
NewState = process(State, Msg),
process_data(NextStateName, NewState#state{buf = <<>>}, Tail);
empty ->
NewState = State#state{buf = <<>>},
{next_state, NextStateName, NewState};
more when size(NewBuf) < ?MAX_BUF_SIZE ->
NewState = State#state{buf = NewBuf},
{next_state, NextStateName, NewState};
{error, Reason} ->
?LOG_DEBUG("Cannot parse packet: ~p", [Reason]),
{stop, normal, State}
end.
update_shaper(#state{shaper = none} = State, _Data) ->
State;
update_shaper(#state{shaper = Shaper} = State, Data) ->
{NewShaper, Pause} = stun_shaper:update(Shaper, size(Data)),
if Pause > 0 ->
erlang:start_timer(Pause, self(), activate);
true ->
activate_socket(State)
end,
State#state{shaper = NewShaper}.
send(State, Data) when is_binary(Data) ->
SockMod = State#state.sock_mod,
Sock = State#state.sock,
case SockMod of
gen_udp ->
{Addr, Port} = State#state.peer,
gen_udp:send(Sock, Addr, Port, Data);
_ ->
case SockMod:send(Sock, Data) of
ok -> ok;
_ -> exit(normal)
end
end,
State;
send(State, Msg) ->
send(State, Msg, undefined).
send(State, Msg, Pass) ->
?LOG_DEBUG(#{verbatim => {"Sending:~n~s", [stun_codec:pp(Msg)]}}),
send(State, stun_codec:encode(Msg, Pass)).
route_on_turn(State, Msg) ->
route_on_turn(State, Msg, undefined).
route_on_turn(State, Msg, Pass) ->
case turn_sm:find_allocation(State#state.peer) of
{ok, Pid} ->
turn:route(Pid, Msg),
State;
_ ->
case Msg of
#stun{class = request} ->
?LOG_NOTICE("Rejecting request: Allocation mismatch"),
Resp = prepare_response(State, Msg),
R = Resp#stun{class = error,
'ERROR-CODE' = stun_codec:error(437)},
run_hook(protocol_error, State, R),
send(State, R, Pass);
_ ->
State
end
end.
prepare_state(Opts, Sock, Peer, SockMod) when is_list(Opts) ->
ID = case proplists:get_value(session_id, Opts) of
ID0 when is_binary(ID0) ->
Stick to listener 's session ID .
undefined ->
stun_logger:make_id()
end,
stun_logger:set_metadata(stun, SockMod, ID, Peer),
case proplists:get_bool(use_turn, Opts) of
true ->
lists:foldl(
fun({turn_ip, IP}, State) ->
case prepare_addr(IP) of
{ok, Addr} ->
?LOG_WARNING("'turn_ip' is deprecated, specify "
"'turn_ipv4_address' and optionally "
"'turn_ipv6_address' instead"),
State#state{relay_ipv4_ip = Addr};
{error, _} ->
?LOG_ERROR("Wrong 'turn_ip' value: ~p", [IP]),
State
end;
({turn_ipv4_address, IP}, State) ->
case prepare_addr(IP) of
{ok, Addr} ->
State#state{relay_ipv4_ip = Addr};
{error, _} ->
?LOG_ERROR("Wrong 'turn_ipv4_address' value: ~p",
[IP]),
State
end;
({turn_ipv6_address, IP}, State) ->
case prepare_addr(IP) of
{ok, Addr} ->
State#state{relay_ipv6_ip = Addr};
{error, _} ->
?LOG_ERROR("Wrong 'turn_ipv6_address' value: ~p",
[IP]),
State
end;
({turn_min_port, Min}, State)
when is_integer(Min), Min > 1024, Min < 65536 ->
State#state{min_port = Min};
({turn_min_port, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_min_port' value: ~p", [Wrong]),
State;
({turn_max_port, Max}, State)
when is_integer(Max), Max > 1024, Max < 65536 ->
State#state{max_port = Max};
({turn_max_port, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_max_port' value: ~p", [Wrong]),
State;
({turn_max_allocations, N}, State)
when (is_integer(N) andalso N > 0) orelse is_atom(N) ->
State#state{max_allocs = N};
({turn_max_allocations, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_max_allocations' value: ~p",
[Wrong]),
State;
({turn_max_permissions, N}, State)
when (is_integer(N) andalso N > 0) orelse is_atom(N) ->
State#state{max_permissions = N};
({turn_max_permissions, Wrong}, State) ->
?LOG_ERROR("Wrong 'turn_max_permissions' value: ~p",
[Wrong]),
State;
({turn_blacklist, B}, State) ->
case lists:all(fun is_valid_subnet/1, B) of
true ->
State#state{blacklist = B};
false ->
?LOG_ERROR("Wrong 'turn_blacklist' value: ~p",
[B]),
State
end;
({turn_whitelist, B}, State) ->
case lists:all(fun is_valid_subnet/1, B) of
true ->
State#state{whitelist = B};
false ->
?LOG_ERROR("Wrong 'turn_whitelist' value: ~p",
[B]),
State
end;
({shaper, S}, State)
when S == none orelse (is_integer(S) andalso (S > 0)) ->
State#state{shaper = stun_shaper:new(S)};
({shaper, Wrong}, State) ->
?LOG_ERROR("Wrong 'shaper' value: ~p", [Wrong]),
State;
({server_name, S}, State) ->
try
State#state{server_name = iolist_to_binary(S)}
catch _:_ ->
?LOG_ERROR("Wrong 'server_name' value: ~p", [S]),
State
end;
({auth_realm, R}, State) ->
try
State#state{realm = iolist_to_binary(R)}
catch _:_ ->
?LOG_ERROR("Wrong 'auth_realm' value: ~p", [R]),
State
end;
({auth_fun, F}, State) when is_function(F) ->
State#state{auth_fun = F};
({auth_fun, Wrong}, State) ->
?LOG_ERROR("Wrong 'auth_fun' value: ~p", [Wrong]),
State;
({hook_fun, F}, State) when is_function(F) ->
State#state{hook_fun = F};
({hook_fun, Wrong}, State) ->
?LOG_ERROR("Wrong 'hook_fun' value: ~p", [Wrong]),
State;
({auth_type, anonymous}, State) ->
State#state{auth = anonymous};
({auth_type, user}, State) ->
State#state{auth = user};
({auth_type, Wrong}, State) ->
?LOG_ERROR("Wrong 'auth_type' value: ~p", [Wrong]),
State;
({use_turn, _}, State) -> State;
(use_turn, State) -> State;
(inet, State) -> State;
({ip, _}, State) -> State;
({backlog, _}, State) -> State;
({certfile, _}, State) -> State;
({dhfile, _}, State) -> State;
({ciphers, _}, State) -> State;
({protocol_options, _}, State) -> State;
({tls, _}, State) -> State;
(tls, State) -> State;
({proxy_protocol, _}, State) -> State;
(proxy_protocol, State) -> State;
({sock_peer_name, _}, State) -> State;
({session_id, _}, State) -> State;
(Opt, State) ->
?LOG_ERROR("Ignoring unknown option '~p'", [Opt]),
State
end,
#state{session_id = ID, peer = Peer, sock = Sock,
sock_mod = SockMod, use_turn = true},
Opts);
_ ->
#state{session_id = ID, sock = Sock, sock_mod = SockMod,
peer = Peer, hook_fun = proplists:get_value(hook_fun, Opts),
auth = anonymous}
end;
prepare_state(State, _Sock, Peer, SockMod) ->
ID = stun_logger:make_id(),
stun_logger:set_metadata(stun, SockMod, ID, Peer),
State#state{session_id = ID, peer = Peer}.
prepare_addr(IPBin) when is_binary(IPBin) ->
prepare_addr(binary_to_list(IPBin));
prepare_addr(IPS) when is_list(IPS) ->
inet_parse:address(IPS);
prepare_addr(T) when is_tuple(T) ->
try
inet_parse:address(inet_parse:ntoa(T))
catch _:_ ->
{error, einval}
end.
activate_socket(#state{sock = Sock, sock_mod = gen_tcp, shaper = none}) ->
inet:setopts(Sock, [{active, ?TCP_ACTIVE}]);
activate_socket(#state{sock = Sock, sock_mod = SockMod, shaper = none}) ->
SockMod:setopts(Sock, [{active, ?TCP_ACTIVE}]);
activate_socket(#state{sock = Sock, sock_mod = gen_tcp}) ->
inet:setopts(Sock, [{active, once}]);
activate_socket(#state{sock = Sock, sock_mod = SockMod}) ->
SockMod:setopts(Sock, [{active, once}]).
cancel_timer(undefined) ->
ok;
cancel_timer(TRef) ->
case erlang:cancel_timer(TRef) of
false ->
receive
{timeout, TRef, _} ->
ok
after 0 ->
ok
end;
_ ->
ok
end.
now_priority() ->
{p1_time_compat:monotonic_time(micro_seconds), p1_time_compat:unique_integer([monotonic])}.
clean_treap(Treap, CleanPriority) ->
case treap:is_empty(Treap) of
true ->
Treap;
false ->
{_Key, {TS, _}, _Value} = treap:get_root(Treap),
if TS > CleanPriority ->
clean_treap(treap:delete_root(Treap), CleanPriority);
true ->
Treap
end
end.
make_nonce(Addr, Nonces) ->
Priority = now_priority(),
{TS, _} = Priority,
Nonce = list_to_binary(integer_to_list(rand_uniform(1 bsl 32))),
NewNonces = clean_treap(Nonces, TS + ?NONCE_LIFETIME),
{Nonce, treap:insert(Nonce, Priority, Addr, NewNonces)}.
have_nonce(Nonce, Nonces) ->
TS = p1_time_compat:monotonic_time(micro_seconds),
NewNonces = clean_treap(Nonces, TS + ?NONCE_LIFETIME),
case treap:lookup(Nonce, NewNonces) of
{ok, _, _} ->
{true, NewNonces};
_ ->
{false, NewNonces}
end.
check_integrity(User, Realm, Msg, Pass) when is_binary(Pass) ->
check_integrity(User, Realm, Msg, [Pass]);
check_integrity(_User, _Realm, _Msg, []) ->
false;
check_integrity(User, Realm, Msg, [Pass | T]) ->
Key = {User, Realm, Pass},
case stun_codec:check_integrity(Msg, Key) of
true ->
{true, Key};
false ->
check_integrity(User, Realm, Msg, T)
end.
check_expired_tag({expired, Pass}) ->
{Pass, true};
check_expired_tag(Pass) ->
{Pass, false}.
unmap_v4_addr({{0, 0, 0, 0, 0, 16#FFFF, D7, D8}, Port}) ->
{{D7 bsr 8, D7 band 255, D8 bsr 8, D8 band 255}, Port};
unmap_v4_addr(AddrPort) ->
AddrPort.
is_valid_subnet({{IP1, IP2, IP3, IP4}, Mask}) ->
(IP1 >= 0) and (IP1 =< 255) and
(IP2 >= 0) and (IP2 =< 255) and
(IP3 >= 0) and (IP3 =< 255) and
(IP4 >= 0) and (IP4 =< 255) and
(Mask >= 0) and (Mask =< 32);
is_valid_subnet({{IP1, IP2, IP3, IP4, IP5, IP6, IP7, IP8}, Mask}) ->
(IP1 >= 0) and (IP1 =< 65535) and
(IP2 >= 0) and (IP2 =< 65535) and
(IP3 >= 0) and (IP3 =< 65535) and
(IP4 >= 0) and (IP4 =< 65535) and
(IP5 >= 0) and (IP5 =< 65535) and
(IP6 >= 0) and (IP6 =< 65535) and
(IP7 >= 0) and (IP7 =< 65535) and
(IP8 >= 0) and (IP8 =< 65535) and
(Mask >= 0) and (Mask =< 128);
is_valid_subnet(_) ->
false.
get_sockmod(Opts, Sock) ->
case proplists:get_value(tls, Opts, false) of
true ->
{ok, fast_tls};
false ->
{ok, gen_tcp};
optional ->
case is_tls_handshake(Sock) of
true ->
{ok, fast_tls};
false ->
{ok, gen_tcp}
end
end.
get_peername(Sock, Opts) ->
case proplists:get_value(sock_peer_name, Opts) of
{_, Addr} ->
{ok, Addr};
undefined ->
inet:peername(Sock)
end.
-ifdef(USE_OLD_INET_BACKEND).
-dialyzer({[no_match], [get_sockmod/2]}).
is_tls_handshake(_Sock) ->
?LOG_ERROR("Multiplexing TCP and TLS requires a newer Erlang/OTP version"),
{error, eprotonosupport}.
-else.
is_tls_handshake({_, _, {_, Socket}}) ->
case socket:recvfrom(Socket, 10, [peek], ?TIMEOUT) of
{ok, {_, <<22, 3, _:4/binary, 0, _:2/binary, 3>>}} ->
?LOG_DEBUG("Determined transport protocol: TLS"),
true;
{ok, {_, _}} ->
?LOG_DEBUG("Determined transport protocol: TCP"),
false;
{error, Reason} ->
?LOG_INFO("Cannot determine transport protocol: ~s", [Reason]),
false
end.
-endif.
maybe_starttls(Sock, fast_tls, Opts) ->
case proplists:is_defined(certfile, Opts) of
true ->
TLSOpts = lists:filter(
fun({certfile, _Val}) ->
true;
({dhfile, _Val}) ->
true;
({ciphers, _Val}) ->
true;
({protocol_options, _Val}) ->
true;
(_Opt) ->
false
end, Opts),
fast_tls:tcp_to_tls(Sock, [verify_none | TLSOpts]);
false ->
?LOG_ERROR("Cannot accept TLS connection: "
"option 'certfile' is not set"),
{error, eprotonosupport}
end;
maybe_starttls(Sock, gen_tcp, _Opts) ->
{ok, Sock}.
prepare_response(State, Msg) ->
#stun{method = Msg#stun.method,
magic = Msg#stun.magic,
trid = Msg#stun.trid,
'SOFTWARE' = State#state.server_name}.
run_hook(HookName,
#state{session_id = ID,
peer = Client,
sock_mod = SockMod,
hook_fun = HookFun},
#stun{'USERNAME' = User,
'REALM' = Realm,
'ERROR-CODE' = Reason} = Msg)
when is_function(HookFun) ->
Info = #{id => ID,
user => User,
realm => Realm,
client => Client,
transport => stun_logger:encode_transport(SockMod),
version => stun_codec:version(Msg),
reason => Reason},
?LOG_DEBUG("Running '~s' hook", [HookName]),
try HookFun(HookName, Info)
catch _:Err -> ?LOG_ERROR("Hook '~s' failed: ~p", [HookName, Err])
end;
run_hook(HookName, _State, _Msg) ->
?LOG_DEBUG("No callback function specified for '~s' hook", [HookName]),
ok.
-define(THRESHOLD, 16#10000000000000000).
-ifdef(RAND_UNIFORM).
rand_uniform() ->
rand:uniform().
rand_uniform(N) ->
rand:uniform(N).
rand_uniform(N, M) ->
rand:uniform(M-N+1) + N-1.
-else.
rand_uniform() ->
crypto:rand_uniform(0, ?THRESHOLD)/?THRESHOLD.
rand_uniform(N) ->
crypto:rand_uniform(1, N+1).
rand_uniform(N, M) ->
crypto:rand_uniform(N, M+1).
-endif.
|
6c5d5a07e3788376b64c4d117f99457f1d88402a1fdaf1181036a3257fdd829b | liquidz/antq | shadow_test.clj | (ns antq.dep.shadow-test
(:require
[antq.dep.shadow :as sut]
[antq.record :as r]
[antq.util.env :as u.env]
[clojure.java.io :as io]
[clojure.test :as t]))
(def ^:private file-path
"path/to/shadow-cljs.edn")
(defn- dependency
[m]
(r/map->Dependency (merge {:project :shadow-cljs
:type :java
:file file-path}
m)))
(t/deftest extract-deps-test
(let [deps (sut/extract-deps
file-path
(slurp (io/resource "dep/test_shadow-cljs.edn")))]
(t/is (sequential? deps))
(t/is (every? #(instance? antq.record.Dependency %) deps))
(t/is (= #{(dependency {:name "foo/core" :version "1.0.0"})
(dependency {:name "bar" :version "2.0.0"})
(dependency {:name "baz" :version "3.0.0"})
(dependency {:name "with/meta" :version "4.0.0"})}
(set deps)))))
(t/deftest extract-deps-with-env-tag-test
(with-redefs [u.env/getenv {"ENV1" "1.0.0"
"ENV2" "2.0.0"
"ENV5" "5.0.0"}]
(let [deps (sut/extract-deps
file-path
(slurp (io/resource "dep/test_shadow-cljs-env.edn")))]
(t/is (sequential? deps))
(t/is (every? #(instance? antq.record.Dependency %) deps))
(t/is (= #{(dependency {:name "foo1" :version "1.0.0"})
(dependency {:name "foo2" :version "2.0.0"})
(dependency {:name "foo3" :version "default3"})
(dependency {:name "foo4" :version "default4"})
(dependency {:name "foo5" :version "5.0.0"})}
(set deps))))))
(t/deftest load-deps-test
(with-redefs [sut/project-file "test_shadow-cljs.edn"]
(let [deps (sut/load-deps "test/resources/dep")]
(t/is (seq deps))
(t/is (every? #(= :java (:type %)) deps))))
(with-redefs [sut/project-file "non_existing_file.edn"]
(t/is (nil? (sut/load-deps "test/resources/dep")))))
| null | https://raw.githubusercontent.com/liquidz/antq/51b257d94761a4642c6d35e65774a060248624b7/test/antq/dep/shadow_test.clj | clojure | (ns antq.dep.shadow-test
(:require
[antq.dep.shadow :as sut]
[antq.record :as r]
[antq.util.env :as u.env]
[clojure.java.io :as io]
[clojure.test :as t]))
(def ^:private file-path
"path/to/shadow-cljs.edn")
(defn- dependency
[m]
(r/map->Dependency (merge {:project :shadow-cljs
:type :java
:file file-path}
m)))
(t/deftest extract-deps-test
(let [deps (sut/extract-deps
file-path
(slurp (io/resource "dep/test_shadow-cljs.edn")))]
(t/is (sequential? deps))
(t/is (every? #(instance? antq.record.Dependency %) deps))
(t/is (= #{(dependency {:name "foo/core" :version "1.0.0"})
(dependency {:name "bar" :version "2.0.0"})
(dependency {:name "baz" :version "3.0.0"})
(dependency {:name "with/meta" :version "4.0.0"})}
(set deps)))))
(t/deftest extract-deps-with-env-tag-test
(with-redefs [u.env/getenv {"ENV1" "1.0.0"
"ENV2" "2.0.0"
"ENV5" "5.0.0"}]
(let [deps (sut/extract-deps
file-path
(slurp (io/resource "dep/test_shadow-cljs-env.edn")))]
(t/is (sequential? deps))
(t/is (every? #(instance? antq.record.Dependency %) deps))
(t/is (= #{(dependency {:name "foo1" :version "1.0.0"})
(dependency {:name "foo2" :version "2.0.0"})
(dependency {:name "foo3" :version "default3"})
(dependency {:name "foo4" :version "default4"})
(dependency {:name "foo5" :version "5.0.0"})}
(set deps))))))
(t/deftest load-deps-test
(with-redefs [sut/project-file "test_shadow-cljs.edn"]
(let [deps (sut/load-deps "test/resources/dep")]
(t/is (seq deps))
(t/is (every? #(= :java (:type %)) deps))))
(with-redefs [sut/project-file "non_existing_file.edn"]
(t/is (nil? (sut/load-deps "test/resources/dep")))))
| |
a76ba1c53a8f766670428934faa9b5cbf370f2b9b31b56d6700714f3cbe7cc37 | den1k/vimsical | core_test.clj | (ns vimsical.backend.core-test
(:require
[vimsical.backend.core :as sut]
[clojure.test :as t]))
(t/deftest core-test (t/is true))
| null | https://raw.githubusercontent.com/den1k/vimsical/1e4a1f1297849b1121baf24bdb7a0c6ba3558954/test/backend/vimsical/backend/core_test.clj | clojure | (ns vimsical.backend.core-test
(:require
[vimsical.backend.core :as sut]
[clojure.test :as t]))
(t/deftest core-test (t/is true))
| |
85de58182586a5799656947155daf8722e6da268d340bb25b1016793fb0a827b | McMasterU/HashedExpression | OperationSpec.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DuplicateRecordFields #
-- |
Module : HashedExpression . Internal . OperationSpec
-- Copyright : (c) OCA 2020
License : MIT ( see the LICENSE file )
-- Maintainer :
-- Stability : provisional
-- Portability : unportable
--
-- This modules contains specification for all operations (each corresponding to a constructor of @Op)
module HashedExpression.Internal.OperationSpec where
import Data.Function ((&))
import Data.List (sort)
import GHC.Stack (HasCallStack)
import HashedExpression.Internal.Base
import HashedExpression.Utils
data UnarySpec = UnarySpec
{ toOp :: Arg -> Op,
decideShape :: Shape -> Shape,
decideET :: ElementType -> ElementType
}
data BinarySpec = BinarySpec
{ toOp :: Arg -> Arg -> Op,
decideShape :: Shape -> Shape -> Shape,
decideET :: ElementType -> ElementType -> ElementType
}
data NarySpec = NarySpec
{ toOp :: [Arg] -> Op,
decideShape :: [Shape] -> Shape,
decideET :: [ElementType] -> ElementType
}
data ConditionarySpec = ConditionarySpec
{ toOp :: Arg -> [Arg] -> Op,
decideShape :: Shape -> [Shape] -> Shape,
decideET :: ElementType -> [ElementType] -> ElementType
}
data OperationSpec
= Unary UnarySpec
| Binary BinarySpec
| Nary NarySpec
| ConditionAry ConditionarySpec
-------------------------------------------------------------------------------
assertSame :: (HasCallStack, Ord a, Show a) => [a] -> b -> b
assertSame xs y
| allEqual xs = y
| otherwise = error $ "must be equal " ++ show xs
-------------------------------------------------------------------------------
-- |
defaultUnary :: HasCallStack => (Arg -> Op) -> [ElementType] -> UnarySpec
defaultUnary f allowedETs = UnarySpec {toOp = f, decideShape = id, decideET = decideET}
where
decideET et
| et `elem` allowedETs = et
| otherwise = error "Element type is not allowed"
-- |
defaultBinary :: HasCallStack => (Arg -> Arg -> Op) -> [ElementType] -> BinarySpec
defaultBinary f allowedETs = BinarySpec {toOp = f, decideShape = req, decideET = decideET}
where
req x y = assertSame [x, y] x
decideET x y
| x `elem` allowedETs = assertSame [x, y] x
| otherwise = error "Element type is not allowed"
-------------------------------------------------------------------------------
specSum :: HasCallStack => NarySpec
specSum =
NarySpec
{ toOp = Sum,
decideShape = \xs -> assertSame xs $ head xs,
decideET = \xs -> assertSame xs $ head xs
}
specMul :: HasCallStack => NarySpec
specMul =
NarySpec
{ toOp = Mul,
decideShape = \xs -> assertSame xs $ head xs,
decideET = \xs -> assertSame xs $ head xs
}
specPower :: HasCallStack => Int -> UnarySpec
specPower alpha = defaultUnary (Power alpha) [R, C]
specNeg :: HasCallStack => UnarySpec
specNeg = defaultUnary Neg [R, C]
specScale :: HasCallStack => BinarySpec
specScale =
BinarySpec
{ toOp = Scale,
decideShape = decideShape,
decideET = decideET
}
where
decideShape x y
| null x = y
| otherwise = error "First operand must be scalar"
decideET :: ElementType -> ElementType -> ElementType
decideET R R = R
decideET R C = C
decideET C C = C
decideET x y = error $ "Scaling invalid et " ++ show x ++ " " ++ show y
specDiv :: HasCallStack => BinarySpec
specDiv = defaultBinary Div [R]
specSqrt :: HasCallStack => UnarySpec
specSqrt = defaultUnary Sqrt [R]
specSin :: HasCallStack => UnarySpec
specSin = defaultUnary Sin [R]
specCos :: HasCallStack => UnarySpec
specCos = defaultUnary Cos [R]
specTan :: HasCallStack => UnarySpec
specTan = defaultUnary Tan [R]
specExp :: HasCallStack => UnarySpec
specExp = defaultUnary Exp [R]
specLog :: HasCallStack => UnarySpec
specLog = defaultUnary Log [R]
specSinh :: HasCallStack => UnarySpec
specSinh = defaultUnary Sinh [R]
specCosh :: HasCallStack => UnarySpec
specCosh = defaultUnary Cosh [R]
specTanh :: HasCallStack => UnarySpec
specTanh = defaultUnary Tanh [R]
specAsin :: HasCallStack => UnarySpec
specAsin = defaultUnary Asin [R]
specAcos :: HasCallStack => UnarySpec
specAcos = defaultUnary Acos [R]
specAtan :: HasCallStack => UnarySpec
specAtan = defaultUnary Atan [R]
specAsinh :: HasCallStack => UnarySpec
specAsinh = defaultUnary Asinh [R]
specAcosh :: HasCallStack => UnarySpec
specAcosh = defaultUnary Acosh [R]
specAtanh :: HasCallStack => UnarySpec
specAtanh = defaultUnary Atanh [R]
specRealImag :: HasCallStack => BinarySpec
specRealImag =
BinarySpec {toOp = RealImag, decideShape = decideShape, decideET = decideET}
where
decideShape x y = assertSame [x, y] x
decideET x y
| x == R && y == R = C
| otherwise = error $ "2 operands must be real" ++ show [x, y]
specRealPart :: HasCallStack => UnarySpec
specRealPart =
UnarySpec {toOp = RealPart, decideShape = id, decideET = decideET}
where
decideET x
| x == C = R
| otherwise = error "Must be complex"
specImagPart :: HasCallStack => UnarySpec
specImagPart =
UnarySpec {toOp = ImagPart, decideShape = id, decideET = decideET}
where
decideET x
| x == C = R
| otherwise = error "Must be complex"
specConjugate :: HasCallStack => UnarySpec
specConjugate = defaultUnary Conjugate [C]
specInnerProd :: HasCallStack => BinarySpec
specInnerProd =
BinarySpec
{ toOp = InnerProd,
decideShape = decideShape,
decideET = decideET
}
where
decideShape x y = assertSame [x, y] []
decideET x y = assertSame [x, y] x
specPiecewise :: HasCallStack => [Double] -> ConditionarySpec
specPiecewise marks =
ConditionarySpec {toOp = Piecewise marks, decideShape = decideShape, decideET = decideET}
where
decideShape condition branches = assertSame (condition : branches) condition
decideET condition branches
| condition == R && length branches == length marks + 1 = head branches
| otherwise = error "Condition must be real and number of branches must equal number of marks + 1"
specRotate :: HasCallStack => RotateAmount -> UnarySpec
specRotate ra = defaultUnary (Rotate ra) [R, C]
specFT :: HasCallStack => UnarySpec
specFT = defaultUnary FT [C]
specIFT :: HasCallStack => UnarySpec
specIFT = defaultUnary IFT [C]
-------------------------------------------------------------------------------
processDimSelector :: Shape -> [DimSelector] -> Shape
processDimSelector [] [] = []
processDimSelector (size : xs) ((Range start end step) : ss) = (((end - start) `mod` size) `div` step + 1) : processDimSelector xs ss
processDimSelector (_ : xs) ((At _) : ss) = processDimSelector xs ss -- collapse the corresponding dimension
specProject :: HasCallStack => [DimSelector] -> UnarySpec
specProject dmSelectors =
UnarySpec {toOp = Project dmSelectors, decideShape = decideShape, decideET = id}
where
decideShape shape
| length shape == length dmSelectors = processDimSelector shape dmSelectors
| otherwise = error "dim selectors and shape must be of same length"
specInject :: HasCallStack => [DimSelector] -> BinarySpec
specInject dmSelectors =
BinarySpec {toOp = Inject dmSelectors, decideShape = decideShape, decideET = decideET}
where
decideShape subShape baseShape
| length baseShape == length dmSelectors,
subShape == processDimSelector baseShape dmSelectors =
baseShape
| otherwise = error $ "dim selectors, sub shape and base shape not valid" ++ show dmSelectors ++ " " ++ show subShape ++ " " ++ show baseShape
decideET x y = assertSame [x, y] x
-------------------------------------------------------------------------------
specMatMul :: HasCallStack => BinarySpec
specMatMul = BinarySpec {toOp = MatMul, decideShape = decideShape, decideET = decideET}
where
decideShape [m, n] [p, q]
| n == p = [m, q]
| otherwise = error $ "invalid shape matrix multiplication" ++ show [m, n] ++ " " ++ show [p, q]
decideShape [m, n] [p]
| n == p = [m]
| otherwise = error $ "invalid shape matrix multiplication" ++ show [m, n] ++ " " ++ show [p]
decideShape shape1 shape2 = error $ "invalid shape matrix multiplication" ++ show shape1 ++ " " ++ show shape2
decideET x y = assertSame [x, y] x
specTranspose :: HasCallStack => UnarySpec
specTranspose = UnarySpec {toOp = Transpose, decideShape = decideShape, decideET = id}
where
decideShape [m, n] = [n, m]
decideShape [ m ] = [ 1 , m ]
decideShape _ = error "invalid shape tranpose"
specCoerce :: HasCallStack => Shape -> UnarySpec
specCoerce targetShape = UnarySpec {toOp = Coerce targetShape, decideShape = decideShape, decideET = id}
where
decideShape shape
| coercible shape targetShape || coercible targetShape shape = targetShape
decideShape _ = error "not coercible"
| null | https://raw.githubusercontent.com/McMasterU/HashedExpression/cfe9f21165f1f3fc6d59ec27fb962c29e67a9bbb/src/HashedExpression/Internal/OperationSpec.hs | haskell | |
Copyright : (c) OCA 2020
Maintainer :
Stability : provisional
Portability : unportable
This modules contains specification for all operations (each corresponding to a constructor of @Op)
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
|
|
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
collapse the corresponding dimension
----------------------------------------------------------------------------- | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DuplicateRecordFields #
Module : HashedExpression . Internal . OperationSpec
License : MIT ( see the LICENSE file )
module HashedExpression.Internal.OperationSpec where
import Data.Function ((&))
import Data.List (sort)
import GHC.Stack (HasCallStack)
import HashedExpression.Internal.Base
import HashedExpression.Utils
data UnarySpec = UnarySpec
{ toOp :: Arg -> Op,
decideShape :: Shape -> Shape,
decideET :: ElementType -> ElementType
}
data BinarySpec = BinarySpec
{ toOp :: Arg -> Arg -> Op,
decideShape :: Shape -> Shape -> Shape,
decideET :: ElementType -> ElementType -> ElementType
}
data NarySpec = NarySpec
{ toOp :: [Arg] -> Op,
decideShape :: [Shape] -> Shape,
decideET :: [ElementType] -> ElementType
}
data ConditionarySpec = ConditionarySpec
{ toOp :: Arg -> [Arg] -> Op,
decideShape :: Shape -> [Shape] -> Shape,
decideET :: ElementType -> [ElementType] -> ElementType
}
data OperationSpec
= Unary UnarySpec
| Binary BinarySpec
| Nary NarySpec
| ConditionAry ConditionarySpec
assertSame :: (HasCallStack, Ord a, Show a) => [a] -> b -> b
assertSame xs y
| allEqual xs = y
| otherwise = error $ "must be equal " ++ show xs
defaultUnary :: HasCallStack => (Arg -> Op) -> [ElementType] -> UnarySpec
defaultUnary f allowedETs = UnarySpec {toOp = f, decideShape = id, decideET = decideET}
where
decideET et
| et `elem` allowedETs = et
| otherwise = error "Element type is not allowed"
defaultBinary :: HasCallStack => (Arg -> Arg -> Op) -> [ElementType] -> BinarySpec
defaultBinary f allowedETs = BinarySpec {toOp = f, decideShape = req, decideET = decideET}
where
req x y = assertSame [x, y] x
decideET x y
| x `elem` allowedETs = assertSame [x, y] x
| otherwise = error "Element type is not allowed"
specSum :: HasCallStack => NarySpec
specSum =
NarySpec
{ toOp = Sum,
decideShape = \xs -> assertSame xs $ head xs,
decideET = \xs -> assertSame xs $ head xs
}
specMul :: HasCallStack => NarySpec
specMul =
NarySpec
{ toOp = Mul,
decideShape = \xs -> assertSame xs $ head xs,
decideET = \xs -> assertSame xs $ head xs
}
specPower :: HasCallStack => Int -> UnarySpec
specPower alpha = defaultUnary (Power alpha) [R, C]
specNeg :: HasCallStack => UnarySpec
specNeg = defaultUnary Neg [R, C]
specScale :: HasCallStack => BinarySpec
specScale =
BinarySpec
{ toOp = Scale,
decideShape = decideShape,
decideET = decideET
}
where
decideShape x y
| null x = y
| otherwise = error "First operand must be scalar"
decideET :: ElementType -> ElementType -> ElementType
decideET R R = R
decideET R C = C
decideET C C = C
decideET x y = error $ "Scaling invalid et " ++ show x ++ " " ++ show y
specDiv :: HasCallStack => BinarySpec
specDiv = defaultBinary Div [R]
specSqrt :: HasCallStack => UnarySpec
specSqrt = defaultUnary Sqrt [R]
specSin :: HasCallStack => UnarySpec
specSin = defaultUnary Sin [R]
specCos :: HasCallStack => UnarySpec
specCos = defaultUnary Cos [R]
specTan :: HasCallStack => UnarySpec
specTan = defaultUnary Tan [R]
specExp :: HasCallStack => UnarySpec
specExp = defaultUnary Exp [R]
specLog :: HasCallStack => UnarySpec
specLog = defaultUnary Log [R]
specSinh :: HasCallStack => UnarySpec
specSinh = defaultUnary Sinh [R]
specCosh :: HasCallStack => UnarySpec
specCosh = defaultUnary Cosh [R]
specTanh :: HasCallStack => UnarySpec
specTanh = defaultUnary Tanh [R]
specAsin :: HasCallStack => UnarySpec
specAsin = defaultUnary Asin [R]
specAcos :: HasCallStack => UnarySpec
specAcos = defaultUnary Acos [R]
specAtan :: HasCallStack => UnarySpec
specAtan = defaultUnary Atan [R]
specAsinh :: HasCallStack => UnarySpec
specAsinh = defaultUnary Asinh [R]
specAcosh :: HasCallStack => UnarySpec
specAcosh = defaultUnary Acosh [R]
specAtanh :: HasCallStack => UnarySpec
specAtanh = defaultUnary Atanh [R]
specRealImag :: HasCallStack => BinarySpec
specRealImag =
BinarySpec {toOp = RealImag, decideShape = decideShape, decideET = decideET}
where
decideShape x y = assertSame [x, y] x
decideET x y
| x == R && y == R = C
| otherwise = error $ "2 operands must be real" ++ show [x, y]
specRealPart :: HasCallStack => UnarySpec
specRealPart =
UnarySpec {toOp = RealPart, decideShape = id, decideET = decideET}
where
decideET x
| x == C = R
| otherwise = error "Must be complex"
specImagPart :: HasCallStack => UnarySpec
specImagPart =
UnarySpec {toOp = ImagPart, decideShape = id, decideET = decideET}
where
decideET x
| x == C = R
| otherwise = error "Must be complex"
specConjugate :: HasCallStack => UnarySpec
specConjugate = defaultUnary Conjugate [C]
specInnerProd :: HasCallStack => BinarySpec
specInnerProd =
BinarySpec
{ toOp = InnerProd,
decideShape = decideShape,
decideET = decideET
}
where
decideShape x y = assertSame [x, y] []
decideET x y = assertSame [x, y] x
specPiecewise :: HasCallStack => [Double] -> ConditionarySpec
specPiecewise marks =
ConditionarySpec {toOp = Piecewise marks, decideShape = decideShape, decideET = decideET}
where
decideShape condition branches = assertSame (condition : branches) condition
decideET condition branches
| condition == R && length branches == length marks + 1 = head branches
| otherwise = error "Condition must be real and number of branches must equal number of marks + 1"
specRotate :: HasCallStack => RotateAmount -> UnarySpec
specRotate ra = defaultUnary (Rotate ra) [R, C]
specFT :: HasCallStack => UnarySpec
specFT = defaultUnary FT [C]
specIFT :: HasCallStack => UnarySpec
specIFT = defaultUnary IFT [C]
processDimSelector :: Shape -> [DimSelector] -> Shape
processDimSelector [] [] = []
processDimSelector (size : xs) ((Range start end step) : ss) = (((end - start) `mod` size) `div` step + 1) : processDimSelector xs ss
specProject :: HasCallStack => [DimSelector] -> UnarySpec
specProject dmSelectors =
UnarySpec {toOp = Project dmSelectors, decideShape = decideShape, decideET = id}
where
decideShape shape
| length shape == length dmSelectors = processDimSelector shape dmSelectors
| otherwise = error "dim selectors and shape must be of same length"
specInject :: HasCallStack => [DimSelector] -> BinarySpec
specInject dmSelectors =
BinarySpec {toOp = Inject dmSelectors, decideShape = decideShape, decideET = decideET}
where
decideShape subShape baseShape
| length baseShape == length dmSelectors,
subShape == processDimSelector baseShape dmSelectors =
baseShape
| otherwise = error $ "dim selectors, sub shape and base shape not valid" ++ show dmSelectors ++ " " ++ show subShape ++ " " ++ show baseShape
decideET x y = assertSame [x, y] x
specMatMul :: HasCallStack => BinarySpec
specMatMul = BinarySpec {toOp = MatMul, decideShape = decideShape, decideET = decideET}
where
decideShape [m, n] [p, q]
| n == p = [m, q]
| otherwise = error $ "invalid shape matrix multiplication" ++ show [m, n] ++ " " ++ show [p, q]
decideShape [m, n] [p]
| n == p = [m]
| otherwise = error $ "invalid shape matrix multiplication" ++ show [m, n] ++ " " ++ show [p]
decideShape shape1 shape2 = error $ "invalid shape matrix multiplication" ++ show shape1 ++ " " ++ show shape2
decideET x y = assertSame [x, y] x
specTranspose :: HasCallStack => UnarySpec
specTranspose = UnarySpec {toOp = Transpose, decideShape = decideShape, decideET = id}
where
decideShape [m, n] = [n, m]
decideShape [ m ] = [ 1 , m ]
decideShape _ = error "invalid shape tranpose"
specCoerce :: HasCallStack => Shape -> UnarySpec
specCoerce targetShape = UnarySpec {toOp = Coerce targetShape, decideShape = decideShape, decideET = id}
where
decideShape shape
| coercible shape targetShape || coercible targetShape shape = targetShape
decideShape _ = error "not coercible"
|
5e879159d66feac30dc799b7819cb95dd38ced04e15e77c2cc09929f1a29e2fc | abridgewater/nq-clim | standard-application-frame.lisp | ;;;
nq - clim / frame / standard - application - frame
;;;
;;; The normal superclass for an application frame class.
;;;
(cl:defpackage :nq-clim/frame/standard-application-frame
(:use :cl
:nq-clim/frame/application-frame
:nq-clim/frame/application-frame-functions
:nq-clim/frame/manageable-frame-mixin)
(:export
"STANDARD-APPLICATION-FRAME"))
(cl:in-package :nq-clim/frame/standard-application-frame)
(defclass standard-application-frame (application-frame
manageable-frame-mixin)
((pretty-name :initarg :pretty-name :accessor frame-pretty-name)
(toplevel-pane :initform nil :accessor frame-panes)))
EOF
| null | https://raw.githubusercontent.com/abridgewater/nq-clim/11d339fd0ac77b6d624fc5537b170294a191a3de/frame/standard-application-frame.lisp | lisp |
The normal superclass for an application frame class.
| nq - clim / frame / standard - application - frame
(cl:defpackage :nq-clim/frame/standard-application-frame
(:use :cl
:nq-clim/frame/application-frame
:nq-clim/frame/application-frame-functions
:nq-clim/frame/manageable-frame-mixin)
(:export
"STANDARD-APPLICATION-FRAME"))
(cl:in-package :nq-clim/frame/standard-application-frame)
(defclass standard-application-frame (application-frame
manageable-frame-mixin)
((pretty-name :initarg :pretty-name :accessor frame-pretty-name)
(toplevel-pane :initform nil :accessor frame-panes)))
EOF
|
b4cb3020a19c3642d5098b428bdfc06eae19743243c6416c0faeacb1aa6779fd | simnalamburt/snucse.pl | pp.ml | (*
* SNU 4190.310 Programming Languages
*
* SM5
*)
open K
open K
module KParseTreePrinter : sig val print : program -> unit end =
struct
let q x = ["\"" ^ x ^ "\""]
let pfx = " "
let indent l = List.map (fun s -> pfx ^ s) l
let rec comma = function [] -> []
| [h] -> [h ^ ","]
| (h :: t) -> h :: (comma t)
let ps s l =
match l with
[] -> [s]
| (h :: t) -> (s ^ "(") :: (List.fold_left (fun l x -> (comma l) @ (indent x)) (indent h) t)
@ [(")")]
let rec pe e =
match e with
NUM i -> ps "NUM" [[string_of_int i]]
| TRUE -> ps "TRUE" []
| FALSE -> ps "FALSE" []
| UNIT -> ps "UNIT" []
| VAR x -> ps "VAR" [q x]
| ADD (e1, e2) -> ps "ADD" [pe e1; pe e2]
| SUB (e1, e2) -> ps "SUB" [pe e1; pe e2]
| MUL (e1, e2) -> ps "MUL" [pe e1; pe e2]
| DIV (e1, e2) -> ps "DIV" [pe e1; pe e2]
| EQUAL (e1, e2) -> ps "EQUAL" [pe e1; pe e2]
| LESS (e1, e2) -> ps "LESS" [pe e1; pe e2]
| NOT e -> ps "NOT" [pe e]
| ASSIGN (i, e) -> ps "ASSIGN" [q i; pe e]
| SEQ (e1, e2) -> ps "SEQ" [pe e1; pe e2]
| IF (e1, e2, e3) -> ps "IF" [pe e1; pe e2; pe e3]
| WHILE (e1, e2) -> ps "WHILE" [pe e1; pe e2]
| FOR (i, e1, e2, e3) -> ps "FOR" [q i; pe e1; pe e2; pe e3]
| LETV (i, e1, e2) -> ps "LETV" [q i; pe e1; pe e2]
| LETF(f, x, e1, e2) -> ps "LETF" [q f; q x; pe e1; pe e2]
| CALLV (f, e) -> ps "CALLV" [q f; pe e]
| CALLR (f, y) -> ps "CALLR" [q f; q y]
| READ i -> ps "READ" [q i]
| WRITE e -> ps "WRITE" [pe e]
let print pgm = List.iter print_endline (pe pgm)
end
| null | https://raw.githubusercontent.com/simnalamburt/snucse.pl/a130f826c6f27224c88b0dd2e1588f35bccc9ebb/hw5/pp.ml | ocaml |
* SNU 4190.310 Programming Languages
*
* SM5
|
open K
open K
module KParseTreePrinter : sig val print : program -> unit end =
struct
let q x = ["\"" ^ x ^ "\""]
let pfx = " "
let indent l = List.map (fun s -> pfx ^ s) l
let rec comma = function [] -> []
| [h] -> [h ^ ","]
| (h :: t) -> h :: (comma t)
let ps s l =
match l with
[] -> [s]
| (h :: t) -> (s ^ "(") :: (List.fold_left (fun l x -> (comma l) @ (indent x)) (indent h) t)
@ [(")")]
let rec pe e =
match e with
NUM i -> ps "NUM" [[string_of_int i]]
| TRUE -> ps "TRUE" []
| FALSE -> ps "FALSE" []
| UNIT -> ps "UNIT" []
| VAR x -> ps "VAR" [q x]
| ADD (e1, e2) -> ps "ADD" [pe e1; pe e2]
| SUB (e1, e2) -> ps "SUB" [pe e1; pe e2]
| MUL (e1, e2) -> ps "MUL" [pe e1; pe e2]
| DIV (e1, e2) -> ps "DIV" [pe e1; pe e2]
| EQUAL (e1, e2) -> ps "EQUAL" [pe e1; pe e2]
| LESS (e1, e2) -> ps "LESS" [pe e1; pe e2]
| NOT e -> ps "NOT" [pe e]
| ASSIGN (i, e) -> ps "ASSIGN" [q i; pe e]
| SEQ (e1, e2) -> ps "SEQ" [pe e1; pe e2]
| IF (e1, e2, e3) -> ps "IF" [pe e1; pe e2; pe e3]
| WHILE (e1, e2) -> ps "WHILE" [pe e1; pe e2]
| FOR (i, e1, e2, e3) -> ps "FOR" [q i; pe e1; pe e2; pe e3]
| LETV (i, e1, e2) -> ps "LETV" [q i; pe e1; pe e2]
| LETF(f, x, e1, e2) -> ps "LETF" [q f; q x; pe e1; pe e2]
| CALLV (f, e) -> ps "CALLV" [q f; pe e]
| CALLR (f, y) -> ps "CALLR" [q f; q y]
| READ i -> ps "READ" [q i]
| WRITE e -> ps "WRITE" [pe e]
let print pgm = List.iter print_endline (pe pgm)
end
|
0fd6a0e279d3db87708bb1004e716154692e59bce3bd0da8e417bd283807a31c | rlepigre/pml | ahash.mli | (** Modified version of the [Hashbtl] library, to work with types containing
functions. Physical equality is used when [Pervasives.compare] raises an
exception. Note that this is only used when the hash function leads to a
collision (and thus quite rarely). *)
(** The type of hash tables from type ['a] to type ['b]. *)
type ('a, 'b) t
(** [Hashtbl.create n] creates a new, empty hash table with initial size [n]
([n] should be on the order of the expected number of elements that will
be in the table). The table is grown as needed so [n] is just an initial
guess. *)
val create : int -> ('a, 'b) t
(** Empty a hash table. Use [reset] instead of [clear] to shrink the size of
the bucket table to its initial size. *)
val clear : ('a, 'b) t -> unit
(** Empty a hash table and shrink the bucket table to its initial size. *)
val reset : ('a, 'b) t -> unit
(** Return a copy of the given hashtable. *)
val copy : ('a, 'b) t -> ('a, 'b) t
* [ Hashtbl.add tbl key v ] adds a binding of [ key ] to [ v ] in table [ tbl ] . A
previous bindings for [ x ] is not removed , but simply hidden . If any , the
previous binding can be restored by calling [ Hashtbl.remove key ] .
previous bindings for [x] is not removed, but simply hidden. If any, the
previous binding can be restored by calling [Hashtbl.remove tbl key]. *)
val add : ('a, 'b) t -> 'a -> 'b -> unit
(** [Hashtbl.find tbl key] returns the current binding of [key] in [tbl], or
raises [Not_found] if no such binding exists. *)
val find : ('a, 'b) t -> 'a -> 'b
* [ Hashtbl.find_all tbl key ] returns the list of all data associated with
[ key ] in [ tbl ] . The current binding is returned first , then the previous
bindings , in reverse order of introduction in the table .
[key] in [tbl]. The current binding is returned first, then the previous
bindings, in reverse order of introduction in the table. *)
val find_all : ('a, 'b) t -> 'a -> 'b list
* [ tbl key ] checks if [ key ] is bound in [ tbl ] .
val mem : ('a, 'b) t -> 'a -> bool
* [ Hashtbl.remove key ] removes the current binding of [ key ] in [ tbl ] ,
restoring the previous binding if it exists . It does nothing if [ key ] is
not bound in [ tbl ] .
restoring the previous binding if it exists. It does nothing if [key] is
not bound in [tbl]. *)
val remove : ('a, 'b) t -> 'a -> unit
* [ Hashtbl.replace key v ] replaces the current binding of [ key ] by [ v ]
in [ tbl ] . If [ key ] has no binding in [ tbl ] , a binding of [ key ] to [ v ] is
added to [ tbl ] .
in [tbl]. If [key] has no binding in [tbl], a binding of [key] to [v] is
added to [tbl]. *)
val replace : ('a, 'b) t -> 'a -> 'b -> unit
* [ Hashtbl.iter f tbl ] applies [ f ] to all bindings in table [ tbl ] . The key
if given to [ f ] as its first argument , and the value as its second . Each
Each binding is presented exactly once to [ f ] .
The order in which the bindings are passed to [ f ] is unspecified . If the
table contains several bindings for the same key , they are passed to [ f ]
in reverse order of introduction . In other words the most recent binding
is passed first .
if given to [f] as its first argument, and the value as its second. Each
Each binding is presented exactly once to [f].
The order in which the bindings are passed to [f] is unspecified. If the
table contains several bindings for the same key, they are passed to [f]
in reverse order of introduction. In other words the most recent binding
is passed first. *)
val iter : ('a -> 'b -> unit) -> ('a, 'b) t -> unit
* [ Hashtbl.fold f tbl init ] computes [ ( f kN dN ... ( f k1 d1 init ) ... ) ] in
which [ k1 ... kN ] are the keys of all bindings in [ tbl ] , and [ d1 ... dN ]
are the associated values . Each binding is presented exactly once to the
function [ f ] .
The order in which the bindings are passed to [ f ] is unspecified . If the
table contains several bindings for the same key , they are passed to [ f ]
in reverse order of introduction . In other words the most recent binding
is passed first .
which [k1 ... kN] are the keys of all bindings in [tbl], and [d1 ... dN]
are the associated values. Each binding is presented exactly once to the
function [f].
The order in which the bindings are passed to [f] is unspecified. If the
table contains several bindings for the same key, they are passed to [f]
in reverse order of introduction. In other words the most recent binding
is passed first. *)
val fold : ('a -> 'b -> 'c -> 'c) -> ('a, 'b) t -> 'c -> 'c
* [ Hashtbl.length ] returns the number of bindings in [ tbl ] in constant
time . Multiple bindings are counted .
time. Multiple bindings are counted. *)
val length : ('a, 'b) t -> int
| null | https://raw.githubusercontent.com/rlepigre/pml/cdfdea0eecc6767b16edc6a7bef917bc9dd746ed/src/util/ahash.mli | ocaml | * Modified version of the [Hashbtl] library, to work with types containing
functions. Physical equality is used when [Pervasives.compare] raises an
exception. Note that this is only used when the hash function leads to a
collision (and thus quite rarely).
* The type of hash tables from type ['a] to type ['b].
* [Hashtbl.create n] creates a new, empty hash table with initial size [n]
([n] should be on the order of the expected number of elements that will
be in the table). The table is grown as needed so [n] is just an initial
guess.
* Empty a hash table. Use [reset] instead of [clear] to shrink the size of
the bucket table to its initial size.
* Empty a hash table and shrink the bucket table to its initial size.
* Return a copy of the given hashtable.
* [Hashtbl.find tbl key] returns the current binding of [key] in [tbl], or
raises [Not_found] if no such binding exists. |
type ('a, 'b) t
val create : int -> ('a, 'b) t
val clear : ('a, 'b) t -> unit
val reset : ('a, 'b) t -> unit
val copy : ('a, 'b) t -> ('a, 'b) t
* [ Hashtbl.add tbl key v ] adds a binding of [ key ] to [ v ] in table [ tbl ] . A
previous bindings for [ x ] is not removed , but simply hidden . If any , the
previous binding can be restored by calling [ Hashtbl.remove key ] .
previous bindings for [x] is not removed, but simply hidden. If any, the
previous binding can be restored by calling [Hashtbl.remove tbl key]. *)
val add : ('a, 'b) t -> 'a -> 'b -> unit
val find : ('a, 'b) t -> 'a -> 'b
* [ Hashtbl.find_all tbl key ] returns the list of all data associated with
[ key ] in [ tbl ] . The current binding is returned first , then the previous
bindings , in reverse order of introduction in the table .
[key] in [tbl]. The current binding is returned first, then the previous
bindings, in reverse order of introduction in the table. *)
val find_all : ('a, 'b) t -> 'a -> 'b list
* [ tbl key ] checks if [ key ] is bound in [ tbl ] .
val mem : ('a, 'b) t -> 'a -> bool
* [ Hashtbl.remove key ] removes the current binding of [ key ] in [ tbl ] ,
restoring the previous binding if it exists . It does nothing if [ key ] is
not bound in [ tbl ] .
restoring the previous binding if it exists. It does nothing if [key] is
not bound in [tbl]. *)
val remove : ('a, 'b) t -> 'a -> unit
* [ Hashtbl.replace key v ] replaces the current binding of [ key ] by [ v ]
in [ tbl ] . If [ key ] has no binding in [ tbl ] , a binding of [ key ] to [ v ] is
added to [ tbl ] .
in [tbl]. If [key] has no binding in [tbl], a binding of [key] to [v] is
added to [tbl]. *)
val replace : ('a, 'b) t -> 'a -> 'b -> unit
* [ Hashtbl.iter f tbl ] applies [ f ] to all bindings in table [ tbl ] . The key
if given to [ f ] as its first argument , and the value as its second . Each
Each binding is presented exactly once to [ f ] .
The order in which the bindings are passed to [ f ] is unspecified . If the
table contains several bindings for the same key , they are passed to [ f ]
in reverse order of introduction . In other words the most recent binding
is passed first .
if given to [f] as its first argument, and the value as its second. Each
Each binding is presented exactly once to [f].
The order in which the bindings are passed to [f] is unspecified. If the
table contains several bindings for the same key, they are passed to [f]
in reverse order of introduction. In other words the most recent binding
is passed first. *)
val iter : ('a -> 'b -> unit) -> ('a, 'b) t -> unit
* [ Hashtbl.fold f tbl init ] computes [ ( f kN dN ... ( f k1 d1 init ) ... ) ] in
which [ k1 ... kN ] are the keys of all bindings in [ tbl ] , and [ d1 ... dN ]
are the associated values . Each binding is presented exactly once to the
function [ f ] .
The order in which the bindings are passed to [ f ] is unspecified . If the
table contains several bindings for the same key , they are passed to [ f ]
in reverse order of introduction . In other words the most recent binding
is passed first .
which [k1 ... kN] are the keys of all bindings in [tbl], and [d1 ... dN]
are the associated values. Each binding is presented exactly once to the
function [f].
The order in which the bindings are passed to [f] is unspecified. If the
table contains several bindings for the same key, they are passed to [f]
in reverse order of introduction. In other words the most recent binding
is passed first. *)
val fold : ('a -> 'b -> 'c -> 'c) -> ('a, 'b) t -> 'c -> 'c
* [ Hashtbl.length ] returns the number of bindings in [ tbl ] in constant
time . Multiple bindings are counted .
time. Multiple bindings are counted. *)
val length : ('a, 'b) t -> int
|
e39bc5f4bd446459df9e3e2e0d7e1343105b99b120a3f75aa698944ba1069c39 | avsm/mirage-duniverse | stdio.ml | open! Base
open! Import
module In_channel = In_channel
module Out_channel = Out_channel
let stdin = In_channel.stdin
let stdout = Out_channel.stdout
let stderr = Out_channel.stderr
let eprintf = Out_channel.eprintf
let printf = Out_channel.printf
let print_endline = Out_channel.print_endline
let prerr_endline = Out_channel.prerr_endline
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/stdio/src/stdio.ml | ocaml | open! Base
open! Import
module In_channel = In_channel
module Out_channel = Out_channel
let stdin = In_channel.stdin
let stdout = Out_channel.stdout
let stderr = Out_channel.stderr
let eprintf = Out_channel.eprintf
let printf = Out_channel.printf
let print_endline = Out_channel.print_endline
let prerr_endline = Out_channel.prerr_endline
| |
484312c638bfc4878cdfe83bd9794d3068df15fc1bd6fb1ea520e1e20fda7402 | hswick/jutsu.ai | project.clj | (defproject hswick/jutsu.ai "0.1.5"
:description "Clojure wrapper for deeplearning4j intended to make machine learning on the JVM simpler"
:url ""
:dependencies [[org.clojure/clojure "1.8.0"]
[org.nd4j/nd4j-native-platform "1.0.0-beta" :scope "provided"]
[org.deeplearning4j/deeplearning4j-core "1.0.0-beta"]
[org.nd4j/nd4j-api "1.0.0-beta"]
[org.datavec/datavec-api "1.0.0-beta"]]
:license {:name "Eclipse Public License 1.0"
:url "-1.0.php"}
:resource-paths ["data"]
:profiles {:uberjar
{:main jutsu.ai.core}
:user
{:dependencies
[[nightlight "1.7.2"]
[hswick/jutsu.matrix "0.0.15"]
[org.clojure/tools.namespace "0.2.11"]]}})
| null | https://raw.githubusercontent.com/hswick/jutsu.ai/5f9a20b0ef0360b74b67137853344e084347b48c/project.clj | clojure | (defproject hswick/jutsu.ai "0.1.5"
:description "Clojure wrapper for deeplearning4j intended to make machine learning on the JVM simpler"
:url ""
:dependencies [[org.clojure/clojure "1.8.0"]
[org.nd4j/nd4j-native-platform "1.0.0-beta" :scope "provided"]
[org.deeplearning4j/deeplearning4j-core "1.0.0-beta"]
[org.nd4j/nd4j-api "1.0.0-beta"]
[org.datavec/datavec-api "1.0.0-beta"]]
:license {:name "Eclipse Public License 1.0"
:url "-1.0.php"}
:resource-paths ["data"]
:profiles {:uberjar
{:main jutsu.ai.core}
:user
{:dependencies
[[nightlight "1.7.2"]
[hswick/jutsu.matrix "0.0.15"]
[org.clojure/tools.namespace "0.2.11"]]}})
| |
5446525ffcbfb6c5db7bce5e93cabd65b9859402e08b2db19e95cba07f52456e | quchen/generative-art | Truchet.hs | module Main (main) where
import Control.Monad.Primitive
import Control.Monad.Reader.Class
import Control.Monad.ST
import Data.List (partition)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Data.Traversable
import qualified Data.Vector as V
import System.Random.MWC
import Arc
import Draw
import Draw.Plotting
import Geometry
import Geometry.Algorithms.SimplexNoise
import Geometry.Coordinates.Hexagonal hiding (Polygon, rotateAround)
import Geometry.Shapes
cellSize :: Num a => a
cellSize = 5
main :: IO ()
main = do
testplot
triptych
testplot :: IO ()
testplot = do
let picWidth, picHeight :: Num a => a
picWidth = 400
picHeight = 250
canvases = concat
[ [ move UR 1 $ move R n hexZero | n <- [-2..1]]
, [ move R n hexZero | n <- [-2..2]]
, [ move DR 1 $ move R n hexZero | n <- [-2..1]]
]
configurations = zip canvases
[ V.fromList $ allRotations =<< [ mkTile [(L, UL, [1..k]), (UR, R, [1..l]), (DR, DL, [1..m])] | k <- [0..3], l <- [0..3], m <- [0..3], k+l+m >= 7]
, V.fromList $ allRotations $ mkTile [(UL, UR, [1..3]), (R, DR, [1..3]), (DL, L, [1..3])]
, V.fromList [ mkTile [(DL, DR, [1..k]), (DR, R, [1..l]), (R, UR, [1..m]), (UR, UL, [1..n]), (UL, L, [1..o]), (L, DL, [1..p])] | k <- [0..3], l <- [0..3], m <- [0..3], n <- [0..3], o <- [0..3], p <- [0..3], k+l == 3, l+m == 3, m+n == 3, n+o == 3, o+p == 3, p+k == 3 ]
, V.fromList [ mkTile [(DL, DR, [1..k]), (DR, R, [1..l]), (R, UR, [1..m]), (UR, UL, [1..n]), (UL, L, [1..o]), (L, DL, [1..p])] | k <- [1..3], l <- [1..3], m <- [1..3], n <- [1..3], o <- [1..3], p <- [1..3], k+l == 3, l+m == 3, m+n == 3, n+o == 3, o+p == 3, p+k == 3 ]
, V.singleton $ mkTile [(L, UR, [1..3]), (R, DL, [1..2])]
, V.fromList $ allRotations =<< [ mkTile [(L, UR, [1..k]), (R, DL, [1..l])] | k <- [0..3], l <- [0..2], k+l == 5 ]
, V.fromList $ allRotations =<< concat
[ [ mkTile [(L, UR, [1..k]), (R, DL, [1..l])] | k <- [0..3], l <- [0..2], k+l == 5 ]
, [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == 7 ]
]
, V.fromList $ allRotations $ mkTile [(L, UR, [1, 2]), (R, DL, [1, 2])]
, V.singleton $ mkTile [(R, UL, [1,2]), (R, DL, [1])]
, V.fromList $ allRotations =<< [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == 7 ]
, V.fromList $ allRotations =<< [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (L, UL, [1..m]), (UL, UR, [1..n]), (UR, R, [1..m])] | k <- [0..3], l <- [2..3], m <- [0..3], n <- [0..3], if k == 0 then l == 3 else l == 2, m+n <= 3, k+m <= 3, k+n >= 4, k+n <= 5 ]
, V.fromList $ allRotations =<< concat
[ [ mkTile [(L, UL, [1..k]), (UR, R, [1..l]), (DR, DL, [1..m])] | k <- [0..3], l <- [0..3], m <- [0..3], k+l+m == 9]
, [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == 7 ]
]
, V.fromList [ mkTile [(L, R, [1,2]), (UL, UR, [1..3]), (DL, DR, [1..2])] ]
]
let settings = def
{ _zTravelHeight = 3
, _zDrawingHeight = -0.5
, _feedrate = 1000
, _previewPenTravelColor = Nothing
, _previewPenWidth = 0.5
}
plotResult = runPlot settings $ do
let optimizationSettings = MinimizePenHoveringSettings
{ _getStartEndPoint = \arcs -> (fst (arcStartEnd (V.head arcs)), snd (arcStartEnd (V.last arcs)))
, _flipObject = Just (fmap reverseArc . V.reverse)
, _mergeObjects = Nothing -- Already taken care of in 'strands'
}
optimize = concatMap V.toList . minimizePenHoveringBy optimizationSettings . S.fromList
shapes =
[ transform align
( [mask, transform (scale 1.02) mask]
, clipArc mask <$> optimize (V.map (uncurry toArc) <$> strandsColor1)
, clipArc mask <$> optimize (V.map (uncurry toArc) <$> strandsColor2)
)
| (hex, tiles) <- configurations
, let align = translate (toVec2 (8 * cellSize) hex +. Vec2 (picWidth/2) (picHeight/2)) <> rotate (deg 30)
, let mask = transform (scale (7.1 * cellSize)) (regularPolygon 6)
, let tiling = runST $ do
gen <- initialize (V.fromList [123, 987])
randomTiling (const tiles) gen (hexagonsInRange 4 hexZero)
, let allStrands = strands tiling
, let (strandsColor1, strandsColor2) = partition (\xs -> let (_, (_, i, _)) = V.head xs in i == 2) allStrands
]
penChange = withDrawingHeight 0 $ do
repositionTo zero
penDown
pause PauseUserConfirm
penUp
comment "Silver pen"
local (\s -> s { _previewPenColor = mathematica97 2 }) $
for_ ((\(_, x, _) -> x) <$> shapes) plot
penChange
comment "Gold pen"
local (\s -> s { _previewPenColor = mathematica97 3, _feedrate = 500 }) $ do -- gold pen requires veeeery low feedrate
plot ((\(_, _, x) -> x) <$> shapes)
plot ((\(x, _, _) -> x) <$> shapes)
renderPreview "out/penplotting-truchet-testplot.svg" plotResult
writeGCodeFile "truchet-testplot.g" plotResult
triptych :: IO ()
triptych = do
let picWidth, picHeight :: Num a => a
picWidth = 400
picHeight = 400
prototiles1 a = V.fromList $ allRotations =<<
[ mkTile [(L, UL, [1..k]), (UR, R, [1..l]), (DR, DL, [1..m])] | k <- [0..3], l <- [0..3], m <- [0..3], k+l+m == max 0 (min 9 (round (9 * a)))]
prototiles2 a = V.fromList $ allRotations =<< concat
[ [ mkTile [(L, UR, [1..k]), (R, DL, [1..l])] | k <- [0..3], l <- [0..2], k+l == max 0 (min 5 (round (5 * a))) ]
, [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == max 0 (min 7 (round (7 * a))) ]
]
prototiles3 a = V.fromList $ allRotations =<<
[ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == max 0 (min 7 (round (7 * a))) ]
generateTiling prototiles = runST $ do
gen <- initialize (V.fromList [125])
noise <- simplex2 def { _simplexFrequency = 1/50, _simplexOctaves = 4 } gen
let bump d p = case norm p of
r | r < d -> exp (1 - 1 / (1 - (r/d)^2))
| otherwise -> 0
variation p = bump (min picHeight picWidth / 2) p ** 0.4 * (1 + 0.1 * (noise p + 1) * 0.5)
randomTiling (prototiles . variation) gen (hexagonsInRange 25 hexZero)
settings = def
{ _zTravelHeight = 3
, _zDrawingHeight = -0.5
, _feedrate = 1000
, _previewPenTravelColor = Nothing
}
for_ (zip [1..] (generateTiling <$> [prototiles1, prototiles2, prototiles3])) $ \(k, tiling) -> do
let plotResult = runPlot settings $ do
let allStrands = strands tiling
(strandsColor1, strandsColor2) = partition (\xs -> let (_, (_, i, _)) = V.head xs in i == 2) allStrands
optimizationSettings = MinimizePenHoveringSettings
{ _getStartEndPoint = \arcs -> (fst (arcStartEnd (V.head arcs)), snd (arcStartEnd (V.last arcs)))
, _flipObject = Just (fmap reverseArc . V.reverse)
, _mergeObjects = Nothing -- Already taken care of in 'strands'
}
optimize = concatMap V.toList . minimizePenHoveringBy optimizationSettings . S.fromList
penChange = withDrawingHeight 0 $ do
repositionTo zero
penDown
pause PauseUserConfirm
penUp
comment "Silver pen"
local (\s -> s { _previewPenColor = mathematica97 2 }) $
for_ (transform (translate (Vec2 (picWidth/2) (picHeight/2))) $ optimize (V.map (uncurry toArc) <$> strandsColor1)) plot
penChange
comment "Gold pen"
local (\s -> s { _previewPenColor = mathematica97 3, _feedrate = 500 }) $ -- gold pen requires veeeery low feedrate
for_ (transform (translate (Vec2 (picWidth/2) (picHeight/2))) $ optimize (V.map (uncurry toArc) <$> strandsColor2)) plot
penChange
print (_totalBoundingBox plotResult)
renderPreview ("out/penplotting-truchet" ++ show k ++ "-preview.svg") plotResult
writeGCodeFile ("truchet" ++ show k ++ ".g") plotResult
newtype Tile = Tile (M.Map (Direction, Int) Direction) deriving (Eq, Ord, Show)
mkTile :: [(Direction, Direction, [Int])] -> Tile
mkTile = Tile . go M.empty
where
go :: M.Map (Direction, Int) Direction -> [(Direction, Direction, [Int])] -> M.Map (Direction, Int) Direction
go m [] = m
go m ((d1, d2, is) : xs) = foldl' (addArc d1 d2) (go m xs) is
addArc :: Direction -> Direction -> M.Map (Direction, Int) Direction -> Int -> M.Map (Direction, Int) Direction
addArc d1 d2 m i = M.insert (d1, arcIndex d1 d2 i) d2 . M.insert (d2, arcIndex d2 d1 i) d1 $ m
arcIndex d1 d2 i = if cyclic d1 d2 then i else 4-i
cyclic :: Direction -> Direction -> Bool
cyclic d1 d2
| d1 == reverseDirection d2 = d1 < d2
| otherwise = (6 + fromEnum d1 - fromEnum d2) `mod` 6 <= 3
extractArc :: Tile -> Maybe ((Direction, Int, Direction), Tile)
extractArc (Tile xs)
| M.null xs = Nothing
| otherwise =
let ((d1, i), d2) = M.findMin xs
in Just ((d1, i, d2), deleteArc (Tile xs) (d1, i, d2))
findArc :: Tile -> (Direction, Int) -> Maybe ((Direction, Int, Direction), Tile)
findArc (Tile xs) (d1, i) = fmap (\d2 -> ((d1, i, d2), deleteArc (Tile xs) (d1, i, d2))) (M.lookup (d1, i) xs)
deleteArc :: Tile -> (Direction, Int, Direction) -> Tile
deleteArc (Tile xs) (d1, i, d2) = Tile $ M.delete (d1, i) $ M.delete (d2, 4-i) xs
allRotations :: Tile -> [Tile]
allRotations tile = [ rotateTile i tile | i <- [0..6] ]
rotateTile :: Int -> Tile -> Tile
rotateTile n (Tile xs) = Tile $ M.fromList $ (\((d1, i), d2) -> ((rotateDirection d1, i), rotateDirection d2)) <$> M.toList xs
where
rotateDirection d = toEnum ((fromEnum d + n) `mod` 6)
type Tiling = M.Map Hex Tile
randomTiling :: PrimMonad m => (Vec2 -> V.Vector Tile) -> Gen (PrimState m) -> [Hex] -> m Tiling
randomTiling baseTiles gen coords = fmap M.fromList $ for coords $ \hex -> do
let p = toVec2 cellSize hex
tile <- randomTile (baseTiles p) gen
pure (hex, tile)
randomTile :: PrimMonad m => V.Vector Tile -> Gen (PrimState m) -> m Tile
randomTile baseTiles = \gen -> do
rnd <- uniformRM (0, countTiles - 1) gen
pure (baseTiles V.! rnd)
where countTiles = V.length baseTiles
strands :: Tiling -> [V.Vector (Hex, (Direction, Int, Direction))]
strands tiling = case M.lookupMin tiling of
Nothing -> []
Just (startHex, t) -> case extractArc t of
Nothing -> strands (M.delete startHex tiling)
Just ((d, i, d'), t') ->
let tiling' = M.insert startHex t' tiling
(s, tiling'') = strand tiling' startHex (d, i)
(s', tiling''') = strand tiling'' startHex (d', 4-i)
in V.fromList (reverseStrand s ++ [(startHex, (d, i, d'))] ++ s') : strands tiling'''
strand :: Tiling -> Hex -> (Direction, Int) -> ([(Hex, (Direction, Int, Direction))], Tiling)
strand tiling hex (d, i) = let hex' = move d 1 hex in case M.lookup hex' tiling of
Nothing -> ([], tiling)
Just t -> case findArc t (reverseDirection d, 4-i) of
Nothing -> ([], tiling)
Just ((_, _, d'), t') ->
let (s', tiling') = strand (M.insert hex' t' tiling) hex' (d', i)
in ((hex', (reverseDirection d, 4-i, d')) : s', tiling')
reverseStrand :: [(Hex, (Direction, Int, Direction))] -> [(Hex, (Direction, Int, Direction))]
reverseStrand = fmap (\(h, (d1, i, d2)) -> (h, (d2, 4-i, d1))) . reverse
reverseDirection :: Direction -> Direction
reverseDirection d = toEnum ((fromEnum d + 3) `mod` 6)
toArc :: Hex -> (Direction, Int, Direction) -> Arc
toArc hex (d1, n, d2) = sketchArc (fromIntegral n') d1 d2
where
n' = if cyclic d1 d2 then n else 4-n
center = toVec2 cellSize hex
side d = 0.5 *. (center +. nextCenter d)
nextCenter d = toVec2 cellSize (move d 1 hex)
corner d d' = (center +. nextCenter d +. nextCenter d') /. 3
[down, _lowerLeft, _upperLeft, _up, upperRight, lowerRight] = [ transform (rotate alpha) (Vec2 0 cellSize) | alpha <- deg <$> [0, 60 .. 300] ]
sketchArc i DR UL = straight ((0.5 - 0.25 * i) *. upperRight +. side DR) ((0.5 - 0.25 * i) *. upperRight +. side UL)
sketchArc i UR DL = straight ((0.5 - 0.25 * i) *. lowerRight +. side UR) ((0.5 - 0.25 * i) *. lowerRight +. side DL)
sketchArc i R L = straight ((0.5 - 0.25 * i) *. down +. side R) ((0.5 - 0.25 * i) *. down +. side L)
sketchArc i UL DR = straight ((0.5 - 0.25 * i) *. upperRight +. side UL) ((0.5 - 0.25 * i) *. upperRight +. side DR)
sketchArc i DL UR = straight ((0.5 - 0.25 * i) *. lowerRight +. side DL) ((0.5 - 0.25 * i) *. lowerRight +. side UR)
sketchArc i L R = straight ((0.5 - 0.25 * i) *. down +. side L) ((0.5 - 0.25 * i) *. down +. side R)
sketchArc i UR L = ccwArc (nextCenter UL) ((1 + 0.25 * i) * cellSize) (deg 30) (deg 90)
sketchArc i R UL = ccwArc (nextCenter UR) ((1 + 0.25 * i) * cellSize) (deg 90) (deg 150)
sketchArc i DR UR = ccwArc (nextCenter R) ((1 + 0.25 * i) * cellSize) (deg 150) (deg 210)
sketchArc i DL R = ccwArc (nextCenter DR) ((1 + 0.25 * i) * cellSize) (deg 210) (deg 270)
sketchArc i L DR = ccwArc (nextCenter DL) ((1 + 0.25 * i) * cellSize) (deg 270) (deg 330)
sketchArc i UL DL = ccwArc (nextCenter L) ((1 + 0.25 * i) * cellSize) (deg 330) (deg 30)
sketchArc i L UR = cwArc (nextCenter UL) ((1 + 0.25 * i) * cellSize) (deg 90) (deg 30)
sketchArc i UL R = cwArc (nextCenter UR) ((1 + 0.25 * i) * cellSize) (deg 150) (deg 90)
sketchArc i UR DR = cwArc (nextCenter R) ((1 + 0.25 * i) * cellSize) (deg 210) (deg 150)
sketchArc i R DL = cwArc (nextCenter DR) ((1 + 0.25 * i) * cellSize) (deg 270) (deg 210)
sketchArc i DR L = cwArc (nextCenter DL) ((1 + 0.25 * i) * cellSize) (deg 330) (deg 270)
sketchArc i DL UL = cwArc (nextCenter L) ((1 + 0.25 * i) * cellSize) (deg 30) (deg 330)
sketchArc i UL L = ccwArc (corner L UL) (0.25 * i * cellSize) (deg 330) (deg 90)
sketchArc i UR UL = ccwArc (corner UL UR) (0.25 * i * cellSize) (deg 30) (deg 150)
sketchArc i R UR = ccwArc (corner UR R) (0.25 * i * cellSize) (deg 90) (deg 210)
sketchArc i DR R = ccwArc (corner R DR) (0.25 * i * cellSize) (deg 150) (deg 270)
sketchArc i DL DR = ccwArc (corner DR DL) (0.25 * i * cellSize) (deg 210) (deg 330)
sketchArc i L DL = ccwArc (corner DL L) (0.25 * i * cellSize) (deg 270) (deg 30)
sketchArc i L UL = cwArc (corner L UL) (0.25 * i * cellSize) (deg 90) (deg 330)
sketchArc i UL UR = cwArc (corner UL UR) (0.25 * i * cellSize) (deg 150) (deg 30)
sketchArc i UR R = cwArc (corner UR R) (0.25 * i * cellSize) (deg 210) (deg 90)
sketchArc i R DR = cwArc (corner R DR) (0.25 * i * cellSize) (deg 270) (deg 150)
sketchArc i DR DL = cwArc (corner DR DL) (0.25 * i * cellSize) (deg 330) (deg 210)
sketchArc i DL L = cwArc (corner DL L) (0.25 * i * cellSize) (deg 30) (deg 270)
sketchArc _ d d' = error ("Illegal tile " ++ show (d, d'))
| null | https://raw.githubusercontent.com/quchen/generative-art/6372c7e36a4bcd09e2b22d3e395b6b6217f8601f/penplotting/Truchet/Truchet.hs | haskell | Already taken care of in 'strands'
gold pen requires veeeery low feedrate
Already taken care of in 'strands'
gold pen requires veeeery low feedrate | module Main (main) where
import Control.Monad.Primitive
import Control.Monad.Reader.Class
import Control.Monad.ST
import Data.List (partition)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Data.Traversable
import qualified Data.Vector as V
import System.Random.MWC
import Arc
import Draw
import Draw.Plotting
import Geometry
import Geometry.Algorithms.SimplexNoise
import Geometry.Coordinates.Hexagonal hiding (Polygon, rotateAround)
import Geometry.Shapes
cellSize :: Num a => a
cellSize = 5
main :: IO ()
main = do
testplot
triptych
testplot :: IO ()
testplot = do
let picWidth, picHeight :: Num a => a
picWidth = 400
picHeight = 250
canvases = concat
[ [ move UR 1 $ move R n hexZero | n <- [-2..1]]
, [ move R n hexZero | n <- [-2..2]]
, [ move DR 1 $ move R n hexZero | n <- [-2..1]]
]
configurations = zip canvases
[ V.fromList $ allRotations =<< [ mkTile [(L, UL, [1..k]), (UR, R, [1..l]), (DR, DL, [1..m])] | k <- [0..3], l <- [0..3], m <- [0..3], k+l+m >= 7]
, V.fromList $ allRotations $ mkTile [(UL, UR, [1..3]), (R, DR, [1..3]), (DL, L, [1..3])]
, V.fromList [ mkTile [(DL, DR, [1..k]), (DR, R, [1..l]), (R, UR, [1..m]), (UR, UL, [1..n]), (UL, L, [1..o]), (L, DL, [1..p])] | k <- [0..3], l <- [0..3], m <- [0..3], n <- [0..3], o <- [0..3], p <- [0..3], k+l == 3, l+m == 3, m+n == 3, n+o == 3, o+p == 3, p+k == 3 ]
, V.fromList [ mkTile [(DL, DR, [1..k]), (DR, R, [1..l]), (R, UR, [1..m]), (UR, UL, [1..n]), (UL, L, [1..o]), (L, DL, [1..p])] | k <- [1..3], l <- [1..3], m <- [1..3], n <- [1..3], o <- [1..3], p <- [1..3], k+l == 3, l+m == 3, m+n == 3, n+o == 3, o+p == 3, p+k == 3 ]
, V.singleton $ mkTile [(L, UR, [1..3]), (R, DL, [1..2])]
, V.fromList $ allRotations =<< [ mkTile [(L, UR, [1..k]), (R, DL, [1..l])] | k <- [0..3], l <- [0..2], k+l == 5 ]
, V.fromList $ allRotations =<< concat
[ [ mkTile [(L, UR, [1..k]), (R, DL, [1..l])] | k <- [0..3], l <- [0..2], k+l == 5 ]
, [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == 7 ]
]
, V.fromList $ allRotations $ mkTile [(L, UR, [1, 2]), (R, DL, [1, 2])]
, V.singleton $ mkTile [(R, UL, [1,2]), (R, DL, [1])]
, V.fromList $ allRotations =<< [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == 7 ]
, V.fromList $ allRotations =<< [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (L, UL, [1..m]), (UL, UR, [1..n]), (UR, R, [1..m])] | k <- [0..3], l <- [2..3], m <- [0..3], n <- [0..3], if k == 0 then l == 3 else l == 2, m+n <= 3, k+m <= 3, k+n >= 4, k+n <= 5 ]
, V.fromList $ allRotations =<< concat
[ [ mkTile [(L, UL, [1..k]), (UR, R, [1..l]), (DR, DL, [1..m])] | k <- [0..3], l <- [0..3], m <- [0..3], k+l+m == 9]
, [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == 7 ]
]
, V.fromList [ mkTile [(L, R, [1,2]), (UL, UR, [1..3]), (DL, DR, [1..2])] ]
]
let settings = def
{ _zTravelHeight = 3
, _zDrawingHeight = -0.5
, _feedrate = 1000
, _previewPenTravelColor = Nothing
, _previewPenWidth = 0.5
}
plotResult = runPlot settings $ do
let optimizationSettings = MinimizePenHoveringSettings
{ _getStartEndPoint = \arcs -> (fst (arcStartEnd (V.head arcs)), snd (arcStartEnd (V.last arcs)))
, _flipObject = Just (fmap reverseArc . V.reverse)
}
optimize = concatMap V.toList . minimizePenHoveringBy optimizationSettings . S.fromList
shapes =
[ transform align
( [mask, transform (scale 1.02) mask]
, clipArc mask <$> optimize (V.map (uncurry toArc) <$> strandsColor1)
, clipArc mask <$> optimize (V.map (uncurry toArc) <$> strandsColor2)
)
| (hex, tiles) <- configurations
, let align = translate (toVec2 (8 * cellSize) hex +. Vec2 (picWidth/2) (picHeight/2)) <> rotate (deg 30)
, let mask = transform (scale (7.1 * cellSize)) (regularPolygon 6)
, let tiling = runST $ do
gen <- initialize (V.fromList [123, 987])
randomTiling (const tiles) gen (hexagonsInRange 4 hexZero)
, let allStrands = strands tiling
, let (strandsColor1, strandsColor2) = partition (\xs -> let (_, (_, i, _)) = V.head xs in i == 2) allStrands
]
penChange = withDrawingHeight 0 $ do
repositionTo zero
penDown
pause PauseUserConfirm
penUp
comment "Silver pen"
local (\s -> s { _previewPenColor = mathematica97 2 }) $
for_ ((\(_, x, _) -> x) <$> shapes) plot
penChange
comment "Gold pen"
plot ((\(_, _, x) -> x) <$> shapes)
plot ((\(x, _, _) -> x) <$> shapes)
renderPreview "out/penplotting-truchet-testplot.svg" plotResult
writeGCodeFile "truchet-testplot.g" plotResult
triptych :: IO ()
triptych = do
let picWidth, picHeight :: Num a => a
picWidth = 400
picHeight = 400
prototiles1 a = V.fromList $ allRotations =<<
[ mkTile [(L, UL, [1..k]), (UR, R, [1..l]), (DR, DL, [1..m])] | k <- [0..3], l <- [0..3], m <- [0..3], k+l+m == max 0 (min 9 (round (9 * a)))]
prototiles2 a = V.fromList $ allRotations =<< concat
[ [ mkTile [(L, UR, [1..k]), (R, DL, [1..l])] | k <- [0..3], l <- [0..2], k+l == max 0 (min 5 (round (5 * a))) ]
, [ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == max 0 (min 7 (round (7 * a))) ]
]
prototiles3 a = V.fromList $ allRotations =<<
[ mkTile [(L, R, [1..k]), (DL, DR, [1..l]), (UL, UR, [1..m])] | k <- [0..3], l <- [0..2], m <- [0..3], k+m <= 5, k+l+m == max 0 (min 7 (round (7 * a))) ]
generateTiling prototiles = runST $ do
gen <- initialize (V.fromList [125])
noise <- simplex2 def { _simplexFrequency = 1/50, _simplexOctaves = 4 } gen
let bump d p = case norm p of
r | r < d -> exp (1 - 1 / (1 - (r/d)^2))
| otherwise -> 0
variation p = bump (min picHeight picWidth / 2) p ** 0.4 * (1 + 0.1 * (noise p + 1) * 0.5)
randomTiling (prototiles . variation) gen (hexagonsInRange 25 hexZero)
settings = def
{ _zTravelHeight = 3
, _zDrawingHeight = -0.5
, _feedrate = 1000
, _previewPenTravelColor = Nothing
}
for_ (zip [1..] (generateTiling <$> [prototiles1, prototiles2, prototiles3])) $ \(k, tiling) -> do
let plotResult = runPlot settings $ do
let allStrands = strands tiling
(strandsColor1, strandsColor2) = partition (\xs -> let (_, (_, i, _)) = V.head xs in i == 2) allStrands
optimizationSettings = MinimizePenHoveringSettings
{ _getStartEndPoint = \arcs -> (fst (arcStartEnd (V.head arcs)), snd (arcStartEnd (V.last arcs)))
, _flipObject = Just (fmap reverseArc . V.reverse)
}
optimize = concatMap V.toList . minimizePenHoveringBy optimizationSettings . S.fromList
penChange = withDrawingHeight 0 $ do
repositionTo zero
penDown
pause PauseUserConfirm
penUp
comment "Silver pen"
local (\s -> s { _previewPenColor = mathematica97 2 }) $
for_ (transform (translate (Vec2 (picWidth/2) (picHeight/2))) $ optimize (V.map (uncurry toArc) <$> strandsColor1)) plot
penChange
comment "Gold pen"
for_ (transform (translate (Vec2 (picWidth/2) (picHeight/2))) $ optimize (V.map (uncurry toArc) <$> strandsColor2)) plot
penChange
print (_totalBoundingBox plotResult)
renderPreview ("out/penplotting-truchet" ++ show k ++ "-preview.svg") plotResult
writeGCodeFile ("truchet" ++ show k ++ ".g") plotResult
newtype Tile = Tile (M.Map (Direction, Int) Direction) deriving (Eq, Ord, Show)
mkTile :: [(Direction, Direction, [Int])] -> Tile
mkTile = Tile . go M.empty
where
go :: M.Map (Direction, Int) Direction -> [(Direction, Direction, [Int])] -> M.Map (Direction, Int) Direction
go m [] = m
go m ((d1, d2, is) : xs) = foldl' (addArc d1 d2) (go m xs) is
addArc :: Direction -> Direction -> M.Map (Direction, Int) Direction -> Int -> M.Map (Direction, Int) Direction
addArc d1 d2 m i = M.insert (d1, arcIndex d1 d2 i) d2 . M.insert (d2, arcIndex d2 d1 i) d1 $ m
arcIndex d1 d2 i = if cyclic d1 d2 then i else 4-i
cyclic :: Direction -> Direction -> Bool
cyclic d1 d2
| d1 == reverseDirection d2 = d1 < d2
| otherwise = (6 + fromEnum d1 - fromEnum d2) `mod` 6 <= 3
extractArc :: Tile -> Maybe ((Direction, Int, Direction), Tile)
extractArc (Tile xs)
| M.null xs = Nothing
| otherwise =
let ((d1, i), d2) = M.findMin xs
in Just ((d1, i, d2), deleteArc (Tile xs) (d1, i, d2))
findArc :: Tile -> (Direction, Int) -> Maybe ((Direction, Int, Direction), Tile)
findArc (Tile xs) (d1, i) = fmap (\d2 -> ((d1, i, d2), deleteArc (Tile xs) (d1, i, d2))) (M.lookup (d1, i) xs)
deleteArc :: Tile -> (Direction, Int, Direction) -> Tile
deleteArc (Tile xs) (d1, i, d2) = Tile $ M.delete (d1, i) $ M.delete (d2, 4-i) xs
allRotations :: Tile -> [Tile]
allRotations tile = [ rotateTile i tile | i <- [0..6] ]
rotateTile :: Int -> Tile -> Tile
rotateTile n (Tile xs) = Tile $ M.fromList $ (\((d1, i), d2) -> ((rotateDirection d1, i), rotateDirection d2)) <$> M.toList xs
where
rotateDirection d = toEnum ((fromEnum d + n) `mod` 6)
type Tiling = M.Map Hex Tile
randomTiling :: PrimMonad m => (Vec2 -> V.Vector Tile) -> Gen (PrimState m) -> [Hex] -> m Tiling
randomTiling baseTiles gen coords = fmap M.fromList $ for coords $ \hex -> do
let p = toVec2 cellSize hex
tile <- randomTile (baseTiles p) gen
pure (hex, tile)
randomTile :: PrimMonad m => V.Vector Tile -> Gen (PrimState m) -> m Tile
randomTile baseTiles = \gen -> do
rnd <- uniformRM (0, countTiles - 1) gen
pure (baseTiles V.! rnd)
where countTiles = V.length baseTiles
strands :: Tiling -> [V.Vector (Hex, (Direction, Int, Direction))]
strands tiling = case M.lookupMin tiling of
Nothing -> []
Just (startHex, t) -> case extractArc t of
Nothing -> strands (M.delete startHex tiling)
Just ((d, i, d'), t') ->
let tiling' = M.insert startHex t' tiling
(s, tiling'') = strand tiling' startHex (d, i)
(s', tiling''') = strand tiling'' startHex (d', 4-i)
in V.fromList (reverseStrand s ++ [(startHex, (d, i, d'))] ++ s') : strands tiling'''
strand :: Tiling -> Hex -> (Direction, Int) -> ([(Hex, (Direction, Int, Direction))], Tiling)
strand tiling hex (d, i) = let hex' = move d 1 hex in case M.lookup hex' tiling of
Nothing -> ([], tiling)
Just t -> case findArc t (reverseDirection d, 4-i) of
Nothing -> ([], tiling)
Just ((_, _, d'), t') ->
let (s', tiling') = strand (M.insert hex' t' tiling) hex' (d', i)
in ((hex', (reverseDirection d, 4-i, d')) : s', tiling')
reverseStrand :: [(Hex, (Direction, Int, Direction))] -> [(Hex, (Direction, Int, Direction))]
reverseStrand = fmap (\(h, (d1, i, d2)) -> (h, (d2, 4-i, d1))) . reverse
reverseDirection :: Direction -> Direction
reverseDirection d = toEnum ((fromEnum d + 3) `mod` 6)
toArc :: Hex -> (Direction, Int, Direction) -> Arc
toArc hex (d1, n, d2) = sketchArc (fromIntegral n') d1 d2
where
n' = if cyclic d1 d2 then n else 4-n
center = toVec2 cellSize hex
side d = 0.5 *. (center +. nextCenter d)
nextCenter d = toVec2 cellSize (move d 1 hex)
corner d d' = (center +. nextCenter d +. nextCenter d') /. 3
[down, _lowerLeft, _upperLeft, _up, upperRight, lowerRight] = [ transform (rotate alpha) (Vec2 0 cellSize) | alpha <- deg <$> [0, 60 .. 300] ]
sketchArc i DR UL = straight ((0.5 - 0.25 * i) *. upperRight +. side DR) ((0.5 - 0.25 * i) *. upperRight +. side UL)
sketchArc i UR DL = straight ((0.5 - 0.25 * i) *. lowerRight +. side UR) ((0.5 - 0.25 * i) *. lowerRight +. side DL)
sketchArc i R L = straight ((0.5 - 0.25 * i) *. down +. side R) ((0.5 - 0.25 * i) *. down +. side L)
sketchArc i UL DR = straight ((0.5 - 0.25 * i) *. upperRight +. side UL) ((0.5 - 0.25 * i) *. upperRight +. side DR)
sketchArc i DL UR = straight ((0.5 - 0.25 * i) *. lowerRight +. side DL) ((0.5 - 0.25 * i) *. lowerRight +. side UR)
sketchArc i L R = straight ((0.5 - 0.25 * i) *. down +. side L) ((0.5 - 0.25 * i) *. down +. side R)
sketchArc i UR L = ccwArc (nextCenter UL) ((1 + 0.25 * i) * cellSize) (deg 30) (deg 90)
sketchArc i R UL = ccwArc (nextCenter UR) ((1 + 0.25 * i) * cellSize) (deg 90) (deg 150)
sketchArc i DR UR = ccwArc (nextCenter R) ((1 + 0.25 * i) * cellSize) (deg 150) (deg 210)
sketchArc i DL R = ccwArc (nextCenter DR) ((1 + 0.25 * i) * cellSize) (deg 210) (deg 270)
sketchArc i L DR = ccwArc (nextCenter DL) ((1 + 0.25 * i) * cellSize) (deg 270) (deg 330)
sketchArc i UL DL = ccwArc (nextCenter L) ((1 + 0.25 * i) * cellSize) (deg 330) (deg 30)
sketchArc i L UR = cwArc (nextCenter UL) ((1 + 0.25 * i) * cellSize) (deg 90) (deg 30)
sketchArc i UL R = cwArc (nextCenter UR) ((1 + 0.25 * i) * cellSize) (deg 150) (deg 90)
sketchArc i UR DR = cwArc (nextCenter R) ((1 + 0.25 * i) * cellSize) (deg 210) (deg 150)
sketchArc i R DL = cwArc (nextCenter DR) ((1 + 0.25 * i) * cellSize) (deg 270) (deg 210)
sketchArc i DR L = cwArc (nextCenter DL) ((1 + 0.25 * i) * cellSize) (deg 330) (deg 270)
sketchArc i DL UL = cwArc (nextCenter L) ((1 + 0.25 * i) * cellSize) (deg 30) (deg 330)
sketchArc i UL L = ccwArc (corner L UL) (0.25 * i * cellSize) (deg 330) (deg 90)
sketchArc i UR UL = ccwArc (corner UL UR) (0.25 * i * cellSize) (deg 30) (deg 150)
sketchArc i R UR = ccwArc (corner UR R) (0.25 * i * cellSize) (deg 90) (deg 210)
sketchArc i DR R = ccwArc (corner R DR) (0.25 * i * cellSize) (deg 150) (deg 270)
sketchArc i DL DR = ccwArc (corner DR DL) (0.25 * i * cellSize) (deg 210) (deg 330)
sketchArc i L DL = ccwArc (corner DL L) (0.25 * i * cellSize) (deg 270) (deg 30)
sketchArc i L UL = cwArc (corner L UL) (0.25 * i * cellSize) (deg 90) (deg 330)
sketchArc i UL UR = cwArc (corner UL UR) (0.25 * i * cellSize) (deg 150) (deg 30)
sketchArc i UR R = cwArc (corner UR R) (0.25 * i * cellSize) (deg 210) (deg 90)
sketchArc i R DR = cwArc (corner R DR) (0.25 * i * cellSize) (deg 270) (deg 150)
sketchArc i DR DL = cwArc (corner DR DL) (0.25 * i * cellSize) (deg 330) (deg 210)
sketchArc i DL L = cwArc (corner DL L) (0.25 * i * cellSize) (deg 30) (deg 270)
sketchArc _ d d' = error ("Illegal tile " ++ show (d, d'))
|
0da617c258fc90645edf9b944b67fdeac430e6888823e3696195e00cd2a480cf | expipiplus1/vulkan | Camera.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveAnyClass #-}
{-# OPTIONS_GHC -fplugin=Foreign.Storable.Generic.Plugin #-}
# OPTIONS_GHC -fplugin - opt = Foreign . Storable . Generic . Plugin:-v0 #
module Camera where
import Control.Lens
import Foreign.Storable.Generic
import GHC.Generics ( Generic )
import Linear
data Camera = Camera
{ camPosition :: V3 Float
, camOrientation :: Quaternion Float
, camAspect :: Float
, camFOV :: Float
^ Vertical field of view in Radians
}
data CameraMatrices = CameraMatrices
{ cmViewInverse :: M44 Float
, cmProjInverse :: M44 Float
}
deriving (Generic, GStorable)
initialCamera :: Camera
initialCamera =
Camera (V3 0 0 (-10)) (axisAngle (V3 0 0 1) 0) (16 / 9) (pi / 4)
> > > viewMatrix initialCamera
V4 ( V4 1.0 0.0 0.0 0.0 ) ( V4 0.0 1.0 0.0 0.0 ) ( V4 0.0 0.0 1.0 10.0 ) ( V4 0.0 0.0 0.0 1.0 )
viewMatrix :: Camera -> M44 Float
viewMatrix Camera {..} = inv44 $ mkTransformation camOrientation camPosition
-- >>> projectionMatrix initialCamera
V4 ( V4 0.3611771 0.0 0.0 0.0 ) ( V4 0.0 0.6420926 0.0 0.0 ) ( V4 0.0 0.0 0.0 0.1 ) ( V4 0.0 0.0 1.0 0.0 )
--
-- >>> tan (1.5 / 2)
0.9315964599440725
projectionMatrix :: Camera -> M44 Float
projectionMatrix Camera {..} =
let cotFoV = 1 / tan (camFOV / 2)
dx = cotFoV / camAspect
dy = cotFoV
zNear = 0.1
in V4 (V4 dx 0 0 0) (V4 0 dy 0 0) (V4 0 0 0 zNear) (V4 0 0 1 0)
-- >>> projectRay initialCamera (V2 0 0)
( V3 0.0 0.0 ( -10.0),V3 0.0 0.0 1.0 )
--
-- >>> projectRay initialCamera (V2 0 1)
( V3 0.0 0.0 ( -10.0),V3 0.0 0.38268346 0.9238795 )
--
-- >>> projectRay initialCamera (V2 1 0)
( V3 0.0 0.0 ( -10.0),V3 0.5929577 0.0 0.8052336 )
projectRay
:: Camera
-> V2 Float
^ position on screen in [ -1 .. 1]^2
-> (V3 Float, V3 Float)
-- ^ Origin, Direction
projectRay c scr2 =
let viewInverse = inv44 $ viewMatrix c
projInverse = inv44 $ projectionMatrix c
origin = (viewInverse !* point (V3 0 0 0)) ^. _xyz
targetScreenSpace = V4 (scr2 ^. _x) (scr2 ^. _y) 1 1
target = projInverse !* targetScreenSpace
dir = normalize ((viewInverse !* vector (target ^. _xyz)) ^. _xyz)
in (origin, dir)
-- >>> projectToScreen initialCamera (V3 0 0 (-9.8))
V3 0.0 0.0 0.5000005
--
-- >>> projectToScreen initialCamera (V3 0 0 (-10))
-- V3 NaN NaN Infinity
--
-- >>> projectToScreen initialCamera (V3 0 0 (-9.9))
V3 0.0 0.0 0.9999962
--
-- >>> projectToScreen initialCamera (V3 0 0 1000)
V3 0.0 0.0 9.900991e-5
projectToScreen :: Camera -> V3 Float -> V3 Float
projectToScreen c =
normalizePoint . (projectionMatrix c !*) . (viewMatrix c !*) . point
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/b1e33d1031779b4740c279c68879d05aee371659/examples/lib/Camera.hs | haskell | # LANGUAGE DeriveAnyClass #
# OPTIONS_GHC -fplugin=Foreign.Storable.Generic.Plugin #
>>> projectionMatrix initialCamera
>>> tan (1.5 / 2)
>>> projectRay initialCamera (V2 0 0)
>>> projectRay initialCamera (V2 0 1)
>>> projectRay initialCamera (V2 1 0)
^ Origin, Direction
>>> projectToScreen initialCamera (V3 0 0 (-9.8))
>>> projectToScreen initialCamera (V3 0 0 (-10))
V3 NaN NaN Infinity
>>> projectToScreen initialCamera (V3 0 0 (-9.9))
>>> projectToScreen initialCamera (V3 0 0 1000) | # LANGUAGE DeriveGeneric #
# OPTIONS_GHC -fplugin - opt = Foreign . Storable . Generic . Plugin:-v0 #
module Camera where
import Control.Lens
import Foreign.Storable.Generic
import GHC.Generics ( Generic )
import Linear
data Camera = Camera
{ camPosition :: V3 Float
, camOrientation :: Quaternion Float
, camAspect :: Float
, camFOV :: Float
^ Vertical field of view in Radians
}
data CameraMatrices = CameraMatrices
{ cmViewInverse :: M44 Float
, cmProjInverse :: M44 Float
}
deriving (Generic, GStorable)
initialCamera :: Camera
initialCamera =
Camera (V3 0 0 (-10)) (axisAngle (V3 0 0 1) 0) (16 / 9) (pi / 4)
> > > viewMatrix initialCamera
V4 ( V4 1.0 0.0 0.0 0.0 ) ( V4 0.0 1.0 0.0 0.0 ) ( V4 0.0 0.0 1.0 10.0 ) ( V4 0.0 0.0 0.0 1.0 )
viewMatrix :: Camera -> M44 Float
viewMatrix Camera {..} = inv44 $ mkTransformation camOrientation camPosition
V4 ( V4 0.3611771 0.0 0.0 0.0 ) ( V4 0.0 0.6420926 0.0 0.0 ) ( V4 0.0 0.0 0.0 0.1 ) ( V4 0.0 0.0 1.0 0.0 )
0.9315964599440725
projectionMatrix :: Camera -> M44 Float
projectionMatrix Camera {..} =
let cotFoV = 1 / tan (camFOV / 2)
dx = cotFoV / camAspect
dy = cotFoV
zNear = 0.1
in V4 (V4 dx 0 0 0) (V4 0 dy 0 0) (V4 0 0 0 zNear) (V4 0 0 1 0)
( V3 0.0 0.0 ( -10.0),V3 0.0 0.0 1.0 )
( V3 0.0 0.0 ( -10.0),V3 0.0 0.38268346 0.9238795 )
( V3 0.0 0.0 ( -10.0),V3 0.5929577 0.0 0.8052336 )
projectRay
:: Camera
-> V2 Float
^ position on screen in [ -1 .. 1]^2
-> (V3 Float, V3 Float)
projectRay c scr2 =
let viewInverse = inv44 $ viewMatrix c
projInverse = inv44 $ projectionMatrix c
origin = (viewInverse !* point (V3 0 0 0)) ^. _xyz
targetScreenSpace = V4 (scr2 ^. _x) (scr2 ^. _y) 1 1
target = projInverse !* targetScreenSpace
dir = normalize ((viewInverse !* vector (target ^. _xyz)) ^. _xyz)
in (origin, dir)
V3 0.0 0.0 0.5000005
V3 0.0 0.0 0.9999962
V3 0.0 0.0 9.900991e-5
projectToScreen :: Camera -> V3 Float -> V3 Float
projectToScreen c =
normalizePoint . (projectionMatrix c !*) . (viewMatrix c !*) . point
|
dadc885a9b5c07236467b2ceda3497bc6dd4d6116635e52c9f33993b2bbd6a5a | AnOctopus/haskell-minithesis | Examples.hs | module Examples where
import Relude
import TestCase
import Gen
example :: Property [Int]
example = do
l <- listRange 0 2000 int
r <- list int
l === r
example2 :: Property [Int]
example2 = do
l <- list int
l /== []
example3 :: Property Bool
example3 = do
i <- intRange 0 100
lst <- listRange i (i*2) int
assert (not (any (>=9) lst)) lst
example4 :: Property Bool
example4 = do
f <- float
assert (abs f <= 1.0) f
| null | https://raw.githubusercontent.com/AnOctopus/haskell-minithesis/933c9ab762468d7eaca716e4d609a0b1b850a322/src/Examples.hs | haskell | module Examples where
import Relude
import TestCase
import Gen
example :: Property [Int]
example = do
l <- listRange 0 2000 int
r <- list int
l === r
example2 :: Property [Int]
example2 = do
l <- list int
l /== []
example3 :: Property Bool
example3 = do
i <- intRange 0 100
lst <- listRange i (i*2) int
assert (not (any (>=9) lst)) lst
example4 :: Property Bool
example4 = do
f <- float
assert (abs f <= 1.0) f
| |
98926899c6a3160fd3722a3ff5ba304bdeb25e80040a3e4e81b716391d96f3e4 | hasktorch/hasktorch | Native6.hs |
-- generated by using spec/Declarations.yaml
# LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
module Torch.Internal.Unmanaged.Native.Native6 where
import Foreign.C.String
import Foreign.C.Types
import Foreign
import Torch.Internal.Type
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Cpp.Unsafe as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
C.context $ C.cppCtx <> mempty { C.ctxTypesTable = typeTable }
C.include "<vector>"
C.include "<ATen/Tensor.h>"
C.include "<ATen/Functions.h>"
slice_tllll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_tllll _self _dim _start _end _step =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)
, $(int64_t _step)));
}|]
slice_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_tlll _self _dim _start _end =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)));
}|]
slice_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_tll _self _dim _start =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(int64_t _start)));
}|]
slice_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
slice_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
slice_t
:: Ptr Tensor
-> IO (Ptr Tensor)
slice_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)));
}|]
slice_backward_tlllll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_backward_tlllll _grad_output _input_sizes _dim _start _end _step =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_backward(
*$(at::Tensor* _grad_output)
, *$(std::vector<int64_t>* _input_sizes)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)
, $(int64_t _step)));
}|]
slice_scatter_ttllll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttllll _self _src _dim _start _end _step =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)
, $(int64_t _step)));
}|]
slice_scatter_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttlll _self _src _dim _start _end =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)));
}|]
slice_scatter_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttll _self _src _dim _start =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _start)));
}|]
slice_scatter_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttl _self _src _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)));
}|]
slice_scatter_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
slice_scatter_tt _self _src =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)));
}|]
select_scatter_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
select_scatter_ttll _self _src _dim _index =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::select_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _index)));
}|]
diagonal_scatter_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_scatter_ttlll _self _src _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diagonal_scatter_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_scatter_ttll _self _src _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
diagonal_scatter_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diagonal_scatter_ttl _self _src _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _offset)));
}|]
diagonal_scatter_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diagonal_scatter_tt _self _src =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)));
}|]
slogdet_t
:: Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
slogdet_t _self =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::slogdet(
*$(at::Tensor* _self)));
}|]
smm_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
smm_tt _self _mat2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::smm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat2)));
}|]
softmax_tls
:: Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
softmax_tls _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
softmax_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
softmax_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
softmax_tns
:: Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
softmax_tns _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
softmax_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
softmax_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
_softmax_tlb
:: Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
_softmax_tlb _self _dim _half_to_float =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _half_to_float)));
}|]
_softmax_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
_softmax_out_ttlb _out _self _dim _half_to_float =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _half_to_float)));
}|]
_softmax_backward_data_ttls
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
_softmax_backward_data_ttls _grad_output _output _dim _input_dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax_backward_data(
*$(at::Tensor* _grad_output)
, *$(at::Tensor* _output)
, $(int64_t _dim)
, $(at::ScalarType _input_dtype)));
}|]
_softmax_backward_data_out_tttls
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
_softmax_backward_data_out_tttls _grad_input _grad_output _output _dim _input_dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax_backward_data_out(
*$(at::Tensor* _grad_input)
, *$(at::Tensor* _grad_output)
, *$(at::Tensor* _output)
, $(int64_t _dim)
, $(at::ScalarType _input_dtype)));
}|]
unsafe_split_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr TensorList)
unsafe_split_tll _self _split_size _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split(
*$(at::Tensor* _self)
, $(int64_t _split_size)
, $(int64_t _dim)));
}|]
unsafe_split_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
unsafe_split_tl _self _split_size =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split(
*$(at::Tensor* _self)
, $(int64_t _split_size)));
}|]
split_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr TensorList)
split_tll _self _split_size _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split(
*$(at::Tensor* _self)
, $(int64_t _split_size)
, $(int64_t _dim)));
}|]
split_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
split_tl _self _split_size =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split(
*$(at::Tensor* _self)
, $(int64_t _split_size)));
}|]
unsafe_split_with_sizes_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr TensorList)
unsafe_split_with_sizes_tll _self _split_sizes _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)
, $(int64_t _dim)));
}|]
unsafe_split_with_sizes_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr TensorList)
unsafe_split_with_sizes_tl _self _split_sizes =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)));
}|]
split_with_sizes_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr TensorList)
split_with_sizes_tll _self _split_sizes _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)
, $(int64_t _dim)));
}|]
split_with_sizes_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr TensorList)
split_with_sizes_tl _self _split_sizes =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)));
}|]
hsplit_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
hsplit_tl _self _sections =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::hsplit(
*$(at::Tensor* _self)
, $(int64_t _sections)));
}|]
vsplit_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
vsplit_tl _self _sections =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::vsplit(
*$(at::Tensor* _self)
, $(int64_t _sections)));
}|]
dsplit_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
dsplit_tl _self _sections =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::dsplit(
*$(at::Tensor* _self)
, $(int64_t _sections)));
}|]
squeeze_t
:: Ptr Tensor
-> IO (Ptr Tensor)
squeeze_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::squeeze(
*$(at::Tensor* _self)));
}|]
squeeze_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
squeeze_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::squeeze(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
squeeze_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
squeeze_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::squeeze(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
sspaddmm_tttss
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_tttss _self _mat1 _mat2 _beta _alpha =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)
, *$(at::Scalar* _alpha)));
}|]
sspaddmm_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_ttts _self _mat1 _mat2 _beta =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)));
}|]
sspaddmm_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
sspaddmm_ttt _self _mat1 _mat2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)));
}|]
sspaddmm_out_ttttss
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_out_ttttss _out _self _mat1 _mat2 _beta _alpha =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)
, *$(at::Scalar* _alpha)));
}|]
sspaddmm_out_tttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_out_tttts _out _self _mat1 _mat2 _beta =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)));
}|]
sspaddmm_out_tttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
sspaddmm_out_tttt _out _self _mat1 _mat2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)));
}|]
stack_ll
:: Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
stack_ll _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack(
*$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
stack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
stack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
stack_out_tll
:: Ptr Tensor
-> Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
stack_out_tll _out _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
stack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
stack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
_stack_ll
:: Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
_stack_ll _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack(
*$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
_stack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
_stack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
_stack_out_tll
:: Ptr Tensor
-> Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
_stack_out_tll _out _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
_stack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
_stack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
hstack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
hstack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::hstack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
hstack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
hstack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::hstack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
vstack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
vstack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vstack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
vstack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
vstack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vstack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
dstack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
dstack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dstack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
dstack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
dstack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dstack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
stft_tllltbbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
stft_tllltbbb _self _n_fft _hop_length _win_length _window _normalized _onesided _return_complex =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _normalized)
, $(bool _onesided)
, $(bool _return_complex)));
}|]
stft_tllltbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> IO (Ptr Tensor)
stft_tllltbb _self _n_fft _hop_length _win_length _window _normalized _onesided =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _normalized)
, $(bool _onesided)));
}|]
stft_tllltb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> IO (Ptr Tensor)
stft_tllltb _self _n_fft _hop_length _win_length _window _normalized =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _normalized)));
}|]
stft_tlllt
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
stft_tlllt _self _n_fft _hop_length _win_length _window =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)));
}|]
stft_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
stft_tlll _self _n_fft _hop_length _win_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)));
}|]
stft_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
stft_tll _self _n_fft _hop_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)));
}|]
stft_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
stft_tl _self _n_fft =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)));
}|]
istft_tllltbbblb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> Int64
-> CBool
-> IO (Ptr Tensor)
istft_tllltbbblb _self _n_fft _hop_length _win_length _window _center _normalized _onesided _length _return_complex =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)
, $(bool _onesided)
, $(int64_t _length)
, $(bool _return_complex)));
}|]
istft_tllltbbbl
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> Int64
-> IO (Ptr Tensor)
istft_tllltbbbl _self _n_fft _hop_length _win_length _window _center _normalized _onesided _length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)
, $(bool _onesided)
, $(int64_t _length)));
}|]
istft_tllltbbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
istft_tllltbbb _self _n_fft _hop_length _win_length _window _center _normalized _onesided =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)
, $(bool _onesided)));
}|]
istft_tllltbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> IO (Ptr Tensor)
istft_tllltbb _self _n_fft _hop_length _win_length _window _center _normalized =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)));
}|]
istft_tllltb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> IO (Ptr Tensor)
istft_tllltb _self _n_fft _hop_length _win_length _window _center =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)));
}|]
istft_tlllt
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
istft_tlllt _self _n_fft _hop_length _win_length _window =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)));
}|]
istft_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
istft_tlll _self _n_fft _hop_length _win_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)));
}|]
istft_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
istft_tll _self _n_fft _hop_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)));
}|]
istft_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
istft_tl _self _n_fft =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)));
}|]
stride_tl
:: Ptr Tensor
-> Int64
-> IO (Int64)
stride_tl _self _dim =
[C.throwBlock| int64_t { return (at::stride(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
stride_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Int64)
stride_tn _self _dim =
[C.throwBlock| int64_t { return (at::stride(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
sum_ts
:: Ptr Tensor
-> ScalarType
-> IO (Ptr Tensor)
sum_ts _self _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, $(at::ScalarType _dtype)));
}|]
sum_t
:: Ptr Tensor
-> IO (Ptr Tensor)
sum_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)));
}|]
sum_tlbs
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_tlbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
sum_tlb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
sum_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
sum_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
sum_tNbs
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_tNbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_tNb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
sum_tNb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)));
}|]
sum_tN
:: Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
sum_tN _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
sum_out_ttlbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_out_ttlbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
sum_out_ttlb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
sum_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
sum_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
sum_out_ttNbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_out_ttNbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_out_ttNb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
sum_out_ttNb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)));
}|]
sum_out_ttN
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
sum_out_ttN _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
nansum_ts
:: Ptr Tensor
-> ScalarType
-> IO (Ptr Tensor)
nansum_ts _self _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, $(at::ScalarType _dtype)));
}|]
nansum_t
:: Ptr Tensor
-> IO (Ptr Tensor)
nansum_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)));
}|]
nansum_tlbs
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
nansum_tlbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
nansum_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
nansum_tlb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
nansum_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
nansum_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
nansum_out_ttlbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
nansum_out_ttlbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
nansum_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
nansum_out_ttlb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
nansum_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
nansum_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
sqrt_t
:: Ptr Tensor
-> IO (Ptr Tensor)
sqrt_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sqrt(
*$(at::Tensor* _self)));
}|]
sqrt__t
:: Ptr Tensor
-> IO (Ptr Tensor)
sqrt__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sqrt_(
*$(at::Tensor* _self)));
}|]
sqrt_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
sqrt_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sqrt_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
square_t
:: Ptr Tensor
-> IO (Ptr Tensor)
square_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::square(
*$(at::Tensor* _self)));
}|]
square__t
:: Ptr Tensor
-> IO (Ptr Tensor)
square__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::square_(
*$(at::Tensor* _self)));
}|]
square_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
square_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::square_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
std_tb
:: Ptr Tensor
-> CBool
-> IO (Ptr Tensor)
std_tb _self _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, $(bool _unbiased)));
}|]
std_t
:: Ptr Tensor
-> IO (Ptr Tensor)
std_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)));
}|]
std_tlbb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_tlbb _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
std_tlb _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)));
}|]
std_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
std_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
std_tllb
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_tllb _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
std_tll _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)));
}|]
std_mean_tb
:: Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tb _self _unbiased =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, $(bool _unbiased)));
}|]
std_mean_t
:: Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_t _self =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)));
}|]
std_mean_tlbb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tlbb _self _dim _unbiased _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_mean_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tlb _self _dim _unbiased =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)));
}|]
std_mean_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tl _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
std_mean_tllb
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tllb _self _dim _correction _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_mean_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tll _self _dim _correction =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)));
}|]
std_mean_tNbb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNbb _self _dim _unbiased _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_mean_tNb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNb _self _dim _unbiased =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)));
}|]
std_mean_tN
:: Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tN _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
std_mean_tNlb
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNlb _self _dim _correction _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_mean_tNl
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNl _self _dim _correction =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)));
}|]
std_out_ttlbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_out_ttlbb _out _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
std_out_ttlb _out _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)));
}|]
std_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
std_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
std_out_ttllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_out_ttllb _out _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_out_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
std_out_ttll _out _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)));
}|]
std_tNbb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_tNbb _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_tNb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
std_tNb _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)));
}|]
std_tN
:: Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
std_tN _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
std_out_ttNbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_out_ttNbb _out _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_out_ttNb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
std_out_ttNb _out _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)));
}|]
std_out_ttN
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
std_out_ttN _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
std_tNlb
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_tNlb _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_tNl
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> IO (Ptr Tensor)
std_tNl _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)));
}|]
std_out_ttNlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_out_ttNlb _out _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_out_ttNl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> Int64
-> IO (Ptr Tensor)
std_out_ttNl _out _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)));
}|]
prod_ts
:: Ptr Tensor
-> ScalarType
-> IO (Ptr Tensor)
prod_ts _self _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(at::ScalarType _dtype)));
}|]
prod_t
:: Ptr Tensor
-> IO (Ptr Tensor)
prod_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)));
}|]
prod_tlbs
:: Ptr Tensor
-> Int64
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_tlbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_tlb
:: Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
prod_tlb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)));
}|]
prod_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
prod_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
prod_out_ttlbs
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_out_ttlbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
prod_out_ttlb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)));
}|]
prod_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
prod_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
prod_tnbs
:: Ptr Tensor
-> Ptr Dimname
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_tnbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_tnb
:: Ptr Tensor
-> Ptr Dimname
-> CBool
-> IO (Ptr Tensor)
prod_tnb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)));
}|]
prod_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
prod_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
prod_out_ttnbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_out_ttnbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_out_ttnb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> CBool
-> IO (Ptr Tensor)
prod_out_ttnb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)));
}|]
prod_out_ttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
prod_out_ttn _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
t_t
:: Ptr Tensor
-> IO (Ptr Tensor)
t_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::t(
*$(at::Tensor* _self)));
}|]
tan_t
:: Ptr Tensor
-> IO (Ptr Tensor)
tan_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tan(
*$(at::Tensor* _self)));
}|]
tan__t
:: Ptr Tensor
-> IO (Ptr Tensor)
tan__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tan_(
*$(at::Tensor* _self)));
}|]
tan_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
tan_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tan_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
tanh_t
:: Ptr Tensor
-> IO (Ptr Tensor)
tanh_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tanh(
*$(at::Tensor* _self)));
}|]
tanh__t
:: Ptr Tensor
-> IO (Ptr Tensor)
tanh__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tanh_(
*$(at::Tensor* _self)));
}|]
tanh_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
tanh_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tanh_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
tensordot_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
tensordot_ttll _self _other _dims_self _dims_other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tensordot(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::vector<int64_t>* _dims_self)
, *$(std::vector<int64_t>* _dims_other)));
}|]
tensordot_out_tttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
tensordot_out_tttll _out _self _other _dims_self _dims_other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tensordot_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::vector<int64_t>* _dims_self)
, *$(std::vector<int64_t>* _dims_other)));
}|]
threshold_tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_tss _self _threshold _value =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold(
*$(at::Tensor* _self)
, *$(at::Scalar* _threshold)
, *$(at::Scalar* _value)));
}|]
threshold__tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold__tss _self _threshold _value =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_(
*$(at::Tensor* _self)
, *$(at::Scalar* _threshold)
, *$(at::Scalar* _value)));
}|]
threshold_out_ttss
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_out_ttss _out _self _threshold _value =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Scalar* _threshold)
, *$(at::Scalar* _value)));
}|]
threshold_backward_out_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_backward_out_ttts _grad_input _grad_output _self _threshold =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_backward_out(
*$(at::Tensor* _grad_input)
, *$(at::Tensor* _grad_output)
, *$(at::Tensor* _self)
, *$(at::Scalar* _threshold)));
}|]
threshold_backward_tts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_backward_tts _grad_output _self _threshold =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_backward(
*$(at::Tensor* _grad_output)
, *$(at::Tensor* _self)
, *$(at::Scalar* _threshold)));
}|]
tile_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
tile_tl _self _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tile(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dims)));
}|]
transpose_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
transpose_tll _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::transpose(
*$(at::Tensor* _self)
, $(int64_t _dim0)
, $(int64_t _dim1)));
}|]
transpose_tnn
:: Ptr Tensor
-> Ptr Dimname
-> Ptr Dimname
-> IO (Ptr Tensor)
transpose_tnn _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::transpose(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim0)
, *$(at::Dimname* _dim1)));
}|]
_mkldnn_transpose_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
_mkldnn_transpose_tll _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_mkldnn_transpose(
*$(at::Tensor* _self)
, $(int64_t _dim0)
, $(int64_t _dim1)));
}|]
_mkldnn_transpose__tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
_mkldnn_transpose__tll _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_mkldnn_transpose_(
*$(at::Tensor* _self)
, $(int64_t _dim0)
, $(int64_t _dim1)));
}|]
one_hot_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
one_hot_tl _self _num_classes =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::one_hot(
*$(at::Tensor* _self)
, $(int64_t _num_classes)));
}|]
one_hot_t
:: Ptr Tensor
-> IO (Ptr Tensor)
one_hot_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::one_hot(
*$(at::Tensor* _self)));
}|]
flip_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
flip_tl _self _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::flip(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dims)));
}|]
fliplr_t
:: Ptr Tensor
-> IO (Ptr Tensor)
fliplr_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fliplr(
*$(at::Tensor* _self)));
}|]
flipud_t
:: Ptr Tensor
-> IO (Ptr Tensor)
flipud_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::flipud(
*$(at::Tensor* _self)));
}|]
roll_tll
:: Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
roll_tll _self _shifts _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::roll(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _shifts)
, *$(std::vector<int64_t>* _dims)));
}|]
roll_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
roll_tl _self _shifts =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::roll(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _shifts)));
}|]
rot90_tll
:: Ptr Tensor
-> Int64
-> Ptr IntArray
-> IO (Ptr Tensor)
rot90_tll _self _k _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::rot90(
*$(at::Tensor* _self)
, $(int64_t _k)
, *$(std::vector<int64_t>* _dims)));
}|]
rot90_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
rot90_tl _self _k =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::rot90(
*$(at::Tensor* _self)
, $(int64_t _k)));
}|]
rot90_t
:: Ptr Tensor
-> IO (Ptr Tensor)
rot90_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::rot90(
*$(at::Tensor* _self)));
}|]
trapezoid_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
trapezoid_ttl _y _x _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)
, $(int64_t _dim)));
}|]
trapezoid_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
trapezoid_tt _y _x =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)));
}|]
trapezoid_tsl
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> IO (Ptr Tensor)
trapezoid_tsl _y _dx _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)
, $(int64_t _dim)));
}|]
trapezoid_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
trapezoid_ts _y _dx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)));
}|]
trapezoid_t
:: Ptr Tensor
-> IO (Ptr Tensor)
trapezoid_t _y =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)));
}|]
trapz_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
trapz_ttl _y _x _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)
, $(int64_t _dim)));
}|]
trapz_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
trapz_tt _y _x =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)));
}|]
trapz_tdl
:: Ptr Tensor
-> CDouble
-> Int64
-> IO (Ptr Tensor)
trapz_tdl _y _dx _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, $(double _dx)
, $(int64_t _dim)));
}|]
trapz_td
:: Ptr Tensor
-> CDouble
-> IO (Ptr Tensor)
trapz_td _y _dx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, $(double _dx)));
}|]
trapz_t
:: Ptr Tensor
-> IO (Ptr Tensor)
trapz_t _y =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)));
}|]
_trilinear_tttlllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
_trilinear_tttlllll _i1 _i2 _i3 _expand1 _expand2 _expand3 _sumdim _unroll_dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_trilinear(
*$(at::Tensor* _i1)
, *$(at::Tensor* _i2)
, *$(at::Tensor* _i3)
, *$(std::vector<int64_t>* _expand1)
, *$(std::vector<int64_t>* _expand2)
, *$(std::vector<int64_t>* _expand3)
, *$(std::vector<int64_t>* _sumdim)
, $(int64_t _unroll_dim)));
}|]
_trilinear_tttllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
_trilinear_tttllll _i1 _i2 _i3 _expand1 _expand2 _expand3 _sumdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_trilinear(
*$(at::Tensor* _i1)
, *$(at::Tensor* _i2)
, *$(at::Tensor* _i3)
, *$(std::vector<int64_t>* _expand1)
, *$(std::vector<int64_t>* _expand2)
, *$(std::vector<int64_t>* _expand3)
, *$(std::vector<int64_t>* _sumdim)));
}|]
triplet_margin_loss_tttdddbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> CDouble
-> CBool
-> Int64
-> IO (Ptr Tensor)
triplet_margin_loss_tttdddbl _anchor _positive _negative _margin _p _eps _swap _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)
, $(double _eps)
, $(bool _swap)
, $(int64_t _reduction)));
}|]
triplet_margin_loss_tttdddb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> CDouble
-> CBool
-> IO (Ptr Tensor)
triplet_margin_loss_tttdddb _anchor _positive _negative _margin _p _eps _swap =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)
, $(double _eps)
, $(bool _swap)));
}|]
triplet_margin_loss_tttddd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> CDouble
-> IO (Ptr Tensor)
triplet_margin_loss_tttddd _anchor _positive _negative _margin _p _eps =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)
, $(double _eps)));
}|]
triplet_margin_loss_tttdd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> IO (Ptr Tensor)
triplet_margin_loss_tttdd _anchor _positive _negative _margin _p =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)));
}|]
triplet_margin_loss_tttd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> IO (Ptr Tensor)
triplet_margin_loss_tttd _anchor _positive _negative _margin =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)));
}|]
triplet_margin_loss_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
triplet_margin_loss_ttt _anchor _positive _negative =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)));
}|]
trunc_t
:: Ptr Tensor
-> IO (Ptr Tensor)
trunc_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trunc(
*$(at::Tensor* _self)));
}|]
trunc__t
:: Ptr Tensor
-> IO (Ptr Tensor)
trunc__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trunc_(
*$(at::Tensor* _self)));
}|]
trunc_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
trunc_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trunc_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
fix_t
:: Ptr Tensor
-> IO (Ptr Tensor)
fix_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fix(
*$(at::Tensor* _self)));
}|]
fix__t
:: Ptr Tensor
-> IO (Ptr Tensor)
fix__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fix_(
*$(at::Tensor* _self)));
}|]
fix_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
fix_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fix_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
_has_compatible_shallow_copy_type_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (CBool)
_has_compatible_shallow_copy_type_tt _self _from =
[C.throwBlock| bool { return (at::_has_compatible_shallow_copy_type(
*$(at::Tensor* _self)
, *$(at::Tensor* _from)));
}|]
_unique_tbb
:: Ptr Tensor
-> CBool
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_unique_tbb _self _sorted _return_inverse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_unique(
*$(at::Tensor* _self)
, $(bool _sorted)
, $(bool _return_inverse)));
}|]
_unique_tb
:: Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_unique_tb _self _sorted =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_unique(
*$(at::Tensor* _self)
, $(bool _sorted)));
}|]
_unique_t
:: Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_unique_t _self =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_unique(
*$(at::Tensor* _self)));
}|]
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/6233c173e1dd9fd7218fd13b104da15fc457f67e/libtorch-ffi/src/Torch/Internal/Unmanaged/Native/Native6.hs | haskell | generated by using spec/Declarations.yaml
# LANGUAGE OverloadedStrings # |
# LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
module Torch.Internal.Unmanaged.Native.Native6 where
import Foreign.C.String
import Foreign.C.Types
import Foreign
import Torch.Internal.Type
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Cpp.Unsafe as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
C.context $ C.cppCtx <> mempty { C.ctxTypesTable = typeTable }
C.include "<vector>"
C.include "<ATen/Tensor.h>"
C.include "<ATen/Functions.h>"
slice_tllll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_tllll _self _dim _start _end _step =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)
, $(int64_t _step)));
}|]
slice_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_tlll _self _dim _start _end =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)));
}|]
slice_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_tll _self _dim _start =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(int64_t _start)));
}|]
slice_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
slice_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
slice_t
:: Ptr Tensor
-> IO (Ptr Tensor)
slice_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice(
*$(at::Tensor* _self)));
}|]
slice_backward_tlllll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_backward_tlllll _grad_output _input_sizes _dim _start _end _step =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_backward(
*$(at::Tensor* _grad_output)
, *$(std::vector<int64_t>* _input_sizes)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)
, $(int64_t _step)));
}|]
slice_scatter_ttllll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttllll _self _src _dim _start _end _step =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)
, $(int64_t _step)));
}|]
slice_scatter_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttlll _self _src _dim _start _end =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _start)
, $(int64_t _end)));
}|]
slice_scatter_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttll _self _src _dim _start =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _start)));
}|]
slice_scatter_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
slice_scatter_ttl _self _src _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)));
}|]
slice_scatter_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
slice_scatter_tt _self _src =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::slice_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)));
}|]
select_scatter_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
select_scatter_ttll _self _src _dim _index =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::select_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _dim)
, $(int64_t _index)));
}|]
diagonal_scatter_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_scatter_ttlll _self _src _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diagonal_scatter_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_scatter_ttll _self _src _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
diagonal_scatter_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diagonal_scatter_ttl _self _src _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)
, $(int64_t _offset)));
}|]
diagonal_scatter_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diagonal_scatter_tt _self _src =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_scatter(
*$(at::Tensor* _self)
, *$(at::Tensor* _src)));
}|]
slogdet_t
:: Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
slogdet_t _self =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::slogdet(
*$(at::Tensor* _self)));
}|]
smm_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
smm_tt _self _mat2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::smm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat2)));
}|]
softmax_tls
:: Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
softmax_tls _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
softmax_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
softmax_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
softmax_tns
:: Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
softmax_tns _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
softmax_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
softmax_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::softmax(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
_softmax_tlb
:: Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
_softmax_tlb _self _dim _half_to_float =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _half_to_float)));
}|]
_softmax_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
_softmax_out_ttlb _out _self _dim _half_to_float =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _half_to_float)));
}|]
_softmax_backward_data_ttls
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
_softmax_backward_data_ttls _grad_output _output _dim _input_dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax_backward_data(
*$(at::Tensor* _grad_output)
, *$(at::Tensor* _output)
, $(int64_t _dim)
, $(at::ScalarType _input_dtype)));
}|]
_softmax_backward_data_out_tttls
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
_softmax_backward_data_out_tttls _grad_input _grad_output _output _dim _input_dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_softmax_backward_data_out(
*$(at::Tensor* _grad_input)
, *$(at::Tensor* _grad_output)
, *$(at::Tensor* _output)
, $(int64_t _dim)
, $(at::ScalarType _input_dtype)));
}|]
unsafe_split_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr TensorList)
unsafe_split_tll _self _split_size _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split(
*$(at::Tensor* _self)
, $(int64_t _split_size)
, $(int64_t _dim)));
}|]
unsafe_split_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
unsafe_split_tl _self _split_size =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split(
*$(at::Tensor* _self)
, $(int64_t _split_size)));
}|]
split_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr TensorList)
split_tll _self _split_size _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split(
*$(at::Tensor* _self)
, $(int64_t _split_size)
, $(int64_t _dim)));
}|]
split_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
split_tl _self _split_size =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split(
*$(at::Tensor* _self)
, $(int64_t _split_size)));
}|]
unsafe_split_with_sizes_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr TensorList)
unsafe_split_with_sizes_tll _self _split_sizes _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)
, $(int64_t _dim)));
}|]
unsafe_split_with_sizes_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr TensorList)
unsafe_split_with_sizes_tl _self _split_sizes =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::unsafe_split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)));
}|]
split_with_sizes_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr TensorList)
split_with_sizes_tll _self _split_sizes _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)
, $(int64_t _dim)));
}|]
split_with_sizes_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr TensorList)
split_with_sizes_tl _self _split_sizes =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::split_with_sizes(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _split_sizes)));
}|]
hsplit_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
hsplit_tl _self _sections =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::hsplit(
*$(at::Tensor* _self)
, $(int64_t _sections)));
}|]
vsplit_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
vsplit_tl _self _sections =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::vsplit(
*$(at::Tensor* _self)
, $(int64_t _sections)));
}|]
dsplit_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr TensorList)
dsplit_tl _self _sections =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::dsplit(
*$(at::Tensor* _self)
, $(int64_t _sections)));
}|]
squeeze_t
:: Ptr Tensor
-> IO (Ptr Tensor)
squeeze_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::squeeze(
*$(at::Tensor* _self)));
}|]
squeeze_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
squeeze_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::squeeze(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
squeeze_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
squeeze_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::squeeze(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
sspaddmm_tttss
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_tttss _self _mat1 _mat2 _beta _alpha =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)
, *$(at::Scalar* _alpha)));
}|]
sspaddmm_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_ttts _self _mat1 _mat2 _beta =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)));
}|]
sspaddmm_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
sspaddmm_ttt _self _mat1 _mat2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm(
*$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)));
}|]
sspaddmm_out_ttttss
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_out_ttttss _out _self _mat1 _mat2 _beta _alpha =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)
, *$(at::Scalar* _alpha)));
}|]
sspaddmm_out_tttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
sspaddmm_out_tttts _out _self _mat1 _mat2 _beta =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)
, *$(at::Scalar* _beta)));
}|]
sspaddmm_out_tttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
sspaddmm_out_tttt _out _self _mat1 _mat2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sspaddmm_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _mat1)
, *$(at::Tensor* _mat2)));
}|]
stack_ll
:: Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
stack_ll _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack(
*$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
stack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
stack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
stack_out_tll
:: Ptr Tensor
-> Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
stack_out_tll _out _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
stack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
stack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
_stack_ll
:: Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
_stack_ll _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack(
*$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
_stack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
_stack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
_stack_out_tll
:: Ptr Tensor
-> Ptr TensorList
-> Int64
-> IO (Ptr Tensor)
_stack_out_tll _out _tensors _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)
, $(int64_t _dim)));
}|]
_stack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
_stack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
hstack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
hstack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::hstack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
hstack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
hstack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::hstack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
vstack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
vstack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vstack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
vstack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
vstack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vstack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
dstack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
dstack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dstack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
dstack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
dstack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dstack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
stft_tllltbbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
stft_tllltbbb _self _n_fft _hop_length _win_length _window _normalized _onesided _return_complex =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _normalized)
, $(bool _onesided)
, $(bool _return_complex)));
}|]
stft_tllltbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> IO (Ptr Tensor)
stft_tllltbb _self _n_fft _hop_length _win_length _window _normalized _onesided =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _normalized)
, $(bool _onesided)));
}|]
stft_tllltb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> IO (Ptr Tensor)
stft_tllltb _self _n_fft _hop_length _win_length _window _normalized =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _normalized)));
}|]
stft_tlllt
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
stft_tlllt _self _n_fft _hop_length _win_length _window =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)));
}|]
stft_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
stft_tlll _self _n_fft _hop_length _win_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)));
}|]
stft_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
stft_tll _self _n_fft _hop_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)));
}|]
stft_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
stft_tl _self _n_fft =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::stft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)));
}|]
istft_tllltbbblb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> Int64
-> CBool
-> IO (Ptr Tensor)
istft_tllltbbblb _self _n_fft _hop_length _win_length _window _center _normalized _onesided _length _return_complex =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)
, $(bool _onesided)
, $(int64_t _length)
, $(bool _return_complex)));
}|]
istft_tllltbbbl
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> Int64
-> IO (Ptr Tensor)
istft_tllltbbbl _self _n_fft _hop_length _win_length _window _center _normalized _onesided _length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)
, $(bool _onesided)
, $(int64_t _length)));
}|]
istft_tllltbbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
istft_tllltbbb _self _n_fft _hop_length _win_length _window _center _normalized _onesided =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)
, $(bool _onesided)));
}|]
istft_tllltbb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> CBool
-> IO (Ptr Tensor)
istft_tllltbb _self _n_fft _hop_length _win_length _window _center _normalized =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)
, $(bool _normalized)));
}|]
istft_tllltb
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> CBool
-> IO (Ptr Tensor)
istft_tllltb _self _n_fft _hop_length _win_length _window _center =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)
, $(bool _center)));
}|]
istft_tlllt
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
istft_tlllt _self _n_fft _hop_length _win_length _window =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)
, *$(at::Tensor* _window)));
}|]
istft_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
istft_tlll _self _n_fft _hop_length _win_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)
, $(int64_t _win_length)));
}|]
istft_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
istft_tll _self _n_fft _hop_length =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)
, $(int64_t _hop_length)));
}|]
istft_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
istft_tl _self _n_fft =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::istft(
*$(at::Tensor* _self)
, $(int64_t _n_fft)));
}|]
stride_tl
:: Ptr Tensor
-> Int64
-> IO (Int64)
stride_tl _self _dim =
[C.throwBlock| int64_t { return (at::stride(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
stride_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Int64)
stride_tn _self _dim =
[C.throwBlock| int64_t { return (at::stride(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
sum_ts
:: Ptr Tensor
-> ScalarType
-> IO (Ptr Tensor)
sum_ts _self _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, $(at::ScalarType _dtype)));
}|]
sum_t
:: Ptr Tensor
-> IO (Ptr Tensor)
sum_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)));
}|]
sum_tlbs
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_tlbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
sum_tlb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
sum_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
sum_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
sum_tNbs
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_tNbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_tNb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
sum_tNb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)));
}|]
sum_tN
:: Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
sum_tN _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
sum_out_ttlbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_out_ttlbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
sum_out_ttlb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
sum_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
sum_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
sum_out_ttNbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
sum_out_ttNbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
sum_out_ttNb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
sum_out_ttNb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _keepdim)));
}|]
sum_out_ttN
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
sum_out_ttN _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
nansum_ts
:: Ptr Tensor
-> ScalarType
-> IO (Ptr Tensor)
nansum_ts _self _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, $(at::ScalarType _dtype)));
}|]
nansum_t
:: Ptr Tensor
-> IO (Ptr Tensor)
nansum_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)));
}|]
nansum_tlbs
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
nansum_tlbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
nansum_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
nansum_tlb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
nansum_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
nansum_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
nansum_out_ttlbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
nansum_out_ttlbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
nansum_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
nansum_out_ttlb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _keepdim)));
}|]
nansum_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
nansum_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::nansum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
sqrt_t
:: Ptr Tensor
-> IO (Ptr Tensor)
sqrt_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sqrt(
*$(at::Tensor* _self)));
}|]
sqrt__t
:: Ptr Tensor
-> IO (Ptr Tensor)
sqrt__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sqrt_(
*$(at::Tensor* _self)));
}|]
sqrt_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
sqrt_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::sqrt_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
square_t
:: Ptr Tensor
-> IO (Ptr Tensor)
square_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::square(
*$(at::Tensor* _self)));
}|]
square__t
:: Ptr Tensor
-> IO (Ptr Tensor)
square__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::square_(
*$(at::Tensor* _self)));
}|]
square_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
square_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::square_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
std_tb
:: Ptr Tensor
-> CBool
-> IO (Ptr Tensor)
std_tb _self _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, $(bool _unbiased)));
}|]
std_t
:: Ptr Tensor
-> IO (Ptr Tensor)
std_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)));
}|]
std_tlbb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_tlbb _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
std_tlb _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)));
}|]
std_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
std_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
std_tllb
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_tllb _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
std_tll _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)));
}|]
std_mean_tb
:: Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tb _self _unbiased =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, $(bool _unbiased)));
}|]
std_mean_t
:: Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_t _self =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)));
}|]
std_mean_tlbb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tlbb _self _dim _unbiased _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_mean_tlb
:: Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tlb _self _dim _unbiased =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)));
}|]
std_mean_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tl _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
std_mean_tllb
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tllb _self _dim _correction _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_mean_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tll _self _dim _correction =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)));
}|]
std_mean_tNbb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNbb _self _dim _unbiased _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_mean_tNb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNb _self _dim _unbiased =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)));
}|]
std_mean_tN
:: Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tN _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
std_mean_tNlb
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNlb _self _dim _correction _keepdim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_mean_tNl
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
std_mean_tNl _self _dim _correction =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::std_mean(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)));
}|]
std_out_ttlbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_out_ttlbb _out _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> CBool
-> IO (Ptr Tensor)
std_out_ttlb _out _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(bool _unbiased)));
}|]
std_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
std_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
std_out_ttllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_out_ttllb _out _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_out_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
std_out_ttll _out _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _correction)));
}|]
std_tNbb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_tNbb _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_tNb
:: Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
std_tNb _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)));
}|]
std_tN
:: Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
std_tN _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
std_out_ttNbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> CBool
-> IO (Ptr Tensor)
std_out_ttNbb _out _self _dim _unbiased _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)
, $(bool _keepdim)));
}|]
std_out_ttNb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> CBool
-> IO (Ptr Tensor)
std_out_ttNb _out _self _dim _unbiased =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(bool _unbiased)));
}|]
std_out_ttN
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> IO (Ptr Tensor)
std_out_ttN _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)));
}|]
std_tNlb
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_tNlb _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_tNl
:: Ptr Tensor
-> Ptr DimnameList
-> Int64
-> IO (Ptr Tensor)
std_tNl _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std(
*$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)));
}|]
std_out_ttNlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> Int64
-> CBool
-> IO (Ptr Tensor)
std_out_ttNlb _out _self _dim _correction _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)
, $(bool _keepdim)));
}|]
std_out_ttNl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr DimnameList
-> Int64
-> IO (Ptr Tensor)
std_out_ttNl _out _self _dim _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::std_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(std::vector<at::Dimname>* _dim)
, $(int64_t _correction)));
}|]
prod_ts
:: Ptr Tensor
-> ScalarType
-> IO (Ptr Tensor)
prod_ts _self _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(at::ScalarType _dtype)));
}|]
prod_t
:: Ptr Tensor
-> IO (Ptr Tensor)
prod_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)));
}|]
prod_tlbs
:: Ptr Tensor
-> Int64
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_tlbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_tlb
:: Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
prod_tlb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)));
}|]
prod_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
prod_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
prod_out_ttlbs
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_out_ttlbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_out_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
prod_out_ttlb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(bool _keepdim)));
}|]
prod_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
prod_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
prod_tnbs
:: Ptr Tensor
-> Ptr Dimname
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_tnbs _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_tnb
:: Ptr Tensor
-> Ptr Dimname
-> CBool
-> IO (Ptr Tensor)
prod_tnb _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)));
}|]
prod_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
prod_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
prod_out_ttnbs
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> CBool
-> ScalarType
-> IO (Ptr Tensor)
prod_out_ttnbs _out _self _dim _keepdim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)
, $(at::ScalarType _dtype)));
}|]
prod_out_ttnb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> CBool
-> IO (Ptr Tensor)
prod_out_ttnb _out _self _dim _keepdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(bool _keepdim)));
}|]
prod_out_ttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
prod_out_ttn _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::prod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
t_t
:: Ptr Tensor
-> IO (Ptr Tensor)
t_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::t(
*$(at::Tensor* _self)));
}|]
tan_t
:: Ptr Tensor
-> IO (Ptr Tensor)
tan_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tan(
*$(at::Tensor* _self)));
}|]
tan__t
:: Ptr Tensor
-> IO (Ptr Tensor)
tan__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tan_(
*$(at::Tensor* _self)));
}|]
tan_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
tan_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tan_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
tanh_t
:: Ptr Tensor
-> IO (Ptr Tensor)
tanh_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tanh(
*$(at::Tensor* _self)));
}|]
tanh__t
:: Ptr Tensor
-> IO (Ptr Tensor)
tanh__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tanh_(
*$(at::Tensor* _self)));
}|]
tanh_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
tanh_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tanh_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
tensordot_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
tensordot_ttll _self _other _dims_self _dims_other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tensordot(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::vector<int64_t>* _dims_self)
, *$(std::vector<int64_t>* _dims_other)));
}|]
tensordot_out_tttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
tensordot_out_tttll _out _self _other _dims_self _dims_other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tensordot_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::vector<int64_t>* _dims_self)
, *$(std::vector<int64_t>* _dims_other)));
}|]
threshold_tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_tss _self _threshold _value =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold(
*$(at::Tensor* _self)
, *$(at::Scalar* _threshold)
, *$(at::Scalar* _value)));
}|]
threshold__tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold__tss _self _threshold _value =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_(
*$(at::Tensor* _self)
, *$(at::Scalar* _threshold)
, *$(at::Scalar* _value)));
}|]
threshold_out_ttss
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_out_ttss _out _self _threshold _value =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Scalar* _threshold)
, *$(at::Scalar* _value)));
}|]
threshold_backward_out_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_backward_out_ttts _grad_input _grad_output _self _threshold =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_backward_out(
*$(at::Tensor* _grad_input)
, *$(at::Tensor* _grad_output)
, *$(at::Tensor* _self)
, *$(at::Scalar* _threshold)));
}|]
threshold_backward_tts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
threshold_backward_tts _grad_output _self _threshold =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::threshold_backward(
*$(at::Tensor* _grad_output)
, *$(at::Tensor* _self)
, *$(at::Scalar* _threshold)));
}|]
tile_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
tile_tl _self _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::tile(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dims)));
}|]
transpose_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
transpose_tll _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::transpose(
*$(at::Tensor* _self)
, $(int64_t _dim0)
, $(int64_t _dim1)));
}|]
transpose_tnn
:: Ptr Tensor
-> Ptr Dimname
-> Ptr Dimname
-> IO (Ptr Tensor)
transpose_tnn _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::transpose(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim0)
, *$(at::Dimname* _dim1)));
}|]
_mkldnn_transpose_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
_mkldnn_transpose_tll _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_mkldnn_transpose(
*$(at::Tensor* _self)
, $(int64_t _dim0)
, $(int64_t _dim1)));
}|]
_mkldnn_transpose__tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
_mkldnn_transpose__tll _self _dim0 _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_mkldnn_transpose_(
*$(at::Tensor* _self)
, $(int64_t _dim0)
, $(int64_t _dim1)));
}|]
one_hot_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
one_hot_tl _self _num_classes =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::one_hot(
*$(at::Tensor* _self)
, $(int64_t _num_classes)));
}|]
one_hot_t
:: Ptr Tensor
-> IO (Ptr Tensor)
one_hot_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::one_hot(
*$(at::Tensor* _self)));
}|]
flip_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
flip_tl _self _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::flip(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dims)));
}|]
fliplr_t
:: Ptr Tensor
-> IO (Ptr Tensor)
fliplr_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fliplr(
*$(at::Tensor* _self)));
}|]
flipud_t
:: Ptr Tensor
-> IO (Ptr Tensor)
flipud_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::flipud(
*$(at::Tensor* _self)));
}|]
roll_tll
:: Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
roll_tll _self _shifts _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::roll(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _shifts)
, *$(std::vector<int64_t>* _dims)));
}|]
roll_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
roll_tl _self _shifts =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::roll(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _shifts)));
}|]
rot90_tll
:: Ptr Tensor
-> Int64
-> Ptr IntArray
-> IO (Ptr Tensor)
rot90_tll _self _k _dims =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::rot90(
*$(at::Tensor* _self)
, $(int64_t _k)
, *$(std::vector<int64_t>* _dims)));
}|]
rot90_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
rot90_tl _self _k =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::rot90(
*$(at::Tensor* _self)
, $(int64_t _k)));
}|]
rot90_t
:: Ptr Tensor
-> IO (Ptr Tensor)
rot90_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::rot90(
*$(at::Tensor* _self)));
}|]
trapezoid_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
trapezoid_ttl _y _x _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)
, $(int64_t _dim)));
}|]
trapezoid_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
trapezoid_tt _y _x =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)));
}|]
trapezoid_tsl
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> IO (Ptr Tensor)
trapezoid_tsl _y _dx _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)
, $(int64_t _dim)));
}|]
trapezoid_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
trapezoid_ts _y _dx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)));
}|]
trapezoid_t
:: Ptr Tensor
-> IO (Ptr Tensor)
trapezoid_t _y =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapezoid(
*$(at::Tensor* _y)));
}|]
trapz_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
trapz_ttl _y _x _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)
, $(int64_t _dim)));
}|]
trapz_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
trapz_tt _y _x =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)));
}|]
trapz_tdl
:: Ptr Tensor
-> CDouble
-> Int64
-> IO (Ptr Tensor)
trapz_tdl _y _dx _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, $(double _dx)
, $(int64_t _dim)));
}|]
trapz_td
:: Ptr Tensor
-> CDouble
-> IO (Ptr Tensor)
trapz_td _y _dx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)
, $(double _dx)));
}|]
trapz_t
:: Ptr Tensor
-> IO (Ptr Tensor)
trapz_t _y =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trapz(
*$(at::Tensor* _y)));
}|]
_trilinear_tttlllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
_trilinear_tttlllll _i1 _i2 _i3 _expand1 _expand2 _expand3 _sumdim _unroll_dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_trilinear(
*$(at::Tensor* _i1)
, *$(at::Tensor* _i2)
, *$(at::Tensor* _i3)
, *$(std::vector<int64_t>* _expand1)
, *$(std::vector<int64_t>* _expand2)
, *$(std::vector<int64_t>* _expand3)
, *$(std::vector<int64_t>* _sumdim)
, $(int64_t _unroll_dim)));
}|]
_trilinear_tttllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
_trilinear_tttllll _i1 _i2 _i3 _expand1 _expand2 _expand3 _sumdim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_trilinear(
*$(at::Tensor* _i1)
, *$(at::Tensor* _i2)
, *$(at::Tensor* _i3)
, *$(std::vector<int64_t>* _expand1)
, *$(std::vector<int64_t>* _expand2)
, *$(std::vector<int64_t>* _expand3)
, *$(std::vector<int64_t>* _sumdim)));
}|]
triplet_margin_loss_tttdddbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> CDouble
-> CBool
-> Int64
-> IO (Ptr Tensor)
triplet_margin_loss_tttdddbl _anchor _positive _negative _margin _p _eps _swap _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)
, $(double _eps)
, $(bool _swap)
, $(int64_t _reduction)));
}|]
triplet_margin_loss_tttdddb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> CDouble
-> CBool
-> IO (Ptr Tensor)
triplet_margin_loss_tttdddb _anchor _positive _negative _margin _p _eps _swap =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)
, $(double _eps)
, $(bool _swap)));
}|]
triplet_margin_loss_tttddd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> CDouble
-> IO (Ptr Tensor)
triplet_margin_loss_tttddd _anchor _positive _negative _margin _p _eps =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)
, $(double _eps)));
}|]
triplet_margin_loss_tttdd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> IO (Ptr Tensor)
triplet_margin_loss_tttdd _anchor _positive _negative _margin _p =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)
, $(double _p)));
}|]
triplet_margin_loss_tttd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> IO (Ptr Tensor)
triplet_margin_loss_tttd _anchor _positive _negative _margin =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)
, $(double _margin)));
}|]
triplet_margin_loss_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
triplet_margin_loss_ttt _anchor _positive _negative =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::triplet_margin_loss(
*$(at::Tensor* _anchor)
, *$(at::Tensor* _positive)
, *$(at::Tensor* _negative)));
}|]
trunc_t
:: Ptr Tensor
-> IO (Ptr Tensor)
trunc_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trunc(
*$(at::Tensor* _self)));
}|]
trunc__t
:: Ptr Tensor
-> IO (Ptr Tensor)
trunc__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trunc_(
*$(at::Tensor* _self)));
}|]
trunc_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
trunc_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::trunc_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
fix_t
:: Ptr Tensor
-> IO (Ptr Tensor)
fix_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fix(
*$(at::Tensor* _self)));
}|]
fix__t
:: Ptr Tensor
-> IO (Ptr Tensor)
fix__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fix_(
*$(at::Tensor* _self)));
}|]
fix_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
fix_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::fix_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
_has_compatible_shallow_copy_type_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (CBool)
_has_compatible_shallow_copy_type_tt _self _from =
[C.throwBlock| bool { return (at::_has_compatible_shallow_copy_type(
*$(at::Tensor* _self)
, *$(at::Tensor* _from)));
}|]
_unique_tbb
:: Ptr Tensor
-> CBool
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_unique_tbb _self _sorted _return_inverse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_unique(
*$(at::Tensor* _self)
, $(bool _sorted)
, $(bool _return_inverse)));
}|]
_unique_tb
:: Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_unique_tb _self _sorted =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_unique(
*$(at::Tensor* _self)
, $(bool _sorted)));
}|]
_unique_t
:: Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_unique_t _self =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_unique(
*$(at::Tensor* _self)));
}|]
|
3a51334d9b9b9192d1acd18ea28f8df05a990751559a2fb9a0b39965e289013e | Shimuuar/histogram-fill | Read.hs | -- | Helper function for defining Read instances for bin data types.
module Data.Histogram.Bin.Read
( ws
, eol
, value
, maybeValue
, keyword
) where
import Text.Read
import Text.ParserCombinators.ReadP (ReadP, many, satisfy, char, string)
-- | Whitespaces
ws :: ReadP String
ws = many $ satisfy (`elem` " \t")
-- | End of line
eol :: ReadP Char
eol = char '\n'
-- | Equal sign
eq :: ReadP ()
eq = ws >> char '=' >> return ()
-- | Key value pair
value :: Read a => String -> ReadPrec a
value str = do lift $ key str >> eq
getVal
-- | Return optional value
maybeValue :: Read a => String -> ReadPrec (Maybe a)
maybeValue str = do lift (key str >> eq)
lift (ws >> eol >> return Nothing) <++ (Just `fmap` getVal)
-- | Keyword
keyword :: String -> ReadPrec ()
keyword str = lift $ key str >> ws >> eol >> return ()
key :: String -> ReadP String
key s = char '#' >> ws >> string s
getVal :: Read a => ReadPrec a
getVal = do x <- readPrec
lift eol >> return x
| null | https://raw.githubusercontent.com/Shimuuar/histogram-fill/3dff15027390cf64e7fc3fbaac34c28ffcdacbd6/histogram-fill/Data/Histogram/Bin/Read.hs | haskell | | Helper function for defining Read instances for bin data types.
| Whitespaces
| End of line
| Equal sign
| Key value pair
| Return optional value
| Keyword | module Data.Histogram.Bin.Read
( ws
, eol
, value
, maybeValue
, keyword
) where
import Text.Read
import Text.ParserCombinators.ReadP (ReadP, many, satisfy, char, string)
ws :: ReadP String
ws = many $ satisfy (`elem` " \t")
eol :: ReadP Char
eol = char '\n'
eq :: ReadP ()
eq = ws >> char '=' >> return ()
value :: Read a => String -> ReadPrec a
value str = do lift $ key str >> eq
getVal
maybeValue :: Read a => String -> ReadPrec (Maybe a)
maybeValue str = do lift (key str >> eq)
lift (ws >> eol >> return Nothing) <++ (Just `fmap` getVal)
keyword :: String -> ReadPrec ()
keyword str = lift $ key str >> ws >> eol >> return ()
key :: String -> ReadP String
key s = char '#' >> ws >> string s
getVal :: Read a => ReadPrec a
getVal = do x <- readPrec
lift eol >> return x
|
9f4c7ffd36169a5eaedfc17214062e8e7ae1c0904331cf9fec09a36c5cc9a856 | dmitryvk/sbcl-win32-threads | dlisp3.lisp | This software is part of the SBCL system . See the README file for
;;;; more information.
This software is derived from software originally released by Xerox
;;;; Corporation. Copyright and release statements follow. Later modifications
;;;; to the software are in the public domain and are provided with
;;;; absolutely no warranty. See the COPYING and CREDITS files for more
;;;; information.
copyright information from original PCL sources :
;;;;
Copyright ( c ) 1985 , 1986 , 1987 , 1988 , 1989 , 1990 Xerox Corporation .
;;;; All rights reserved.
;;;;
;;;; Use and copying of this software and preparation of derivative works based
;;;; upon this software are permitted. Any distribution of this software or
derivative works must comply with all applicable United States export
;;;; control laws.
;;;;
This software is made available AS IS , and Xerox Corporation makes no
;;;; warranty about the software, its performance or its conformity to any
;;;; specification.
(in-package "SB-PCL")
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *checking-or-caching-list*
'((t nil (class) nil)
(t nil (class class) nil)
(t nil (class class class) nil)
(t nil (class class t) nil)
(t nil (class class t t) nil)
(t nil (class class t t t) nil)
(t nil (class t) nil)
(t nil (class t t) nil)
(t nil (class t t t) nil)
(t nil (class t t t t) nil)
(t nil (class t t t t t) nil)
(t nil (class t t t t t t) nil)
(t nil (t class) nil)
(t nil (t class t) nil)
(t nil (t t class) nil)
(t nil (class) t)
(t nil (class class) t)
(t nil (class t) t)
(t nil (class t t) t)
(t nil (class t t t) t)
(t nil (t class) t)
(t t (class) nil)
(t t (class class) nil)
(t t (class class class) nil)
(nil nil (class) nil)
(nil nil (class class) nil)
(nil nil (class class t) nil)
(nil nil (class class t t) nil)
(nil nil (class t) nil)
(nil nil (t class t) nil)
(nil nil (class) t)
(nil nil (class class) t)))
EVAL - WHEN
;;; Rather than compiling the constructors here, just tickle the range
;;; of shapes defined above, leaving the generation of the
;;; constructors to precompile-dfun-constructors.
(dolist (key *checking-or-caching-list*)
(destructuring-bind (cached-emf-p return-value-p metatypes applyp) key
(multiple-value-bind (args generator)
(if cached-emf-p
(if return-value-p
(values (list metatypes) 'emit-constant-value)
(values (list metatypes applyp) 'emit-caching))
(if return-value-p
(values (list metatypes) 'emit-in-checking-p)
(values (list metatypes applyp) 'emit-checking)))
(apply #'get-dfun-constructor generator args))))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/pcl/dlisp3.lisp | lisp | more information.
Corporation. Copyright and release statements follow. Later modifications
to the software are in the public domain and are provided with
absolutely no warranty. See the COPYING and CREDITS files for more
information.
All rights reserved.
Use and copying of this software and preparation of derivative works based
upon this software are permitted. Any distribution of this software or
control laws.
warranty about the software, its performance or its conformity to any
specification.
Rather than compiling the constructors here, just tickle the range
of shapes defined above, leaving the generation of the
constructors to precompile-dfun-constructors. | This software is part of the SBCL system . See the README file for
This software is derived from software originally released by Xerox
copyright information from original PCL sources :
Copyright ( c ) 1985 , 1986 , 1987 , 1988 , 1989 , 1990 Xerox Corporation .
derivative works must comply with all applicable United States export
This software is made available AS IS , and Xerox Corporation makes no
(in-package "SB-PCL")
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *checking-or-caching-list*
'((t nil (class) nil)
(t nil (class class) nil)
(t nil (class class class) nil)
(t nil (class class t) nil)
(t nil (class class t t) nil)
(t nil (class class t t t) nil)
(t nil (class t) nil)
(t nil (class t t) nil)
(t nil (class t t t) nil)
(t nil (class t t t t) nil)
(t nil (class t t t t t) nil)
(t nil (class t t t t t t) nil)
(t nil (t class) nil)
(t nil (t class t) nil)
(t nil (t t class) nil)
(t nil (class) t)
(t nil (class class) t)
(t nil (class t) t)
(t nil (class t t) t)
(t nil (class t t t) t)
(t nil (t class) t)
(t t (class) nil)
(t t (class class) nil)
(t t (class class class) nil)
(nil nil (class) nil)
(nil nil (class class) nil)
(nil nil (class class t) nil)
(nil nil (class class t t) nil)
(nil nil (class t) nil)
(nil nil (t class t) nil)
(nil nil (class) t)
(nil nil (class class) t)))
EVAL - WHEN
(dolist (key *checking-or-caching-list*)
(destructuring-bind (cached-emf-p return-value-p metatypes applyp) key
(multiple-value-bind (args generator)
(if cached-emf-p
(if return-value-p
(values (list metatypes) 'emit-constant-value)
(values (list metatypes applyp) 'emit-caching))
(if return-value-p
(values (list metatypes) 'emit-in-checking-p)
(values (list metatypes applyp) 'emit-checking)))
(apply #'get-dfun-constructor generator args))))
|
755707905d51a5bd07fe4bf1d865346d501698ec07ca95db3f7efb23e3707d09 | f-f/dhall-clj | import_test.clj | (ns dhall-clj.import-test
(:require [clojure.test :refer :all]
[medley.core :refer [map-vals]]
[dhall-clj.ast :refer :all]
[dhall-clj.core :as core]
[dhall-clj.parse :refer [parse expr]]
[dhall-clj.import :refer [resolve-imports get-cached-file]]
[dhall-clj.typecheck :refer [typecheck]]
[dhall-clj.alpha-normalize :refer [alpha-normalize]]
[dhall-clj.beta-normalize :refer [beta-normalize]]
[dhall-clj.state :as s]
[dhall-clj.test-utils :refer :all]
[clojure.java.io :as io]
[me.raynes.fs :as fs]))
(def prelude-hash "d45e8141950bcbdfa58c4ff9dcf3fd20d1dca0dca3db71583f73842f1b45ad2d")
(def simple-success-cases
{"Prelude import with hash"
{:actual (str "./../../../Prelude/package.dhall sha256:" prelude-hash)
:expected "./../../../Prelude/package.dhall"}})
(def simple-failure-cases
{"Prelude import with hash" "./dhall-lang/Prelude/package.dhall sha256:b575f038399d47f033b63d6e29ceb8e7778b45765778026c9015ef1d28655cc3"})
(def test-folder "dhall-lang/tests/import")
(def problematic
"Here we list all the tests that blow up, so we categorize and exclude them.
Note: they are vectors because the path creation is platform-sensitive."
[
Waiting on issue # 34
["dhall-lang" "tests" "import" "failure" "referentiallyInsane.dhall"]
;; Waiting for proper cycle detection
["dhall-lang" "tests" "import" "failure" "cycle.dhall"]])
(defn valid-testcases []
(let [all (success-testcases (str test-folder "/success"))]
(->> problematic
(map #(->> % (apply io/file) str))
(apply dissoc all))))
(deftest import-success-suite
(let [import-cache (s/new)
parent (str test-folder "/success")
f (fn [e]
(fs/with-mutable-cwd
(fs/chdir parent)
(-> e
parse
expr
(resolve-imports import-cache)
(beta-normalize)
(alpha-normalize))))] ;; This last alpha-normalize is necessary so that cache works
(doseq [[testcase {:keys [actual expected]}] (merge simple-success-cases
(valid-testcases))]
(println "TESTCASE:" testcase)
(testing actual
(is (= (f actual) (f expected)))))))
(defn valid-failing-testcases []
(let [all (failure-testcases test-folder)]
(->> problematic
(map #(->> % (apply io/file) str))
(apply dissoc all))))
(deftest import-failure-suite
(let [import-cache (s/new)
parent (str test-folder "/failure")
f (fn [e]
(fs/with-mutable-cwd
(fs/chdir parent)
(-> e
parse
expr
(resolve-imports import-cache))))]
(doseq [[testcase dhall] (merge simple-failure-cases
(valid-failing-testcases))]
(println "TESTCASE failure:" testcase)
(testing testcase
(is (thrown-with-msg? clojure.lang.ExceptionInfo #"Import error:"
(f dhall)))))))
(defmacro time'
"Evaluates expr and returns the amount of ms it took together with the evaluation"
[expr]
`(let [start# (. System (nanoTime))
ret# ~expr]
[ret# (/ (double (- (. System (nanoTime)) start#)) 1000000.0)]))
(deftest import-caching-suite
(println "IMPORT CACHING")
(testing "Prelude caching"
(let [cache-file (get-cached-file prelude-hash)
to-eval (str "./dhall-lang/Prelude/package.dhall sha256:" prelude-hash)
_ (fs/delete cache-file)
[pr1 time-uncached] (time' (core/input-ast to-eval))
[pr2 time-cached] (time' (core/input-ast to-eval))]
(println "Time to fetch the uncached Prelude is > 0.5s")
(is (> time-uncached 500))
(println "Time to fetch the cached Prelude is < 0.5s")
(is (< time-cached) 500)
(println "The two Preludes are the same")
(is (= (alpha-normalize pr1) pr2)))))
| null | https://raw.githubusercontent.com/f-f/dhall-clj/05d25d2464972bbeae46d828b478b4cfd59836dc/test/dhall_clj/import_test.clj | clojure | Waiting for proper cycle detection
This last alpha-normalize is necessary so that cache works | (ns dhall-clj.import-test
(:require [clojure.test :refer :all]
[medley.core :refer [map-vals]]
[dhall-clj.ast :refer :all]
[dhall-clj.core :as core]
[dhall-clj.parse :refer [parse expr]]
[dhall-clj.import :refer [resolve-imports get-cached-file]]
[dhall-clj.typecheck :refer [typecheck]]
[dhall-clj.alpha-normalize :refer [alpha-normalize]]
[dhall-clj.beta-normalize :refer [beta-normalize]]
[dhall-clj.state :as s]
[dhall-clj.test-utils :refer :all]
[clojure.java.io :as io]
[me.raynes.fs :as fs]))
(def prelude-hash "d45e8141950bcbdfa58c4ff9dcf3fd20d1dca0dca3db71583f73842f1b45ad2d")
(def simple-success-cases
{"Prelude import with hash"
{:actual (str "./../../../Prelude/package.dhall sha256:" prelude-hash)
:expected "./../../../Prelude/package.dhall"}})
(def simple-failure-cases
{"Prelude import with hash" "./dhall-lang/Prelude/package.dhall sha256:b575f038399d47f033b63d6e29ceb8e7778b45765778026c9015ef1d28655cc3"})
(def test-folder "dhall-lang/tests/import")
(def problematic
"Here we list all the tests that blow up, so we categorize and exclude them.
Note: they are vectors because the path creation is platform-sensitive."
[
Waiting on issue # 34
["dhall-lang" "tests" "import" "failure" "referentiallyInsane.dhall"]
["dhall-lang" "tests" "import" "failure" "cycle.dhall"]])
(defn valid-testcases []
(let [all (success-testcases (str test-folder "/success"))]
(->> problematic
(map #(->> % (apply io/file) str))
(apply dissoc all))))
(deftest import-success-suite
(let [import-cache (s/new)
parent (str test-folder "/success")
f (fn [e]
(fs/with-mutable-cwd
(fs/chdir parent)
(-> e
parse
expr
(resolve-imports import-cache)
(beta-normalize)
(doseq [[testcase {:keys [actual expected]}] (merge simple-success-cases
(valid-testcases))]
(println "TESTCASE:" testcase)
(testing actual
(is (= (f actual) (f expected)))))))
(defn valid-failing-testcases []
(let [all (failure-testcases test-folder)]
(->> problematic
(map #(->> % (apply io/file) str))
(apply dissoc all))))
(deftest import-failure-suite
(let [import-cache (s/new)
parent (str test-folder "/failure")
f (fn [e]
(fs/with-mutable-cwd
(fs/chdir parent)
(-> e
parse
expr
(resolve-imports import-cache))))]
(doseq [[testcase dhall] (merge simple-failure-cases
(valid-failing-testcases))]
(println "TESTCASE failure:" testcase)
(testing testcase
(is (thrown-with-msg? clojure.lang.ExceptionInfo #"Import error:"
(f dhall)))))))
(defmacro time'
"Evaluates expr and returns the amount of ms it took together with the evaluation"
[expr]
`(let [start# (. System (nanoTime))
ret# ~expr]
[ret# (/ (double (- (. System (nanoTime)) start#)) 1000000.0)]))
(deftest import-caching-suite
(println "IMPORT CACHING")
(testing "Prelude caching"
(let [cache-file (get-cached-file prelude-hash)
to-eval (str "./dhall-lang/Prelude/package.dhall sha256:" prelude-hash)
_ (fs/delete cache-file)
[pr1 time-uncached] (time' (core/input-ast to-eval))
[pr2 time-cached] (time' (core/input-ast to-eval))]
(println "Time to fetch the uncached Prelude is > 0.5s")
(is (> time-uncached 500))
(println "Time to fetch the cached Prelude is < 0.5s")
(is (< time-cached) 500)
(println "The two Preludes are the same")
(is (= (alpha-normalize pr1) pr2)))))
|
1623ac4cf0c4e064f837780d3c55a91047286e1e3a5146006e0eb7c44b8ecdaf | tfausak/strive | Comments.hs | -- | 'Strive.Actions.Comments'
module Strive.Options.Comments
( GetActivityCommentsOptions (..),
)
where
import Data.Aeson (encode)
import Data.ByteString.Char8 (unpack)
import Data.ByteString.Lazy (toStrict)
import Data.Default (Default, def)
import Network.HTTP.Types (QueryLike, toQuery)
-- | 'Strive.Actions.getActivityComments'
data GetActivityCommentsOptions = GetActivityCommentsOptions
{ getActivityCommentsOptions_markdown :: Bool,
getActivityCommentsOptions_page :: Integer,
getActivityCommentsOptions_perPage :: Integer
}
deriving (Show)
instance Default GetActivityCommentsOptions where
def =
GetActivityCommentsOptions
{ getActivityCommentsOptions_markdown = False,
getActivityCommentsOptions_page = 1,
getActivityCommentsOptions_perPage = 200
}
instance QueryLike GetActivityCommentsOptions where
toQuery options =
toQuery
[ ( "before",
unpack
(toStrict (encode (getActivityCommentsOptions_markdown options)))
),
("page", show (getActivityCommentsOptions_page options)),
("per_page", show (getActivityCommentsOptions_perPage options))
]
| null | https://raw.githubusercontent.com/tfausak/strive/8bd61df4b2723301273b11589c5f237b42e934dc/source/library/Strive/Options/Comments.hs | haskell | | 'Strive.Actions.Comments'
| 'Strive.Actions.getActivityComments' | module Strive.Options.Comments
( GetActivityCommentsOptions (..),
)
where
import Data.Aeson (encode)
import Data.ByteString.Char8 (unpack)
import Data.ByteString.Lazy (toStrict)
import Data.Default (Default, def)
import Network.HTTP.Types (QueryLike, toQuery)
data GetActivityCommentsOptions = GetActivityCommentsOptions
{ getActivityCommentsOptions_markdown :: Bool,
getActivityCommentsOptions_page :: Integer,
getActivityCommentsOptions_perPage :: Integer
}
deriving (Show)
instance Default GetActivityCommentsOptions where
def =
GetActivityCommentsOptions
{ getActivityCommentsOptions_markdown = False,
getActivityCommentsOptions_page = 1,
getActivityCommentsOptions_perPage = 200
}
instance QueryLike GetActivityCommentsOptions where
toQuery options =
toQuery
[ ( "before",
unpack
(toStrict (encode (getActivityCommentsOptions_markdown options)))
),
("page", show (getActivityCommentsOptions_page options)),
("per_page", show (getActivityCommentsOptions_perPage options))
]
|
3ba3acae3e1a9247401fd05cb6da44e1b3c241f9125eafdf21ce028dd2606f6c | jaredly/reason-language-server | translcore.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Translation from typed abstract syntax to lambda terms,
for the core language *)
open Asttypes
open Typedtree
open Lambda
val transl_exp: expression -> lambda
val transl_apply: ?should_be_tailcall:bool
-> ?inlined:inline_attribute
-> ?specialised:specialise_attribute
-> lambda -> (arg_label * expression option) list
-> Location.t -> lambda
val transl_let: rec_flag -> value_binding list -> lambda -> lambda
val transl_primitive: Location.t -> Primitive.description -> Env.t
-> Types.type_expr -> Path.t option -> lambda
val transl_extension_constructor: Env.t -> Path.t option ->
extension_constructor -> lambda
val used_primitives: (Path.t, Location.t) Hashtbl.t
type error =
Free_super_var
| Unknown_builtin_primitive of string
| Unreachable_reached
exception Error of Location.t * error
open Format
val report_error: formatter -> error -> unit
Forward declaration -- to be filled in by Translmod.transl_module
val transl_module :
(module_coercion -> Path.t option -> module_expr -> lambda) ref
val transl_object :
(Ident.t -> string list -> class_expr -> lambda) ref
| null | https://raw.githubusercontent.com/jaredly/reason-language-server/ce1b3f8ddb554b6498c2a83ea9c53a6bdf0b6081/ocaml_typing/406/translcore.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Translation from typed abstract syntax to lambda terms,
for the core language | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Asttypes
open Typedtree
open Lambda
val transl_exp: expression -> lambda
val transl_apply: ?should_be_tailcall:bool
-> ?inlined:inline_attribute
-> ?specialised:specialise_attribute
-> lambda -> (arg_label * expression option) list
-> Location.t -> lambda
val transl_let: rec_flag -> value_binding list -> lambda -> lambda
val transl_primitive: Location.t -> Primitive.description -> Env.t
-> Types.type_expr -> Path.t option -> lambda
val transl_extension_constructor: Env.t -> Path.t option ->
extension_constructor -> lambda
val used_primitives: (Path.t, Location.t) Hashtbl.t
type error =
Free_super_var
| Unknown_builtin_primitive of string
| Unreachable_reached
exception Error of Location.t * error
open Format
val report_error: formatter -> error -> unit
Forward declaration -- to be filled in by Translmod.transl_module
val transl_module :
(module_coercion -> Path.t option -> module_expr -> lambda) ref
val transl_object :
(Ident.t -> string list -> class_expr -> lambda) ref
|
b8af4f1fa5d46ed9647a92e240bd7888aa862d214d60665ead8ac1163c6120da | babashka/babashka | sigint_handler.clj | (ns babashka.impl.sigint-handler
{:no-doc true}
(:import [sun.misc Signal]
[sun.misc SignalHandler]))
(set! *warn-on-reflection* true)
(defn handle-sigint! []
(when-not (= "true" (System/getenv "BABASHKA_DISABLE_SIGNAL_HANDLERS"))
(Signal/handle
(Signal. "INT")
(reify SignalHandler
(handle [_ _]
;; This is needed to run shutdown hooks on interrupt, System/exit triggers those
(System/exit 130))))))
| null | https://raw.githubusercontent.com/babashka/babashka/10638685549205926489ac325721261c301819d4/src/babashka/impl/sigint_handler.clj | clojure | This is needed to run shutdown hooks on interrupt, System/exit triggers those | (ns babashka.impl.sigint-handler
{:no-doc true}
(:import [sun.misc Signal]
[sun.misc SignalHandler]))
(set! *warn-on-reflection* true)
(defn handle-sigint! []
(when-not (= "true" (System/getenv "BABASHKA_DISABLE_SIGNAL_HANDLERS"))
(Signal/handle
(Signal. "INT")
(reify SignalHandler
(handle [_ _]
(System/exit 130))))))
|
15831c65695f41cacfa69290cc2f7a19e0a7a558a57f2ee0543136472e9b8d35 | chaoxu/fancy-walks | B.hs | {-# OPTIONS_GHC -O2 #-}
import Data.List
import Data.Maybe
import Data.Char
import Data.Array
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Tree
import Data.Graph
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
m <- readInt
a <- replicateM n (replicateM m readInt)
return (n, m, a)
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
main = do
input <- evalState parseInput <$> BS.getContents
forM_ (zip [1..] input) $ \(cas, params) -> do
putStr $ "Case #" ++ show cas ++ ":\n" ++ (solve params)
solve (n, m, a) = unlines . map (unwords.map (\x->[x])) $ output
where
bnds = ((1,1),(n,m))
arr = listArray bnds [ele | row <- a, ele <- row]
next (x,y) = ans
where
delta = [(0,0),(-1,0),(0,-1),(0,1),(1,0)]
pts = [pt | (dx,dy) <- delta, let pt = (x+dx,y+dy), inRange bnds pt]
ans = minimumBy (compare `on` (arr!)) pts
graph = buildG (0, rangeSize bnds-1) [(index bnds idx, index bnds $ next idx) | idx <- range bnds]
comps = sort $ map (sort . F.toList) $ components graph
colored = array (0, rangeSize bnds-1) [ (idx, color)
| (color, comp) <- zip ['a'..] comps
, idx <- comp
]
output = [[colored ! (index bnds (i,j)) | j <- [1..m]] | i <- [1..n]]
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/code.google.com/codejam/Google%20Code%20Jam%202009/Qualification%20Round/B.hs | haskell | # OPTIONS_GHC -O2 # |
import Data.List
import Data.Maybe
import Data.Char
import Data.Array
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Tree
import Data.Graph
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
m <- readInt
a <- replicateM n (replicateM m readInt)
return (n, m, a)
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
main = do
input <- evalState parseInput <$> BS.getContents
forM_ (zip [1..] input) $ \(cas, params) -> do
putStr $ "Case #" ++ show cas ++ ":\n" ++ (solve params)
solve (n, m, a) = unlines . map (unwords.map (\x->[x])) $ output
where
bnds = ((1,1),(n,m))
arr = listArray bnds [ele | row <- a, ele <- row]
next (x,y) = ans
where
delta = [(0,0),(-1,0),(0,-1),(0,1),(1,0)]
pts = [pt | (dx,dy) <- delta, let pt = (x+dx,y+dy), inRange bnds pt]
ans = minimumBy (compare `on` (arr!)) pts
graph = buildG (0, rangeSize bnds-1) [(index bnds idx, index bnds $ next idx) | idx <- range bnds]
comps = sort $ map (sort . F.toList) $ components graph
colored = array (0, rangeSize bnds-1) [ (idx, color)
| (color, comp) <- zip ['a'..] comps
, idx <- comp
]
output = [[colored ! (index bnds (i,j)) | j <- [1..m]] | i <- [1..n]]
|
06c81768ebb3eae52ebefc99acd0c528dd7e120aaf92fc5fed93bc202ac04555 | msantos/pkt | pkt_sctp.erl | Copyright ( c ) 2009 - 2022 , < >
%% All rights reserved.
%%
%% Redistribution and use in source and binary forms, with or without
%% modification, are permitted provided that the following conditions
%% are met:
%%
%% Redistributions of source code must retain the above copyright
%% notice, this list of conditions and the following disclaimer.
%%
%% Redistributions in binary form must reproduce the above copyright
%% notice, this list of conditions and the following disclaimer in the
%% documentation and/or other materials provided with the distribution.
%%
%% Neither the name of the author nor the names of its contributors
%% may be used to endorse or promote products derived from this software
%% without specific prior written permission.
%%
%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
%% LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
%% FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR FOR ANY DIRECT , INDIRECT ,
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING ,
BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
%% ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
%% POSSIBILITY OF SUCH DAMAGE.
-module(pkt_sctp).
-include("pkt_sctp.hrl").
-export([codec/1]).
-spec codec(binary()) -> {#sctp{}, binary()}.
codec(<<SPort:16, DPort:16, VTag:32, Sum:32, Payload/binary>>) ->
{Chunks, Other} = decode_chunks(Payload, []),
SCTP = #sctp{
sport = SPort,
dport = DPort,
vtag = VTag,
sum = Sum,
chunks = lists:reverse(Chunks)
},
{SCTP, Other}.
Internal functions
decode_chunks(Chunks, Acc) ->
case chunk_len(Chunks) < byte_size(Chunks) of
true ->
<<Type:8, Flags:1/binary, Length:16, Rest/binary>> = Chunks,
Pad = chunk_pad_len(Length),
Len = Length-4,
<<Payload:Len/binary, _:Pad/binary, Tail/binary>> = Rest,
decode_chunks(Tail, [chunk(Type, Flags, Length, Payload) | Acc]);
false ->
{Acc, Chunks}
end.
if ' chunks ' is less than 4 bytes , we ca n't read a length .
if ' length ' is less than 4 , the chunk is corrupt .
%%% we return 'sizeof chunks' plus one, indicating that a read will fail.
chunk_len(<<_:16, L:16, _/binary>>) when 4 =< L ->
L-4+chunk_pad_len(L);
chunk_len(Chunks) ->
byte_size(Chunks)+1.
%%% pad length in bytes
chunk_pad_len(L) ->
3-((L+3) rem 4).
-spec chunk(byte(), binary(), non_neg_integer(), binary()) -> #sctp_chunk{}.
chunk(Type, Flags, Len, Payload) ->
<<_Spare:4, I:1, U:1, B:1, E:1>> = Flags,
#sctp_chunk{
type = Type,
i = I,
u = U,
b = B,
e = E,
len = Len - 4,
payload = chunk_payload(Type, Payload)
}.
-spec chunk_payload(non_neg_integer(), binary()) ->
#sctp_chunk_data{}
| #sctp_chunk_init{}
| #sctp_chunk_init_ack{}
| #sctp_chunk_sack{}
| #sctp_chunk_cookie_echo{}
| #sctp_chunk_cookie_ack{}
| #sctp_chunk_heartbeat{}
| #sctp_chunk_heartbeat_ack{}
| #sctp_chunk_shutdown{}
| #sctp_chunk_shutdown_ack{}
| #sctp_chunk_shutdown_complete{}
| binary().
chunk_payload(?SCTP_CHUNK_DATA, <<Tsn:32, Sid:16, Ssn:16, Ppi:32, Data/binary>>) ->
#sctp_chunk_data{tsn = Tsn, sid = Sid, ssn = Ssn, ppi = Ppi, data = Data};
chunk_payload(?SCTP_CHUNK_INIT, <<Itag:32, Arwnd:32, OutStreams:16, InStreams:16, Tsn:32, Rest/binary>>) ->
#sctp_chunk_init{
itag = Itag,
a_rwnd = Arwnd,
outbound_streams = OutStreams,
inbound_streams = InStreams,
tsn = Tsn,
params = init_params(Rest, [])
};
chunk_payload(?SCTP_CHUNK_INIT_ACK, <<Itag:32, Arwnd:32, OutStreams:16, InStreams:16, Tsn:32, Rest/binary>>) ->
#sctp_chunk_init_ack{
itag = Itag,
a_rwnd = Arwnd,
outbound_streams = OutStreams,
inbound_streams = InStreams,
tsn = Tsn,
params = init_params(Rest, [])
};
chunk_payload(?SCTP_CHUNK_SACK, <<TSN_ACK:32, Arwnd:32, GapsN:16, DuplicateTSN:16, Rest/binary>>) ->
start ( 16 ) , Gap Ack end ( 16 )
<<Gaps:GapsLength/binary-unit:8, TSNs/binary>> = Rest,
#sctp_chunk_sack{
tsn_ack = TSN_ACK,
a_rwnd = Arwnd,
number_gap_ack_blocks = GapsN,
number_duplicate_tsn = DuplicateTSN,
gap_ack_blocks = [{Start, End} || <<Start:16, End:16>> <= Gaps],
duplicate_tsns = [T || <<T:32>> <= TSNs]
};
chunk_payload(?SCTP_CHUNK_COOKIE_ECHO, Cookie) ->
#sctp_chunk_cookie_echo{cookie = Cookie};
chunk_payload(?SCTP_CHUNK_COOKIE_ACK, <<>>) ->
#sctp_chunk_cookie_ack{};
chunk_payload(?SCTP_CHUNK_HEARTBEAT, <<Type:16, _Length:16, Info/binary>>) ->
#sctp_chunk_heartbeat{type = Type, info = Info};
chunk_payload(?SCTP_CHUNK_HEARTBEAT_ACK, <<Type:16, _Length:16, Info/binary>>) ->
#sctp_chunk_heartbeat_ack{type = Type, info = Info};
chunk_payload(?SCTP_CHUNK_SHUTDOWN, <<TSN_ACK:32>>) ->
#sctp_chunk_shutdown{tsn_ack = TSN_ACK};
chunk_payload(?SCTP_CHUNK_SHUTDOWN_ACK, <<>>) ->
#sctp_chunk_shutdown_ack{};
chunk_payload(?SCTP_CHUNK_SHUTDOWN_COMPLETE, <<>>) ->
#sctp_chunk_shutdown_complete{};
chunk_payload(?SCTP_CHUNK_ABORT, Errors) ->
#sctp_chunk_abort{error_causes = error_causes(Errors, [])};
chunk_payload(_, Data) ->
Data.
%% IPv4 Address Parameter
init_params(<<5:16, 8:16, A:8, B:8, C:8, D:8, Rest/binary>>, Acc) ->
init_params(Rest, [{ipv4, {A, B, C, D}} | Acc]);
%% IPv6 Address Parameter
init_params(<<6:16, 20:16, Value:16/binary-unit:8, Rest/binary>>, Acc) ->
IP = list_to_tuple([N || <<N:16>> <= Value]),
init_params(Rest, [{ipv6, IP} | Acc]);
State cookie
init_params(<<7:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Cookie:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{state_cookie, Cookie} | Acc]);
%% Unrecognized Parameter
init_params(<<8:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Parameter:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{unrecognized, Parameter} | Acc]);
%% Cookie Preservative
init_params(<<9:16, 8:16, Value:32, Rest/binary>>, Acc) ->
init_params(Rest, [{cookie, Value} | Acc]);
%% Host Name Address
init_params(<<11:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Hostname:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{hostname, Hostname} | Acc]);
%% Supported Address Types
init_params(<<12:16, Length:16, Rest/binary>>, Acc) ->
AddressType =
fun(5) -> ipv4;
(6) -> ipv6;
(11) -> hostname
end,
L = Length - 4,
<<Types:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{address_types, [AddressType(V) || <<V:16>> <= Types]} | Acc]);
init_params(<<>>, Acc) -> Acc;
Ignore ECN and Forward TSN parameters
init_params(_, Acc) -> Acc.
error_causes(<<Code:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Opts:L/binary-unit:8, Tail/binary>> = Rest,
Error = #sctp_error_cause{
code = Code,
descr = gen_sctp:error_string(Code),
opts = sctp_error(Code, L, Opts)
},
error_causes(Tail, [Error | Acc]);
error_causes(<<>>, Acc) -> Acc.
sctp_error(1, _Length, <<Ident:16, _Reserved:8>>) ->
[{stream_identifier, Ident}];
sctp_error(12, Length, Opts) ->
<<Reason:Length/binary-unit:8>> = Opts,
[{abort_reason, Reason}];
%% FIXME: add more error causes
sctp_error(_Code, _Length, Opts) ->
[{data, Opts}].
| null | https://raw.githubusercontent.com/msantos/pkt/92fa4ec6903c1c0a7c564e7cd1c468a92e3e3f3b/src/pkt_sctp.erl | erlang | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Neither the name of the author nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
we return 'sizeof chunks' plus one, indicating that a read will fail.
pad length in bytes
IPv4 Address Parameter
IPv6 Address Parameter
Unrecognized Parameter
Cookie Preservative
Host Name Address
Supported Address Types
FIXME: add more error causes | Copyright ( c ) 2009 - 2022 , < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
COPYRIGHT HOLDER OR FOR ANY DIRECT , INDIRECT ,
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING ,
BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
-module(pkt_sctp).
-include("pkt_sctp.hrl").
-export([codec/1]).
-spec codec(binary()) -> {#sctp{}, binary()}.
codec(<<SPort:16, DPort:16, VTag:32, Sum:32, Payload/binary>>) ->
{Chunks, Other} = decode_chunks(Payload, []),
SCTP = #sctp{
sport = SPort,
dport = DPort,
vtag = VTag,
sum = Sum,
chunks = lists:reverse(Chunks)
},
{SCTP, Other}.
Internal functions
decode_chunks(Chunks, Acc) ->
case chunk_len(Chunks) < byte_size(Chunks) of
true ->
<<Type:8, Flags:1/binary, Length:16, Rest/binary>> = Chunks,
Pad = chunk_pad_len(Length),
Len = Length-4,
<<Payload:Len/binary, _:Pad/binary, Tail/binary>> = Rest,
decode_chunks(Tail, [chunk(Type, Flags, Length, Payload) | Acc]);
false ->
{Acc, Chunks}
end.
if ' chunks ' is less than 4 bytes , we ca n't read a length .
if ' length ' is less than 4 , the chunk is corrupt .
chunk_len(<<_:16, L:16, _/binary>>) when 4 =< L ->
L-4+chunk_pad_len(L);
chunk_len(Chunks) ->
byte_size(Chunks)+1.
chunk_pad_len(L) ->
3-((L+3) rem 4).
-spec chunk(byte(), binary(), non_neg_integer(), binary()) -> #sctp_chunk{}.
chunk(Type, Flags, Len, Payload) ->
<<_Spare:4, I:1, U:1, B:1, E:1>> = Flags,
#sctp_chunk{
type = Type,
i = I,
u = U,
b = B,
e = E,
len = Len - 4,
payload = chunk_payload(Type, Payload)
}.
-spec chunk_payload(non_neg_integer(), binary()) ->
#sctp_chunk_data{}
| #sctp_chunk_init{}
| #sctp_chunk_init_ack{}
| #sctp_chunk_sack{}
| #sctp_chunk_cookie_echo{}
| #sctp_chunk_cookie_ack{}
| #sctp_chunk_heartbeat{}
| #sctp_chunk_heartbeat_ack{}
| #sctp_chunk_shutdown{}
| #sctp_chunk_shutdown_ack{}
| #sctp_chunk_shutdown_complete{}
| binary().
chunk_payload(?SCTP_CHUNK_DATA, <<Tsn:32, Sid:16, Ssn:16, Ppi:32, Data/binary>>) ->
#sctp_chunk_data{tsn = Tsn, sid = Sid, ssn = Ssn, ppi = Ppi, data = Data};
chunk_payload(?SCTP_CHUNK_INIT, <<Itag:32, Arwnd:32, OutStreams:16, InStreams:16, Tsn:32, Rest/binary>>) ->
#sctp_chunk_init{
itag = Itag,
a_rwnd = Arwnd,
outbound_streams = OutStreams,
inbound_streams = InStreams,
tsn = Tsn,
params = init_params(Rest, [])
};
chunk_payload(?SCTP_CHUNK_INIT_ACK, <<Itag:32, Arwnd:32, OutStreams:16, InStreams:16, Tsn:32, Rest/binary>>) ->
#sctp_chunk_init_ack{
itag = Itag,
a_rwnd = Arwnd,
outbound_streams = OutStreams,
inbound_streams = InStreams,
tsn = Tsn,
params = init_params(Rest, [])
};
chunk_payload(?SCTP_CHUNK_SACK, <<TSN_ACK:32, Arwnd:32, GapsN:16, DuplicateTSN:16, Rest/binary>>) ->
start ( 16 ) , Gap Ack end ( 16 )
<<Gaps:GapsLength/binary-unit:8, TSNs/binary>> = Rest,
#sctp_chunk_sack{
tsn_ack = TSN_ACK,
a_rwnd = Arwnd,
number_gap_ack_blocks = GapsN,
number_duplicate_tsn = DuplicateTSN,
gap_ack_blocks = [{Start, End} || <<Start:16, End:16>> <= Gaps],
duplicate_tsns = [T || <<T:32>> <= TSNs]
};
chunk_payload(?SCTP_CHUNK_COOKIE_ECHO, Cookie) ->
#sctp_chunk_cookie_echo{cookie = Cookie};
chunk_payload(?SCTP_CHUNK_COOKIE_ACK, <<>>) ->
#sctp_chunk_cookie_ack{};
chunk_payload(?SCTP_CHUNK_HEARTBEAT, <<Type:16, _Length:16, Info/binary>>) ->
#sctp_chunk_heartbeat{type = Type, info = Info};
chunk_payload(?SCTP_CHUNK_HEARTBEAT_ACK, <<Type:16, _Length:16, Info/binary>>) ->
#sctp_chunk_heartbeat_ack{type = Type, info = Info};
chunk_payload(?SCTP_CHUNK_SHUTDOWN, <<TSN_ACK:32>>) ->
#sctp_chunk_shutdown{tsn_ack = TSN_ACK};
chunk_payload(?SCTP_CHUNK_SHUTDOWN_ACK, <<>>) ->
#sctp_chunk_shutdown_ack{};
chunk_payload(?SCTP_CHUNK_SHUTDOWN_COMPLETE, <<>>) ->
#sctp_chunk_shutdown_complete{};
chunk_payload(?SCTP_CHUNK_ABORT, Errors) ->
#sctp_chunk_abort{error_causes = error_causes(Errors, [])};
chunk_payload(_, Data) ->
Data.
init_params(<<5:16, 8:16, A:8, B:8, C:8, D:8, Rest/binary>>, Acc) ->
init_params(Rest, [{ipv4, {A, B, C, D}} | Acc]);
init_params(<<6:16, 20:16, Value:16/binary-unit:8, Rest/binary>>, Acc) ->
IP = list_to_tuple([N || <<N:16>> <= Value]),
init_params(Rest, [{ipv6, IP} | Acc]);
State cookie
init_params(<<7:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Cookie:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{state_cookie, Cookie} | Acc]);
init_params(<<8:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Parameter:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{unrecognized, Parameter} | Acc]);
init_params(<<9:16, 8:16, Value:32, Rest/binary>>, Acc) ->
init_params(Rest, [{cookie, Value} | Acc]);
init_params(<<11:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Hostname:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{hostname, Hostname} | Acc]);
init_params(<<12:16, Length:16, Rest/binary>>, Acc) ->
AddressType =
fun(5) -> ipv4;
(6) -> ipv6;
(11) -> hostname
end,
L = Length - 4,
<<Types:L/binary-unit:8, Tail/binary>> = Rest,
init_params(Tail, [{address_types, [AddressType(V) || <<V:16>> <= Types]} | Acc]);
init_params(<<>>, Acc) -> Acc;
Ignore ECN and Forward TSN parameters
init_params(_, Acc) -> Acc.
error_causes(<<Code:16, Length:16, Rest/binary>>, Acc) ->
L = Length - 4,
<<Opts:L/binary-unit:8, Tail/binary>> = Rest,
Error = #sctp_error_cause{
code = Code,
descr = gen_sctp:error_string(Code),
opts = sctp_error(Code, L, Opts)
},
error_causes(Tail, [Error | Acc]);
error_causes(<<>>, Acc) -> Acc.
sctp_error(1, _Length, <<Ident:16, _Reserved:8>>) ->
[{stream_identifier, Ident}];
sctp_error(12, Length, Opts) ->
<<Reason:Length/binary-unit:8>> = Opts,
[{abort_reason, Reason}];
sctp_error(_Code, _Length, Opts) ->
[{data, Opts}].
|
bb4c40144e41f5ef898b010fe9dbf10c19f03163380deb245684eb12c5455741 | felipecsl/show-do-milhao | Utils.hs | module Utils where
import Data.ByteString (ByteString (..))
import Data.ByteString.Char8 (unpack)
import Data.List.Split (keepDelimsL, split, whenElt)
import qualified Data.Text as T
import Data.Text.ICU.Convert (open, toUnicode)
import qualified Data.Text.IO as T
mapIndChar :: (a -> Char -> b) -> [a] -> [b]
mapIndChar f l = zipWith f l ['a'..]
mapInd :: (a -> Int -> b) -> [a] -> [b]
mapInd f l = zipWith f l [0..]
group :: Int -> [a] -> [[a]]
group _ [] = []
group n l
| n > 0 = take n l : group n (drop n l)
| otherwise = error "Negative n"
byteStringToString :: ByteString -> IO String
byteStringToString s = do
conv <- open "utf-8" Nothing
return (T.unpack $ toUnicode conv s)
byteStringToInt :: ByteString -> Int
byteStringToInt s = read (unpack s) :: Int
doWhileM :: (a -> IO Bool) -> [a] -> IO Bool
doWhileM _ [] = return True
doWhileM m (x:xs) = do
res <- m x
if res
then doWhileM m xs
else return False
-- Split an array using the function for selecting the delimiter. The resulting array includes
the delimiter as the first item in the array . This is important because we need the headers for
each question group ( Facil , Medio and Dificil )
splitWhen :: (a -> Bool) -> [a] -> [[a]]
splitWhen = split . keepDelimsL . whenElt
| null | https://raw.githubusercontent.com/felipecsl/show-do-milhao/cc1c7d48a72d68c38cad760654c3f2e50a3af03a/src/Utils.hs | haskell | Split an array using the function for selecting the delimiter. The resulting array includes | module Utils where
import Data.ByteString (ByteString (..))
import Data.ByteString.Char8 (unpack)
import Data.List.Split (keepDelimsL, split, whenElt)
import qualified Data.Text as T
import Data.Text.ICU.Convert (open, toUnicode)
import qualified Data.Text.IO as T
mapIndChar :: (a -> Char -> b) -> [a] -> [b]
mapIndChar f l = zipWith f l ['a'..]
mapInd :: (a -> Int -> b) -> [a] -> [b]
mapInd f l = zipWith f l [0..]
group :: Int -> [a] -> [[a]]
group _ [] = []
group n l
| n > 0 = take n l : group n (drop n l)
| otherwise = error "Negative n"
byteStringToString :: ByteString -> IO String
byteStringToString s = do
conv <- open "utf-8" Nothing
return (T.unpack $ toUnicode conv s)
byteStringToInt :: ByteString -> Int
byteStringToInt s = read (unpack s) :: Int
doWhileM :: (a -> IO Bool) -> [a] -> IO Bool
doWhileM _ [] = return True
doWhileM m (x:xs) = do
res <- m x
if res
then doWhileM m xs
else return False
the delimiter as the first item in the array . This is important because we need the headers for
each question group ( Facil , Medio and Dificil )
splitWhen :: (a -> Bool) -> [a] -> [[a]]
splitWhen = split . keepDelimsL . whenElt
|
88c10310c3b7be79d46359c72ec6cae98bc5c5e5dd7af6512a4c39833e185be6 | processone/ejabberd | mod_mam_mnesia.erl | %%%-------------------------------------------------------------------
%%% File : mod_mam_mnesia.erl
Author : < >
Created : 15 Apr 2016 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2023 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%----------------------------------------------------------------------
-module(mod_mam_mnesia).
-behaviour(mod_mam).
%% API
-export([init/2, remove_user/2, remove_room/3, delete_old_messages/3,
extended_fields/0, store/8, write_prefs/4, get_prefs/2, select/6, remove_from_archive/3,
is_empty_for_user/2, is_empty_for_room/3, delete_old_messages_batch/5]).
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("xmpp/include/xmpp.hrl").
-include("logger.hrl").
-include("mod_mam.hrl").
-define(BIN_GREATER_THAN(A, B),
((A > B andalso byte_size(A) == byte_size(B))
orelse byte_size(A) > byte_size(B))).
-define(BIN_LESS_THAN(A, B),
((A < B andalso byte_size(A) == byte_size(B))
orelse byte_size(A) < byte_size(B))).
A bit less than 2 GiB.
%%%===================================================================
%%% API
%%%===================================================================
init(_Host, _Opts) ->
try
{atomic, _} = ejabberd_mnesia:create(
?MODULE, archive_msg,
[{disc_only_copies, [node()]},
{type, bag},
{attributes, record_info(fields, archive_msg)}]),
{atomic, _} = ejabberd_mnesia:create(
?MODULE, archive_prefs,
[{disc_only_copies, [node()]},
{attributes, record_info(fields, archive_prefs)}]),
ok
catch _:{badmatch, _} ->
{error, db_failure}
end.
remove_user(LUser, LServer) ->
US = {LUser, LServer},
F = fun () ->
mnesia:delete({archive_msg, US}),
mnesia:delete({archive_prefs, US})
end,
mnesia:transaction(F).
remove_room(_LServer, LName, LHost) ->
remove_user(LName, LHost).
remove_from_archive(LUser, LServer, none) ->
US = {LUser, LServer},
case mnesia:transaction(fun () -> mnesia:delete({archive_msg, US}) end) of
{atomic, _} -> ok;
{aborted, Reason} -> {error, Reason}
end;
remove_from_archive(LUser, LServer, WithJid) ->
US = {LUser, LServer},
Peer = jid:remove_resource(jid:split(WithJid)),
F = fun () ->
Msgs = mnesia:select(
archive_msg,
ets:fun2ms(
fun(#archive_msg{us = US1, bare_peer = Peer1} = Msg)
when US1 == US, Peer1 == Peer -> Msg
end)),
lists:foreach(fun mnesia:delete_object/1, Msgs)
end,
case mnesia:transaction(F) of
{atomic, _} -> ok;
{aborted, Reason} -> {error, Reason}
end.
delete_old_messages(global, TimeStamp, Type) ->
mnesia:change_table_copy_type(archive_msg, node(), disc_copies),
Result = delete_old_user_messages(mnesia:dirty_first(archive_msg), TimeStamp, Type),
mnesia:change_table_copy_type(archive_msg, node(), disc_only_copies),
Result.
delete_old_user_messages('$end_of_table', _TimeStamp, _Type) ->
ok;
delete_old_user_messages(User, TimeStamp, Type) ->
F = fun() ->
Msgs = mnesia:read(archive_msg, User),
Keep = lists:filter(
fun(#archive_msg{timestamp = MsgTS,
type = MsgType}) ->
MsgTS >= TimeStamp orelse (Type /= all andalso
Type /= MsgType)
end, Msgs),
if length(Keep) < length(Msgs) ->
mnesia:delete({archive_msg, User}),
lists:foreach(fun(Msg) -> mnesia:write(Msg) end, Keep);
true ->
ok
end
end,
NextRecord = mnesia:dirty_next(archive_msg, User),
case mnesia:transaction(F) of
{atomic, ok} ->
delete_old_user_messages(NextRecord, TimeStamp, Type);
{aborted, Err} ->
?ERROR_MSG("Cannot delete old MAM messages: ~ts", [Err]),
Err
end.
delete_batch('$end_of_table', _LServer, _TS, _Type, Num) ->
{Num, '$end_of_table'};
delete_batch(LastUS, _LServer, _TS, _Type, 0) ->
{0, LastUS};
delete_batch(none, LServer, TS, Type, Num) ->
delete_batch(mnesia:first(archive_msg), LServer, TS, Type, Num);
delete_batch({_, LServer2} = LastUS, LServer, TS, Type, Num) when LServer /= LServer2 ->
delete_batch(mnesia:next(archive_msg, LastUS), LServer, TS, Type, Num);
delete_batch(LastUS, LServer, TS, Type, Num) ->
Left =
lists:foldl(
fun(_, 0) ->
0;
(#archive_msg{timestamp = TS2, type = Type2} = O, Num2) when TS2 < TS, (Type == all orelse Type == Type2) ->
mnesia:delete_object(O),
Num2 - 1;
(_, Num2) ->
Num2
end, Num, mnesia:wread({archive_msg, LastUS})),
case Left of
0 -> {0, LastUS};
_ -> delete_batch(mnesia:next(archive_msg, LastUS), LServer, TS, Type, Left)
end.
delete_old_messages_batch(LServer, TimeStamp, Type, Batch, LastUS) ->
R = mnesia:transaction(
fun() ->
{Num, NextUS} = delete_batch(LastUS, LServer, TimeStamp, Type, Batch),
{Batch - Num, NextUS}
end),
case R of
{atomic, {Num, State}} ->
{ok, State, Num};
{aborted, Err} ->
{error, Err}
end.
extended_fields() ->
[].
store(Pkt, _, {LUser, LServer}, Type, Peer, Nick, _Dir, TS) ->
case {mnesia:table_info(archive_msg, disc_only_copies),
mnesia:table_info(archive_msg, memory)} of
{[_|_], TableSize} when TableSize > ?TABLE_SIZE_LIMIT ->
?ERROR_MSG("MAM archives too large, won't store message for ~ts@~ts",
[LUser, LServer]),
{error, overflow};
_ ->
LPeer = {PUser, PServer, _} = jid:tolower(Peer),
F = fun() ->
mnesia:write(
#archive_msg{us = {LUser, LServer},
id = integer_to_binary(TS),
timestamp = misc:usec_to_now(TS),
peer = LPeer,
bare_peer = {PUser, PServer, <<>>},
type = Type,
nick = Nick,
packet = Pkt})
end,
case mnesia:transaction(F) of
{atomic, ok} ->
ok;
{aborted, Err} ->
?ERROR_MSG("Cannot add message to MAM archive of ~ts@~ts: ~ts",
[LUser, LServer, Err]),
Err
end
end.
write_prefs(_LUser, _LServer, Prefs, _ServerHost) ->
mnesia:dirty_write(Prefs).
get_prefs(LUser, LServer) ->
case mnesia:dirty_read(archive_prefs, {LUser, LServer}) of
[Prefs] ->
{ok, Prefs};
_ ->
error
end.
select(_LServer, JidRequestor,
#jid{luser = LUser, lserver = LServer} = JidArchive,
Query, RSM, MsgType) ->
Start = proplists:get_value(start, Query),
End = proplists:get_value('end', Query),
With = proplists:get_value(with, Query),
LWith = if With /= undefined -> jid:tolower(With);
true -> undefined
end,
MS = make_matchspec(LUser, LServer, Start, End, LWith),
Msgs = mnesia:dirty_select(archive_msg, MS),
SortedMsgs = lists:keysort(#archive_msg.timestamp, Msgs),
{FilteredMsgs, IsComplete} = filter_by_rsm(SortedMsgs, RSM),
Count = length(Msgs),
Result = {lists:flatmap(
fun(Msg) ->
case mod_mam:msg_to_el(
Msg, MsgType, JidRequestor, JidArchive) of
{ok, El} ->
[{Msg#archive_msg.id,
binary_to_integer(Msg#archive_msg.id),
El}];
{error, _} ->
[]
end
end, FilteredMsgs), IsComplete, Count},
erlang:garbage_collect(),
Result.
is_empty_for_user(LUser, LServer) ->
mnesia:dirty_read(archive_msg, {LUser, LServer}) == [].
is_empty_for_room(_LServer, LName, LHost) ->
is_empty_for_user(LName, LHost).
%%%===================================================================
Internal functions
%%%===================================================================
make_matchspec(LUser, LServer, Start, undefined, With) ->
%% List is always greater than a tuple
make_matchspec(LUser, LServer, Start, [], With);
make_matchspec(LUser, LServer, Start, End, {_, _, <<>>} = With) ->
ets:fun2ms(
fun(#archive_msg{timestamp = TS,
us = US,
bare_peer = BPeer} = Msg)
when Start =< TS, End >= TS,
US == {LUser, LServer},
BPeer == With ->
Msg
end);
make_matchspec(LUser, LServer, Start, End, {_, _, _} = With) ->
ets:fun2ms(
fun(#archive_msg{timestamp = TS,
us = US,
peer = Peer} = Msg)
when Start =< TS, End >= TS,
US == {LUser, LServer},
Peer == With ->
Msg
end);
make_matchspec(LUser, LServer, Start, End, undefined) ->
ets:fun2ms(
fun(#archive_msg{timestamp = TS,
us = US,
peer = Peer} = Msg)
when Start =< TS, End >= TS,
US == {LUser, LServer} ->
Msg
end).
filter_by_rsm(Msgs, undefined) ->
{Msgs, true};
filter_by_rsm(_Msgs, #rsm_set{max = Max}) when Max < 0 ->
{[], true};
filter_by_rsm(Msgs, #rsm_set{max = Max, before = Before, 'after' = After}) ->
NewMsgs = if is_binary(After), After /= <<"">> ->
lists:filter(
fun(#archive_msg{id = I}) ->
?BIN_GREATER_THAN(I, After)
end, Msgs);
is_binary(Before), Before /= <<"">> ->
lists:foldl(
fun(#archive_msg{id = I} = Msg, Acc)
when ?BIN_LESS_THAN(I, Before) ->
[Msg|Acc];
(_, Acc) ->
Acc
end, [], Msgs);
is_binary(Before), Before == <<"">> ->
lists:reverse(Msgs);
true ->
Msgs
end,
filter_by_max(NewMsgs, Max).
filter_by_max(Msgs, undefined) ->
{Msgs, true};
filter_by_max(Msgs, Len) when is_integer(Len), Len >= 0 ->
{lists:sublist(Msgs, Len), length(Msgs) =< Len};
filter_by_max(_Msgs, _Junk) ->
{[], true}.
| null | https://raw.githubusercontent.com/processone/ejabberd/c103182bc7e5b8a8ab123ce02d1959a54e939480/src/mod_mam_mnesia.erl | erlang | -------------------------------------------------------------------
File : mod_mam_mnesia.erl
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
----------------------------------------------------------------------
API
===================================================================
API
===================================================================
===================================================================
===================================================================
List is always greater than a tuple | Author : < >
Created : 15 Apr 2016 by < >
ejabberd , Copyright ( C ) 2002 - 2023 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
-module(mod_mam_mnesia).
-behaviour(mod_mam).
-export([init/2, remove_user/2, remove_room/3, delete_old_messages/3,
extended_fields/0, store/8, write_prefs/4, get_prefs/2, select/6, remove_from_archive/3,
is_empty_for_user/2, is_empty_for_room/3, delete_old_messages_batch/5]).
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("xmpp/include/xmpp.hrl").
-include("logger.hrl").
-include("mod_mam.hrl").
-define(BIN_GREATER_THAN(A, B),
((A > B andalso byte_size(A) == byte_size(B))
orelse byte_size(A) > byte_size(B))).
-define(BIN_LESS_THAN(A, B),
((A < B andalso byte_size(A) == byte_size(B))
orelse byte_size(A) < byte_size(B))).
A bit less than 2 GiB.
init(_Host, _Opts) ->
try
{atomic, _} = ejabberd_mnesia:create(
?MODULE, archive_msg,
[{disc_only_copies, [node()]},
{type, bag},
{attributes, record_info(fields, archive_msg)}]),
{atomic, _} = ejabberd_mnesia:create(
?MODULE, archive_prefs,
[{disc_only_copies, [node()]},
{attributes, record_info(fields, archive_prefs)}]),
ok
catch _:{badmatch, _} ->
{error, db_failure}
end.
remove_user(LUser, LServer) ->
US = {LUser, LServer},
F = fun () ->
mnesia:delete({archive_msg, US}),
mnesia:delete({archive_prefs, US})
end,
mnesia:transaction(F).
remove_room(_LServer, LName, LHost) ->
remove_user(LName, LHost).
remove_from_archive(LUser, LServer, none) ->
US = {LUser, LServer},
case mnesia:transaction(fun () -> mnesia:delete({archive_msg, US}) end) of
{atomic, _} -> ok;
{aborted, Reason} -> {error, Reason}
end;
remove_from_archive(LUser, LServer, WithJid) ->
US = {LUser, LServer},
Peer = jid:remove_resource(jid:split(WithJid)),
F = fun () ->
Msgs = mnesia:select(
archive_msg,
ets:fun2ms(
fun(#archive_msg{us = US1, bare_peer = Peer1} = Msg)
when US1 == US, Peer1 == Peer -> Msg
end)),
lists:foreach(fun mnesia:delete_object/1, Msgs)
end,
case mnesia:transaction(F) of
{atomic, _} -> ok;
{aborted, Reason} -> {error, Reason}
end.
delete_old_messages(global, TimeStamp, Type) ->
mnesia:change_table_copy_type(archive_msg, node(), disc_copies),
Result = delete_old_user_messages(mnesia:dirty_first(archive_msg), TimeStamp, Type),
mnesia:change_table_copy_type(archive_msg, node(), disc_only_copies),
Result.
delete_old_user_messages('$end_of_table', _TimeStamp, _Type) ->
ok;
delete_old_user_messages(User, TimeStamp, Type) ->
F = fun() ->
Msgs = mnesia:read(archive_msg, User),
Keep = lists:filter(
fun(#archive_msg{timestamp = MsgTS,
type = MsgType}) ->
MsgTS >= TimeStamp orelse (Type /= all andalso
Type /= MsgType)
end, Msgs),
if length(Keep) < length(Msgs) ->
mnesia:delete({archive_msg, User}),
lists:foreach(fun(Msg) -> mnesia:write(Msg) end, Keep);
true ->
ok
end
end,
NextRecord = mnesia:dirty_next(archive_msg, User),
case mnesia:transaction(F) of
{atomic, ok} ->
delete_old_user_messages(NextRecord, TimeStamp, Type);
{aborted, Err} ->
?ERROR_MSG("Cannot delete old MAM messages: ~ts", [Err]),
Err
end.
delete_batch('$end_of_table', _LServer, _TS, _Type, Num) ->
{Num, '$end_of_table'};
delete_batch(LastUS, _LServer, _TS, _Type, 0) ->
{0, LastUS};
delete_batch(none, LServer, TS, Type, Num) ->
delete_batch(mnesia:first(archive_msg), LServer, TS, Type, Num);
delete_batch({_, LServer2} = LastUS, LServer, TS, Type, Num) when LServer /= LServer2 ->
delete_batch(mnesia:next(archive_msg, LastUS), LServer, TS, Type, Num);
delete_batch(LastUS, LServer, TS, Type, Num) ->
Left =
lists:foldl(
fun(_, 0) ->
0;
(#archive_msg{timestamp = TS2, type = Type2} = O, Num2) when TS2 < TS, (Type == all orelse Type == Type2) ->
mnesia:delete_object(O),
Num2 - 1;
(_, Num2) ->
Num2
end, Num, mnesia:wread({archive_msg, LastUS})),
case Left of
0 -> {0, LastUS};
_ -> delete_batch(mnesia:next(archive_msg, LastUS), LServer, TS, Type, Left)
end.
delete_old_messages_batch(LServer, TimeStamp, Type, Batch, LastUS) ->
R = mnesia:transaction(
fun() ->
{Num, NextUS} = delete_batch(LastUS, LServer, TimeStamp, Type, Batch),
{Batch - Num, NextUS}
end),
case R of
{atomic, {Num, State}} ->
{ok, State, Num};
{aborted, Err} ->
{error, Err}
end.
extended_fields() ->
[].
store(Pkt, _, {LUser, LServer}, Type, Peer, Nick, _Dir, TS) ->
case {mnesia:table_info(archive_msg, disc_only_copies),
mnesia:table_info(archive_msg, memory)} of
{[_|_], TableSize} when TableSize > ?TABLE_SIZE_LIMIT ->
?ERROR_MSG("MAM archives too large, won't store message for ~ts@~ts",
[LUser, LServer]),
{error, overflow};
_ ->
LPeer = {PUser, PServer, _} = jid:tolower(Peer),
F = fun() ->
mnesia:write(
#archive_msg{us = {LUser, LServer},
id = integer_to_binary(TS),
timestamp = misc:usec_to_now(TS),
peer = LPeer,
bare_peer = {PUser, PServer, <<>>},
type = Type,
nick = Nick,
packet = Pkt})
end,
case mnesia:transaction(F) of
{atomic, ok} ->
ok;
{aborted, Err} ->
?ERROR_MSG("Cannot add message to MAM archive of ~ts@~ts: ~ts",
[LUser, LServer, Err]),
Err
end
end.
write_prefs(_LUser, _LServer, Prefs, _ServerHost) ->
mnesia:dirty_write(Prefs).
get_prefs(LUser, LServer) ->
case mnesia:dirty_read(archive_prefs, {LUser, LServer}) of
[Prefs] ->
{ok, Prefs};
_ ->
error
end.
select(_LServer, JidRequestor,
#jid{luser = LUser, lserver = LServer} = JidArchive,
Query, RSM, MsgType) ->
Start = proplists:get_value(start, Query),
End = proplists:get_value('end', Query),
With = proplists:get_value(with, Query),
LWith = if With /= undefined -> jid:tolower(With);
true -> undefined
end,
MS = make_matchspec(LUser, LServer, Start, End, LWith),
Msgs = mnesia:dirty_select(archive_msg, MS),
SortedMsgs = lists:keysort(#archive_msg.timestamp, Msgs),
{FilteredMsgs, IsComplete} = filter_by_rsm(SortedMsgs, RSM),
Count = length(Msgs),
Result = {lists:flatmap(
fun(Msg) ->
case mod_mam:msg_to_el(
Msg, MsgType, JidRequestor, JidArchive) of
{ok, El} ->
[{Msg#archive_msg.id,
binary_to_integer(Msg#archive_msg.id),
El}];
{error, _} ->
[]
end
end, FilteredMsgs), IsComplete, Count},
erlang:garbage_collect(),
Result.
is_empty_for_user(LUser, LServer) ->
mnesia:dirty_read(archive_msg, {LUser, LServer}) == [].
is_empty_for_room(_LServer, LName, LHost) ->
is_empty_for_user(LName, LHost).
Internal functions
make_matchspec(LUser, LServer, Start, undefined, With) ->
make_matchspec(LUser, LServer, Start, [], With);
make_matchspec(LUser, LServer, Start, End, {_, _, <<>>} = With) ->
ets:fun2ms(
fun(#archive_msg{timestamp = TS,
us = US,
bare_peer = BPeer} = Msg)
when Start =< TS, End >= TS,
US == {LUser, LServer},
BPeer == With ->
Msg
end);
make_matchspec(LUser, LServer, Start, End, {_, _, _} = With) ->
ets:fun2ms(
fun(#archive_msg{timestamp = TS,
us = US,
peer = Peer} = Msg)
when Start =< TS, End >= TS,
US == {LUser, LServer},
Peer == With ->
Msg
end);
make_matchspec(LUser, LServer, Start, End, undefined) ->
ets:fun2ms(
fun(#archive_msg{timestamp = TS,
us = US,
peer = Peer} = Msg)
when Start =< TS, End >= TS,
US == {LUser, LServer} ->
Msg
end).
filter_by_rsm(Msgs, undefined) ->
{Msgs, true};
filter_by_rsm(_Msgs, #rsm_set{max = Max}) when Max < 0 ->
{[], true};
filter_by_rsm(Msgs, #rsm_set{max = Max, before = Before, 'after' = After}) ->
NewMsgs = if is_binary(After), After /= <<"">> ->
lists:filter(
fun(#archive_msg{id = I}) ->
?BIN_GREATER_THAN(I, After)
end, Msgs);
is_binary(Before), Before /= <<"">> ->
lists:foldl(
fun(#archive_msg{id = I} = Msg, Acc)
when ?BIN_LESS_THAN(I, Before) ->
[Msg|Acc];
(_, Acc) ->
Acc
end, [], Msgs);
is_binary(Before), Before == <<"">> ->
lists:reverse(Msgs);
true ->
Msgs
end,
filter_by_max(NewMsgs, Max).
filter_by_max(Msgs, undefined) ->
{Msgs, true};
filter_by_max(Msgs, Len) when is_integer(Len), Len >= 0 ->
{lists:sublist(Msgs, Len), length(Msgs) =< Len};
filter_by_max(_Msgs, _Junk) ->
{[], true}.
|
74837f8d4ec9099599fd863de51a6d83b7e761c28aa99b885b18687b92d5117a | input-output-hk/ouroboros-network | Utils.hs | # LANGUAGE LambdaCase #
# LANGUAGE NamedFieldPuns #
module TestLib.Utils where
import Control.Monad.Class.MonadTime (DiffTime, Time, diffTime)
import Control.Monad.IOSim
import Data.Bifoldable (bifoldMap)
import Data.Bitraversable (bimapAccumL)
import Data.List (dropWhileEnd, find, intercalate)
import qualified Data.List.Trace as Trace
import qualified Data.Map.Strict as Map
import Data.Maybe (fromJust, fromMaybe, isJust, isNothing)
import Data.Monoid (Sum (Sum))
import Text.Printf (printf)
import Test.QuickCheck (Arbitrary (..), Property, choose,
counterexample, cover, frequency, label, property, shrink,
tabulate, (.&&.))
import Network.TypedProtocol.Core (PeerHasAgency (..))
import Ouroboros.Network.ConnectionHandler (ConnectionHandlerTrace)
import Ouroboros.Network.ConnectionManager.Types
import Ouroboros.Network.Driver.Limits (ProtocolTimeLimits (..))
import Ouroboros.Network.Protocol.Handshake.Codec
(timeLimitsHandshake)
import Ouroboros.Network.Protocol.Handshake.Type
import qualified Ouroboros.Network.Snocket as Snocket
verifyAllTimeouts :: Show addr
=> Bool
-> Trace (SimResult ()) [(Time, AbstractTransitionTrace addr)]
-> AllProperty
verifyAllTimeouts inDiffusion =
bifoldMap
( \ case
MainReturn {} -> mempty
v -> AllProperty
$ counterexample (show v) (property False)
)
(\ tr ->
AllProperty
$ counterexample ("\nConnection transition trace:\n"
++ intercalate "\n" (map show tr)
)
$ verifyTimeouts Nothing inDiffusion tr)
-- verifyTimeouts checks that in all \tau transition states the timeout is
-- respected. It does so by checking the stream of abstract transitions
-- paired with the time they happened, for a given connection; and checking
-- that transitions from \tau states to any other happens within the correct
timeout bounds . One note is that for the example
InboundIdleState^\tau - > OutboundState^\tau - > OutboundState sequence
The first transition would be fine , but for the second we need the time
when we transitioned into InboundIdleState and not OutboundState .
--
verifyTimeouts :: Maybe (AbstractState, Time)
^ Map of first occurrence of a given \tau state
-> Bool
-- ^ If runnning in Diffusion or not
-> [(Time , AbstractTransitionTrace addr)]
-- ^ Stream of abstract transitions for a given connection
-- paired with the time it occurred
-> Property
verifyTimeouts state inDiffusion [] =
counterexample
("This state didn't timeout:\n"
++ show state
)
$ (inDiffusion || isNothing state)
-- If we already seen a \tau transition state
verifyTimeouts st@(Just (state, t')) inDiffusion
((t, TransitionTrace _ tt@(Transition _ to)):xs) =
let newState = Just (to, t)
idleTimeout =
1.1 * tProtocolIdleTimeout simTimeouts
outboundTimeout =
1.1 * tOutboundIdleTimeout simTimeouts
timeWaitTimeout =
1.1 * tTimeWaitTimeout simTimeouts
handshakeTimeout = case timeLimitsHandshake of
(ProtocolTimeLimits stLimit) ->
Should be the same but we bias to the shorter one
let time = min (fromMaybe 0 (stLimit (ClientAgency TokPropose)))
(fromMaybe 0 (stLimit (ServerAgency TokConfirm)))
in time + (0.1 * time)
in case state of
UnnegotiatedSt _ -> case to of
Timeout terminating states
OutboundUniSt ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
InboundIdleSt Unidirectional ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
-- These states terminate the current timeout
and starts a new one
OutboundDupSt Ticking ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts newState inDiffusion xs
InboundIdleSt Duplex ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
InboundIdleSt Duplex -> case to of
-- Should preserve the timeout
OutboundDupSt Ticking -> verifyTimeouts st inDiffusion xs
InboundIdleSt Duplex -> verifyTimeouts st inDiffusion xs
Timeout terminating states
OutboundDupSt Expired ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
InboundSt Duplex ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
DuplexSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
-- This state terminates the current timeout
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
InboundIdleSt Unidirectional -> case to of
Timeout terminating states
InboundSt Unidirectional ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
-- This state terminates the current timeout
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
OutboundDupSt Ticking -> case to of
-- Should preserve the timeout
InboundIdleSt Duplex -> verifyTimeouts st inDiffusion xs
OutboundDupSt Ticking -> verifyTimeouts st inDiffusion xs
Timeout terminating states
OutboundDupSt Expired ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
DuplexSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
InboundSt Duplex ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
-- This state terminates the current timeout
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
OutboundIdleSt _ -> case to of
Timeout terminating states
InboundSt Duplex ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
-- This state terminates the current timeout
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
TerminatingSt -> case to of
Timeout terminating states
UnnegotiatedSt Inbound ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= timeWaitTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= timeWaitTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
_ -> error ("Should be a \tau state: " ++ show st)
where
errorMsg trans time' time maxDiffTime =
"\nAt transition: " ++ show trans ++ "\n"
++ "First happened at: " ++ show time' ++ "\n"
++ "Second happened at: " ++ show time ++ "\n"
++ "Should only take: "
++ show maxDiffTime
++ ", but took:" ++ show (diffTime time time')
-- If we haven't seen a \tau transition state
verifyTimeouts Nothing inDiffusion ((t, TransitionTrace _ (Transition _ to)):xs) =
let newState = Just (to, t)
in case to of
InboundIdleSt _ -> verifyTimeouts newState inDiffusion xs
OutboundDupSt Ticking -> verifyTimeouts newState inDiffusion xs
OutboundIdleSt _ -> verifyTimeouts newState inDiffusion xs
TerminatingSt -> verifyTimeouts newState inDiffusion xs
_ -> verifyTimeouts Nothing inDiffusion xs
| Configurable timeouts . We use different timeouts for ' IO ' and ' ' property tests .
--
data Timeouts = Timeouts {
tProtocolIdleTimeout :: DiffTime,
tOutboundIdleTimeout :: DiffTime,
tTimeWaitTimeout :: DiffTime
}
-- | Timeouts for 'IO' tests.
--
ioTimeouts :: Timeouts
ioTimeouts = Timeouts {
tProtocolIdleTimeout = 0.1,
tOutboundIdleTimeout = 0.1,
tTimeWaitTimeout = 0.1
}
| Timeouts for ' ' tests .
--
simTimeouts :: Timeouts
simTimeouts = Timeouts {
tProtocolIdleTimeout = 5,
tOutboundIdleTimeout = 5,
tTimeWaitTimeout = 30
}
| Groups ' TransitionTrace ' to the same peerAddr .
--
groupConns :: Ord addr
=> (a -> TransitionTrace' addr st)
-> (Transition' st -> Bool)
-> Trace r a
-> Trace r [a]
groupConns getTransition isFinalTransition =
fmap fromJust
. Trace.filter isJust
-- there might be some connections in the state, push them onto the 'Trace'
. (\(s, o) -> foldr (\a as -> Trace.Cons (Just (reverse a)) as) o (Map.elems s))
. bimapAccumL
( \ s a -> (s, a))
( \ s a ->
let TransitionTrace { ttPeerAddr, ttTransition } = getTransition a
in if isFinalTransition ttTransition
then case ttPeerAddr `Map.lookup` s of
Nothing -> ( Map.insert ttPeerAddr [a] s
, Nothing
)
Just trs -> ( Map.delete ttPeerAddr s
, Just (reverse $ a : trs)
)
else ( Map.alter (\case
Nothing -> Just [a]
Just as -> Just (a : as)
)
ttPeerAddr s
, Nothing)
)
Map.empty
-- | The concrete address type used by simulations.
--
type SimAddr = Snocket.TestAddress SimAddr_
type SimAddr_ = Int
| We use a wrapper for test addresses since the Arbitrary instance for Snocket . TestAddress only
generates addresses between 1 and 4 .
newtype TestAddr = TestAddr { unTestAddr :: SimAddr }
deriving (Show, Eq, Ord)
instance Arbitrary TestAddr where
arbitrary = TestAddr . Snocket.TestAddress <$> choose (1, 100)
-- | Test property together with classification.
--
data TestProperty = TestProperty {
tpProperty :: !Property,
-- ^ 'True' if property is true
tpNumberOfTransitions :: !(Sum Int),
-- ^ number of all transitions
tpNumberOfConnections :: !(Sum Int),
-- ^ number of all connections
tpNumberOfPrunings :: !(Sum Int),
-- ^ number of all connections
--
-- classification of connections
--
tpNegotiatedDataFlows :: ![NegotiatedDataFlow],
tpEffectiveDataFlows :: ![EffectiveDataFlow],
tpTerminationTypes :: ![TerminationType],
tpActivityTypes :: ![ActivityType],
tpTransitions :: ![AbstractTransition]
}
instance Show TestProperty where
show tp =
concat [ "TestProperty "
, "{ tpNumberOfTransitions = " ++ show (tpNumberOfTransitions tp)
, ", tpNumberOfConnections = " ++ show (tpNumberOfConnections tp)
, ", tpNumberOfPrunings = " ++ show (tpNumberOfPrunings tp)
, ", tpNegotiatedDataFlows = " ++ show (tpNegotiatedDataFlows tp)
, ", tpTerminationTypes = " ++ show (tpTerminationTypes tp)
, ", tpActivityTypes = " ++ show (tpActivityTypes tp)
, ", tpTransitions = " ++ show (tpTransitions tp)
, "}"
]
instance Semigroup TestProperty where
(<>) (TestProperty a0 a1 a2 a3 a4 a5 a6 a7 a8)
(TestProperty b0 b1 b2 b3 b4 b5 b6 b7 b8) =
TestProperty (a0 .&&. b0)
(a1 <> b1)
(a2 <> b2)
(a3 <> b3)
(a4 <> b4)
(a5 <> b5)
(a6 <> b6)
(a7 <> b7)
(a8 <> b8)
instance Monoid TestProperty where
mempty = TestProperty (property True)
mempty mempty mempty mempty
mempty mempty mempty mempty
mkProperty :: TestProperty -> Property
mkProperty TestProperty { tpProperty
, tpNumberOfTransitions = Sum numberOfTransitions_
, tpNumberOfConnections = Sum numberOfConnections_
, tpNumberOfPrunings = Sum numberOfPrunings_
, tpNegotiatedDataFlows
, tpEffectiveDataFlows
, tpTerminationTypes
, tpActivityTypes
, tpTransitions
} =
label ("Number of transitions: " ++ within_ 10 numberOfTransitions_
)
. label ("Number of connections: " ++ show numberOfConnections_
)
. tabulate "Pruning" [show numberOfPrunings_]
. tabulate "Negotiated DataFlow" (map show tpNegotiatedDataFlows)
. tabulate "Effective DataFLow" (map show tpEffectiveDataFlows)
. tabulate "Termination" (map show tpTerminationTypes)
. tabulate "Activity Type" (map show tpActivityTypes)
. tabulate "Transitions" (map ppTransition tpTransitions)
$ tpProperty
mkPropertyPruning :: TestProperty -> Property
mkPropertyPruning tp@TestProperty { tpNumberOfPrunings = Sum numberOfPrunings_ } =
cover 35 (numberOfPrunings_ > 0) "Prunings"
. mkProperty
$ tp
-- classify negotiated data flow
classifyNegotiatedDataFlow :: [AbstractTransition] -> NegotiatedDataFlow
classifyNegotiatedDataFlow as =
case find ( \ tr
-> case toState tr of
OutboundUniSt -> True
OutboundDupSt {} -> True
InboundIdleSt {} -> True
_ -> False
) as of
Nothing -> NotNegotiated
Just tr ->
case toState tr of
OutboundUniSt -> NegotiatedDataFlow Unidirectional
OutboundDupSt {} -> NegotiatedDataFlow Duplex
(InboundIdleSt df) -> NegotiatedDataFlow df
_ -> error "impossible happened!"
-- classify effective data flow
classifyEffectiveDataFlow :: [AbstractTransition] -> EffectiveDataFlow
classifyEffectiveDataFlow as =
case find ((== DuplexSt) . toState) as of
Nothing -> EffectiveDataFlow Unidirectional
Just _ -> EffectiveDataFlow Duplex
-- classify termination
classifyTermination :: [AbstractTransition] -> TerminationType
classifyTermination as =
case last $ dropWhileEnd
(== Transition TerminatedSt TerminatedSt)
$ dropWhileEnd
(== Transition TerminatedSt UnknownConnectionSt) as of
Transition { fromState = TerminatingSt
, toState = TerminatedSt
} -> CleanTermination
_ -> ErroredTermination
-- classify if a connection is active or not
classifyActivityType :: [AbstractTransition] -> ActivityType
classifyActivityType as =
case find ( \ tr
-> case toState tr of
InboundSt {} -> True
OutboundUniSt -> True
OutboundDupSt {} -> True
DuplexSt {} -> True
_ -> False
) as of
Nothing -> IdleConn
Just {} -> ActiveConn
-- classify negotiated data flow
classifyPrunings :: [ConnectionManagerTrace
addr
(ConnectionHandlerTrace
prctl
dataflow)]
-> Sum Int
classifyPrunings =
Sum
. length
. filter ( \x -> case x of
TrPruneConnections _ _ _ -> True
_ -> False
)
newtype AllProperty = AllProperty { getAllProperty :: Property }
instance Semigroup AllProperty where
AllProperty a <> AllProperty b = AllProperty (a .&&. b)
instance Monoid AllProperty where
mempty = AllProperty (property True)
newtype ArbDataFlow = ArbDataFlow DataFlow
deriving Show
instance Arbitrary ArbDataFlow where
arbitrary = ArbDataFlow <$> frequency [ (3, pure Duplex)
, (1, pure Unidirectional)
]
shrink (ArbDataFlow Duplex) = [ArbDataFlow Unidirectional]
shrink (ArbDataFlow Unidirectional) = []
data ActivityType
= IdleConn
-- | Active connections are once that reach any of the state:
--
-- - 'InboundSt'
-- - 'OutobundUniSt'
-- - 'OutboundDupSt'
-- - 'DuplexSt'
--
| ActiveConn
deriving (Eq, Show)
data TerminationType
= ErroredTermination
| CleanTermination
deriving (Eq, Show)
data NegotiatedDataFlow
= NotNegotiated
| Negotiated value of ' DataFlow '
| NegotiatedDataFlow DataFlow
deriving (Eq, Show)
data EffectiveDataFlow
| Unlike the negotiated ' DataFlow ' this indicates if the connection has
-- ever been in 'DuplexSt'
--
= EffectiveDataFlow DataFlow
deriving (Eq, Show)
within_ :: Int -> Int -> String
within_ _ 0 = "0"
within_ a b = let x = b `div` a in
concat [ if b < a
then "1"
else show $ x * a
, " - "
, show $ x * a + a - 1
]
ppTransition :: AbstractTransition -> String
ppTransition Transition {fromState, toState} =
printf "%-30s → %s" (show fromState) (show toState)
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/163408cb58e13ac1ad63b8c947a71c491d00c4f8/ouroboros-network-framework/testlib/TestLib/Utils.hs | haskell | verifyTimeouts checks that in all \tau transition states the timeout is
respected. It does so by checking the stream of abstract transitions
paired with the time they happened, for a given connection; and checking
that transitions from \tau states to any other happens within the correct
^ If runnning in Diffusion or not
^ Stream of abstract transitions for a given connection
paired with the time it occurred
If we already seen a \tau transition state
These states terminate the current timeout
Should preserve the timeout
This state terminates the current timeout
This state terminates the current timeout
Should preserve the timeout
This state terminates the current timeout
This state terminates the current timeout
If we haven't seen a \tau transition state
| Timeouts for 'IO' tests.
there might be some connections in the state, push them onto the 'Trace'
| The concrete address type used by simulations.
| Test property together with classification.
^ 'True' if property is true
^ number of all transitions
^ number of all connections
^ number of all connections
classification of connections
classify negotiated data flow
classify effective data flow
classify termination
classify if a connection is active or not
classify negotiated data flow
| Active connections are once that reach any of the state:
- 'InboundSt'
- 'OutobundUniSt'
- 'OutboundDupSt'
- 'DuplexSt'
ever been in 'DuplexSt'
| # LANGUAGE LambdaCase #
# LANGUAGE NamedFieldPuns #
module TestLib.Utils where
import Control.Monad.Class.MonadTime (DiffTime, Time, diffTime)
import Control.Monad.IOSim
import Data.Bifoldable (bifoldMap)
import Data.Bitraversable (bimapAccumL)
import Data.List (dropWhileEnd, find, intercalate)
import qualified Data.List.Trace as Trace
import qualified Data.Map.Strict as Map
import Data.Maybe (fromJust, fromMaybe, isJust, isNothing)
import Data.Monoid (Sum (Sum))
import Text.Printf (printf)
import Test.QuickCheck (Arbitrary (..), Property, choose,
counterexample, cover, frequency, label, property, shrink,
tabulate, (.&&.))
import Network.TypedProtocol.Core (PeerHasAgency (..))
import Ouroboros.Network.ConnectionHandler (ConnectionHandlerTrace)
import Ouroboros.Network.ConnectionManager.Types
import Ouroboros.Network.Driver.Limits (ProtocolTimeLimits (..))
import Ouroboros.Network.Protocol.Handshake.Codec
(timeLimitsHandshake)
import Ouroboros.Network.Protocol.Handshake.Type
import qualified Ouroboros.Network.Snocket as Snocket
verifyAllTimeouts :: Show addr
=> Bool
-> Trace (SimResult ()) [(Time, AbstractTransitionTrace addr)]
-> AllProperty
verifyAllTimeouts inDiffusion =
bifoldMap
( \ case
MainReturn {} -> mempty
v -> AllProperty
$ counterexample (show v) (property False)
)
(\ tr ->
AllProperty
$ counterexample ("\nConnection transition trace:\n"
++ intercalate "\n" (map show tr)
)
$ verifyTimeouts Nothing inDiffusion tr)
timeout bounds . One note is that for the example
InboundIdleState^\tau - > OutboundState^\tau - > OutboundState sequence
The first transition would be fine , but for the second we need the time
when we transitioned into InboundIdleState and not OutboundState .
verifyTimeouts :: Maybe (AbstractState, Time)
^ Map of first occurrence of a given \tau state
-> Bool
-> [(Time , AbstractTransitionTrace addr)]
-> Property
verifyTimeouts state inDiffusion [] =
counterexample
("This state didn't timeout:\n"
++ show state
)
$ (inDiffusion || isNothing state)
verifyTimeouts st@(Just (state, t')) inDiffusion
((t, TransitionTrace _ tt@(Transition _ to)):xs) =
let newState = Just (to, t)
idleTimeout =
1.1 * tProtocolIdleTimeout simTimeouts
outboundTimeout =
1.1 * tOutboundIdleTimeout simTimeouts
timeWaitTimeout =
1.1 * tTimeWaitTimeout simTimeouts
handshakeTimeout = case timeLimitsHandshake of
(ProtocolTimeLimits stLimit) ->
Should be the same but we bias to the shorter one
let time = min (fromMaybe 0 (stLimit (ClientAgency TokPropose)))
(fromMaybe 0 (stLimit (ServerAgency TokConfirm)))
in time + (0.1 * time)
in case state of
UnnegotiatedSt _ -> case to of
Timeout terminating states
OutboundUniSt ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
InboundIdleSt Unidirectional ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
and starts a new one
OutboundDupSt Ticking ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts newState inDiffusion xs
InboundIdleSt Duplex ->
counterexample (errorMsg tt t' t handshakeTimeout)
$ diffTime t t' <= handshakeTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
InboundIdleSt Duplex -> case to of
OutboundDupSt Ticking -> verifyTimeouts st inDiffusion xs
InboundIdleSt Duplex -> verifyTimeouts st inDiffusion xs
Timeout terminating states
OutboundDupSt Expired ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
InboundSt Duplex ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
DuplexSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
InboundIdleSt Unidirectional -> case to of
Timeout terminating states
InboundSt Unidirectional ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= idleTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
OutboundDupSt Ticking -> case to of
InboundIdleSt Duplex -> verifyTimeouts st inDiffusion xs
OutboundDupSt Ticking -> verifyTimeouts st inDiffusion xs
Timeout terminating states
OutboundDupSt Expired ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
DuplexSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
InboundSt Duplex ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
OutboundIdleSt _ -> case to of
Timeout terminating states
InboundSt Duplex ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
and starts a new one
TerminatingSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= outboundTimeout
.&&. verifyTimeouts newState inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
TerminatingSt -> case to of
Timeout terminating states
UnnegotiatedSt Inbound ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= timeWaitTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
TerminatedSt ->
counterexample (errorMsg tt t' t idleTimeout)
$ diffTime t t' <= timeWaitTimeout
.&&. verifyTimeouts Nothing inDiffusion xs
_ -> error ("Unexpected invalid transition: " ++ show (st, tt))
_ -> error ("Should be a \tau state: " ++ show st)
where
errorMsg trans time' time maxDiffTime =
"\nAt transition: " ++ show trans ++ "\n"
++ "First happened at: " ++ show time' ++ "\n"
++ "Second happened at: " ++ show time ++ "\n"
++ "Should only take: "
++ show maxDiffTime
++ ", but took:" ++ show (diffTime time time')
verifyTimeouts Nothing inDiffusion ((t, TransitionTrace _ (Transition _ to)):xs) =
let newState = Just (to, t)
in case to of
InboundIdleSt _ -> verifyTimeouts newState inDiffusion xs
OutboundDupSt Ticking -> verifyTimeouts newState inDiffusion xs
OutboundIdleSt _ -> verifyTimeouts newState inDiffusion xs
TerminatingSt -> verifyTimeouts newState inDiffusion xs
_ -> verifyTimeouts Nothing inDiffusion xs
| Configurable timeouts . We use different timeouts for ' IO ' and ' ' property tests .
data Timeouts = Timeouts {
tProtocolIdleTimeout :: DiffTime,
tOutboundIdleTimeout :: DiffTime,
tTimeWaitTimeout :: DiffTime
}
ioTimeouts :: Timeouts
ioTimeouts = Timeouts {
tProtocolIdleTimeout = 0.1,
tOutboundIdleTimeout = 0.1,
tTimeWaitTimeout = 0.1
}
| Timeouts for ' ' tests .
simTimeouts :: Timeouts
simTimeouts = Timeouts {
tProtocolIdleTimeout = 5,
tOutboundIdleTimeout = 5,
tTimeWaitTimeout = 30
}
| Groups ' TransitionTrace ' to the same peerAddr .
groupConns :: Ord addr
=> (a -> TransitionTrace' addr st)
-> (Transition' st -> Bool)
-> Trace r a
-> Trace r [a]
groupConns getTransition isFinalTransition =
fmap fromJust
. Trace.filter isJust
. (\(s, o) -> foldr (\a as -> Trace.Cons (Just (reverse a)) as) o (Map.elems s))
. bimapAccumL
( \ s a -> (s, a))
( \ s a ->
let TransitionTrace { ttPeerAddr, ttTransition } = getTransition a
in if isFinalTransition ttTransition
then case ttPeerAddr `Map.lookup` s of
Nothing -> ( Map.insert ttPeerAddr [a] s
, Nothing
)
Just trs -> ( Map.delete ttPeerAddr s
, Just (reverse $ a : trs)
)
else ( Map.alter (\case
Nothing -> Just [a]
Just as -> Just (a : as)
)
ttPeerAddr s
, Nothing)
)
Map.empty
type SimAddr = Snocket.TestAddress SimAddr_
type SimAddr_ = Int
| We use a wrapper for test addresses since the Arbitrary instance for Snocket . TestAddress only
generates addresses between 1 and 4 .
newtype TestAddr = TestAddr { unTestAddr :: SimAddr }
deriving (Show, Eq, Ord)
instance Arbitrary TestAddr where
arbitrary = TestAddr . Snocket.TestAddress <$> choose (1, 100)
data TestProperty = TestProperty {
tpProperty :: !Property,
tpNumberOfTransitions :: !(Sum Int),
tpNumberOfConnections :: !(Sum Int),
tpNumberOfPrunings :: !(Sum Int),
tpNegotiatedDataFlows :: ![NegotiatedDataFlow],
tpEffectiveDataFlows :: ![EffectiveDataFlow],
tpTerminationTypes :: ![TerminationType],
tpActivityTypes :: ![ActivityType],
tpTransitions :: ![AbstractTransition]
}
instance Show TestProperty where
show tp =
concat [ "TestProperty "
, "{ tpNumberOfTransitions = " ++ show (tpNumberOfTransitions tp)
, ", tpNumberOfConnections = " ++ show (tpNumberOfConnections tp)
, ", tpNumberOfPrunings = " ++ show (tpNumberOfPrunings tp)
, ", tpNegotiatedDataFlows = " ++ show (tpNegotiatedDataFlows tp)
, ", tpTerminationTypes = " ++ show (tpTerminationTypes tp)
, ", tpActivityTypes = " ++ show (tpActivityTypes tp)
, ", tpTransitions = " ++ show (tpTransitions tp)
, "}"
]
instance Semigroup TestProperty where
(<>) (TestProperty a0 a1 a2 a3 a4 a5 a6 a7 a8)
(TestProperty b0 b1 b2 b3 b4 b5 b6 b7 b8) =
TestProperty (a0 .&&. b0)
(a1 <> b1)
(a2 <> b2)
(a3 <> b3)
(a4 <> b4)
(a5 <> b5)
(a6 <> b6)
(a7 <> b7)
(a8 <> b8)
instance Monoid TestProperty where
mempty = TestProperty (property True)
mempty mempty mempty mempty
mempty mempty mempty mempty
mkProperty :: TestProperty -> Property
mkProperty TestProperty { tpProperty
, tpNumberOfTransitions = Sum numberOfTransitions_
, tpNumberOfConnections = Sum numberOfConnections_
, tpNumberOfPrunings = Sum numberOfPrunings_
, tpNegotiatedDataFlows
, tpEffectiveDataFlows
, tpTerminationTypes
, tpActivityTypes
, tpTransitions
} =
label ("Number of transitions: " ++ within_ 10 numberOfTransitions_
)
. label ("Number of connections: " ++ show numberOfConnections_
)
. tabulate "Pruning" [show numberOfPrunings_]
. tabulate "Negotiated DataFlow" (map show tpNegotiatedDataFlows)
. tabulate "Effective DataFLow" (map show tpEffectiveDataFlows)
. tabulate "Termination" (map show tpTerminationTypes)
. tabulate "Activity Type" (map show tpActivityTypes)
. tabulate "Transitions" (map ppTransition tpTransitions)
$ tpProperty
mkPropertyPruning :: TestProperty -> Property
mkPropertyPruning tp@TestProperty { tpNumberOfPrunings = Sum numberOfPrunings_ } =
cover 35 (numberOfPrunings_ > 0) "Prunings"
. mkProperty
$ tp
classifyNegotiatedDataFlow :: [AbstractTransition] -> NegotiatedDataFlow
classifyNegotiatedDataFlow as =
case find ( \ tr
-> case toState tr of
OutboundUniSt -> True
OutboundDupSt {} -> True
InboundIdleSt {} -> True
_ -> False
) as of
Nothing -> NotNegotiated
Just tr ->
case toState tr of
OutboundUniSt -> NegotiatedDataFlow Unidirectional
OutboundDupSt {} -> NegotiatedDataFlow Duplex
(InboundIdleSt df) -> NegotiatedDataFlow df
_ -> error "impossible happened!"
classifyEffectiveDataFlow :: [AbstractTransition] -> EffectiveDataFlow
classifyEffectiveDataFlow as =
case find ((== DuplexSt) . toState) as of
Nothing -> EffectiveDataFlow Unidirectional
Just _ -> EffectiveDataFlow Duplex
classifyTermination :: [AbstractTransition] -> TerminationType
classifyTermination as =
case last $ dropWhileEnd
(== Transition TerminatedSt TerminatedSt)
$ dropWhileEnd
(== Transition TerminatedSt UnknownConnectionSt) as of
Transition { fromState = TerminatingSt
, toState = TerminatedSt
} -> CleanTermination
_ -> ErroredTermination
classifyActivityType :: [AbstractTransition] -> ActivityType
classifyActivityType as =
case find ( \ tr
-> case toState tr of
InboundSt {} -> True
OutboundUniSt -> True
OutboundDupSt {} -> True
DuplexSt {} -> True
_ -> False
) as of
Nothing -> IdleConn
Just {} -> ActiveConn
classifyPrunings :: [ConnectionManagerTrace
addr
(ConnectionHandlerTrace
prctl
dataflow)]
-> Sum Int
classifyPrunings =
Sum
. length
. filter ( \x -> case x of
TrPruneConnections _ _ _ -> True
_ -> False
)
newtype AllProperty = AllProperty { getAllProperty :: Property }
instance Semigroup AllProperty where
AllProperty a <> AllProperty b = AllProperty (a .&&. b)
instance Monoid AllProperty where
mempty = AllProperty (property True)
newtype ArbDataFlow = ArbDataFlow DataFlow
deriving Show
instance Arbitrary ArbDataFlow where
arbitrary = ArbDataFlow <$> frequency [ (3, pure Duplex)
, (1, pure Unidirectional)
]
shrink (ArbDataFlow Duplex) = [ArbDataFlow Unidirectional]
shrink (ArbDataFlow Unidirectional) = []
data ActivityType
= IdleConn
| ActiveConn
deriving (Eq, Show)
data TerminationType
= ErroredTermination
| CleanTermination
deriving (Eq, Show)
data NegotiatedDataFlow
= NotNegotiated
| Negotiated value of ' DataFlow '
| NegotiatedDataFlow DataFlow
deriving (Eq, Show)
data EffectiveDataFlow
| Unlike the negotiated ' DataFlow ' this indicates if the connection has
= EffectiveDataFlow DataFlow
deriving (Eq, Show)
within_ :: Int -> Int -> String
within_ _ 0 = "0"
within_ a b = let x = b `div` a in
concat [ if b < a
then "1"
else show $ x * a
, " - "
, show $ x * a + a - 1
]
ppTransition :: AbstractTransition -> String
ppTransition Transition {fromState, toState} =
printf "%-30s → %s" (show fromState) (show toState)
|
7732cedb70fc3d5d9d1e95942ddb41f52bbcc2cfbb03cbf1500fa2ac149752b0 | penpot/penpot | websocket.cljs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
;;
;; Copyright (c) KALEIDOS INC
(ns app.main.data.websocket
(:require
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.uri :as u]
[app.config :as cf]
[app.util.websocket :as ws]
[beicon.core :as rx]
[potok.core :as ptk]))
(l/set-level! :error)
(dm/export ws/send!)
(defonce ws-conn (volatile! nil))
(defn- prepare-uri
[params]
(let [base (-> @cf/public-uri
(u/join "ws/notifications")
(assoc :query (u/map->query-string params)))]
(cond-> base
(= "https" (:scheme base))
(assoc :scheme "wss")
(= "http" (:scheme base))
(assoc :scheme "ws"))))
(defn send
[message]
(ptk/reify ::send-message
ptk/EffectEvent
(effect [_ _ _]
(some-> @ws-conn (ws/send! message)))))
(defn initialize
[]
(ptk/reify ::initialize
ptk/WatchEvent
(watch [_ state stream]
(l/trace :hint "event:initialize" :fn "watch")
(let [sid (:session-id state)
uri (prepare-uri {:session-id sid})
ws (ws/create uri)]
(vreset! ws-conn ws)
(let [stoper (rx/merge
(rx/filter (ptk/type? ::finalize) stream)
(rx/filter (ptk/type? ::initialize) stream))]
(->> (rx/merge
(rx/of #(assoc % :ws-conn ws))
(->> (ws/get-rcv-stream ws)
(rx/filter ws/message-event?)
(rx/map :payload)
(rx/map #(ptk/data-event ::message %)))
(->> (ws/get-rcv-stream ws)
(rx/filter ws/opened-event?)
(rx/map (fn [_] (ptk/data-event ::opened {})))))
(rx/take-until stoper)))))))
;; --- Finalize Websocket
(defn finalize
[]
(ptk/reify ::finalize
ptk/UpdateEvent
(update [_ state]
(dissoc state :ws-conn))
ptk/EffectEvent
(effect [_ _ _]
(l/trace :hint "event:finalize" :fn "effect")
(some-> @ws-conn ws/close!))))
| null | https://raw.githubusercontent.com/penpot/penpot/cc18f84d620e37d8efafc5bed1bcdbe70ec23c1e/frontend/src/app/main/data/websocket.cljs | clojure |
Copyright (c) KALEIDOS INC
--- Finalize Websocket | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns app.main.data.websocket
(:require
[app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.uri :as u]
[app.config :as cf]
[app.util.websocket :as ws]
[beicon.core :as rx]
[potok.core :as ptk]))
(l/set-level! :error)
(dm/export ws/send!)
(defonce ws-conn (volatile! nil))
(defn- prepare-uri
[params]
(let [base (-> @cf/public-uri
(u/join "ws/notifications")
(assoc :query (u/map->query-string params)))]
(cond-> base
(= "https" (:scheme base))
(assoc :scheme "wss")
(= "http" (:scheme base))
(assoc :scheme "ws"))))
(defn send
[message]
(ptk/reify ::send-message
ptk/EffectEvent
(effect [_ _ _]
(some-> @ws-conn (ws/send! message)))))
(defn initialize
[]
(ptk/reify ::initialize
ptk/WatchEvent
(watch [_ state stream]
(l/trace :hint "event:initialize" :fn "watch")
(let [sid (:session-id state)
uri (prepare-uri {:session-id sid})
ws (ws/create uri)]
(vreset! ws-conn ws)
(let [stoper (rx/merge
(rx/filter (ptk/type? ::finalize) stream)
(rx/filter (ptk/type? ::initialize) stream))]
(->> (rx/merge
(rx/of #(assoc % :ws-conn ws))
(->> (ws/get-rcv-stream ws)
(rx/filter ws/message-event?)
(rx/map :payload)
(rx/map #(ptk/data-event ::message %)))
(->> (ws/get-rcv-stream ws)
(rx/filter ws/opened-event?)
(rx/map (fn [_] (ptk/data-event ::opened {})))))
(rx/take-until stoper)))))))
(defn finalize
[]
(ptk/reify ::finalize
ptk/UpdateEvent
(update [_ state]
(dissoc state :ws-conn))
ptk/EffectEvent
(effect [_ _ _]
(l/trace :hint "event:finalize" :fn "effect")
(some-> @ws-conn ws/close!))))
|
e006a577a79cabcf5e1035c71eebc29bbf9c4bbee037ecf5847d96274acb9c3b | DeathKing/Hit-DataStructure-On-Scheme | ex1-3.scm | ;;; Find k in a vector
;;;
Written : >
;;; Find k in a vector
(define (vector-find-k v k)
(let ((l (vector-length v))
(i 0))
(if (= l 0)
#f
(let loop ((e (vector-ref v i))
(i 0))
(cond
((= e k) #t)
((= l (+ i 1)) #f)
(else
(loop (vector-ref v (+ i 1)) (+ i 1))))))))
;;; Find k in a list
(define (list-find-k l k)
(if (null? l)
#f
(if (= (car l) k)
#t
(list-find-k (cdr l) k))))
| null | https://raw.githubusercontent.com/DeathKing/Hit-DataStructure-On-Scheme/11677e3c6053d6c5b37cf0509885f74ab5c2bab9/exercise1/ex1-3.scm | scheme | Find k in a vector
Find k in a vector
Find k in a list | Written : >
(define (vector-find-k v k)
(let ((l (vector-length v))
(i 0))
(if (= l 0)
#f
(let loop ((e (vector-ref v i))
(i 0))
(cond
((= e k) #t)
((= l (+ i 1)) #f)
(else
(loop (vector-ref v (+ i 1)) (+ i 1))))))))
(define (list-find-k l k)
(if (null? l)
#f
(if (= (car l) k)
#t
(list-find-k (cdr l) k))))
|
8fca63570da9ede147235de67b8f32bc344529d3ab650450b59ffc1d8c6050ea | expipiplus1/vulkan | QueryResultFlagBits.hs | {-# language CPP #-}
-- No documentation found for Chapter "QueryResultFlagBits"
module Vulkan.Core10.Enums.QueryResultFlagBits ( QueryResultFlags
, QueryResultFlagBits( QUERY_RESULT_64_BIT
, QUERY_RESULT_WAIT_BIT
, QUERY_RESULT_WITH_AVAILABILITY_BIT
, QUERY_RESULT_PARTIAL_BIT
, ..
)
) where
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import GHC.Show (showString)
import Numeric (showHex)
import Vulkan.Zero (Zero)
import Foreign.Storable (Storable)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Vulkan.Core10.FundamentalTypes (Flags)
type QueryResultFlags = QueryResultFlagBits
-- | VkQueryResultFlagBits - Bitmask specifying how and when query results
-- are returned
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
-- 'QueryResultFlags'
newtype QueryResultFlagBits = QueryResultFlagBits Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
-- | 'QUERY_RESULT_64_BIT' specifies the results will be written as an array
of 64 - bit unsigned integer values . If this bit is not set , the results
will be written as an array of 32 - bit unsigned integer values .
pattern QUERY_RESULT_64_BIT = QueryResultFlagBits 0x00000001
| ' QUERY_RESULT_WAIT_BIT ' specifies that Vulkan will wait for each query ’s
-- status to become available before retrieving its results.
pattern QUERY_RESULT_WAIT_BIT = QueryResultFlagBits 0x00000002
-- | 'QUERY_RESULT_WITH_AVAILABILITY_BIT' specifies that the availability
-- status accompanies the results.
pattern QUERY_RESULT_WITH_AVAILABILITY_BIT = QueryResultFlagBits 0x00000004
-- | 'QUERY_RESULT_PARTIAL_BIT' specifies that returning partial results is
-- acceptable.
pattern QUERY_RESULT_PARTIAL_BIT = QueryResultFlagBits 0x00000008
conNameQueryResultFlagBits :: String
conNameQueryResultFlagBits = "QueryResultFlagBits"
enumPrefixQueryResultFlagBits :: String
enumPrefixQueryResultFlagBits = "QUERY_RESULT_"
showTableQueryResultFlagBits :: [(QueryResultFlagBits, String)]
showTableQueryResultFlagBits =
[ (QUERY_RESULT_64_BIT, "64_BIT")
, (QUERY_RESULT_WAIT_BIT, "WAIT_BIT")
,
( QUERY_RESULT_WITH_AVAILABILITY_BIT
, "WITH_AVAILABILITY_BIT"
)
, (QUERY_RESULT_PARTIAL_BIT, "PARTIAL_BIT")
]
instance Show QueryResultFlagBits where
showsPrec =
enumShowsPrec
enumPrefixQueryResultFlagBits
showTableQueryResultFlagBits
conNameQueryResultFlagBits
(\(QueryResultFlagBits x) -> x)
(\x -> showString "0x" . showHex x)
instance Read QueryResultFlagBits where
readPrec =
enumReadPrec
enumPrefixQueryResultFlagBits
showTableQueryResultFlagBits
conNameQueryResultFlagBits
QueryResultFlagBits
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/70d8cca16893f8de76c0eb89e79e73f5a455db76/src/Vulkan/Core10/Enums/QueryResultFlagBits.hs | haskell | # language CPP #
No documentation found for Chapter "QueryResultFlagBits"
| VkQueryResultFlagBits - Bitmask specifying how and when query results
are returned
= See Also
<-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>,
'QueryResultFlags'
| 'QUERY_RESULT_64_BIT' specifies the results will be written as an array
status to become available before retrieving its results.
| 'QUERY_RESULT_WITH_AVAILABILITY_BIT' specifies that the availability
status accompanies the results.
| 'QUERY_RESULT_PARTIAL_BIT' specifies that returning partial results is
acceptable. | module Vulkan.Core10.Enums.QueryResultFlagBits ( QueryResultFlags
, QueryResultFlagBits( QUERY_RESULT_64_BIT
, QUERY_RESULT_WAIT_BIT
, QUERY_RESULT_WITH_AVAILABILITY_BIT
, QUERY_RESULT_PARTIAL_BIT
, ..
)
) where
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import GHC.Show (showString)
import Numeric (showHex)
import Vulkan.Zero (Zero)
import Foreign.Storable (Storable)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Vulkan.Core10.FundamentalTypes (Flags)
type QueryResultFlags = QueryResultFlagBits
newtype QueryResultFlagBits = QueryResultFlagBits Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
of 64 - bit unsigned integer values . If this bit is not set , the results
will be written as an array of 32 - bit unsigned integer values .
pattern QUERY_RESULT_64_BIT = QueryResultFlagBits 0x00000001
| ' QUERY_RESULT_WAIT_BIT ' specifies that Vulkan will wait for each query ’s
pattern QUERY_RESULT_WAIT_BIT = QueryResultFlagBits 0x00000002
pattern QUERY_RESULT_WITH_AVAILABILITY_BIT = QueryResultFlagBits 0x00000004
pattern QUERY_RESULT_PARTIAL_BIT = QueryResultFlagBits 0x00000008
conNameQueryResultFlagBits :: String
conNameQueryResultFlagBits = "QueryResultFlagBits"
enumPrefixQueryResultFlagBits :: String
enumPrefixQueryResultFlagBits = "QUERY_RESULT_"
showTableQueryResultFlagBits :: [(QueryResultFlagBits, String)]
showTableQueryResultFlagBits =
[ (QUERY_RESULT_64_BIT, "64_BIT")
, (QUERY_RESULT_WAIT_BIT, "WAIT_BIT")
,
( QUERY_RESULT_WITH_AVAILABILITY_BIT
, "WITH_AVAILABILITY_BIT"
)
, (QUERY_RESULT_PARTIAL_BIT, "PARTIAL_BIT")
]
instance Show QueryResultFlagBits where
showsPrec =
enumShowsPrec
enumPrefixQueryResultFlagBits
showTableQueryResultFlagBits
conNameQueryResultFlagBits
(\(QueryResultFlagBits x) -> x)
(\x -> showString "0x" . showHex x)
instance Read QueryResultFlagBits where
readPrec =
enumReadPrec
enumPrefixQueryResultFlagBits
showTableQueryResultFlagBits
conNameQueryResultFlagBits
QueryResultFlagBits
|
78c527e90d05a0f08d952612675cd7839ada2e37616ef5e2f1080b38ca69c0c1 | ghc/nofib | Degrees.hs | Glasow Haskell 0.403 : FINITE ELEMENT PROGRAM V2
-- **********************************************************************
-- * *
* FILE NAME : degrees.hs DATE : 13 - 3 - 1991 *
-- * *
-- * CONTENTS : Computes the degree numbers of each node. *
-- * *
-- * CHANGES : *
* 1 . Mon Mar 11 10:28:10 GMT 1991 *
-- * Add new function degreesrlt for debug use. *
-- **********************************************************************
module Degrees( ndgrs, getndgr, degreesrlt ) where
import Data.Array
import Basics
import Vector
import DB_interface
ndgrs :: (Array Int Int, Array Int Float) -> Int
-- Return the total number of degrees
getndgr :: (Array Int Int, Array Int Float) -> Int -> [Int]
-- Return the degree numbers of a node (U, V and THETA)
ndgrs s =
fst (ndgrs_and_dgrsn s)
getndgr s node =
[u,v,theta]
where
u = dgrsn_s ! index
v = dgrsn_s ! (index + 1)
theta = dgrsn_s ! (index + 2)
dgrsn_s = dgrsn s
index = (node-1) * 3 + 1
dgrsn :: (Array Int Int, Array Int Float) -> Array Int Int
dgrsn s = listArray (1, (nnode s)*3) (snd (ndgrs_and_dgrsn s))
ndgrs_and_dgrsn :: (Array Int Int, Array Int Float) -> (Int,[Int])
ndgrs_and_dgrsn s =
foldl counting_one_node_s (0,[]) [1..(nnode s)]
where
counting_one_node_s = counting_one_node s
counting_one_node s (ndgrs_till_now,dgrsn_till_now) i =
(ndgrs_till_now + ndgrs_this_node, dgrsn_till_now ++ dgrsn_this_node)
where
dof = [ fod j | j <- [2,1,0]]
fod j = if (mod (div bc (e_10 j)) 10 == 1) then
1
else 0
e_10 j = if (j == 0) then (1::Int) else 10 * (e_10 (j-1))
ndgrs_this_node = sum dof
dgrsn_this_node = [g j | j <- [0,1,2]]
g j = if ( (dof!!j) == 0 ) then
0
else
sum (take (j+1) dof) + ndgrs_till_now
bc = getnbc s i
degreesrlt :: (Array Int Int, Array Int Float) -> [Char]
degreesrlt s =
"DEGREE INFORMATION :\n\n" ++
"\t Total degree numbers = " ++ showlj 4 (ndgrs s) ++ "\n\n" ++
(concat ( map a_node_s [1..(nnode s)] )) ++ "\n\n"
where
a_node_s = a_node s
a_node s node =
" Node.no = " ++ (showrj 2 node) ++
" u = " ++ (showrj 8 u) ++ " v = " ++ (showrj 8 v) ++
" theta=" ++ (showrj 8 theta) ++
" bc = " ++ ( showrj 3 bc) ++ "\n"
where
bc = getnbc s node
[u,v,theta] = getndgr s node
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/real/fem/Degrees.hs | haskell | **********************************************************************
* *
* *
* CONTENTS : Computes the degree numbers of each node. *
* *
* CHANGES : *
* Add new function degreesrlt for debug use. *
**********************************************************************
Return the total number of degrees
Return the degree numbers of a node (U, V and THETA) | Glasow Haskell 0.403 : FINITE ELEMENT PROGRAM V2
* FILE NAME : degrees.hs DATE : 13 - 3 - 1991 *
* 1 . Mon Mar 11 10:28:10 GMT 1991 *
module Degrees( ndgrs, getndgr, degreesrlt ) where
import Data.Array
import Basics
import Vector
import DB_interface
ndgrs :: (Array Int Int, Array Int Float) -> Int
getndgr :: (Array Int Int, Array Int Float) -> Int -> [Int]
ndgrs s =
fst (ndgrs_and_dgrsn s)
getndgr s node =
[u,v,theta]
where
u = dgrsn_s ! index
v = dgrsn_s ! (index + 1)
theta = dgrsn_s ! (index + 2)
dgrsn_s = dgrsn s
index = (node-1) * 3 + 1
dgrsn :: (Array Int Int, Array Int Float) -> Array Int Int
dgrsn s = listArray (1, (nnode s)*3) (snd (ndgrs_and_dgrsn s))
ndgrs_and_dgrsn :: (Array Int Int, Array Int Float) -> (Int,[Int])
ndgrs_and_dgrsn s =
foldl counting_one_node_s (0,[]) [1..(nnode s)]
where
counting_one_node_s = counting_one_node s
counting_one_node s (ndgrs_till_now,dgrsn_till_now) i =
(ndgrs_till_now + ndgrs_this_node, dgrsn_till_now ++ dgrsn_this_node)
where
dof = [ fod j | j <- [2,1,0]]
fod j = if (mod (div bc (e_10 j)) 10 == 1) then
1
else 0
e_10 j = if (j == 0) then (1::Int) else 10 * (e_10 (j-1))
ndgrs_this_node = sum dof
dgrsn_this_node = [g j | j <- [0,1,2]]
g j = if ( (dof!!j) == 0 ) then
0
else
sum (take (j+1) dof) + ndgrs_till_now
bc = getnbc s i
degreesrlt :: (Array Int Int, Array Int Float) -> [Char]
degreesrlt s =
"DEGREE INFORMATION :\n\n" ++
"\t Total degree numbers = " ++ showlj 4 (ndgrs s) ++ "\n\n" ++
(concat ( map a_node_s [1..(nnode s)] )) ++ "\n\n"
where
a_node_s = a_node s
a_node s node =
" Node.no = " ++ (showrj 2 node) ++
" u = " ++ (showrj 8 u) ++ " v = " ++ (showrj 8 v) ++
" theta=" ++ (showrj 8 theta) ++
" bc = " ++ ( showrj 3 bc) ++ "\n"
where
bc = getnbc s node
[u,v,theta] = getndgr s node
|
51cdccfa49cb4f571a7b0f722ab1c1fb2cc1849f1fe6a13a48f6ed7abbbb7c8f | SleepyBag/leetcode-racket | 451.rkt | (define/contract (frequency-sort s)
(-> string? string?)
(list->string
(let ([cnt
(let count ([s (string->list s)])
(if (null? s)
(hash)
(hash-update (count (cdr s)) (car s) add1 1)
)
) ])
(sort (string->list s)
(lambda (a b)
(or (> (hash-ref cnt a) (hash-ref cnt b))
(and (= (hash-ref cnt a) (hash-ref cnt b))
(char<? a b)))
)
)
)
)
)
| null | https://raw.githubusercontent.com/SleepyBag/leetcode-racket/470a42a186a00228bcac3c3c40af5785fa761186/451.rkt | racket | (define/contract (frequency-sort s)
(-> string? string?)
(list->string
(let ([cnt
(let count ([s (string->list s)])
(if (null? s)
(hash)
(hash-update (count (cdr s)) (car s) add1 1)
)
) ])
(sort (string->list s)
(lambda (a b)
(or (> (hash-ref cnt a) (hash-ref cnt b))
(and (= (hash-ref cnt a) (hash-ref cnt b))
(char<? a b)))
)
)
)
)
)
| |
1e5ac4bdfcf5563f5e2ab1d379669c878ad98633b7c7dbeeb4edd4a9b75f6820 | haskell/vector | drop.hs | import qualified Data.Vector as U
import Data.Bits
main = print . U.length . U.drop 100000 . U.replicate 1000000 $ (7 :: Int)
| null | https://raw.githubusercontent.com/haskell/vector/4c87e88f07aad166c6ae2ccb94fa539fbdd99a91/old-testsuite/microsuite/drop.hs | haskell | import qualified Data.Vector as U
import Data.Bits
main = print . U.length . U.drop 100000 . U.replicate 1000000 $ (7 :: Int)
| |
ef8c871ec70948ed1673937b364f088762c8e3bae960ee6b337ac7fc949f7d04 | incoherentsoftware/defect-process | Sprites.hs | module Enemy.All.Boss.Sprites
( EnemySprites(..)
, mkEnemySprites
) where
import Control.Monad.IO.Class (MonadIO)
import FileCache
import Window.Graphics
data EnemySprites = EnemySprites
{ _spawn :: Sprite
, _death :: Sprite
, _airDeath :: Sprite
, _airDeathLand :: Sprite
, _idle :: Sprite
, _hurt :: Sprite
, _airHurt :: Sprite
, _fall :: Sprite
, _kneelingImpact :: Sprite
, _kneelingHurt :: Sprite
, _getUp :: Sprite
, _launched :: Sprite
, _launchUp :: Sprite
, _wallSplat :: Sprite
, _wallHurt :: Sprite
, _guard :: Sprite
, _airGuard :: Sprite
, _airGuardLand :: Sprite
}
mkEnemySprites :: (FileCache m, GraphicsRead m, MonadIO m) => m EnemySprites
mkEnemySprites =
EnemySprites <$>
loadPackSprite (PackResourceFilePath "data/enemies/boss-enemy-spawn.pack" "spawn.spr") <*>
loadDeathPackSpr "death.spr" <*>
loadDeathPackSpr "air-death.spr" <*>
loadDeathPackSpr "air-death-land.spr" <*>
loadPackSpr "idle.spr" <*>
loadPackSpr "hurt.spr" <*>
loadPackSpr "air-hurt.spr" <*>
loadPackSpr "fall.spr" <*>
loadPackSpr "kneeling-impact.spr" <*>
loadPackSpr "kneeling-hurt.spr" <*>
loadPackSpr "get-up.spr" <*>
loadPackSpr "launched.spr" <*>
loadPackSpr "launch-up.spr" <*>
loadPackSpr "wall-splat.spr" <*>
loadPackSpr "wall-hurt.spr" <*>
loadPackSpr "guard.spr" <*>
loadPackSpr "air-guard.spr" <*>
loadPackSpr "air-guard-land.spr"
where
loadDeathPackSpr = \f -> loadPackSprite $ PackResourceFilePath "data/enemies/boss-enemy-death.pack" f
loadPackSpr = \f -> loadPackSprite $ PackResourceFilePath "data/enemies/boss-enemy.pack" f
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/Boss/Sprites.hs | haskell | module Enemy.All.Boss.Sprites
( EnemySprites(..)
, mkEnemySprites
) where
import Control.Monad.IO.Class (MonadIO)
import FileCache
import Window.Graphics
data EnemySprites = EnemySprites
{ _spawn :: Sprite
, _death :: Sprite
, _airDeath :: Sprite
, _airDeathLand :: Sprite
, _idle :: Sprite
, _hurt :: Sprite
, _airHurt :: Sprite
, _fall :: Sprite
, _kneelingImpact :: Sprite
, _kneelingHurt :: Sprite
, _getUp :: Sprite
, _launched :: Sprite
, _launchUp :: Sprite
, _wallSplat :: Sprite
, _wallHurt :: Sprite
, _guard :: Sprite
, _airGuard :: Sprite
, _airGuardLand :: Sprite
}
mkEnemySprites :: (FileCache m, GraphicsRead m, MonadIO m) => m EnemySprites
mkEnemySprites =
EnemySprites <$>
loadPackSprite (PackResourceFilePath "data/enemies/boss-enemy-spawn.pack" "spawn.spr") <*>
loadDeathPackSpr "death.spr" <*>
loadDeathPackSpr "air-death.spr" <*>
loadDeathPackSpr "air-death-land.spr" <*>
loadPackSpr "idle.spr" <*>
loadPackSpr "hurt.spr" <*>
loadPackSpr "air-hurt.spr" <*>
loadPackSpr "fall.spr" <*>
loadPackSpr "kneeling-impact.spr" <*>
loadPackSpr "kneeling-hurt.spr" <*>
loadPackSpr "get-up.spr" <*>
loadPackSpr "launched.spr" <*>
loadPackSpr "launch-up.spr" <*>
loadPackSpr "wall-splat.spr" <*>
loadPackSpr "wall-hurt.spr" <*>
loadPackSpr "guard.spr" <*>
loadPackSpr "air-guard.spr" <*>
loadPackSpr "air-guard-land.spr"
where
loadDeathPackSpr = \f -> loadPackSprite $ PackResourceFilePath "data/enemies/boss-enemy-death.pack" f
loadPackSpr = \f -> loadPackSprite $ PackResourceFilePath "data/enemies/boss-enemy.pack" f
| |
b742c1ed759b569fa5c08f8aeb3b58591e0ba6ca4201f3d1f08c7c464efdac11 | holdybot/holdybot | config.clj | (ns parky.config
(:require [cprop.core :refer [load-config]]
[cprop.source :as source]
[mount.core :refer [args defstate]]))
(defstate env
:start
(load-config
:merge
[(args)
(source/from-system-props)
(source/from-env)]))
| null | https://raw.githubusercontent.com/holdybot/holdybot/e65007a3113c89b3f457b9d966d6bf305983c975/src/clj/parky/config.clj | clojure | (ns parky.config
(:require [cprop.core :refer [load-config]]
[cprop.source :as source]
[mount.core :refer [args defstate]]))
(defstate env
:start
(load-config
:merge
[(args)
(source/from-system-props)
(source/from-env)]))
| |
2c98e47714c0578348dc6bfe4daf6c00df1e07fd3807be512addf036c874562a | MyPost/cassius | project.clj | (defproject au.com.auspost/cassius "0.1.15-SNAPSHOT"
:description "Cassandra as a Big Nested Map"
:url "-tools/cassius"
:license {:name "Apache License - v2.0"
:url "-2.0.html"}
:dependencies [[org.clojure/clojure "1.6.0"]
[im.chit/ribol "0.4.0"]
[im.chit/hara.namespace.import "2.1.1"]
[com.taoensso/nippy "2.5.2"]
[com.eaio.uuid/uuid "3.2"]
[com.stuartsierra/component "0.2.1"]
[com.taoensso/timbre "3.1.6"]
[org.apache.commons/commons-pool2 "2.2"]
[org.apache.cassandra/cassandra-all "2.0.9"]]
:profiles {:dev {:dependencies [[midje "1.6.3"]]
:plugins [[lein-midje "3.1.1"]]}}
:documentation {:files {"docs/index"
{:input "test/midje_doc/cassius_guide.clj"
:title "cassius"
:sub-title "Cassandra as a big nested map"
:author "Chris Zheng"
:email ""}}})
| null | https://raw.githubusercontent.com/MyPost/cassius/7b5f550fa8e8f825d4ecd7ba6a0d34c5ff606a7c/project.clj | clojure | (defproject au.com.auspost/cassius "0.1.15-SNAPSHOT"
:description "Cassandra as a Big Nested Map"
:url "-tools/cassius"
:license {:name "Apache License - v2.0"
:url "-2.0.html"}
:dependencies [[org.clojure/clojure "1.6.0"]
[im.chit/ribol "0.4.0"]
[im.chit/hara.namespace.import "2.1.1"]
[com.taoensso/nippy "2.5.2"]
[com.eaio.uuid/uuid "3.2"]
[com.stuartsierra/component "0.2.1"]
[com.taoensso/timbre "3.1.6"]
[org.apache.commons/commons-pool2 "2.2"]
[org.apache.cassandra/cassandra-all "2.0.9"]]
:profiles {:dev {:dependencies [[midje "1.6.3"]]
:plugins [[lein-midje "3.1.1"]]}}
:documentation {:files {"docs/index"
{:input "test/midje_doc/cassius_guide.clj"
:title "cassius"
:sub-title "Cassandra as a big nested map"
:author "Chris Zheng"
:email ""}}})
| |
974f40ef30a5bc5b39e6c54d3e36ebc3f6d095911af983e21cec9686c8633135 | kmi/ocml | operator.lisp | -*- Mode : LISP ; Syntax : Common - lisp ; Base : 10 ; Package : ; -*-
(in-package ocml)
Operators can be used in the LHS of backward rules and in the RHS of forward rules to
;;;carry out operations such as adding new facts, printing, etc..
;;;In this file we define the machinery supporting the definition of operators
(defvar *operators* (make-hash-table))
(defun add-operator (name structure)
(setf (gethash name *operators*)structure))
(defun get-operator (name)
(gethash name *operators*))
(defun remove-operator (name)
(remhash name *operators*))
(defun clear-operators ()
(clrhash *operators*))
(defclass ocml-operator (name-mixin lisp-attachment-mixin basic-ocml-object)
((arity :initarg :arity :initform nil :accessor arity)
(schema :initarg :schema :initform nil)))
(defun make-ocml-operator (&rest options)
(apply #'make-instance (cons 'ocml-operator options)))
INITIALIZE - INSTANCE : AFTER OCML - OPERATOR
(defmethod initialize-instance :after ((op ocml-operator) &rest initargs)
(declare (ignore initargs))
(with-slots (name schema arity) op
(enforce-arity-schema-consistency op name schema arity)
(add-operator name op)))
(defmacro define-operator-internal (name schema documentation &rest options)
(multiple-value-bind (name schema documentation options)
(parse-define-operator-form name schema documentation options)
`(funcall #'make-ocml-operator :name ',name :schema ',schema :documentation ,documentation
,@(mapcar #'(lambda (x)
(list 'quote x))
options))))
(defun parse-define-operator-form (name schema documentation options)
(parse-define-relation-form name schema documentation options 'operator))
(defmethod generate-candidates ((op ocml-operator) pred args)
(declare (ignore pred args))
(with-slots (lisp-fun) op
lisp-fun))
(defun get-relation-or-operator (pred)
(or (get-relation pred)
(get-operator pred))) | null | https://raw.githubusercontent.com/kmi/ocml/90b0b173f588c580c26393c94f9970282c640f4d/src/operator.lisp | lisp | Syntax : Common - lisp ; Base : 10 ; Package : ; -*-
carry out operations such as adding new facts, printing, etc..
In this file we define the machinery supporting the definition of operators |
(in-package ocml)
Operators can be used in the LHS of backward rules and in the RHS of forward rules to
(defvar *operators* (make-hash-table))
(defun add-operator (name structure)
(setf (gethash name *operators*)structure))
(defun get-operator (name)
(gethash name *operators*))
(defun remove-operator (name)
(remhash name *operators*))
(defun clear-operators ()
(clrhash *operators*))
(defclass ocml-operator (name-mixin lisp-attachment-mixin basic-ocml-object)
((arity :initarg :arity :initform nil :accessor arity)
(schema :initarg :schema :initform nil)))
(defun make-ocml-operator (&rest options)
(apply #'make-instance (cons 'ocml-operator options)))
INITIALIZE - INSTANCE : AFTER OCML - OPERATOR
(defmethod initialize-instance :after ((op ocml-operator) &rest initargs)
(declare (ignore initargs))
(with-slots (name schema arity) op
(enforce-arity-schema-consistency op name schema arity)
(add-operator name op)))
(defmacro define-operator-internal (name schema documentation &rest options)
(multiple-value-bind (name schema documentation options)
(parse-define-operator-form name schema documentation options)
`(funcall #'make-ocml-operator :name ',name :schema ',schema :documentation ,documentation
,@(mapcar #'(lambda (x)
(list 'quote x))
options))))
(defun parse-define-operator-form (name schema documentation options)
(parse-define-relation-form name schema documentation options 'operator))
(defmethod generate-candidates ((op ocml-operator) pred args)
(declare (ignore pred args))
(with-slots (lisp-fun) op
lisp-fun))
(defun get-relation-or-operator (pred)
(or (get-relation pred)
(get-operator pred))) |
4340d392288aa2e1b1a31046bb36d6ae84e142c4903019d101a89666b2f34ed0 | singleheart/programming-in-haskell | ex4.hs | newtype ZipList a =
Z [a]
deriving (Show)
instance Functor ZipList
-- fmap :: (a -> b) -> ZipList a -> ZipList b
where
fmap g (Z xs) = Z (fmap g xs)
instance Applicative ZipList
-- pure :: a -> ZipList a
where
pure x = Z (repeat x)
-- (<*>) :: ZipList (a -> b) -> ZipList a -> ZipList b
(Z gs) <*> (Z xs) = Z [g x | (g, x) <- zip gs xs]
| null | https://raw.githubusercontent.com/singleheart/programming-in-haskell/80c7efc0425babea3cd982e47e121f19bec0aba9/ch12/ex4.hs | haskell | fmap :: (a -> b) -> ZipList a -> ZipList b
pure :: a -> ZipList a
(<*>) :: ZipList (a -> b) -> ZipList a -> ZipList b | newtype ZipList a =
Z [a]
deriving (Show)
instance Functor ZipList
where
fmap g (Z xs) = Z (fmap g xs)
instance Applicative ZipList
where
pure x = Z (repeat x)
(Z gs) <*> (Z xs) = Z [g x | (g, x) <- zip gs xs]
|
e4bba0c1225923aa573593403a568b88ced3fdc5ef1b1ec0db95da0273d7dff0 | mediquest-nl/logback-masking-pattern-layouts | util.clj | (ns nl.mediquest.logback.util
(:require
[clojure.string :as string]))
;; Regexes used with string/replace, applied from top to bottom
(def default-re->replacement
(array-map
;; Common patterns
#"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])" "<email>"
#"0[6]{1}(\-)?[^0\D]{1}\d{7}" "<telefoon>"
#"(?i)NL\s?\d{2}\s?[A-Z]{0,4}\s?\d{4}\s?\d{0,2}" "<iban>"
#"\b(?:\d[ -]*?){13,16}\b" "<creditcard>"
;; Found in code
#"(?i)(bsn(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(bsn(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(password(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(password(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(pw(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(pw(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(agb(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(agb(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(name(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(name(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(postgresql:\/\/.*:)(.|[\r\n])*@" "$1*****@"))
(defn scrub [message re->replacement]
(reduce-kv string/replace message re->replacement))
| null | https://raw.githubusercontent.com/mediquest-nl/logback-masking-pattern-layouts/585fed98c15d15ca22d9b48bb97c037aa0068649/src/nl/mediquest/logback/util.clj | clojure | Regexes used with string/replace, applied from top to bottom
Common patterns
Found in code | (ns nl.mediquest.logback.util
(:require
[clojure.string :as string]))
(def default-re->replacement
(array-map
#"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])" "<email>"
#"0[6]{1}(\-)?[^0\D]{1}\d{7}" "<telefoon>"
#"(?i)NL\s?\d{2}\s?[A-Z]{0,4}\s?\d{4}\s?\d{0,2}" "<iban>"
#"\b(?:\d[ -]*?){13,16}\b" "<creditcard>"
#"(?i)(bsn(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(bsn(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(password(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(password(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(pw(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(pw(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(agb(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(agb(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(?i)(name(?:=\s*|\:\s*|\s*|=\s*))\w+" "$1*****"
#"(?i)(name(?:=\s*|\:\s*|\s*|=\s*))\".*\"" "$1*****"
#"(postgresql:\/\/.*:)(.|[\r\n])*@" "$1*****@"))
(defn scrub [message re->replacement]
(reduce-kv string/replace message re->replacement))
|
446b5fc37a1e3f9cbe2288cca14862d7e904d5fb619fffdf89f2de751194fc2e | jixiuf/helloerlang | mochiweb_test_web.erl | @author Mochi Media < >
@copyright 2010 Mochi Media < >
%% @doc Web server for mochiweb_test.
-module(mochiweb_test_web).
-author("Mochi Media <>").
-export([start/1, stop/0, loop/2]).
%% External API
start(Options) ->
{DocRoot, Options1} = get_option(docroot, Options),
Loop = fun (Req) ->
?MODULE:loop(Req, DocRoot)
end,
mochiweb_http:start([{name, ?MODULE},{max,10000}, {loop, Loop} | Options1]).
stop() ->
mochiweb_http:stop(?MODULE).
loop(Req, DocRoot) ->
"/" ++ Path = Req:get(path),
try
case Req:get(method) of
Method when Method =:= 'GET'; Method =:= 'HEAD' ->
case Path of
新增了 /time 这个 URL,它是一个 HTTP Chunked 的例子
Response = Req:ok({"text/plain", chunked}),
Params = Req:parse_qs(), %get query string
:8080 / time?id=1
time(Response,Id);
_ ->
Req:serve_file(Path, DocRoot)
end;
'POST' ->
case Path of
_ ->
Req:not_found()
end;
_ ->
Req:respond({501, [], []})
end
catch
Type:What ->
Report = ["web request failed",
{path, Path},
{type, Type}, {what, What},
{trace, erlang:get_stacktrace()}],
error_logger:error_report(Report),
%% NOTE: mustache templates need \ because they are not awesome.
Req:respond({500, [{"Content-Type", "text/plain"}],
"request failed, sorry\n"})
end.
%% Internal API
get_option(Option, Options) ->
{proplists:get_value(Option, Options), proplists:delete(Option, Options)}.
打印当前时间,间隔一秒,再在已经打开的 http 连接之上,再次打印,这也就是所谓 HTTP长连接 / ServerPush 的一种
time(Resp,Id)->
case Id of
undefined->
Resp:write_chunk(io_lib:format("The time for Id:~p is: ~p~n",[0 ,calendar:local_time()])),
io:format("~p~n",[io:format("The time for Id:~p is: ~p~n",[0 ,calendar:local_time()])]);
_ ->
Resp:write_chunk(io_lib:format("The time for Id:~p is: ~p~n",[Id ,calendar:local_time()])),
io:format("~p~n",[io:format("The time for Id:~p is: ~p~n",[Id ,calendar:local_time()])])
end,
timer:sleep(1000),
time(Resp,Id).
%%
%% Tests
%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
you_should_write_a_test() ->
?assertEqual(
"No, but I will!",
"Have you written any tests?"),
ok.
-endif.
| null | https://raw.githubusercontent.com/jixiuf/helloerlang/3960eb4237b026f98edf35d6064539259a816d58/mochiweb_test/src/mochiweb_test_web.erl | erlang | @doc Web server for mochiweb_test.
External API
get query string
NOTE: mustache templates need \ because they are not awesome.
Internal API
Tests
| @author Mochi Media < >
@copyright 2010 Mochi Media < >
-module(mochiweb_test_web).
-author("Mochi Media <>").
-export([start/1, stop/0, loop/2]).
start(Options) ->
{DocRoot, Options1} = get_option(docroot, Options),
Loop = fun (Req) ->
?MODULE:loop(Req, DocRoot)
end,
mochiweb_http:start([{name, ?MODULE},{max,10000}, {loop, Loop} | Options1]).
stop() ->
mochiweb_http:stop(?MODULE).
loop(Req, DocRoot) ->
"/" ++ Path = Req:get(path),
try
case Req:get(method) of
Method when Method =:= 'GET'; Method =:= 'HEAD' ->
case Path of
新增了 /time 这个 URL,它是一个 HTTP Chunked 的例子
Response = Req:ok({"text/plain", chunked}),
:8080 / time?id=1
time(Response,Id);
_ ->
Req:serve_file(Path, DocRoot)
end;
'POST' ->
case Path of
_ ->
Req:not_found()
end;
_ ->
Req:respond({501, [], []})
end
catch
Type:What ->
Report = ["web request failed",
{path, Path},
{type, Type}, {what, What},
{trace, erlang:get_stacktrace()}],
error_logger:error_report(Report),
Req:respond({500, [{"Content-Type", "text/plain"}],
"request failed, sorry\n"})
end.
get_option(Option, Options) ->
{proplists:get_value(Option, Options), proplists:delete(Option, Options)}.
打印当前时间,间隔一秒,再在已经打开的 http 连接之上,再次打印,这也就是所谓 HTTP长连接 / ServerPush 的一种
time(Resp,Id)->
case Id of
undefined->
Resp:write_chunk(io_lib:format("The time for Id:~p is: ~p~n",[0 ,calendar:local_time()])),
io:format("~p~n",[io:format("The time for Id:~p is: ~p~n",[0 ,calendar:local_time()])]);
_ ->
Resp:write_chunk(io_lib:format("The time for Id:~p is: ~p~n",[Id ,calendar:local_time()])),
io:format("~p~n",[io:format("The time for Id:~p is: ~p~n",[Id ,calendar:local_time()])])
end,
timer:sleep(1000),
time(Resp,Id).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
you_should_write_a_test() ->
?assertEqual(
"No, but I will!",
"Have you written any tests?"),
ok.
-endif.
|
c721a5f38cd39bcd9805836d38bc1e69c846718ddaa052c234b03b6c7110dad7 | kudelskisecurity/scannerl | fp_mqtts.erl | %%% MQTT over SSL fingerprinting module
%%%
%%% Output:
%%% mqtt or not_mqtt atoms
%%%
-module(fp_mqtts).
-author("Adrien Giner - ").
-behavior(fp_module).
-include("../includes/args.hrl").
-export([callback_next_step/1]).
-export([get_default_args/0]).
-export([get_description/0]).
-export([get_arguments/0]).
%% our record for this fingerprint
-define(TIMEOUT, 3000). % milli-seconds
-define(PORT, 8883). % HTTP port
-define(TYPE, ssl). % transport type
-define(MAXPKT, 1). % max packet expected
-define(DESCRIPTION, "TCP/8883: MQTT over SSL identification").
-define(ARGUMENTS, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% API
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% public API to get {port, timeout}
get_default_args() ->
#args{module=?MODULE, type=?TYPE, port=?PORT,
timeout=?TIMEOUT, fsmopts=[{sslcheck,false}], maxpkt=?MAXPKT}.
get_description() ->
?DESCRIPTION.
get_arguments() ->
?ARGUMENTS.
% callback
callback_next_step(Args) when Args#args.moddata == undefined ->
first packet
debug(Args, "first packet"),
{continue, Args#args.maxpkt, get_payload(), true};
callback_next_step(Args) when Args#args.packetrcv < 1 ->
% no packet received
debug(Args, "no packet received"),
{result, {{error, up}, timeout}};
callback_next_step(Args) ->
debug(Args, "a packet received"),
{result, parse_payload(Args#args.datarcv)}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% debug
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% send debug
debug(Args, Msg) ->
utils:debug(fpmodules, Msg,
{Args#args.target, Args#args.id}, Args#args.debugval).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% utils
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
get_payload() ->
utils_mqtt:forge_connect().
parse_payload(Pkt) ->
{Val, _Res} = utils_mqtt:parse(Pkt),
case Val of
false ->
{{error, up}, not_mqtt};
true ->
{{ok, result}, mqtt}
end.
| null | https://raw.githubusercontent.com/kudelskisecurity/scannerl/8133065030d014401c47b2470e67a36e9df81b1e/src/fpmodules/fp_mqtts.erl | erlang | MQTT over SSL fingerprinting module
Output:
mqtt or not_mqtt atoms
our record for this fingerprint
milli-seconds
HTTP port
transport type
max packet expected
API
public API to get {port, timeout}
callback
no packet received
debug
send debug
utils
|
-module(fp_mqtts).
-author("Adrien Giner - ").
-behavior(fp_module).
-include("../includes/args.hrl").
-export([callback_next_step/1]).
-export([get_default_args/0]).
-export([get_description/0]).
-export([get_arguments/0]).
-define(DESCRIPTION, "TCP/8883: MQTT over SSL identification").
-define(ARGUMENTS, []).
get_default_args() ->
#args{module=?MODULE, type=?TYPE, port=?PORT,
timeout=?TIMEOUT, fsmopts=[{sslcheck,false}], maxpkt=?MAXPKT}.
get_description() ->
?DESCRIPTION.
get_arguments() ->
?ARGUMENTS.
callback_next_step(Args) when Args#args.moddata == undefined ->
first packet
debug(Args, "first packet"),
{continue, Args#args.maxpkt, get_payload(), true};
callback_next_step(Args) when Args#args.packetrcv < 1 ->
debug(Args, "no packet received"),
{result, {{error, up}, timeout}};
callback_next_step(Args) ->
debug(Args, "a packet received"),
{result, parse_payload(Args#args.datarcv)}.
debug(Args, Msg) ->
utils:debug(fpmodules, Msg,
{Args#args.target, Args#args.id}, Args#args.debugval).
get_payload() ->
utils_mqtt:forge_connect().
parse_payload(Pkt) ->
{Val, _Res} = utils_mqtt:parse(Pkt),
case Val of
false ->
{{error, up}, not_mqtt};
true ->
{{ok, result}, mqtt}
end.
|
e4b0c532ba9ace79a6cc94a2d2dae3e535f23628f07cabd1dbaf2120fb9b25bb | resistor/hzertz | Main.hs | module Main where
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
import qualified Callbacks as Callbacks
-- NEW MAIN
initDisplay :: IO ()
initDisplay = do
initialDisplayMode $= [DoubleBuffered]
initialWindowSize $= Size 800 600
createWindow "Zertz"
Callbacks.registerCallbacks
lineSmooth $= Enabled
polygonSmooth $= Enabled
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
hint LineSmooth $= DontCare
hint PolygonSmooth $= DontCare
main :: IO ()
main = do
(progname, _) <- getArgsAndInitialize
initDisplay
mainLoop
-- OLD MAIN
-- run_minimax str_state =
let state : : . ZertzState
-- state = read str_state in
show $ MiniMax.minimax 100 state
-- prompt = do
-- putStr "Move? "
-- IO.hFlush IO.stdout
-- state <- getLine
-- putStrLn $ "\nState: " ++ (run_minimax state) ++ "\n"
-- prompt
-- oldmain = do
putStrLn $ " \nState : " + + ( show ) + + " \n "
-- prompt | null | https://raw.githubusercontent.com/resistor/hzertz/d1910ff8ae530bdc98f21d766a6372c4e5a0974b/Main.hs | haskell | NEW MAIN
OLD MAIN
run_minimax str_state =
state = read str_state in
prompt = do
putStr "Move? "
IO.hFlush IO.stdout
state <- getLine
putStrLn $ "\nState: " ++ (run_minimax state) ++ "\n"
prompt
oldmain = do
prompt | module Main where
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
import qualified Callbacks as Callbacks
initDisplay :: IO ()
initDisplay = do
initialDisplayMode $= [DoubleBuffered]
initialWindowSize $= Size 800 600
createWindow "Zertz"
Callbacks.registerCallbacks
lineSmooth $= Enabled
polygonSmooth $= Enabled
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
hint LineSmooth $= DontCare
hint PolygonSmooth $= DontCare
main :: IO ()
main = do
(progname, _) <- getArgsAndInitialize
initDisplay
mainLoop
let state : : . ZertzState
show $ MiniMax.minimax 100 state
putStrLn $ " \nState : " + + ( show ) + + " \n " |
8b619f8b6e6748a35779557c6c286050a851f51153dbc01e734f6ec97f2d4289 | dhleong/wish | generic_info.cljs | (ns wish.sheets.dnd5e.overlays.generic-info
(:require [clojure.string :as str]
[spade.core :refer [defattrs]]
[wish.sheets.dnd5e.subs.proficiency :as proficiency]
[wish.sheets.dnd5e.util :refer [ability->mod
mod->str]]
[wish.sheets.dnd5e.views.shared :refer [challenge-indicator]]
[wish.sheets.dnd5e.widgets :refer [spell-aoe]]
[wish.util :refer [<sub]]
[wish.views.widgets :refer [formatted-text-fragment]]))
; ======= Item/Spell generic info =========================
(def ^:private properties
{:finesse? "Finesse"
:heavy? "Heavy"
:light? "Light"
:reach? "Reach"
:special? "Special"
:two-handed? "Two-handed"
:uses-ammunition? "Uses Ammunition"
:versatile "Versatile"})
(defn generic-info [entity]
(let [{:keys [aoe damage dice range]} entity
proficiency-bonus (<sub [::proficiency/bonus])]
(when (or aoe damage dice range)
[:table.info
[:tbody
(when-let [cast-time (:time entity)]
[:tr
[:th.header "Cast Time"]
[:td cast-time]])
(when range
[:tr
[:th.header "Range"]
(if (string? range)
[:td range]
(let [[near far] range]
[:td near " / " far " ft."]))])
(when aoe
[:tr
[:th.header "Area of Effect"]
[:td [spell-aoe aoe]]])
(when-let [flags (->> properties
keys
(filter entity)
(map properties)
seq)]
[:tr
[:th.header "Properties"]
[:td (str/join "; " flags)]])
(when damage
[:tr
[:th.header "Damage Type"]
[:td (str/capitalize
(name damage))]])
(when dice
[:tr
[:th.header (if damage
"Damage"
"Healing")]
[:td (if (fn? dice)
(dice (assoc (:wish/container entity)
:proficiency-bonus proficiency-bonus))
dice)]])
]]
)))
; ======= Ally info =======================================
(def ^:private abilities-block
(delay
(resolve
'wish.sheets.dnd5e.views.abilities/abilities-block)))
(defn- ->abilities-info [raw-abilities]
(reduce-kv
(fn [m id score]
(let [modifier (mod->str (ability->mod score))]
(assoc m id {:score score
:modifier modifier
:save modifier})))
{}
raw-abilities))
(defattrs feature-label-attrs [italics?]
{:color :*header*
:font-style (when italics?
:italic)
:font-weight :bold})
(defn- feature-label
([label] (feature-label nil label))
([{:keys [italics?]} label]
[:span (feature-label-attrs italics?)
label "\u00A0\u00A0"]))
(defn- prefixed-formatted-text [label text]
[:div.desc
[formatted-text-fragment {:first-container [:div.p [feature-label label]]}
text]])
(defn- feature-listing [feature]
[:div.desc
[formatted-text-fragment {:first-container [:div.p
[feature-label {:italics? true}
(:name feature)]]}
(:desc feature)]])
(defn- features-listing [features]
[:<>
(for [{:keys [id] :as feature} features]
^{:key id}
[feature-listing feature])])
(defn ally [entity]
(let [entity (<sub [:allies/inflated-of entity])]
[:<>
(when (:size entity)
[:div.desc
(when-let [cr (:challenge entity)]
[challenge-indicator {:inline? true} cr])
(str/capitalize (name (:size entity)))
" "
(str/capitalize (name (:type entity)))])
(when-some [abilities (:abilities entity)]
[@abilities-block
:abilities (->abilities-info abilities)])
(when-some [senses (:senses entity)]
[prefixed-formatted-text "Senses:" senses])
(when-some [speed (:speed entity)]
[prefixed-formatted-text "Speed:" speed])
(when-let [features (seq (:sorted-features entity))]
[features-listing features])
(when-let [attacks (seq (vals (:attacks (:attrs entity))))]
[:<>
[:h4 "Actions"]
; We might consider extra details on attacks, but the feature listing
; is probably sufficient for now
[features-listing attacks]])
]))
| null | https://raw.githubusercontent.com/dhleong/wish/db5d22763d9bce17dd5af22754de47c6dcacc68e/src/cljs/wish/sheets/dnd5e/overlays/generic_info.cljs | clojure | ======= Item/Spell generic info =========================
======= Ally info =======================================
We might consider extra details on attacks, but the feature listing
is probably sufficient for now | (ns wish.sheets.dnd5e.overlays.generic-info
(:require [clojure.string :as str]
[spade.core :refer [defattrs]]
[wish.sheets.dnd5e.subs.proficiency :as proficiency]
[wish.sheets.dnd5e.util :refer [ability->mod
mod->str]]
[wish.sheets.dnd5e.views.shared :refer [challenge-indicator]]
[wish.sheets.dnd5e.widgets :refer [spell-aoe]]
[wish.util :refer [<sub]]
[wish.views.widgets :refer [formatted-text-fragment]]))
(def ^:private properties
{:finesse? "Finesse"
:heavy? "Heavy"
:light? "Light"
:reach? "Reach"
:special? "Special"
:two-handed? "Two-handed"
:uses-ammunition? "Uses Ammunition"
:versatile "Versatile"})
(defn generic-info [entity]
(let [{:keys [aoe damage dice range]} entity
proficiency-bonus (<sub [::proficiency/bonus])]
(when (or aoe damage dice range)
[:table.info
[:tbody
(when-let [cast-time (:time entity)]
[:tr
[:th.header "Cast Time"]
[:td cast-time]])
(when range
[:tr
[:th.header "Range"]
(if (string? range)
[:td range]
(let [[near far] range]
[:td near " / " far " ft."]))])
(when aoe
[:tr
[:th.header "Area of Effect"]
[:td [spell-aoe aoe]]])
(when-let [flags (->> properties
keys
(filter entity)
(map properties)
seq)]
[:tr
[:th.header "Properties"]
[:td (str/join "; " flags)]])
(when damage
[:tr
[:th.header "Damage Type"]
[:td (str/capitalize
(name damage))]])
(when dice
[:tr
[:th.header (if damage
"Damage"
"Healing")]
[:td (if (fn? dice)
(dice (assoc (:wish/container entity)
:proficiency-bonus proficiency-bonus))
dice)]])
]]
)))
(def ^:private abilities-block
(delay
(resolve
'wish.sheets.dnd5e.views.abilities/abilities-block)))
(defn- ->abilities-info [raw-abilities]
(reduce-kv
(fn [m id score]
(let [modifier (mod->str (ability->mod score))]
(assoc m id {:score score
:modifier modifier
:save modifier})))
{}
raw-abilities))
(defattrs feature-label-attrs [italics?]
{:color :*header*
:font-style (when italics?
:italic)
:font-weight :bold})
(defn- feature-label
([label] (feature-label nil label))
([{:keys [italics?]} label]
[:span (feature-label-attrs italics?)
label "\u00A0\u00A0"]))
(defn- prefixed-formatted-text [label text]
[:div.desc
[formatted-text-fragment {:first-container [:div.p [feature-label label]]}
text]])
(defn- feature-listing [feature]
[:div.desc
[formatted-text-fragment {:first-container [:div.p
[feature-label {:italics? true}
(:name feature)]]}
(:desc feature)]])
(defn- features-listing [features]
[:<>
(for [{:keys [id] :as feature} features]
^{:key id}
[feature-listing feature])])
(defn ally [entity]
(let [entity (<sub [:allies/inflated-of entity])]
[:<>
(when (:size entity)
[:div.desc
(when-let [cr (:challenge entity)]
[challenge-indicator {:inline? true} cr])
(str/capitalize (name (:size entity)))
" "
(str/capitalize (name (:type entity)))])
(when-some [abilities (:abilities entity)]
[@abilities-block
:abilities (->abilities-info abilities)])
(when-some [senses (:senses entity)]
[prefixed-formatted-text "Senses:" senses])
(when-some [speed (:speed entity)]
[prefixed-formatted-text "Speed:" speed])
(when-let [features (seq (:sorted-features entity))]
[features-listing features])
(when-let [attacks (seq (vals (:attacks (:attrs entity))))]
[:<>
[:h4 "Actions"]
[features-listing attacks]])
]))
|
c4188493239110be05027ef8f8761f1f6bf10a0d7eb12e56fef1c120b27e8376 | xmonad/xmonad-contrib | SwapPromote.hs | -----------------------------------------------------------------------------
-- |
Module : XMonad . Actions . SwapPromote
-- Description : Track the master window history per workspace.
Copyright : ( c ) 2018
-- License : BSD-style (see LICENSE)
--
-- Maintainer :
-- Stability : unstable
-- Portability : unportable
--
-- Module for tracking master window history per workspace, and associated
-- functions for manipulating the stack using such history.
--
-----------------------------------------------------------------------------
module XMonad.Actions.SwapPromote
( -- * Usage
-- $usage
MasterHistory (..)
-- * State Accessors
, getMasterHistoryMap
, getMasterHistoryFromTag
, getMasterHistoryCurrent
, getMasterHistoryFromWindow
, modifyMasterHistoryFromTag
, modifyMasterHistoryCurrent
-- * Log Hook
, masterHistoryHook
-- * Log Hook Building Blocks
, masterHistoryHook'
, updateMasterHistory
-- * Actions
, swapPromote
, swapPromote'
, swapIn
, swapIn'
, swapHybrid
, swapHybrid'
-- * Action Building Blocks
, swapApply
, swapPromoteStack
, swapInStack
, swapHybridStack
* List Utilities
, cycleN
, split
, split'
, merge
, merge'
-- * Stack Utilities
, stackSplit
, stackMerge
) where
import XMonad
import XMonad.Prelude
import qualified XMonad.StackSet as W
import qualified XMonad.Util.ExtensibleState as XS
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Arrow
-- $usage
-- Given your configuration file, import this module:
--
> import XMonad . Actions . SwapPromote
--
-- First add 'masterHistoryHook' to your 'logHook' to track master windows per
-- workspace:
--
-- > myLogHook = otherHook >> masterHistoryHook
--
Then replace xmonad 's default promote keybinding with ' swapPromote '' :
--
> , ( ( mod1Mask , xK_Return ) , swapPromote ' False )
--
-- Depending on your xmonad configuration or window actions the master history
-- may be empty. If this is the case you can still chain another promotion
-- function:
--
> import XMonad . Actions . DwmPromote
> , ( ( mod1Mask , xK_Return ) , whenX ( swapPromote False ) dwmpromote )
--
-- To be clear, this is only called when the lack of master history hindered
-- the swap and not other conditions, such as having a only a single window.
--
-- While 'swapPromote' preserves window focus, 'swapIn' preserves the focus
-- position - effectively "swapping" new windows into focus without moving the
-- zipper. A mix of both, 'swapHybrid' promotes focused non-master windows
-- while swapping windows into the focused master. This works well on layouts
-- with large masters. Both come with chainable variants, see 'swapIn'' and
-- 'swapHybrid''.
--
-- So far floating windows have been treated no differently than tiled windows
-- even though their positions are independent of the stack. Often, yanking
-- floating windows in and out of the workspace will obliterate the stack
-- history - particularly frustrating with "XMonad.Util.Scratchpad" since it is
-- toggled so frequenty and always replaces the master window. That's why the
swap functions accept a boolean argument ; when @True@ non - focused floating
-- windows will be ignored.
--
-- All together:
--
> , ( ( mod1Mask , xK_Return ) , whenX ( swapHybrid True ) dwmpromote )
-- | Mapping from workspace tag to master history list. The current master is
the head of the list , the previous master the second element , and so on .
-- Without history, the list is empty.
newtype MasterHistory = MasterHistory
{ getMasterHistory :: M.Map WorkspaceId [Window]
} deriving (Read,Show)
instance ExtensionClass MasterHistory where
initialValue = MasterHistory M.empty
-- | Return the master history map from the state.
getMasterHistoryMap :: X (M.Map WorkspaceId [Window])
getMasterHistoryMap = XS.gets getMasterHistory
-- | Return the master history list of a given tag. The master history list may
-- be empty. An invalid tag will also result in an empty list.
getMasterHistoryFromTag :: WorkspaceId -> X [Window]
getMasterHistoryFromTag t = M.findWithDefault [] t <$> getMasterHistoryMap
-- | Return the master history list of the current workspace.
getMasterHistoryCurrent :: X [Window]
getMasterHistoryCurrent = gets (W.currentTag . windowset)
>>= getMasterHistoryFromTag
-- | Return the master history list of the workspace containing the given
window . Return an empty list if the window is not in the stackset .
getMasterHistoryFromWindow :: Window -> X [Window]
getMasterHistoryFromWindow w = gets (W.findTag w . windowset)
>>= maybe (return []) getMasterHistoryFromTag
-- | Modify the master history list of a given workspace, or the empty list of
-- no such workspace is mapped. The result is then re-inserted into the master
-- history map.
modifyMasterHistoryFromTag :: WorkspaceId -> ([Window] -> [Window]) -> X ()
modifyMasterHistoryFromTag t f = XS.modify $ \(MasterHistory m) ->
let l = M.findWithDefault [] t m
in MasterHistory $ M.insert t (f l) m
-- | Modify the master history list of the current workspace. While the current
-- workspace is guaranteed to exist; its master history may not. For more
information see ' ' .
modifyMasterHistoryCurrent :: ([Window] -> [Window]) -> X ()
modifyMasterHistoryCurrent f = gets (W.currentTag . windowset)
>>= flip modifyMasterHistoryFromTag f
-- | A 'logHook' to update the master history mapping. Non-existent workspaces
-- are removed, and the master history list for the current workspaces is
-- updated. See 'masterHistoryHook''.
masterHistoryHook :: X ()
masterHistoryHook = masterHistoryHook' True updateMasterHistory
-- | Backend for 'masterHistoryHook'.
masterHistoryHook' :: Bool
^ If @True@ , remove non - existent workspaces .
-> ([Window] -> [Window] -> [Window])
-- ^ Function used to update the master history list of
the current workspace . First argument is the master
history , second is the integrated stack . See
-- 'updateMasterHistory' for more details.
-> X ()
masterHistoryHook' removeWorkspaces historyModifier = do
wset <- gets windowset
let W.Workspace wid _ mst = W.workspace . W.current $ wset
tags = map W.tag $ W.workspaces wset
st = W.integrate' mst
XS.modify $ \(MasterHistory mm) ->
let mm' = if removeWorkspaces
then restrictKeys mm $ S.fromList tags
else mm
ms = M.findWithDefault [] wid mm'
ms' = historyModifier ms st
in MasterHistory $ M.insert wid ms' mm'
-- | Less efficient version of 'M.restrictKeys'. Given broader eventual
-- adoption, replace this with 'M.restrictKeys'.
restrictKeys :: Ord k => M.Map k a -> S.Set k -> M.Map k a
restrictKeys m s = M.filterWithKey (\k _ -> k `S.member` s) m
-- | Given the current master history list and an integrated stack, return the
-- new master history list. The current master is either moved (if it exists
-- within the history) or added to the head of the list, and all missing (i.e.
-- closed) windows are removed.
updateMasterHistory :: [Window] -- ^ The master history list.
-> [Window] -- ^ The integrated stack.
-> [Window]
updateMasterHistory _ [] = []
updateMasterHistory ms ws@(w:_) = (w : delete w ms) `intersect` ws
-- | Wrap 'swapPromoteStack'; see also 'swapApply'.
swapPromote :: Bool -> X Bool
swapPromote = flip swapApply swapPromoteStack
-- | Like 'swapPromote'' but discard the result.
swapPromote' :: Bool -> X ()
swapPromote' = void . swapPromote
-- | Wrap 'swapInStack'; see also 'swapApply'.
swapIn :: Bool -> X Bool
swapIn = flip swapApply swapInStack
-- | Like 'swapIn'' but discard the result.
swapIn' :: Bool -> X ()
swapIn' = void . swapIn
-- | Wrap 'swapHybridStack'; see also 'swapApply'.
swapHybrid :: Bool -> X Bool
swapHybrid = flip swapApply swapHybridStack
-- | Like 'swapHybrid'' but discard the result.
swapHybrid' :: Bool -> X ()
swapHybrid' = void . swapHybrid
-- | Apply the given master history stack modifier to the current stack. If
given @True@ , all non - focused floating windows will be ignored . Return
@True@ if insufficient history ; if so use ' whenX ' to sequence a backup
-- promotion function.
swapApply :: Bool
-> (Maybe Window -> W.Stack Window -> (Bool,W.Stack Window))
-> X Bool
swapApply ignoreFloats swapFunction = do
fl <- gets $ W.floating . windowset
st <- gets $ W.stack . W.workspace . W.current . windowset
ch <- getMasterHistoryCurrent
let swapApply' s1 =
let fl' = if ignoreFloats then M.keysSet fl else S.empty
ff = (||) <$> (`S.notMember` fl') <*> (== W.focus s1)
fh = filter ff ch
pm = listToMaybe . drop 1 $ fh
(r,s2) = stackSplit s1 fl' :: ([(Int,Window)],W.Stack Window)
(b,s3) = swapFunction pm s2
s4 = stackMerge s3 r
mh = let w = head . W.integrate $ s3
in const $ w : delete w ch
in (b,Just s4,mh)
(x,y,z) = maybe (False,Nothing,id) swapApply' st
-- Any floating master windows will be added to the history when 'windows'
-- calls the log hook.
modifyMasterHistoryCurrent z
windows $ W.modify Nothing . const $ y
return x
-- | If the focused window is the master window and there is no previous
-- master, do nothing. Otherwise swap the master with the previous master. If
-- the focused window is not the master window, swap it with the master window.
-- In either case focus follows the original window, i.e. the focused window
-- does not change, only its position.
--
The first argument is the previous master ( which may not exist ) , the second
a window stack . Return if the master history hindered the swap ; the
-- history is either empty or out-of-sync. Though the latter shouldn't happen
-- this function never changes the stack under such circumstances.
swapPromoteStack :: Maybe Window -> W.Stack Window -> (Bool,W.Stack Window)
swapPromoteStack _ st@(W.Stack _x [] []) = (False,st)
swapPromoteStack Nothing st@(W.Stack _x [] _r) = (True,st)
swapPromoteStack (Just pm) (W.Stack x [] r) =
let (r',l') = (reverse *** cycleN 1) $ span (/= pm) $ reverse r
st' = W.Stack x l' r'
b = null l'
in (b,st')
swapPromoteStack _ (W.Stack x l r) =
let r' = (++ r) . cycleN 1 . reverse $ l
st' = W.Stack x [] r'
in (False,st')
-- | Perform the same swap as 'swapPromoteStack'. However the new window
-- receives the focus; it appears to "swap into" the position of the original
-- window. Under this model focus follows stack position and the zipper does
-- not move.
--
-- See 'swapPromoteStack' for more details regarding the parameters.
swapInStack :: Maybe Window -> W.Stack Window -> (Bool,W.Stack Window)
swapInStack _ st@(W.Stack _x [] []) = (False,st)
swapInStack Nothing st@(W.Stack _x [] _r) = (True,st)
swapInStack (Just pm) (W.Stack x [] r) =
let (x',r') = case span (/= pm) r of
(__,[]) -> (x,r)
(sl,sr) -> (pm,sl ++ x : drop 1 sr)
st' = W.Stack x' [] r'
b = x' == x
in (b,st')
swapInStack _ (W.Stack x l r) =
let l' = init l ++ [x]
x' = last l
st' = W.Stack x' l' r
in (False,st')
-- | If the focused window is the master window, use 'swapInStack'. Otherwise use
-- 'swapPromoteStack'.
--
-- See 'swapPromoteStack' for more details regarding the parameters.
swapHybridStack :: Maybe Window -> W.Stack Window -> (Bool,W.Stack Window)
swapHybridStack m st@(W.Stack _ [] _) = swapInStack m st
swapHybridStack m st = swapPromoteStack m st
-- | Cycle a list by the given count. If positive, cycle to the left. If
-- negative, cycle to the right:
--
> > > cycleN 2 [ 1,2,3,4,5 ]
[ 3,4,5,1,2 ]
> > > cycleN ( -2 ) [ 1,2,3,4,5 ]
[ 4,5,1,2,3 ]
cycleN :: Int -> [a] -> [a]
cycleN n ls =
let l = length ls
in take l $ drop (n `mod` l) $ cycle ls
| Wrap ' split '' with an initial index of @0@ , discarding the list 's length .
split :: (Num a, Enum a) => (b -> Bool) -> [b] -> ([(a,b)],[b])
split p l =
let (_,ys,ns) = split' p 0 l
in (ys,ns)
-- | Given a predicate, an initial index and a list, return a tuple containing:
--
-- * List length.
-- * Indexed list of elements which satisfy the predicate. An indexed element
-- is a tuple containing the element index (offset by the initial index) and
-- the element.
-- * List of elements which do not satisfy the predicate.
--
-- The initial index and length of the list simplify chaining calls to this
-- function, such as for zippers of lists.
split' :: (Num a, Enum a) => (b -> Bool) -> a -> [b] -> (a,[(a,b)],[b])
split' p i l =
let accumulate e (c,ys,ns) = if p (snd e)
then (c+1,e:ys,ns)
else (c+1,ys,e:ns)
(c',ys',ns') = foldr accumulate (0,[],[]) $ zip [i..] l
in (c',ys',map snd ns')
-- | Wrap 'merge'' with an initial virtual index of @0@. Return only the
-- unindexed list with elements from the leftover indexed list appended.
merge :: (Ord a, Num a) => [(a,b)] -> [b] -> [b]
merge il ul =
let (_,il',ul') = merge' 0 il ul
in ul' ++ map snd il'
-- | Inverse of 'split'. Merge an indexed list with an unindexed list (see
-- 'split''). Given a virtual index, an indexed list and an unindexed list,
-- return a tuple containing:
--
* Virtual index /after/ the unindexed list
-- * Remainder of the indexed list
* Merged list
--
-- If the indexed list is empty, this functions consumes the entire unindexed
list . If the unindexed list is empty , this function consumes only adjacent
-- indexed elements. For example, @[(10,"ten"),(12,"twelve")]@ implies missing
-- unindexed elements and so once @(10,"ten")@ is consumed this function
-- concludes.
--
-- The indexed list is assumed to have been created by 'split'' and not checked
-- for correctness. Indices are assumed to be ascending, i.e.
-- > [(1,"one"),(2,"two"),(4,"four")]
--
-- The initial and final virtual indices simplify chaining calls to the this
function , as as for zippers of lists . Positive values shift the unindexed
-- list towards the tail, as if preceded by that many elements.
merge' :: (Ord a, Num a) => a -> [(a,b)] -> [b] -> (a,[(a,b)],[b])
merge' i il@((j,a):ps) ul@(b:bs) = if j <= i
then let (x,y,z) = merge' (i+1) ps ul
in (x,y,a:z)
else let (x,y,z) = merge' (i+1) il bs
in (x,y,b:z)
merge' i [] (b:bs) =
let (x,y,z) = merge' (i+1) [] bs
in (x,y,b:z)
merge' i il@((j,a):ps) [] = if j <= i
then let (x,y,z) = merge' (i+1) ps []
in (x,y,a:z)
else (i,il,[])
merge' i [] [] =
(i,[],[])
-- | Remove all elements of the set from the stack. Skip the currently focused
-- member. Return an indexed list of excluded elements and the modified stack.
-- Use 'stackMerge' to re-insert the elements using this list.
stackSplit :: (Num a, Enum a, Ord b) => W.Stack b -> S.Set b -> ([(a,b)],W.Stack b)
stackSplit (W.Stack x l r) s =
let (c,fl,tl) = split' (`S.member` s) 0 (reverse l)
(_,fr,tr) = split' (`S.member` s) (c+1) r
in (fl++fr,W.Stack x (reverse tl) tr)
-- | Inverse of 'stackSplit'. Given a list of elements and their original
-- indices, re-insert the elements into these same positions within the stack.
-- Skip the currently focused member. Works best if the stack's length hasn't
-- changed, though if shorter any leftover elements will be tacked on.
stackMerge :: (Ord a, Num a) => W.Stack b -> [(a,b)] -> W.Stack b
stackMerge (W.Stack x l r) il =
let (i,il1,l') = merge' 0 il (reverse l)
(_,il2,r') = merge' (i+1) il1 r
in W.Stack x (reverse l') (r' ++ map snd il2)
| null | https://raw.githubusercontent.com/xmonad/xmonad-contrib/e0d1f177ea6c620b7612e431ff01b3ca1a62c829/XMonad/Actions/SwapPromote.hs | haskell | ---------------------------------------------------------------------------
|
Description : Track the master window history per workspace.
License : BSD-style (see LICENSE)
Maintainer :
Stability : unstable
Portability : unportable
Module for tracking master window history per workspace, and associated
functions for manipulating the stack using such history.
---------------------------------------------------------------------------
* Usage
$usage
* State Accessors
* Log Hook
* Log Hook Building Blocks
* Actions
* Action Building Blocks
* Stack Utilities
$usage
Given your configuration file, import this module:
First add 'masterHistoryHook' to your 'logHook' to track master windows per
workspace:
> myLogHook = otherHook >> masterHistoryHook
Depending on your xmonad configuration or window actions the master history
may be empty. If this is the case you can still chain another promotion
function:
To be clear, this is only called when the lack of master history hindered
the swap and not other conditions, such as having a only a single window.
While 'swapPromote' preserves window focus, 'swapIn' preserves the focus
position - effectively "swapping" new windows into focus without moving the
zipper. A mix of both, 'swapHybrid' promotes focused non-master windows
while swapping windows into the focused master. This works well on layouts
with large masters. Both come with chainable variants, see 'swapIn'' and
'swapHybrid''.
So far floating windows have been treated no differently than tiled windows
even though their positions are independent of the stack. Often, yanking
floating windows in and out of the workspace will obliterate the stack
history - particularly frustrating with "XMonad.Util.Scratchpad" since it is
toggled so frequenty and always replaces the master window. That's why the
windows will be ignored.
All together:
| Mapping from workspace tag to master history list. The current master is
Without history, the list is empty.
| Return the master history map from the state.
| Return the master history list of a given tag. The master history list may
be empty. An invalid tag will also result in an empty list.
| Return the master history list of the current workspace.
| Return the master history list of the workspace containing the given
| Modify the master history list of a given workspace, or the empty list of
no such workspace is mapped. The result is then re-inserted into the master
history map.
| Modify the master history list of the current workspace. While the current
workspace is guaranteed to exist; its master history may not. For more
| A 'logHook' to update the master history mapping. Non-existent workspaces
are removed, and the master history list for the current workspaces is
updated. See 'masterHistoryHook''.
| Backend for 'masterHistoryHook'.
^ Function used to update the master history list of
'updateMasterHistory' for more details.
| Less efficient version of 'M.restrictKeys'. Given broader eventual
adoption, replace this with 'M.restrictKeys'.
| Given the current master history list and an integrated stack, return the
new master history list. The current master is either moved (if it exists
within the history) or added to the head of the list, and all missing (i.e.
closed) windows are removed.
^ The master history list.
^ The integrated stack.
| Wrap 'swapPromoteStack'; see also 'swapApply'.
| Like 'swapPromote'' but discard the result.
| Wrap 'swapInStack'; see also 'swapApply'.
| Like 'swapIn'' but discard the result.
| Wrap 'swapHybridStack'; see also 'swapApply'.
| Like 'swapHybrid'' but discard the result.
| Apply the given master history stack modifier to the current stack. If
promotion function.
Any floating master windows will be added to the history when 'windows'
calls the log hook.
| If the focused window is the master window and there is no previous
master, do nothing. Otherwise swap the master with the previous master. If
the focused window is not the master window, swap it with the master window.
In either case focus follows the original window, i.e. the focused window
does not change, only its position.
history is either empty or out-of-sync. Though the latter shouldn't happen
this function never changes the stack under such circumstances.
| Perform the same swap as 'swapPromoteStack'. However the new window
receives the focus; it appears to "swap into" the position of the original
window. Under this model focus follows stack position and the zipper does
not move.
See 'swapPromoteStack' for more details regarding the parameters.
| If the focused window is the master window, use 'swapInStack'. Otherwise use
'swapPromoteStack'.
See 'swapPromoteStack' for more details regarding the parameters.
| Cycle a list by the given count. If positive, cycle to the left. If
negative, cycle to the right:
| Given a predicate, an initial index and a list, return a tuple containing:
* List length.
* Indexed list of elements which satisfy the predicate. An indexed element
is a tuple containing the element index (offset by the initial index) and
the element.
* List of elements which do not satisfy the predicate.
The initial index and length of the list simplify chaining calls to this
function, such as for zippers of lists.
| Wrap 'merge'' with an initial virtual index of @0@. Return only the
unindexed list with elements from the leftover indexed list appended.
| Inverse of 'split'. Merge an indexed list with an unindexed list (see
'split''). Given a virtual index, an indexed list and an unindexed list,
return a tuple containing:
* Remainder of the indexed list
If the indexed list is empty, this functions consumes the entire unindexed
indexed elements. For example, @[(10,"ten"),(12,"twelve")]@ implies missing
unindexed elements and so once @(10,"ten")@ is consumed this function
concludes.
The indexed list is assumed to have been created by 'split'' and not checked
for correctness. Indices are assumed to be ascending, i.e.
> [(1,"one"),(2,"two"),(4,"four")]
The initial and final virtual indices simplify chaining calls to the this
list towards the tail, as if preceded by that many elements.
| Remove all elements of the set from the stack. Skip the currently focused
member. Return an indexed list of excluded elements and the modified stack.
Use 'stackMerge' to re-insert the elements using this list.
| Inverse of 'stackSplit'. Given a list of elements and their original
indices, re-insert the elements into these same positions within the stack.
Skip the currently focused member. Works best if the stack's length hasn't
changed, though if shorter any leftover elements will be tacked on. | Module : XMonad . Actions . SwapPromote
Copyright : ( c ) 2018
module XMonad.Actions.SwapPromote
MasterHistory (..)
, getMasterHistoryMap
, getMasterHistoryFromTag
, getMasterHistoryCurrent
, getMasterHistoryFromWindow
, modifyMasterHistoryFromTag
, modifyMasterHistoryCurrent
, masterHistoryHook
, masterHistoryHook'
, updateMasterHistory
, swapPromote
, swapPromote'
, swapIn
, swapIn'
, swapHybrid
, swapHybrid'
, swapApply
, swapPromoteStack
, swapInStack
, swapHybridStack
* List Utilities
, cycleN
, split
, split'
, merge
, merge'
, stackSplit
, stackMerge
) where
import XMonad
import XMonad.Prelude
import qualified XMonad.StackSet as W
import qualified XMonad.Util.ExtensibleState as XS
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Arrow
> import XMonad . Actions . SwapPromote
Then replace xmonad 's default promote keybinding with ' swapPromote '' :
> , ( ( mod1Mask , xK_Return ) , swapPromote ' False )
> import XMonad . Actions . DwmPromote
> , ( ( mod1Mask , xK_Return ) , whenX ( swapPromote False ) dwmpromote )
swap functions accept a boolean argument ; when @True@ non - focused floating
> , ( ( mod1Mask , xK_Return ) , whenX ( swapHybrid True ) dwmpromote )
the head of the list , the previous master the second element , and so on .
newtype MasterHistory = MasterHistory
{ getMasterHistory :: M.Map WorkspaceId [Window]
} deriving (Read,Show)
instance ExtensionClass MasterHistory where
initialValue = MasterHistory M.empty
getMasterHistoryMap :: X (M.Map WorkspaceId [Window])
getMasterHistoryMap = XS.gets getMasterHistory
getMasterHistoryFromTag :: WorkspaceId -> X [Window]
getMasterHistoryFromTag t = M.findWithDefault [] t <$> getMasterHistoryMap
getMasterHistoryCurrent :: X [Window]
getMasterHistoryCurrent = gets (W.currentTag . windowset)
>>= getMasterHistoryFromTag
window . Return an empty list if the window is not in the stackset .
getMasterHistoryFromWindow :: Window -> X [Window]
getMasterHistoryFromWindow w = gets (W.findTag w . windowset)
>>= maybe (return []) getMasterHistoryFromTag
modifyMasterHistoryFromTag :: WorkspaceId -> ([Window] -> [Window]) -> X ()
modifyMasterHistoryFromTag t f = XS.modify $ \(MasterHistory m) ->
let l = M.findWithDefault [] t m
in MasterHistory $ M.insert t (f l) m
information see ' ' .
modifyMasterHistoryCurrent :: ([Window] -> [Window]) -> X ()
modifyMasterHistoryCurrent f = gets (W.currentTag . windowset)
>>= flip modifyMasterHistoryFromTag f
masterHistoryHook :: X ()
masterHistoryHook = masterHistoryHook' True updateMasterHistory
masterHistoryHook' :: Bool
^ If @True@ , remove non - existent workspaces .
-> ([Window] -> [Window] -> [Window])
the current workspace . First argument is the master
history , second is the integrated stack . See
-> X ()
masterHistoryHook' removeWorkspaces historyModifier = do
wset <- gets windowset
let W.Workspace wid _ mst = W.workspace . W.current $ wset
tags = map W.tag $ W.workspaces wset
st = W.integrate' mst
XS.modify $ \(MasterHistory mm) ->
let mm' = if removeWorkspaces
then restrictKeys mm $ S.fromList tags
else mm
ms = M.findWithDefault [] wid mm'
ms' = historyModifier ms st
in MasterHistory $ M.insert wid ms' mm'
restrictKeys :: Ord k => M.Map k a -> S.Set k -> M.Map k a
restrictKeys m s = M.filterWithKey (\k _ -> k `S.member` s) m
-> [Window]
updateMasterHistory _ [] = []
updateMasterHistory ms ws@(w:_) = (w : delete w ms) `intersect` ws
swapPromote :: Bool -> X Bool
swapPromote = flip swapApply swapPromoteStack
swapPromote' :: Bool -> X ()
swapPromote' = void . swapPromote
swapIn :: Bool -> X Bool
swapIn = flip swapApply swapInStack
swapIn' :: Bool -> X ()
swapIn' = void . swapIn
swapHybrid :: Bool -> X Bool
swapHybrid = flip swapApply swapHybridStack
swapHybrid' :: Bool -> X ()
swapHybrid' = void . swapHybrid
given @True@ , all non - focused floating windows will be ignored . Return
@True@ if insufficient history ; if so use ' whenX ' to sequence a backup
swapApply :: Bool
-> (Maybe Window -> W.Stack Window -> (Bool,W.Stack Window))
-> X Bool
swapApply ignoreFloats swapFunction = do
fl <- gets $ W.floating . windowset
st <- gets $ W.stack . W.workspace . W.current . windowset
ch <- getMasterHistoryCurrent
let swapApply' s1 =
let fl' = if ignoreFloats then M.keysSet fl else S.empty
ff = (||) <$> (`S.notMember` fl') <*> (== W.focus s1)
fh = filter ff ch
pm = listToMaybe . drop 1 $ fh
(r,s2) = stackSplit s1 fl' :: ([(Int,Window)],W.Stack Window)
(b,s3) = swapFunction pm s2
s4 = stackMerge s3 r
mh = let w = head . W.integrate $ s3
in const $ w : delete w ch
in (b,Just s4,mh)
(x,y,z) = maybe (False,Nothing,id) swapApply' st
modifyMasterHistoryCurrent z
windows $ W.modify Nothing . const $ y
return x
The first argument is the previous master ( which may not exist ) , the second
a window stack . Return if the master history hindered the swap ; the
swapPromoteStack :: Maybe Window -> W.Stack Window -> (Bool,W.Stack Window)
swapPromoteStack _ st@(W.Stack _x [] []) = (False,st)
swapPromoteStack Nothing st@(W.Stack _x [] _r) = (True,st)
swapPromoteStack (Just pm) (W.Stack x [] r) =
let (r',l') = (reverse *** cycleN 1) $ span (/= pm) $ reverse r
st' = W.Stack x l' r'
b = null l'
in (b,st')
swapPromoteStack _ (W.Stack x l r) =
let r' = (++ r) . cycleN 1 . reverse $ l
st' = W.Stack x [] r'
in (False,st')
swapInStack :: Maybe Window -> W.Stack Window -> (Bool,W.Stack Window)
swapInStack _ st@(W.Stack _x [] []) = (False,st)
swapInStack Nothing st@(W.Stack _x [] _r) = (True,st)
swapInStack (Just pm) (W.Stack x [] r) =
let (x',r') = case span (/= pm) r of
(__,[]) -> (x,r)
(sl,sr) -> (pm,sl ++ x : drop 1 sr)
st' = W.Stack x' [] r'
b = x' == x
in (b,st')
swapInStack _ (W.Stack x l r) =
let l' = init l ++ [x]
x' = last l
st' = W.Stack x' l' r
in (False,st')
swapHybridStack :: Maybe Window -> W.Stack Window -> (Bool,W.Stack Window)
swapHybridStack m st@(W.Stack _ [] _) = swapInStack m st
swapHybridStack m st = swapPromoteStack m st
> > > cycleN 2 [ 1,2,3,4,5 ]
[ 3,4,5,1,2 ]
> > > cycleN ( -2 ) [ 1,2,3,4,5 ]
[ 4,5,1,2,3 ]
cycleN :: Int -> [a] -> [a]
cycleN n ls =
let l = length ls
in take l $ drop (n `mod` l) $ cycle ls
| Wrap ' split '' with an initial index of @0@ , discarding the list 's length .
split :: (Num a, Enum a) => (b -> Bool) -> [b] -> ([(a,b)],[b])
split p l =
let (_,ys,ns) = split' p 0 l
in (ys,ns)
split' :: (Num a, Enum a) => (b -> Bool) -> a -> [b] -> (a,[(a,b)],[b])
split' p i l =
let accumulate e (c,ys,ns) = if p (snd e)
then (c+1,e:ys,ns)
else (c+1,ys,e:ns)
(c',ys',ns') = foldr accumulate (0,[],[]) $ zip [i..] l
in (c',ys',map snd ns')
merge :: (Ord a, Num a) => [(a,b)] -> [b] -> [b]
merge il ul =
let (_,il',ul') = merge' 0 il ul
in ul' ++ map snd il'
* Virtual index /after/ the unindexed list
* Merged list
list . If the unindexed list is empty , this function consumes only adjacent
function , as as for zippers of lists . Positive values shift the unindexed
merge' :: (Ord a, Num a) => a -> [(a,b)] -> [b] -> (a,[(a,b)],[b])
merge' i il@((j,a):ps) ul@(b:bs) = if j <= i
then let (x,y,z) = merge' (i+1) ps ul
in (x,y,a:z)
else let (x,y,z) = merge' (i+1) il bs
in (x,y,b:z)
merge' i [] (b:bs) =
let (x,y,z) = merge' (i+1) [] bs
in (x,y,b:z)
merge' i il@((j,a):ps) [] = if j <= i
then let (x,y,z) = merge' (i+1) ps []
in (x,y,a:z)
else (i,il,[])
merge' i [] [] =
(i,[],[])
stackSplit :: (Num a, Enum a, Ord b) => W.Stack b -> S.Set b -> ([(a,b)],W.Stack b)
stackSplit (W.Stack x l r) s =
let (c,fl,tl) = split' (`S.member` s) 0 (reverse l)
(_,fr,tr) = split' (`S.member` s) (c+1) r
in (fl++fr,W.Stack x (reverse tl) tr)
stackMerge :: (Ord a, Num a) => W.Stack b -> [(a,b)] -> W.Stack b
stackMerge (W.Stack x l r) il =
let (i,il1,l') = merge' 0 il (reverse l)
(_,il2,r') = merge' (i+1) il1 r
in W.Stack x (reverse l') (r' ++ map snd il2)
|
f95f5ab65d7c9b84bf6850c366810faf4ff231ccd466bc5f6cf3d74d6b8ea6b6 | ChrisPenner/comonads-by-example | FileTree.hs | # LANGUAGE TypeOperators #
module Comonads.Cofree.FileTree where
import Control.Comonad
import Control.Comonad.Env
import Control.Comonad.Cofree
import Control.Monad.Free
import qualified Control.Monad.Trans.Free as FF
import Control.Arrow
import Data.Traversable
import Data.Functor.Compose
import Data.Functor.Foldable
import Control.Applicative
import System.Directory
import qualified Data.Map as M
type FileTreeIO = Cofree (IO `Compose` M.Map FilePath) [FilePath]
type FileTreeC = Free (Env [FilePath] `Compose` (M.Map FilePath)) FileTreeIO
mkFileTree :: FilePath -> FileTreeC
mkFileTree path = Pure $ coiter coalg [path]
where
coalg :: [FilePath] -> (IO `Compose` M.Map FilePath) [FilePath]
coalg paths = Compose $ traverse (\p -> listDirectory p <|> pure []) (toMap paths)
explored :: FileTreeC -> [FilePath]
explored = iter alg . fmap (const [])
where
alg w = ask $ getCompose w
toMap :: Ord a => [a] -> M.Map a a
toMap = M.fromList . fmap (id &&& id)
cwd :: FileTreeC
cwd = mkFileTree "."
-- deeper :: FileTreeC -> IO FileTreeC
-- deeper = sequenceA . (>>= go)
-- where
-- go :: FileTreeIO -> Free (Env [FilePath] `Compose` (M.Map FilePath))
-- go (_ :< Compose ioNext) =
-- go
-- :: FF.FreeF
-- (Compose (Env [FilePath]) (M.Map FilePath))
-- (Cofree (Compose IO (M.Map FilePath)) [FilePath])
-- (IO a)
-- -> IO
-- (FF.FreeF
-- (Compose (Env [FilePath]) (M.Map FilePath))
-- (Cofree (Compose IO (M.Map FilePath)) [FilePath])
-- a)
-- go (FF.Pure (_ :< Compose ionext)) = do
-- mapNext <- ionext
pure $ FF.Free ( Compose $ env ( foldMap extract mapNext ) mapNext )
deeper :: FileTreeC -> IO FileTreeC
deeper = cataA algA
where
algA
:: FF.FreeF
(Compose (Env [FilePath]) (M.Map FilePath))
(Cofree (Compose IO (M.Map FilePath)) [FilePath])
(IO FileTreeC)
-> IO FileTreeC
algA (FF.Pure (_ :< Compose ioMap)) = do
mapNext <- ioMap
pure $ Free (Compose $ env (M.keys mapNext) (Pure <$> mapNext))
algA (FF.Free envMap) = Free <$> sequenceA envMap
| null | https://raw.githubusercontent.com/ChrisPenner/comonads-by-example/1d7626f759e59ac8019322612ed6d7ff00da75c9/drafts/FileTree.hs | haskell | deeper :: FileTreeC -> IO FileTreeC
deeper = sequenceA . (>>= go)
where
go :: FileTreeIO -> Free (Env [FilePath] `Compose` (M.Map FilePath))
go (_ :< Compose ioNext) =
go
:: FF.FreeF
(Compose (Env [FilePath]) (M.Map FilePath))
(Cofree (Compose IO (M.Map FilePath)) [FilePath])
(IO a)
-> IO
(FF.FreeF
(Compose (Env [FilePath]) (M.Map FilePath))
(Cofree (Compose IO (M.Map FilePath)) [FilePath])
a)
go (FF.Pure (_ :< Compose ionext)) = do
mapNext <- ionext | # LANGUAGE TypeOperators #
module Comonads.Cofree.FileTree where
import Control.Comonad
import Control.Comonad.Env
import Control.Comonad.Cofree
import Control.Monad.Free
import qualified Control.Monad.Trans.Free as FF
import Control.Arrow
import Data.Traversable
import Data.Functor.Compose
import Data.Functor.Foldable
import Control.Applicative
import System.Directory
import qualified Data.Map as M
type FileTreeIO = Cofree (IO `Compose` M.Map FilePath) [FilePath]
type FileTreeC = Free (Env [FilePath] `Compose` (M.Map FilePath)) FileTreeIO
mkFileTree :: FilePath -> FileTreeC
mkFileTree path = Pure $ coiter coalg [path]
where
coalg :: [FilePath] -> (IO `Compose` M.Map FilePath) [FilePath]
coalg paths = Compose $ traverse (\p -> listDirectory p <|> pure []) (toMap paths)
explored :: FileTreeC -> [FilePath]
explored = iter alg . fmap (const [])
where
alg w = ask $ getCompose w
toMap :: Ord a => [a] -> M.Map a a
toMap = M.fromList . fmap (id &&& id)
cwd :: FileTreeC
cwd = mkFileTree "."
pure $ FF.Free ( Compose $ env ( foldMap extract mapNext ) mapNext )
deeper :: FileTreeC -> IO FileTreeC
deeper = cataA algA
where
algA
:: FF.FreeF
(Compose (Env [FilePath]) (M.Map FilePath))
(Cofree (Compose IO (M.Map FilePath)) [FilePath])
(IO FileTreeC)
-> IO FileTreeC
algA (FF.Pure (_ :< Compose ioMap)) = do
mapNext <- ioMap
pure $ Free (Compose $ env (M.keys mapNext) (Pure <$> mapNext))
algA (FF.Free envMap) = Free <$> sequenceA envMap
|
325988647232aad7c5ca2698b7111af450ed83896b063fe228941a346310a4d5 | jeapostrophe/exp | test.rkt | #lang racket/base
(require "m.rkt")
(require "n.rkt")
| null | https://raw.githubusercontent.com/jeapostrophe/exp/43615110fd0439d2ef940c42629fcdc054c370f9/nsmv/test.rkt | racket | #lang racket/base
(require "m.rkt")
(require "n.rkt")
| |
7b2f86cf0f4777235f6e17fc57b67353c09b9f626d34e3c2983399b027c967ec | scrintal/heroicons-reagent | eye_dropper.cljs | (ns com.scrintal.heroicons.outline.eye-dropper)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M15 11.25l1.5 1.5.75-.75V8.758l2.276-.61a3 3 0 10-3.675-3.675l-.61 2.277H12l-.75.75 1.5 1.5M15 11.25l-8.47 8.47c-.34.34-.8.53-1.28.53s-.94.19-1.28.53l-.97.97-.75-.75.97-.97c.34-.34.53-.8.53-1.28s.19-.94.53-1.28L12.75 9M15 11.25L12.75 9"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/outline/eye_dropper.cljs | clojure | (ns com.scrintal.heroicons.outline.eye-dropper)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M15 11.25l1.5 1.5.75-.75V8.758l2.276-.61a3 3 0 10-3.675-3.675l-.61 2.277H12l-.75.75 1.5 1.5M15 11.25l-8.47 8.47c-.34.34-.8.53-1.28.53s-.94.19-1.28.53l-.97.97-.75-.75.97-.97c.34-.34.53-.8.53-1.28s.19-.94.53-1.28L12.75 9M15 11.25L12.75 9"}]]) | |
0f6d1660942d182b2f9c22adb76c7fdb11854dc46a921c44d0852ac9deb119c1 | timbod7/haskell-chart | example10.hs | import Graphics.Rendering.Chart
import Graphics.Rendering.Chart.Backend.Cairo
import Data.Default.Class
import Data.Colour
import Data.Colour.Names
import Control.Lens
import System.Environment(getArgs)
chart = toRenderable layout
where
vals :: [(Double,Double,Double,Double)]
vals = [ (x,sin (exp x),sin x/2,cos x/10) | x <- [1..20]]
bars = plot_errbars_values .~ [symErrPoint x y dx dy | (x,y,dx,dy) <- vals]
$ plot_errbars_title .~"test"
$ def
points = plot_points_style .~ filledCircles 2 (opaque red)
$ plot_points_values .~ [(x,y) | (x,y,dx,dy) <- vals]
$ plot_points_title .~ "test data"
$ def
layout = layout_title .~ "Error Bars"
$ layout_plots .~ [toPlot bars, toPlot points]
$ def
main = renderableToFile def "example10_big.png" chart
| null | https://raw.githubusercontent.com/timbod7/haskell-chart/8c5a823652ea1b4ec2adbced4a92a8161065ead6/wiki-examples/example10.hs | haskell | import Graphics.Rendering.Chart
import Graphics.Rendering.Chart.Backend.Cairo
import Data.Default.Class
import Data.Colour
import Data.Colour.Names
import Control.Lens
import System.Environment(getArgs)
chart = toRenderable layout
where
vals :: [(Double,Double,Double,Double)]
vals = [ (x,sin (exp x),sin x/2,cos x/10) | x <- [1..20]]
bars = plot_errbars_values .~ [symErrPoint x y dx dy | (x,y,dx,dy) <- vals]
$ plot_errbars_title .~"test"
$ def
points = plot_points_style .~ filledCircles 2 (opaque red)
$ plot_points_values .~ [(x,y) | (x,y,dx,dy) <- vals]
$ plot_points_title .~ "test data"
$ def
layout = layout_title .~ "Error Bars"
$ layout_plots .~ [toPlot bars, toPlot points]
$ def
main = renderableToFile def "example10_big.png" chart
| |
78ae7a92dcfb8d47dd07ee48a99de833fc12edef066c14aa496cec2f188164b5 | rpeszek/typed-encoding | Encoding.hs |
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
| Lazy version of " Data . . Conv . Text . Encoding "
-- @since 0.2.2.0
module Data.TypedEncoding.Conv.Text.Lazy.Encoding where
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TEL
import Data.TypedEncoding.Instances.Support
import qualified Data.TypedEncoding.Common.Util.TypeLits as Knds
import Data.TypedEncoding.Instances.Restriction.UTF8 ()
import Data.TypedEncoding.Instances.Restriction.ASCII ()
import Data.TypedEncoding.Unsafe (withUnsafe)
| Lazy version of ' Data . . Conv . Text . Encoding.decodeUtf8 '
decodeUtf8 :: forall xs c t y ys encs. (
Knds.UnSnoc xs ~ '(,) ys y
, Superset "r-UTF8" y
, encs ~ RemoveRs ys
, AllEncodeInto "r-UTF8" encs
) => Enc xs c BL.ByteString -> Enc xs c TL.Text
decodeUtf8 = withUnsafe (fmap TEL.decodeUtf8)
-- | simplified version of @decodeUtf8@ that works on single /r-/ encodings
-- @since 0.5.2.0
decodeUtf8_1 :: (
Superset "r-UTF8" y
) => Enc '[y] c BL.ByteString -> Enc '[y] c TL.Text
decodeUtf8_1 = decodeUtf8
| Lazy version of ' Data . . Conv . Text . Encoding.encodeUtf8 '
encodeUtf8 :: forall xs c t y ys encs. (
Knds.UnSnoc xs ~ '(,) ys y
, Superset "r-UTF8" y
, encs ~ RemoveRs ys
, AllEncodeInto "r-UTF8" encs
) => Enc xs c TL.Text -> Enc xs c BL.ByteString
encodeUtf8 = withUnsafe (fmap TEL.encodeUtf8)
-- | simplified version of @decodeUtf8@ that works on single /r-/ encodings
-- @since 0.5.2.0
encodeUtf8_1 :: (
Superset "r-UTF8" y
) => Enc '[y] c TL.Text -> Enc '[y] c BL.ByteString
encodeUtf8_1 = encodeUtf8 | null | https://raw.githubusercontent.com/rpeszek/typed-encoding/441f9f3bbf849f485f82eae66402ee2fd7b47a34/src/Data/TypedEncoding/Conv/Text/Lazy/Encoding.hs | haskell | @since 0.2.2.0
| simplified version of @decodeUtf8@ that works on single /r-/ encodings
@since 0.5.2.0
| simplified version of @decodeUtf8@ that works on single /r-/ encodings
@since 0.5.2.0 |
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
| Lazy version of " Data . . Conv . Text . Encoding "
module Data.TypedEncoding.Conv.Text.Lazy.Encoding where
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TEL
import Data.TypedEncoding.Instances.Support
import qualified Data.TypedEncoding.Common.Util.TypeLits as Knds
import Data.TypedEncoding.Instances.Restriction.UTF8 ()
import Data.TypedEncoding.Instances.Restriction.ASCII ()
import Data.TypedEncoding.Unsafe (withUnsafe)
| Lazy version of ' Data . . Conv . Text . Encoding.decodeUtf8 '
decodeUtf8 :: forall xs c t y ys encs. (
Knds.UnSnoc xs ~ '(,) ys y
, Superset "r-UTF8" y
, encs ~ RemoveRs ys
, AllEncodeInto "r-UTF8" encs
) => Enc xs c BL.ByteString -> Enc xs c TL.Text
decodeUtf8 = withUnsafe (fmap TEL.decodeUtf8)
decodeUtf8_1 :: (
Superset "r-UTF8" y
) => Enc '[y] c BL.ByteString -> Enc '[y] c TL.Text
decodeUtf8_1 = decodeUtf8
| Lazy version of ' Data . . Conv . Text . Encoding.encodeUtf8 '
encodeUtf8 :: forall xs c t y ys encs. (
Knds.UnSnoc xs ~ '(,) ys y
, Superset "r-UTF8" y
, encs ~ RemoveRs ys
, AllEncodeInto "r-UTF8" encs
) => Enc xs c TL.Text -> Enc xs c BL.ByteString
encodeUtf8 = withUnsafe (fmap TEL.encodeUtf8)
encodeUtf8_1 :: (
Superset "r-UTF8" y
) => Enc '[y] c TL.Text -> Enc '[y] c BL.ByteString
encodeUtf8_1 = encodeUtf8 |
e371f8754c12e7869dac420c47b54fabbfd2d0da2d489d67a862c7d67681346a | okuoku/nausicaa | test-sentinel.sps | ;;;
Part of : / Scheme
;;;Contents: tests for the sentinel library
Date : Tue Jul 7 , 2009
;;;
;;;Abstract
;;;
;;;
;;;
Copyright ( c ) 2009 < >
;;;
;;;This program is free software: you can redistribute it and/or modify
;;;it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or ( at
;;;your option) any later version.
;;;
;;;This program is distributed in the hope that it will be useful, but
;;;WITHOUT ANY WARRANTY; without even the implied warranty of
;;;MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details .
;;;
You should have received a copy of the GNU General Public License
;;;along with this program. If not, see </>.
;;;
#!r6rs
(import (nausicaa)
(nausicaa language sentinel)
(nausicaa checks))
(check-set-mode! 'report-failed)
(display "*** testing sentinel\n")
(check
(sentinel? sentinel)
=> #t)
(check
(sentinel? 123)
=> #f)
(check
(let ((ell (list 1 2 3 4 5 sentinel)))
(let loop ((ell ell)
(res '()))
(if (sentinel? (car ell))
res
(loop (cdr ell) (cons (car ell) res)))))
=> '(5 4 3 2 1))
(check
(let* ((ell '(1 2 3 4 5))
(iter (let ((ell ell))
(lambda ()
(if (null? ell)
sentinel
(begin0
(car ell)
(set! ell (cdr ell))))))))
(let loop ((res '()))
(let ((v (iter)))
(if (sentinel? v)
res
(loop (cons v res))))))
=> '(5 4 3 2 1))
(let ((s (make-sentinel)))
(check
(sentinel? s)
=> #t)
(check
(eq? s s)
=> #t)
(check
(eq? s sentinel)
=> #f))
;;;; done
(check-report)
;;; end of file
| null | https://raw.githubusercontent.com/okuoku/nausicaa/50e7b4d4141ad4d81051588608677223fe9fb715/scheme/tests/test-sentinel.sps | scheme |
Contents: tests for the sentinel library
Abstract
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
along with this program. If not, see </>.
done
end of file | Part of : / Scheme
Date : Tue Jul 7 , 2009
Copyright ( c ) 2009 < >
the Free Software Foundation , either version 3 of the License , or ( at
General Public License for more details .
You should have received a copy of the GNU General Public License
#!r6rs
(import (nausicaa)
(nausicaa language sentinel)
(nausicaa checks))
(check-set-mode! 'report-failed)
(display "*** testing sentinel\n")
(check
(sentinel? sentinel)
=> #t)
(check
(sentinel? 123)
=> #f)
(check
(let ((ell (list 1 2 3 4 5 sentinel)))
(let loop ((ell ell)
(res '()))
(if (sentinel? (car ell))
res
(loop (cdr ell) (cons (car ell) res)))))
=> '(5 4 3 2 1))
(check
(let* ((ell '(1 2 3 4 5))
(iter (let ((ell ell))
(lambda ()
(if (null? ell)
sentinel
(begin0
(car ell)
(set! ell (cdr ell))))))))
(let loop ((res '()))
(let ((v (iter)))
(if (sentinel? v)
res
(loop (cons v res))))))
=> '(5 4 3 2 1))
(let ((s (make-sentinel)))
(check
(sentinel? s)
=> #t)
(check
(eq? s s)
=> #t)
(check
(eq? s sentinel)
=> #f))
(check-report)
|
d39d510bf5ea7aab53bfe3638f17b6c9aa6f0c1ae2226ecd196827697e8c4235 | Bannerets/camlproto | Crypto.ml | open! Base
open Js_of_ocaml
type sha1_t
class type js_sha1 = object
method init: unit -> sha1_t Js.meth
method feed : t - > Typed_array.arrayBuffer Js.t - > unit Js.meth
method feed: sha1_t -> Cstruct.buffer -> unit Js.meth
method get: sha1_t -> Typed_array.arrayBuffer Js.t Js.meth
end
let js_sha1: js_sha1 Js.t = Js.Unsafe.pure_js_expr "js_sha1"
type sha256_t
class type js_sha256 = object
method init: unit -> sha256_t Js.meth
method feed: sha256_t -> Cstruct.buffer -> unit Js.meth
method get: sha256_t -> Typed_array.arrayBuffer Js.t Js.meth
end
let js_sha256: js_sha256 Js.t = Js.Unsafe.pure_js_expr "js_sha1"
type aes_t
class type js_aes = object
method ecbCreateKey: Cstruct.buffer -> aes_t Js.meth
method ecbEncrypt: aes_t -> Cstruct.buffer -> Typed_array.arrayBuffer Js.t Js.meth
method ecbDecrypt: aes_t -> Cstruct.buffer -> Typed_array.arrayBuffer Js.t Js.meth
end
let js_aes: js_aes Js.t = Js.Unsafe.pure_js_expr "js_aes"
module Crypto: PlatformTypes.Crypto = struct
module SHA1 = struct
type t = sha1_t
let init () = js_sha1##init ()
let feed t cs = js_sha1##feed t (Cstruct.to_bigarray cs)
let get t = js_sha1##get t
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
let digest cs =
let t = init () in
feed t cs;
get t
end
module SHA256 = struct
type t = sha256_t
let init () = js_sha256##init ()
let feed t cs = js_sha256##feed t (Cstruct.to_bigarray cs)
let get t = js_sha256##get t
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
let digest cs =
let t = init () in
feed t cs;
get t
end
module AES = struct
type key = aes_t
let ecb_create_key cs = js_aes##ecbCreateKey (Cstruct.to_bigarray cs)
let ecb_encrypt ~key cs = js_aes##ecbEncrypt key (Cstruct.to_bigarray cs)
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
let ecb_decrypt ~key cs = js_aes##ecbDecrypt key (Cstruct.to_bigarray cs)
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
end
end
| null | https://raw.githubusercontent.com/Bannerets/camlproto/d7c023f573ce6a9e7801aaa0962946f2f8cdc675/src/platform/js/Crypto.ml | ocaml | open! Base
open Js_of_ocaml
type sha1_t
class type js_sha1 = object
method init: unit -> sha1_t Js.meth
method feed : t - > Typed_array.arrayBuffer Js.t - > unit Js.meth
method feed: sha1_t -> Cstruct.buffer -> unit Js.meth
method get: sha1_t -> Typed_array.arrayBuffer Js.t Js.meth
end
let js_sha1: js_sha1 Js.t = Js.Unsafe.pure_js_expr "js_sha1"
type sha256_t
class type js_sha256 = object
method init: unit -> sha256_t Js.meth
method feed: sha256_t -> Cstruct.buffer -> unit Js.meth
method get: sha256_t -> Typed_array.arrayBuffer Js.t Js.meth
end
let js_sha256: js_sha256 Js.t = Js.Unsafe.pure_js_expr "js_sha1"
type aes_t
class type js_aes = object
method ecbCreateKey: Cstruct.buffer -> aes_t Js.meth
method ecbEncrypt: aes_t -> Cstruct.buffer -> Typed_array.arrayBuffer Js.t Js.meth
method ecbDecrypt: aes_t -> Cstruct.buffer -> Typed_array.arrayBuffer Js.t Js.meth
end
let js_aes: js_aes Js.t = Js.Unsafe.pure_js_expr "js_aes"
module Crypto: PlatformTypes.Crypto = struct
module SHA1 = struct
type t = sha1_t
let init () = js_sha1##init ()
let feed t cs = js_sha1##feed t (Cstruct.to_bigarray cs)
let get t = js_sha1##get t
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
let digest cs =
let t = init () in
feed t cs;
get t
end
module SHA256 = struct
type t = sha256_t
let init () = js_sha256##init ()
let feed t cs = js_sha256##feed t (Cstruct.to_bigarray cs)
let get t = js_sha256##get t
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
let digest cs =
let t = init () in
feed t cs;
get t
end
module AES = struct
type key = aes_t
let ecb_create_key cs = js_aes##ecbCreateKey (Cstruct.to_bigarray cs)
let ecb_encrypt ~key cs = js_aes##ecbEncrypt key (Cstruct.to_bigarray cs)
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
let ecb_decrypt ~key cs = js_aes##ecbDecrypt key (Cstruct.to_bigarray cs)
|> Typed_array.Bigstring.of_arrayBuffer
|> Cstruct.of_bigarray
end
end
| |
d87fe9d091000a8eb827ad2d8860769163f9a312da06233eaf1667a76dd20f03 | triffon/fp-2021-22 | code.rkt | #lang racket
(define-syntax-rule (my-delay x) (lambda () x))
(define (my-delay2 x) (lambda () x))
(define (my-force p) (p))
(define-syntax-rule (my-stream-cons x s)
(cons x (my-delay s)))
(define (my-stream-first s)
(car s))
(define (my-stream-rest s)
(my-force (cdr s)))
(define my-empty-stream 'empty-stream)
(define (my-stream-empty? s)
(equal? s 'empty-stream))
(define (my-list-to-stream l)
(if (null? l)
my-empty-stream
(my-stream-cons (car l) (my-list-to-stream (cdr l)))))
; задачи
(define (nats-after x)
(my-stream-cons x (nats-after (+ 1 x))))
(define nats (nats-after 0))
(define nats1 (nats-after 1))
(define nats2 (nats-after 2))
(define (my-take-from-stream s n)
(if (or (= n 0) (my-stream-empty? s))
'()
(cons (my-stream-first s) (my-take-from-stream (my-stream-rest s) (- n 1)))))
(define (my-nth-from-stream s n)
(if (= n 0)
(my-stream-first s)
(my-nth-from-stream (my-stream-rest s) (- n 1))))
(define (my-stream-filter p s)
(cond
((my-stream-empty? s) my-empty-stream)
((p (my-stream-first s))
(my-stream-cons (my-stream-first s) (my-stream-filter p (my-stream-rest s))))
(else
(my-stream-filter p (my-stream-rest s)))))
(define (my-stream-map f s)
(if (my-stream-empty? s)
my-empty-stream
(my-stream-cons (f (my-stream-first s)) (my-stream-map f (my-stream-rest s)))))
; вариант с безкрайно сито на Ератостен
(define (is-divider? x n)
(= (remainder n x) 0))
(define (filter-not-divided-by div s)
(my-stream-filter (lambda (f) (not (is-divider? div f))) s))
(define (primes-iter pp)
(my-stream-cons
(my-stream-first pp)
(primes-iter (filter-not-divided-by (my-stream-first pp) (my-stream-rest pp)))))
(define primes2 (my-stream-cons 1 (primes-iter nats2)))
число ( лесен )
(define (prime? n)
тук имплементираме проверка за просто число
(define primes (my-stream-filter prime? nats))
(define (iterate f x)
(my-stream-cons x (iterate f (f x))))
(define (iterate2 f x)
(my-stream-cons x (my-stream-map f (iterate2 f x)))) | null | https://raw.githubusercontent.com/triffon/fp-2021-22/e8e71eb7f36b9e8f9ec59e336def384e063208a8/exercises/3/09-scheme-stream/code.rkt | racket | задачи
вариант с безкрайно сито на Ератостен | #lang racket
(define-syntax-rule (my-delay x) (lambda () x))
(define (my-delay2 x) (lambda () x))
(define (my-force p) (p))
(define-syntax-rule (my-stream-cons x s)
(cons x (my-delay s)))
(define (my-stream-first s)
(car s))
(define (my-stream-rest s)
(my-force (cdr s)))
(define my-empty-stream 'empty-stream)
(define (my-stream-empty? s)
(equal? s 'empty-stream))
(define (my-list-to-stream l)
(if (null? l)
my-empty-stream
(my-stream-cons (car l) (my-list-to-stream (cdr l)))))
(define (nats-after x)
(my-stream-cons x (nats-after (+ 1 x))))
(define nats (nats-after 0))
(define nats1 (nats-after 1))
(define nats2 (nats-after 2))
(define (my-take-from-stream s n)
(if (or (= n 0) (my-stream-empty? s))
'()
(cons (my-stream-first s) (my-take-from-stream (my-stream-rest s) (- n 1)))))
(define (my-nth-from-stream s n)
(if (= n 0)
(my-stream-first s)
(my-nth-from-stream (my-stream-rest s) (- n 1))))
(define (my-stream-filter p s)
(cond
((my-stream-empty? s) my-empty-stream)
((p (my-stream-first s))
(my-stream-cons (my-stream-first s) (my-stream-filter p (my-stream-rest s))))
(else
(my-stream-filter p (my-stream-rest s)))))
(define (my-stream-map f s)
(if (my-stream-empty? s)
my-empty-stream
(my-stream-cons (f (my-stream-first s)) (my-stream-map f (my-stream-rest s)))))
(define (is-divider? x n)
(= (remainder n x) 0))
(define (filter-not-divided-by div s)
(my-stream-filter (lambda (f) (not (is-divider? div f))) s))
(define (primes-iter pp)
(my-stream-cons
(my-stream-first pp)
(primes-iter (filter-not-divided-by (my-stream-first pp) (my-stream-rest pp)))))
(define primes2 (my-stream-cons 1 (primes-iter nats2)))
число ( лесен )
(define (prime? n)
тук имплементираме проверка за просто число
(define primes (my-stream-filter prime? nats))
(define (iterate f x)
(my-stream-cons x (iterate f (f x))))
(define (iterate2 f x)
(my-stream-cons x (my-stream-map f (iterate2 f x)))) |
bef63ed280a6b5063c826d263f597f803e8e575eba44b7cf2e75c0f4b1e1145f | fragnix/fragnix | Data.IntMap.Lazy.hs | # LANGUAGE Haskell98 #
{-# LINE 1 "Data/IntMap/Lazy.hs" #-}
# LANGUAGE CPP #
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntMap.Lazy
Copyright : ( c ) 2002
( c ) 2008
-- License : BSD-style
-- Maintainer :
-- Portability : portable
--
-- An efficient implementation of maps from integer keys to values
-- (dictionaries).
--
-- API of this module is strict in the keys, but lazy in the values.
-- If you need value-strict maps, use "Data.IntMap.Strict" instead.
The ' IntMap ' type itself is shared between the lazy and strict modules ,
meaning that the same ' IntMap ' value can be passed to functions in
-- both modules (although that is rarely needed).
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
> import Data . IntMap . Lazy ( IntMap )
> import qualified Data . IntMap . Lazy as IntMap
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced map implementation (see "Data.Map").
--
* and , \"/Fast Maps/\ " ,
Workshop on ML , September 1998 , pages 77 - 86 ,
-- <>
--
* , -- Practical Algorithm To Retrieve
Information Coded In Alphanumeric/\ " , Journal of the ACM , 15(4 ) ,
October 1968 , pages 514 - 534 .
--
-- Operation comments contain the operation time complexity in
the Big - O notation < > .
Many operations have a worst - case complexity of /O(min(n , W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
( 32 or 64 ) .
-----------------------------------------------------------------------------
module Data.IntMap.Lazy (
-- * Strictness properties
-- $strictness
-- * Map type
instance Eq , Show
-- * Operators
, (!), (\\)
-- * Query
, IM.null
, size
, member
, notMember
, IM.lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
, alterF
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- ** Universal combining function
, mergeWithKey
-- * Traversal
-- ** Map
, IM.map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, IM.foldr
, IM.foldl
, foldrWithKey
, foldlWithKey
, foldMapWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
, keysSet
, fromSet
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, IM.filter
, filterWithKey
, restrictKeys
, withoutKeys
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
, splitRoot
-- * Submap
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
*
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, showTreeWith
) where
import Data.IntMap.Internal as IM hiding (showTree, showTreeWith)
import Data.IntMap.Internal.DeprecatedDebug
-- $strictness
--
-- This module satisfies the following strictness property:
--
* Key arguments are evaluated to WHNF
--
-- Here are some examples that illustrate the property:
--
-- > insertWith (\ new old -> old) undefined v m == undefined
-- > insertWith (\ new old -> old) k undefined m == OK
-- > delete undefined m == undefined
| null | https://raw.githubusercontent.com/fragnix/fragnix/b9969e9c6366e2917a782f3ac4e77cce0835448b/tests/packages/scotty/Data.IntMap.Lazy.hs | haskell | # LINE 1 "Data/IntMap/Lazy.hs" #
# LANGUAGE Safe #
---------------------------------------------------------------------------
|
Module : Data.IntMap.Lazy
License : BSD-style
Maintainer :
Portability : portable
An efficient implementation of maps from integer keys to values
(dictionaries).
API of this module is strict in the keys, but lazy in the values.
If you need value-strict maps, use "Data.IntMap.Strict" instead.
both modules (although that is rarely needed).
These modules are intended to be imported qualified, to avoid name
clashes with Prelude functions, e.g.
The implementation is based on /big-endian patricia trees/. This data
structure performs especially well on binary operations like 'union'
and 'intersection'. However, my benchmarks show that it is also
(much) faster on insertions and deletions when compared to a generic
size-balanced map implementation (see "Data.Map").
<>
Practical Algorithm To Retrieve
Operation comments contain the operation time complexity in
This means that the operation can become linear in the number of
elements with a maximum of /W/ -- the number of bits in an 'Int'
---------------------------------------------------------------------------
* Strictness properties
$strictness
* Map type
* Operators
* Query
* Construction
** Insertion
** Delete\/Update
* Combine
** Union
** Difference
** Intersection
** Universal combining function
* Traversal
** Map
* Folds
** Strict folds
* Conversion
** Lists
** Ordered lists
* Filter
* Submap
* Debugging
$strictness
This module satisfies the following strictness property:
Here are some examples that illustrate the property:
> insertWith (\ new old -> old) undefined v m == undefined
> insertWith (\ new old -> old) k undefined m == OK
> delete undefined m == undefined | # LANGUAGE Haskell98 #
# LANGUAGE CPP #
Copyright : ( c ) 2002
( c ) 2008
The ' IntMap ' type itself is shared between the lazy and strict modules ,
meaning that the same ' IntMap ' value can be passed to functions in
> import Data . IntMap . Lazy ( IntMap )
> import qualified Data . IntMap . Lazy as IntMap
* and , \"/Fast Maps/\ " ,
Workshop on ML , September 1998 , pages 77 - 86 ,
Information Coded In Alphanumeric/\ " , Journal of the ACM , 15(4 ) ,
October 1968 , pages 514 - 534 .
the Big - O notation < > .
Many operations have a worst - case complexity of /O(min(n , W))/.
( 32 or 64 ) .
module Data.IntMap.Lazy (
instance Eq , Show
, (!), (\\)
, IM.null
, size
, member
, notMember
, IM.lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
, empty
, singleton
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
, alterF
, union
, unionWith
, unionWithKey
, unions
, unionsWith
, difference
, differenceWith
, differenceWithKey
, intersection
, intersectionWith
, intersectionWithKey
, mergeWithKey
, IM.map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
, IM.foldr
, IM.foldl
, foldrWithKey
, foldlWithKey
, foldMapWithKey
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
, elems
, keys
, assocs
, keysSet
, fromSet
, toList
, fromList
, fromListWith
, fromListWithKey
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
, IM.filter
, filterWithKey
, restrictKeys
, withoutKeys
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
, splitRoot
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
*
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
, showTree
, showTreeWith
) where
import Data.IntMap.Internal as IM hiding (showTree, showTreeWith)
import Data.IntMap.Internal.DeprecatedDebug
* Key arguments are evaluated to WHNF
|
3029f0738044a693b1b0ae1ad5256f1ed8405d9368c5e29240254aec6a43b009 | justinethier/cyclone | simple.scm | ;; Experimenting with primitives and continuations.
;; There are several primitives that do not require conts. Can we
;; compile them in such as way that they are not wrapped in a cont?
;; idea is to reduce compiled code, and number of allocated closures.
(import
(scheme base)
(scheme write))
(define (test a b c)
(write
(cons
(+ a b c)
(- a b c))))
(test 1 2 3)
| null | https://raw.githubusercontent.com/justinethier/cyclone/a1c2a8f282f37ce180a5921ae26a5deb04768269/tests/debug/compilation/simple.scm | scheme | Experimenting with primitives and continuations.
There are several primitives that do not require conts. Can we
compile them in such as way that they are not wrapped in a cont?
idea is to reduce compiled code, and number of allocated closures. | (import
(scheme base)
(scheme write))
(define (test a b c)
(write
(cons
(+ a b c)
(- a b c))))
(test 1 2 3)
|
9bb8d4760dc26c5476eb59a1b2cb721fc5ae3a9955ec92db62ac5163350c0818 | futurice/haskell-mega-repo | SubcontractorHoursNotifications.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Futurice.App.Reports.SubcontractorHoursNotifications where
import Data.Aeson (object, (.=))
import Data.Time.Calendar (addGregorianMonthsClip)
import Data.Time.Calendar.WeekDate (toWeekDate)
import Futurice.Integrations (beginningOfCurrMonth)
import Futurice.Prelude
import Prelude ()
import Futurice.App.Reports.Config
import Futurice.App.Reports.Ctx
import Futurice.App.Reports.Templates
import qualified Futurice.App.EmailProxy.Client as E
import qualified Futurice.App.EmailProxy.Types as E
import qualified Personio as P
| Check that is the last non - weekend day of the month
checkNotificationsDay :: Day -> LogT IO Text -> LogT IO Text
checkNotificationsDay day m = do
let days = [beginningOfCurrMonth day .. endOfCurrMonth day]
let lastWeekDayOfMonth = last $ filter (\x -> toWeekDate x ^. _3 `notElem` [6,7]) days
if day == lastWeekDayOfMonth then
m
else do
logInfo "Not the last weekday of the month" day
return "ERR: Other"
where
endOfCurrMonth = pred . addGregorianMonthsClip 1 . beginningOfCurrMonth
activeSubcontractorPredicate :: Day -> P.Employee -> Bool
activeSubcontractorPredicate _d p = and
[ p ^. P.employeeEmploymentType == Just P.External
, p ^. P.employeeStatus == P.Active
]
subcontractorHoursNotifications :: Ctx -> IO Text
subcontractorHoursNotifications ctx = runLogT "subcontractor-hours-notifications" lgr $ do
day <- currentDay
checkNotificationsDay day $ do
subcontractors <- liftIO $ runIntegrations' ctx $ P.personio P.PersonioEmployees
let subcontractors' = filter (activeSubcontractorPredicate day) subcontractors
for_ subcontractors' $ \p -> do
let params = object
[ "name" .= (p ^. P.employeeFirst)
]
case p ^. P.employeeEmail of
Nothing -> logAttention "Subcontractor without email" (p ^. P.employeeFullname)
Just addr -> do
x <- liftIO $ tryDeep $ E.sendEmail mgr emailProxyBurl $ E.emptyReq (E.fromEmail addr)
& E.reqSubject .~ "Reminder: All hours for the month to be reported today"
& E.reqBody .~ renderMustache subcontractorHoursEmailTemplate params ^. strict
case x of
Left exc -> logAttention "sendEmail failed" (show exc)
Right () -> return ()
return "OK"
where
mgr = ctxManager ctx
lgr = ctxLogger ctx
cfg = ctxConfig ctx
emailProxyBurl = cfgEmailProxyBaseurl cfg
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/reports-app/src/Futurice/App/Reports/SubcontractorHoursNotifications.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE TemplateHaskell #
module Futurice.App.Reports.SubcontractorHoursNotifications where
import Data.Aeson (object, (.=))
import Data.Time.Calendar (addGregorianMonthsClip)
import Data.Time.Calendar.WeekDate (toWeekDate)
import Futurice.Integrations (beginningOfCurrMonth)
import Futurice.Prelude
import Prelude ()
import Futurice.App.Reports.Config
import Futurice.App.Reports.Ctx
import Futurice.App.Reports.Templates
import qualified Futurice.App.EmailProxy.Client as E
import qualified Futurice.App.EmailProxy.Types as E
import qualified Personio as P
| Check that is the last non - weekend day of the month
checkNotificationsDay :: Day -> LogT IO Text -> LogT IO Text
checkNotificationsDay day m = do
let days = [beginningOfCurrMonth day .. endOfCurrMonth day]
let lastWeekDayOfMonth = last $ filter (\x -> toWeekDate x ^. _3 `notElem` [6,7]) days
if day == lastWeekDayOfMonth then
m
else do
logInfo "Not the last weekday of the month" day
return "ERR: Other"
where
endOfCurrMonth = pred . addGregorianMonthsClip 1 . beginningOfCurrMonth
activeSubcontractorPredicate :: Day -> P.Employee -> Bool
activeSubcontractorPredicate _d p = and
[ p ^. P.employeeEmploymentType == Just P.External
, p ^. P.employeeStatus == P.Active
]
subcontractorHoursNotifications :: Ctx -> IO Text
subcontractorHoursNotifications ctx = runLogT "subcontractor-hours-notifications" lgr $ do
day <- currentDay
checkNotificationsDay day $ do
subcontractors <- liftIO $ runIntegrations' ctx $ P.personio P.PersonioEmployees
let subcontractors' = filter (activeSubcontractorPredicate day) subcontractors
for_ subcontractors' $ \p -> do
let params = object
[ "name" .= (p ^. P.employeeFirst)
]
case p ^. P.employeeEmail of
Nothing -> logAttention "Subcontractor without email" (p ^. P.employeeFullname)
Just addr -> do
x <- liftIO $ tryDeep $ E.sendEmail mgr emailProxyBurl $ E.emptyReq (E.fromEmail addr)
& E.reqSubject .~ "Reminder: All hours for the month to be reported today"
& E.reqBody .~ renderMustache subcontractorHoursEmailTemplate params ^. strict
case x of
Left exc -> logAttention "sendEmail failed" (show exc)
Right () -> return ()
return "OK"
where
mgr = ctxManager ctx
lgr = ctxLogger ctx
cfg = ctxConfig ctx
emailProxyBurl = cfgEmailProxyBaseurl cfg
|
289eb787981b8cf0b99b0a428c1c0ea800a4a65b0dbdfe942ec938dd4c320baa | logicmoo/wam_common_lisp | util.lsp | (in-package #:compiler)
(defvar file-list
'( "defmacro.lsp" "evalmacros.lsp" "top.lsp"
"module.lsp" "predlib.lsp" "setf.lsp"
"arraylib.lsp" "assert.lsp" "defstruct.lsp"
"describe.lsp" "iolib.lsp" "listlib.lsp"
"mislib.lsp" "numlib.lsp" "packlib.lsp"
"seq.lsp" "seqlib.lsp" "trace.lsp"
"thread.lsp" "loop.lsp"))
(load "../cmp/make-declare.lsp")
(dolist (file file-list)
(sys::proclaim-file file "/tmp/try.lsp"))
| null | https://raw.githubusercontent.com/logicmoo/wam_common_lisp/4396d9e26b050f68182d65c9a2d5a939557616dd/prolog/wam_cl/src/lsp/util.lsp | lisp | (in-package #:compiler)
(defvar file-list
'( "defmacro.lsp" "evalmacros.lsp" "top.lsp"
"module.lsp" "predlib.lsp" "setf.lsp"
"arraylib.lsp" "assert.lsp" "defstruct.lsp"
"describe.lsp" "iolib.lsp" "listlib.lsp"
"mislib.lsp" "numlib.lsp" "packlib.lsp"
"seq.lsp" "seqlib.lsp" "trace.lsp"
"thread.lsp" "loop.lsp"))
(load "../cmp/make-declare.lsp")
(dolist (file file-list)
(sys::proclaim-file file "/tmp/try.lsp"))
| |
025f173099f31904982c7baa245f7b13e0baf28387395855d9231aad92a4d20a | ygmpkk/house | Container.hs | module Container where
import GadgetsPrelude
import Components
import Button
import Display
import Area(subtractArea)
import Useful(mapC)
import Layout
data WrapAttributes = WrapAttributes Int DrawFun
instance HasBorder WrapAttributes where
border b (WrapAttributes _ df) = (WrapAttributes b df)
instance HasPicture WrapAttributes where
picture df (WrapAttributes b _) = (WrapAttributes b df)
wrap = wrap' id
wrap' :: Change WrapAttributes -> Gadget -> Gadget
wrap' cwa g =
myNameIs "wrap" $
readState $ \(GadgetState (_,(lors,lorq),_) osc gap) ->
create g with layout wires connected to me
duplex $ \(gw,wg) ->
claim (fst wg) $
spawnWithState g (GadgetState (nco,gw,(nci,nco)) osc gap) $
duplex $ \(wsm,smw) ->
let (fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
(WrapAttributes b df) = cwa (WrapAttributes 0 (colourbox flc))
me = opFromIp (fst smw)
wc :: Out LORequest -> ImageID -> In LORequest -> In SMResponse -> Gadget
wc lorq me rq smrq =
let wc' = wc lorq me rq smrq in
rx [
from rq $ \l -> case l of
LOSize s fm fo ->
let s' = pairop (+) s (db,db)
fm' p = (moveImage me p):fm (b,b)
fo' p = (moveImage me p):(resizeImage me s'):fo (b,b)
in
tx lorq (LOSize s' fm' fo') $
wc'
LOInit _ _ _ _ ->
error "wrap: wrapped gadget sent another LOInit",
from smrq $ \_ ->
wc'
] (rxFail "wrap")
db = b+b
in
rx [
from (fst wg) $ \l -> case l of
LOInit s fm d cs ->
let s' = pairop (+) (db,db) s
fm' p = [moveImage me p]
ca _ _ c = c
uca _ _ c = c
d' p = [mkImage me (p,pairop (+) s' p) df True ca uca (d (b,b)) False]
cs' = wsm:cs
in
claim (fst smw) $
setGadgetWires (snd smw,(lors,lorq),smw) $
tx lorq (LOInit s' fm' d' cs') $
wc lorq me (fst wg) (fst smw)
] (rxFail "wrap")
data BoxAttributes = BoxAttributes Size DrawFun
instance HasSize BoxAttributes where
size s (BoxAttributes _ df) = (BoxAttributes s df)
instance HasPicture BoxAttributes where
picture df (BoxAttributes s _) = (BoxAttributes s df)
instance HasWidth BoxAttributes where
width w (BoxAttributes (_,h) df) = (BoxAttributes (w,h) df)
instance HasHeight BoxAttributes where
height h (BoxAttributes (w,_) df) = (BoxAttributes (w,h) df)
box = box' id
box' :: Change BoxAttributes -> Gadget -> Gadget
box' cwa g =
myNameIs "box" $
readState $ \(GadgetState (_,(lors,lorq),_) osc gap) ->
create g with layout wires connected to me
duplex $ \(gw,wg) ->
claim (fst wg) $
spawnWithState g (GadgetState (nco,gw,(nci,nco)) osc gap) $
duplex $ \(wsm,smw) ->
let (fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
(BoxAttributes (bx,by) df) = cwa (BoxAttributes (0,0) (colourbox flc))
me = opFromIp (fst smw)
wc :: Out LORequest -> ImageID -> In LORequest -> In SMResponse -> Gadget
wc lorq me rq smrq =
let wc' = wc lorq me rq smrq in
rx [
from rq $ \l -> case l of
LOSize (cx,cy) fm fo ->
let (ax,px) = if cx > bx then (cx,0) else (bx,(bx-cx)`div`2)
(ay,py) = if cy > by then (cy,0) else (by,(by-cy)`div`2)
fm' p = (moveImage me p):fm (px,py)
fo' p = (moveImage me p):(resizeImage me (ax,ay)):fo (px,py)
in
tx lorq (LOSize (ax,ay) fm' fo') $
wc'
LOInit _ _ _ _ ->
error "box: boxed gadget sent another LOInit",
from smrq $ \_ ->
wc'
] (rxFail "box")
in
rx [
from (fst wg) $ \l -> case l of
LOInit (cx,cy) fm d cs ->
let (ax,px) = if cx > bx then (cx,0) else (bx,(bx-cx)`div`2)
(ay,py) = if cy > by then (cy,0) else (by,(by-cy)`div`2)
fm' p = [moveImage me p]
ca _ _ c = c
uca _ _ c = c
d' p = [mkImage me (p,pairop (+) (ax,ay) p) df True ca uca (d (px,py)) False]
cs' = wsm:cs
in
claim (fst smw) $
setGadgetWires (snd smw,(lors,lorq),smw) $
tx lorq (LOInit (ax,ay) fm' d' cs') $
wc lorq me (fst wg) (fst smw)
] (rxFail "box")
-- SLIDERS --
width height wholeX wholeY visibleX visibleY posY
wholeXIn wholeYIn posXIn posYIn
data Slider = Slider Int Int Int Int Int Int Int Int
(In Int) (In Int) (In (Change Int))
(In (Change Int)) (In (Change Int)) (In (Change Int))
(Out Int) (Out Int) (In (Change Size))
instance HasWidth Slider where
width w (Slider _ h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
instance HasHeight Slider where
height h (Slider w _ wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeX :: Int -> Change Slider
sliderWholeX wx (Slider w h _ wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeY :: Int -> Change Slider
sliderWholeY wy (Slider w h wx _ vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleX :: Int -> Change Slider
sliderVisibleX vx (Slider w h wx wy _ vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleY :: Int -> Change Slider
sliderVisibleY vy (Slider w h wx wy vx _ px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosX :: Int -> Change Slider
sliderPosX px (Slider w h wx wy vx vy _ py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosY :: Int -> Change Slider
sliderPosY py (Slider w h wx wy vx vy px _ wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeXIn :: In Int -> Change Slider
sliderWholeXIn wxi (Slider w h wx wy vx vy px py _ wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeYIn :: In Int -> Change Slider
sliderWholeYIn wyi (Slider w h wx wy vx vy px py wxi _ vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleXIn :: In (Change Int) -> Change Slider
sliderVisibleXIn vxi (Slider w h wx wy vx vy px py wxi wyi _ vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleYIn :: In (Change Int) -> Change Slider
sliderVisibleYIn vyi (Slider w h wx wy vx vy px py wxi wyi vxi _ pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosXIn :: In (Change Int) -> Change Slider
sliderPosXIn pxi (Slider w h wx wy vx vy px py wxi wyi vxi vyi _ pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosYIn :: In (Change Int) -> Change Slider
sliderPosYIn pyi (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi _ pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosXOut :: Out Int -> Change Slider
sliderPosXOut pxo (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi _ pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosYOut :: Out Int -> Change Slider
sliderPosYOut pyo (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo _ dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderChangeSize :: In (Change Size) -> Change Slider
sliderChangeSize dsi (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo _) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
slider = slider' id
slider' :: Change Slider -> Gadget
slider' cs =
let (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo ds) =
cs (Slider 100 100 1 1 1 1 0 0 nci nci nci nci nci nci nco nco nci)
in
claim wxi $
claim wyi $
claim vxi $
claim vyi $
claim pxi $
claim pyi $
claim ds $
let s sx sy wx wy vx vy px py mp =
rx [
from ds $ \dsf ->
let (sx',sy') = dsf (sx,sy) in
setSize (sx',sy') $
rds sx' sy' wx wy vx vy px py mp,
from wxi $ \wx' ->
rds sx sy wx' wy vx vy px py mp,
from wyi $ \wy' ->
rds sx sy wx wy' vx vy px py mp,
from vxi $ \dvx ->
rds sx sy wx wy (dvx vx) vy px py mp,
from vyi $ \dvy ->
rds sx sy wx wy vx (dvy vy) px py mp,
from pxi $ \dpx ->
rds sx sy wx wy vx vy (dpx px) py mp,
from pyi $ \dpy ->
rds sx sy wx wy vx vy px (dpy py) mp,
fromSM $ \r -> case r of
SMMouseClick x1 y1 b ->
txSM (SMDrawFun (slidedf True w h wx wy vx vy px py)) $
s sx sy wx wy vx vy px py (Yes (x1,y1))
SMMouseUnClick x2 y2 b ->
case mp of
Yes (x1,y1) ->
let (dx,dy) = (((x2-x1)*wx) `div` sx, ((y2-y1)*wy) `div` sy)
(px2,py2) = (px+dx,py+dy)
(wx',wy',vx',vy',px',py') = confine wx wy vx vy px2 py2 in
txSM (SMDrawFun (slidedf False w h wx' wy' vx' vy' px' py')) $
when (px' /= px) (tx pxo px') $
when (py' /= py) (tx pyo py') $
s sx sy wx' wy' vx' vy' px' py' None
None ->
txSM (SMDrawFun (slidedf False w h wx wy vx vy px py)) $
s sx sy wx wy vx vy px py mp
otherwise ->
s sx sy wx wy vx vy px py mp
] (rxFail "slider")
rds sx' sy' wx wy vx vy px py mp =
let (wx',wy',vx',vy',px',py') = confine wx wy vx vy px py in
txSM (SMDrawFun (slidedf False w h wx' wy' vx' vy' px' py')) $
tx pxo px' $
tx pyo py' $
s sx' sy' wx' wy' vx' vy' px' py' mp
(fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
slidedf :: Bool -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> DrawFun
slidedf p _ _ wx wy vx vy px py (w,h) _ =
let w' = w - 6
h' = h - 6
x = 3 + ((px*w') `div` wx)
y = 3 + ((py*h') `div` wy)
sx = ((vx*w') `div` wx) - 1
sy = ((vy*h') `div` wy) - 1
(ca,cb,cc) = if p then (shc,lic,hic) else (shc,lic,enc)
in
plinth 3 shc lic flc (0,0) (w-1,h-1)++
plinth 3 ca cb cc (x,y) (sx,sy)++
[DrawSetColour bgc,fillbox ((x+3,y+3),(x+sx-3,y+sy-3)),DrawSetColour flc]++
concat (map fillrect (subtractArea ((x,y),(x+sx+1,y+sy+1)) [((3,3),(w-3,h-3))]))
confine :: Int -> Int -> Int -> Int -> Int -> Int -> (Int,Int,Int,Int,Int,Int)
confine wx wy vx vy px py =
let px' = if px+vx > wx then wx-vx else if px < 0 then 0 else px
py' = if py+vy > wy then wy-vy else if py < 0 then 0 else py in
(wx,wy,vx,vy,px',py')
in
initGadget (w,h) (slidedf False w h wx wy vx vy px py) $
s w h wx wy vx vy px py None
sliderx = sliderx' id
sliderx' :: Change Slider -> Gadget
sliderx' cs =
wire $ \px ->
wire $ \sc ->
let ds = (Slider 1 1 1 1 1 1 0 0 nci nci nci nci nci nci nco nco nci)
(Slider w h _ _ _ _ _ _ _ _ _ _ _ _ _ _ si) = cs ds
h' = h - 6
bl = button' (width h'.height h'.border 3.picture leftarrow.buttonMomentary) (op px) ((+)(-10))
br = button' (width h'.height h'.border 3.picture rightarrow.buttonMomentary) (op px) ((+)10)
s = slider' (sliderChangeSize (ip sc).sliderPosXIn (ip px).width (w-h-h).height h.cs)
in
setGapSize 0 $
spawn (mapC (\f -> (\(x,y)->(fst (f (x,y)),y))) si (op sc)) $
wrap (bl <-> s <-> br)
slidery = slidery' id
slidery' :: Change Slider -> Gadget
slidery' cs =
wire $ \py ->
wire $ \sc ->
let ds = (Slider 1 1 1 1 1 1 0 0 nci nci nci nci nci nci nco nco nci)
(Slider w h _ _ _ _ _ _ _ _ _ _ _ _ _ _ si) = cs ds
w' = w - 6
bu = button' (width w'.height w'.border 3.picture uparrow.buttonMomentary) (op py) ((+)(-10))
bd = button' (width w'.height w'.border 3.picture downarrow.buttonMomentary) (op py) ((+)10)
s = slider' (sliderChangeSize (ip sc).sliderPosYIn (ip py).width w.height (h-w-w).cs)
in
setGapSize 0 $
spawn (mapC (\f -> (\(x,y)->(x,snd (f (x,y))))) si (op sc)) $
wrap (bu <|> s <|> bd)
uparrow (x,y) _ =
let hx = x `div` 2
tqx = (7*x) `div` 8
qx = tqx `div` 7
tqy = (6*y) `div` 8
qy = tqy `div` 6
(fgc',bgc',flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
in
[ DrawSetColour shc,
DrawFilledTriangle ((hx,qy),(tqx,tqy),(qx,tqy))]
rightarrow = turnCW uparrow
downarrow = turnCW rightarrow
leftarrow = turnCW downarrow
viewer :: Gadget -> Size -> In (Change Size) -> In Coord -> Out Size -> Gadget
viewer g (sx,sy) ns i o =
myNameIs "viewer" $
readState $ \(GadgetState (_,(lors,lorq),_) osc gap) ->
create g with layout wires connected to me
duplex $ \(gv,vg) ->
claim (fst vg) $
spawnWithState g (GadgetState (nco,gv,(nci,nco)) osc gap) $
duplex $ \(wsm,smw) ->
let me = opFromIp (fst smw)
vc :: Size -> Coord -> (Coord -> [DisplayChange]) -> In Coord -> Out Size -> In LORequest -> In SMResponse -> Gadget
vc (sx,sy) (px,py) fm i o rq smrq =
let vc' s' p' = vc s' p' fm i o rq smrq in
rx [
from rq $ \l -> case l of
LOSize (cx,cy) fm fo ->
tx o (cx,cy) $
let fm' p = [moveImage me p]
fo' p = fo (px,py)
in
tx lorq (LOSize (sx,sy) fm' fo') $
vc' (sx,sy) (px,py)
LOInit _ _ _ _ ->
error "box: boxed gadget sent another LOInit",
from smrq $ \_ ->
vc' (sx,sy) (px,py),
from i $ \p ->
tx (snd smw) (SMUpdates (fm p)) $
vc' (sx,sy) p,
from ns $ \ds ->
let s' = ds (sx,sy)
fm' p = [moveImage me p]
fo' p = [resizeImage me s']
in
tx lorq (LOSize s' fm' fo') $
vc' s' (px,py)
] (rxFail "viewer")
in
rx [
from (fst vg) $ \l -> case l of
LOInit (cx,cy) fm d cs ->
let fm' p = [moveImage me p]
ca _ _ c = c
uca _ _ c = c
d' p = [mkImage me (p,pairop (+) (sx,sy) p) blank True ca uca (d (0,0)) False]
cs' = wsm:cs
in
claim (fst smw) $
setGadgetWires (snd smw,(lors,lorq),smw) $
tx lorq (LOInit (sx,sy) fm' d' cs') $
tx o (cx,cy) $
claim i $
claim ns $
vc (sx,sy) (0,0) fm i o (fst vg) (fst smw)
] (rxFail "viewer")
fixedViewController :: In Size -> Out Coord -> In Int -> Out Int -> In Int -> Out Int -> Component
fixedViewController gs gp vp vs hp hs =
myNameIs "fVC" $
claim gs $
claim vp $
claim hp $
fvc (0,0) where
fvc (ph,pv) =
rx [
from gs $ \(sh,sv) ->
tx hs sh $
tx vs sv $
fvc (ph,pv) ,
from hp $ \ph' ->
tx gp (-ph',-pv) $
fvc (ph',pv),
from vp $ \pv' ->
tx gp (-ph,-pv') $
fvc (ph,pv')
] (rxFail "fixedViewController")
variableViewController :: In Size -> Out Coord -> In Int -> Out Int -> Out (Change Int) -> Out (Change Size) -> In Int -> Out Int -> Out (Change Int) -> Out (Change Size) -> In (Change Size) -> Out (Change Size) -> Component
variableViewController gs gp vp vs vvs vss hp hs hvs hss ds cs =
myNameIs "fVC" $
claim gs $
claim vp $
claim hp $
claim ds $
fvc (0,0) where
fvc (ph,pv) =
rx [
from gs $ \(sh,sv) ->
tx hs sh $
tx vs sv $
fvc (ph,pv) ,
from hp $ \ph' ->
tx gp (-ph',-pv) $
fvc (ph',pv),
from vp $ \pv' ->
tx gp (-ph,-pv') $
fvc (ph,pv'),
from ds $ \dsf ->
tx cs dsf $
tx hss dsf $
tx vss dsf $
tx hvs (\s -> (fst (dsf (s,0)))) $
tx vvs (\s -> (snd (dsf (0,s)))) $
fvc (ph,pv)
] (rxFail "fixedViewController")
fixedScrollBox :: Size -> Gadget -> Gadget
fixedScrollBox (sx,sy) g =
wire $ \gs ->
wire $ \gp ->
wire $ \xs ->
wire $ \xp ->
wire $ \ys ->
wire $ \yp ->
let x = sliderx' (sliderVisibleX sx.sliderPosX 0.sliderWholeXIn (ip xs).sliderPosXOut (op xp).
sliderWholeY 1.sliderVisibleY 1.
width sx.height 29)
y = slidery' (sliderVisibleY sy.sliderPosY 0.sliderWholeYIn (ip ys).sliderPosYOut (op yp).
sliderWholeX 1.sliderVisibleX 1.
height (sy+29).width 29)
v = viewer g (sx,sy) nci (ip gp) (op gs)
in
spawn (fixedViewController (ip gs) (op gp) (ip yp) (op ys) (ip xp) (op xs)) $
setGapSize 0 $
wrap' (border 0) ((v <|> x) <-> y)
dragIcon = dragIcon' id
dragIcon' :: Change ButtonAttributes -> Out (Change Coord) -> Gadget
dragIcon' cba o =
wire $ \w ->
let ca False (SMMouseClick x y _) c = tx (op w) (x,y) $ c True
ca True (SMMouseUnClick x y _) c = tx (op w) (x,y) $ c False
ca s _ c = c s
in
let di =
rx [
from (ip w) $ \(x1,y1) ->
rx [
from (ip w) $ \(x2,y2) ->
tx o (\(x,y) -> (x+x2-x1,y+y2-y1)) $
di
] (rxFail "dragIcon")
] (rxFail "dragIcon")
df (x,y) _ =
let tl = (x `div` 10,y `div` 10)
ls = ((8*x) `div` 10,(8*y) `div` 10)
ss = ((5*x) `div` 10,(5*y) `div` 10)
(fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
in
[DrawSetColour flc,fillbox ((0,0),(x-1,y-1))]++
plinth 2 lic shc enc tl ss++
plinth 2 lic shc enc tl ls
in
giveImage (button' (picture df.buttonAction ca.cba) nco ()) $
myNameIs "dragIcon" $
claim (ip w) $
di
variableScrollBox :: Size -> Gadget -> Gadget
variableScrollBox (sx,sy) g =
wire $ \gs ->
wire $ \gp ->
wire $ \xs ->
wire $ \vxs ->
wire $ \xp ->
wire $ \ys ->
wire $ \vys ->
wire $ \yp ->
wire $ \d ->
wire $ \vs ->
wire $ \cxs ->
wire $ \cys ->
let x = sliderx' (sliderVisibleX sx.sliderPosX 0.sliderWholeXIn (ip xs).sliderPosXOut (op xp).
sliderWholeY 1.sliderVisibleY 1.sliderVisibleXIn (ip vxs).
sliderChangeSize (ip cxs).width sx.height 29)
y = slidery' (sliderVisibleY sy.sliderPosY 0.sliderWholeYIn (ip ys).sliderPosYOut (op yp).
sliderWholeX 1.sliderVisibleX 1.sliderVisibleYIn (ip vys).
sliderChangeSize (ip cys).height sy.width 29)
v = viewer g (sx,sy) (ip vs) (ip gp) (op gs)
b = dragIcon' (picture blank.width 23.height 23.border 3) (op d)
in
spawn (variableViewController (ip gs) (op gp) (ip yp) (op ys) (op vys) (op cys) (ip xp) (op xs) (op vxs) (op cxs) (ip d) (op vs)) $
setGapSize 0 $
wrap' (border 0) ((v <-> y) <|> (x <-> b))
| null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/kernel/Gadgets/lib/Container.hs | haskell | SLIDERS -- | module Container where
import GadgetsPrelude
import Components
import Button
import Display
import Area(subtractArea)
import Useful(mapC)
import Layout
data WrapAttributes = WrapAttributes Int DrawFun
instance HasBorder WrapAttributes where
border b (WrapAttributes _ df) = (WrapAttributes b df)
instance HasPicture WrapAttributes where
picture df (WrapAttributes b _) = (WrapAttributes b df)
wrap = wrap' id
wrap' :: Change WrapAttributes -> Gadget -> Gadget
wrap' cwa g =
myNameIs "wrap" $
readState $ \(GadgetState (_,(lors,lorq),_) osc gap) ->
create g with layout wires connected to me
duplex $ \(gw,wg) ->
claim (fst wg) $
spawnWithState g (GadgetState (nco,gw,(nci,nco)) osc gap) $
duplex $ \(wsm,smw) ->
let (fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
(WrapAttributes b df) = cwa (WrapAttributes 0 (colourbox flc))
me = opFromIp (fst smw)
wc :: Out LORequest -> ImageID -> In LORequest -> In SMResponse -> Gadget
wc lorq me rq smrq =
let wc' = wc lorq me rq smrq in
rx [
from rq $ \l -> case l of
LOSize s fm fo ->
let s' = pairop (+) s (db,db)
fm' p = (moveImage me p):fm (b,b)
fo' p = (moveImage me p):(resizeImage me s'):fo (b,b)
in
tx lorq (LOSize s' fm' fo') $
wc'
LOInit _ _ _ _ ->
error "wrap: wrapped gadget sent another LOInit",
from smrq $ \_ ->
wc'
] (rxFail "wrap")
db = b+b
in
rx [
from (fst wg) $ \l -> case l of
LOInit s fm d cs ->
let s' = pairop (+) (db,db) s
fm' p = [moveImage me p]
ca _ _ c = c
uca _ _ c = c
d' p = [mkImage me (p,pairop (+) s' p) df True ca uca (d (b,b)) False]
cs' = wsm:cs
in
claim (fst smw) $
setGadgetWires (snd smw,(lors,lorq),smw) $
tx lorq (LOInit s' fm' d' cs') $
wc lorq me (fst wg) (fst smw)
] (rxFail "wrap")
data BoxAttributes = BoxAttributes Size DrawFun
instance HasSize BoxAttributes where
size s (BoxAttributes _ df) = (BoxAttributes s df)
instance HasPicture BoxAttributes where
picture df (BoxAttributes s _) = (BoxAttributes s df)
instance HasWidth BoxAttributes where
width w (BoxAttributes (_,h) df) = (BoxAttributes (w,h) df)
instance HasHeight BoxAttributes where
height h (BoxAttributes (w,_) df) = (BoxAttributes (w,h) df)
box = box' id
box' :: Change BoxAttributes -> Gadget -> Gadget
box' cwa g =
myNameIs "box" $
readState $ \(GadgetState (_,(lors,lorq),_) osc gap) ->
create g with layout wires connected to me
duplex $ \(gw,wg) ->
claim (fst wg) $
spawnWithState g (GadgetState (nco,gw,(nci,nco)) osc gap) $
duplex $ \(wsm,smw) ->
let (fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
(BoxAttributes (bx,by) df) = cwa (BoxAttributes (0,0) (colourbox flc))
me = opFromIp (fst smw)
wc :: Out LORequest -> ImageID -> In LORequest -> In SMResponse -> Gadget
wc lorq me rq smrq =
let wc' = wc lorq me rq smrq in
rx [
from rq $ \l -> case l of
LOSize (cx,cy) fm fo ->
let (ax,px) = if cx > bx then (cx,0) else (bx,(bx-cx)`div`2)
(ay,py) = if cy > by then (cy,0) else (by,(by-cy)`div`2)
fm' p = (moveImage me p):fm (px,py)
fo' p = (moveImage me p):(resizeImage me (ax,ay)):fo (px,py)
in
tx lorq (LOSize (ax,ay) fm' fo') $
wc'
LOInit _ _ _ _ ->
error "box: boxed gadget sent another LOInit",
from smrq $ \_ ->
wc'
] (rxFail "box")
in
rx [
from (fst wg) $ \l -> case l of
LOInit (cx,cy) fm d cs ->
let (ax,px) = if cx > bx then (cx,0) else (bx,(bx-cx)`div`2)
(ay,py) = if cy > by then (cy,0) else (by,(by-cy)`div`2)
fm' p = [moveImage me p]
ca _ _ c = c
uca _ _ c = c
d' p = [mkImage me (p,pairop (+) (ax,ay) p) df True ca uca (d (px,py)) False]
cs' = wsm:cs
in
claim (fst smw) $
setGadgetWires (snd smw,(lors,lorq),smw) $
tx lorq (LOInit (ax,ay) fm' d' cs') $
wc lorq me (fst wg) (fst smw)
] (rxFail "box")
width height wholeX wholeY visibleX visibleY posY
wholeXIn wholeYIn posXIn posYIn
data Slider = Slider Int Int Int Int Int Int Int Int
(In Int) (In Int) (In (Change Int))
(In (Change Int)) (In (Change Int)) (In (Change Int))
(Out Int) (Out Int) (In (Change Size))
instance HasWidth Slider where
width w (Slider _ h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
instance HasHeight Slider where
height h (Slider w _ wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeX :: Int -> Change Slider
sliderWholeX wx (Slider w h _ wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeY :: Int -> Change Slider
sliderWholeY wy (Slider w h wx _ vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleX :: Int -> Change Slider
sliderVisibleX vx (Slider w h wx wy _ vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleY :: Int -> Change Slider
sliderVisibleY vy (Slider w h wx wy vx _ px py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosX :: Int -> Change Slider
sliderPosX px (Slider w h wx wy vx vy _ py wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosY :: Int -> Change Slider
sliderPosY py (Slider w h wx wy vx vy px _ wxi wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeXIn :: In Int -> Change Slider
sliderWholeXIn wxi (Slider w h wx wy vx vy px py _ wyi vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderWholeYIn :: In Int -> Change Slider
sliderWholeYIn wyi (Slider w h wx wy vx vy px py wxi _ vxi vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleXIn :: In (Change Int) -> Change Slider
sliderVisibleXIn vxi (Slider w h wx wy vx vy px py wxi wyi _ vyi pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderVisibleYIn :: In (Change Int) -> Change Slider
sliderVisibleYIn vyi (Slider w h wx wy vx vy px py wxi wyi vxi _ pxi pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosXIn :: In (Change Int) -> Change Slider
sliderPosXIn pxi (Slider w h wx wy vx vy px py wxi wyi vxi vyi _ pyi pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosYIn :: In (Change Int) -> Change Slider
sliderPosYIn pyi (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi _ pxo pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosXOut :: Out Int -> Change Slider
sliderPosXOut pxo (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi _ pyo dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderPosYOut :: Out Int -> Change Slider
sliderPosYOut pyo (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo _ dsi) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
sliderChangeSize :: In (Change Size) -> Change Slider
sliderChangeSize dsi (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo _) =
(Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo dsi)
slider = slider' id
slider' :: Change Slider -> Gadget
slider' cs =
let (Slider w h wx wy vx vy px py wxi wyi vxi vyi pxi pyi pxo pyo ds) =
cs (Slider 100 100 1 1 1 1 0 0 nci nci nci nci nci nci nco nco nci)
in
claim wxi $
claim wyi $
claim vxi $
claim vyi $
claim pxi $
claim pyi $
claim ds $
let s sx sy wx wy vx vy px py mp =
rx [
from ds $ \dsf ->
let (sx',sy') = dsf (sx,sy) in
setSize (sx',sy') $
rds sx' sy' wx wy vx vy px py mp,
from wxi $ \wx' ->
rds sx sy wx' wy vx vy px py mp,
from wyi $ \wy' ->
rds sx sy wx wy' vx vy px py mp,
from vxi $ \dvx ->
rds sx sy wx wy (dvx vx) vy px py mp,
from vyi $ \dvy ->
rds sx sy wx wy vx (dvy vy) px py mp,
from pxi $ \dpx ->
rds sx sy wx wy vx vy (dpx px) py mp,
from pyi $ \dpy ->
rds sx sy wx wy vx vy px (dpy py) mp,
fromSM $ \r -> case r of
SMMouseClick x1 y1 b ->
txSM (SMDrawFun (slidedf True w h wx wy vx vy px py)) $
s sx sy wx wy vx vy px py (Yes (x1,y1))
SMMouseUnClick x2 y2 b ->
case mp of
Yes (x1,y1) ->
let (dx,dy) = (((x2-x1)*wx) `div` sx, ((y2-y1)*wy) `div` sy)
(px2,py2) = (px+dx,py+dy)
(wx',wy',vx',vy',px',py') = confine wx wy vx vy px2 py2 in
txSM (SMDrawFun (slidedf False w h wx' wy' vx' vy' px' py')) $
when (px' /= px) (tx pxo px') $
when (py' /= py) (tx pyo py') $
s sx sy wx' wy' vx' vy' px' py' None
None ->
txSM (SMDrawFun (slidedf False w h wx wy vx vy px py)) $
s sx sy wx wy vx vy px py mp
otherwise ->
s sx sy wx wy vx vy px py mp
] (rxFail "slider")
rds sx' sy' wx wy vx vy px py mp =
let (wx',wy',vx',vy',px',py') = confine wx wy vx vy px py in
txSM (SMDrawFun (slidedf False w h wx' wy' vx' vy' px' py')) $
tx pxo px' $
tx pyo py' $
s sx' sy' wx' wy' vx' vy' px' py' mp
(fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
slidedf :: Bool -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> DrawFun
slidedf p _ _ wx wy vx vy px py (w,h) _ =
let w' = w - 6
h' = h - 6
x = 3 + ((px*w') `div` wx)
y = 3 + ((py*h') `div` wy)
sx = ((vx*w') `div` wx) - 1
sy = ((vy*h') `div` wy) - 1
(ca,cb,cc) = if p then (shc,lic,hic) else (shc,lic,enc)
in
plinth 3 shc lic flc (0,0) (w-1,h-1)++
plinth 3 ca cb cc (x,y) (sx,sy)++
[DrawSetColour bgc,fillbox ((x+3,y+3),(x+sx-3,y+sy-3)),DrawSetColour flc]++
concat (map fillrect (subtractArea ((x,y),(x+sx+1,y+sy+1)) [((3,3),(w-3,h-3))]))
confine :: Int -> Int -> Int -> Int -> Int -> Int -> (Int,Int,Int,Int,Int,Int)
confine wx wy vx vy px py =
let px' = if px+vx > wx then wx-vx else if px < 0 then 0 else px
py' = if py+vy > wy then wy-vy else if py < 0 then 0 else py in
(wx,wy,vx,vy,px',py')
in
initGadget (w,h) (slidedf False w h wx wy vx vy px py) $
s w h wx wy vx vy px py None
sliderx = sliderx' id
sliderx' :: Change Slider -> Gadget
sliderx' cs =
wire $ \px ->
wire $ \sc ->
let ds = (Slider 1 1 1 1 1 1 0 0 nci nci nci nci nci nci nco nco nci)
(Slider w h _ _ _ _ _ _ _ _ _ _ _ _ _ _ si) = cs ds
h' = h - 6
bl = button' (width h'.height h'.border 3.picture leftarrow.buttonMomentary) (op px) ((+)(-10))
br = button' (width h'.height h'.border 3.picture rightarrow.buttonMomentary) (op px) ((+)10)
s = slider' (sliderChangeSize (ip sc).sliderPosXIn (ip px).width (w-h-h).height h.cs)
in
setGapSize 0 $
spawn (mapC (\f -> (\(x,y)->(fst (f (x,y)),y))) si (op sc)) $
wrap (bl <-> s <-> br)
slidery = slidery' id
slidery' :: Change Slider -> Gadget
slidery' cs =
wire $ \py ->
wire $ \sc ->
let ds = (Slider 1 1 1 1 1 1 0 0 nci nci nci nci nci nci nco nco nci)
(Slider w h _ _ _ _ _ _ _ _ _ _ _ _ _ _ si) = cs ds
w' = w - 6
bu = button' (width w'.height w'.border 3.picture uparrow.buttonMomentary) (op py) ((+)(-10))
bd = button' (width w'.height w'.border 3.picture downarrow.buttonMomentary) (op py) ((+)10)
s = slider' (sliderChangeSize (ip sc).sliderPosYIn (ip py).width w.height (h-w-w).cs)
in
setGapSize 0 $
spawn (mapC (\f -> (\(x,y)->(x,snd (f (x,y))))) si (op sc)) $
wrap (bu <|> s <|> bd)
uparrow (x,y) _ =
let hx = x `div` 2
tqx = (7*x) `div` 8
qx = tqx `div` 7
tqy = (6*y) `div` 8
qy = tqy `div` 6
(fgc',bgc',flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
in
[ DrawSetColour shc,
DrawFilledTriangle ((hx,qy),(tqx,tqy),(qx,tqy))]
rightarrow = turnCW uparrow
downarrow = turnCW rightarrow
leftarrow = turnCW downarrow
viewer :: Gadget -> Size -> In (Change Size) -> In Coord -> Out Size -> Gadget
viewer g (sx,sy) ns i o =
myNameIs "viewer" $
readState $ \(GadgetState (_,(lors,lorq),_) osc gap) ->
create g with layout wires connected to me
duplex $ \(gv,vg) ->
claim (fst vg) $
spawnWithState g (GadgetState (nco,gv,(nci,nco)) osc gap) $
duplex $ \(wsm,smw) ->
let me = opFromIp (fst smw)
vc :: Size -> Coord -> (Coord -> [DisplayChange]) -> In Coord -> Out Size -> In LORequest -> In SMResponse -> Gadget
vc (sx,sy) (px,py) fm i o rq smrq =
let vc' s' p' = vc s' p' fm i o rq smrq in
rx [
from rq $ \l -> case l of
LOSize (cx,cy) fm fo ->
tx o (cx,cy) $
let fm' p = [moveImage me p]
fo' p = fo (px,py)
in
tx lorq (LOSize (sx,sy) fm' fo') $
vc' (sx,sy) (px,py)
LOInit _ _ _ _ ->
error "box: boxed gadget sent another LOInit",
from smrq $ \_ ->
vc' (sx,sy) (px,py),
from i $ \p ->
tx (snd smw) (SMUpdates (fm p)) $
vc' (sx,sy) p,
from ns $ \ds ->
let s' = ds (sx,sy)
fm' p = [moveImage me p]
fo' p = [resizeImage me s']
in
tx lorq (LOSize s' fm' fo') $
vc' s' (px,py)
] (rxFail "viewer")
in
rx [
from (fst vg) $ \l -> case l of
LOInit (cx,cy) fm d cs ->
let fm' p = [moveImage me p]
ca _ _ c = c
uca _ _ c = c
d' p = [mkImage me (p,pairop (+) (sx,sy) p) blank True ca uca (d (0,0)) False]
cs' = wsm:cs
in
claim (fst smw) $
setGadgetWires (snd smw,(lors,lorq),smw) $
tx lorq (LOInit (sx,sy) fm' d' cs') $
tx o (cx,cy) $
claim i $
claim ns $
vc (sx,sy) (0,0) fm i o (fst vg) (fst smw)
] (rxFail "viewer")
fixedViewController :: In Size -> Out Coord -> In Int -> Out Int -> In Int -> Out Int -> Component
fixedViewController gs gp vp vs hp hs =
myNameIs "fVC" $
claim gs $
claim vp $
claim hp $
fvc (0,0) where
fvc (ph,pv) =
rx [
from gs $ \(sh,sv) ->
tx hs sh $
tx vs sv $
fvc (ph,pv) ,
from hp $ \ph' ->
tx gp (-ph',-pv) $
fvc (ph',pv),
from vp $ \pv' ->
tx gp (-ph,-pv') $
fvc (ph,pv')
] (rxFail "fixedViewController")
variableViewController :: In Size -> Out Coord -> In Int -> Out Int -> Out (Change Int) -> Out (Change Size) -> In Int -> Out Int -> Out (Change Int) -> Out (Change Size) -> In (Change Size) -> Out (Change Size) -> Component
variableViewController gs gp vp vs vvs vss hp hs hvs hss ds cs =
myNameIs "fVC" $
claim gs $
claim vp $
claim hp $
claim ds $
fvc (0,0) where
fvc (ph,pv) =
rx [
from gs $ \(sh,sv) ->
tx hs sh $
tx vs sv $
fvc (ph,pv) ,
from hp $ \ph' ->
tx gp (-ph',-pv) $
fvc (ph',pv),
from vp $ \pv' ->
tx gp (-ph,-pv') $
fvc (ph,pv'),
from ds $ \dsf ->
tx cs dsf $
tx hss dsf $
tx vss dsf $
tx hvs (\s -> (fst (dsf (s,0)))) $
tx vvs (\s -> (snd (dsf (0,s)))) $
fvc (ph,pv)
] (rxFail "fixedViewController")
fixedScrollBox :: Size -> Gadget -> Gadget
fixedScrollBox (sx,sy) g =
wire $ \gs ->
wire $ \gp ->
wire $ \xs ->
wire $ \xp ->
wire $ \ys ->
wire $ \yp ->
let x = sliderx' (sliderVisibleX sx.sliderPosX 0.sliderWholeXIn (ip xs).sliderPosXOut (op xp).
sliderWholeY 1.sliderVisibleY 1.
width sx.height 29)
y = slidery' (sliderVisibleY sy.sliderPosY 0.sliderWholeYIn (ip ys).sliderPosYOut (op yp).
sliderWholeX 1.sliderVisibleX 1.
height (sy+29).width 29)
v = viewer g (sx,sy) nci (ip gp) (op gs)
in
spawn (fixedViewController (ip gs) (op gp) (ip yp) (op ys) (ip xp) (op xs)) $
setGapSize 0 $
wrap' (border 0) ((v <|> x) <-> y)
dragIcon = dragIcon' id
dragIcon' :: Change ButtonAttributes -> Out (Change Coord) -> Gadget
dragIcon' cba o =
wire $ \w ->
let ca False (SMMouseClick x y _) c = tx (op w) (x,y) $ c True
ca True (SMMouseUnClick x y _) c = tx (op w) (x,y) $ c False
ca s _ c = c s
in
let di =
rx [
from (ip w) $ \(x1,y1) ->
rx [
from (ip w) $ \(x2,y2) ->
tx o (\(x,y) -> (x+x2-x1,y+y2-y1)) $
di
] (rxFail "dragIcon")
] (rxFail "dragIcon")
df (x,y) _ =
let tl = (x `div` 10,y `div` 10)
ls = ((8*x) `div` 10,(8*y) `div` 10)
ss = ((5*x) `div` 10,(5*y) `div` 10)
(fgc,bgc,flc,shc,lic,hic,foc,enc,dic,c1,c2,c3,c4,c5) = colourset
in
[DrawSetColour flc,fillbox ((0,0),(x-1,y-1))]++
plinth 2 lic shc enc tl ss++
plinth 2 lic shc enc tl ls
in
giveImage (button' (picture df.buttonAction ca.cba) nco ()) $
myNameIs "dragIcon" $
claim (ip w) $
di
variableScrollBox :: Size -> Gadget -> Gadget
variableScrollBox (sx,sy) g =
wire $ \gs ->
wire $ \gp ->
wire $ \xs ->
wire $ \vxs ->
wire $ \xp ->
wire $ \ys ->
wire $ \vys ->
wire $ \yp ->
wire $ \d ->
wire $ \vs ->
wire $ \cxs ->
wire $ \cys ->
let x = sliderx' (sliderVisibleX sx.sliderPosX 0.sliderWholeXIn (ip xs).sliderPosXOut (op xp).
sliderWholeY 1.sliderVisibleY 1.sliderVisibleXIn (ip vxs).
sliderChangeSize (ip cxs).width sx.height 29)
y = slidery' (sliderVisibleY sy.sliderPosY 0.sliderWholeYIn (ip ys).sliderPosYOut (op yp).
sliderWholeX 1.sliderVisibleX 1.sliderVisibleYIn (ip vys).
sliderChangeSize (ip cys).height sy.width 29)
v = viewer g (sx,sy) (ip vs) (ip gp) (op gs)
b = dragIcon' (picture blank.width 23.height 23.border 3) (op d)
in
spawn (variableViewController (ip gs) (op gp) (ip yp) (op ys) (op vys) (op cys) (ip xp) (op xs) (op vxs) (op cxs) (ip d) (op vs)) $
setGapSize 0 $
wrap' (border 0) ((v <-> y) <|> (x <-> b))
|
586043c5fc25bf438462ba0b6d3abda8a3b743ff3cc9fc3363a481388c5d6b6b | batterseapower/haskell-kata | StreamFusionReassoc.hs | # LANGUAGE ExistentialQuantification , BangPatterns , TypeOperators #
import Prelude hiding (enumFromTo, concatMap, replicate)
data Stream a = forall s. Stream !(s -> Step a s) -- a stepper function
!s -- an initial state
-- | A stream step.
--
-- A step either ends a stream, skips a value, or yields a value
--
data Step a s = Yield a !s
| Skip !s
| Done
-- | Construct an abstract stream from a list.
stream :: [a] -> Stream a
stream xs0 = Stream next xs0
where
# INLINE next #
next [] = Done
next (x:xs) = Yield x xs
{-# INLINE [0] stream #-}
-- | Flatten a stream back into a list.
unstream :: Stream a -> [a]
unstream (Stream next s0) = unfold_unstream s0
where
unfold_unstream !s = case next s of
Done -> []
Skip s' -> unfold_unstream s'
Yield x s' -> x : unfold_unstream s'
{-# INLINE [0] unstream #-}
--
-- /The/ stream fusion rule
--
# RULES
" STREAM stream / unstream fusion " forall s.
stream ( unstream s ) = s
#
"STREAM stream/unstream fusion" forall s.
stream (unstream s) = s
#-}
# INLINE replicate #
replicate n x = unstream (replicateS n x)
{-# INLINE [0] replicateS #-}
replicateS :: Int -> a -> Stream a
replicateS n x = Stream next n
where
# INLINE next #
next !i | i <= 0 = Done
| otherwise = Yield x (i-1)
# INLINE enumFromTo #
enumFromTo x y = unstream (enumFromToS x y)
{-# INLINE [0] enumFromToS #-}
enumFromToS x y = Stream step x
where
# INLINE step #
step x | x <= y = Yield x (x + 1)
| otherwise = Done
data a :!: b = !a :!: !b
# INLINE concatMap #
concatMap f xs = unstream (concatMapS (stream . f) (stream xs))
{-# INLINE [0] concatMapS #-}
concatMapS :: (a -> Stream b) -> Stream a -> Stream b
concatMapS f (Stream next0 s0) = Stream next (s0 :!: Nothing)
where
# INLINE next #
next (s :!: Nothing) = case next0 s of
Done -> Done
Skip s' -> Skip (s' :!: Nothing)
Yield x s' -> Skip (s' :!: Just (f x))
next (s :!: Just (Stream g t)) = case g t of
Done -> Skip (s :!: Nothing)
Skip t' -> Skip (s :!: Just (Stream g t'))
Yield x t' -> Yield x (s :!: Just (Stream g t'))
-- [1,1,2,2,3,3,4,4,5,5,2,2,3,3,4,4,5,5,3,3,4,4,5,5,4,4,5,5,5,5]
main = do
print $ concatMap (\y -> replicate 2 y) (concatMap (\x -> enumFromTo x 5) (enumFromTo 1 (5 :: Int)))
print $ concatMap ( \x - > concatMap ( \y - > replicate 2 y ) ( enumFromTo x 5 ) ) ( enumFromTo 1 ( 5 : : Int ) )
| null | https://raw.githubusercontent.com/batterseapower/haskell-kata/49c0c5cf48f8e5549131c78d026e4f2aa73d8a7a/StreamFusionReassoc.hs | haskell | a stepper function
an initial state
| A stream step.
A step either ends a stream, skips a value, or yields a value
| Construct an abstract stream from a list.
# INLINE [0] stream #
| Flatten a stream back into a list.
# INLINE [0] unstream #
/The/ stream fusion rule
# INLINE [0] replicateS #
# INLINE [0] enumFromToS #
# INLINE [0] concatMapS #
[1,1,2,2,3,3,4,4,5,5,2,2,3,3,4,4,5,5,3,3,4,4,5,5,4,4,5,5,5,5] | # LANGUAGE ExistentialQuantification , BangPatterns , TypeOperators #
import Prelude hiding (enumFromTo, concatMap, replicate)
data Step a s = Yield a !s
| Skip !s
| Done
stream :: [a] -> Stream a
stream xs0 = Stream next xs0
where
# INLINE next #
next [] = Done
next (x:xs) = Yield x xs
unstream :: Stream a -> [a]
unstream (Stream next s0) = unfold_unstream s0
where
unfold_unstream !s = case next s of
Done -> []
Skip s' -> unfold_unstream s'
Yield x s' -> x : unfold_unstream s'
# RULES
" STREAM stream / unstream fusion " forall s.
stream ( unstream s ) = s
#
"STREAM stream/unstream fusion" forall s.
stream (unstream s) = s
#-}
# INLINE replicate #
replicate n x = unstream (replicateS n x)
replicateS :: Int -> a -> Stream a
replicateS n x = Stream next n
where
# INLINE next #
next !i | i <= 0 = Done
| otherwise = Yield x (i-1)
# INLINE enumFromTo #
enumFromTo x y = unstream (enumFromToS x y)
enumFromToS x y = Stream step x
where
# INLINE step #
step x | x <= y = Yield x (x + 1)
| otherwise = Done
data a :!: b = !a :!: !b
# INLINE concatMap #
concatMap f xs = unstream (concatMapS (stream . f) (stream xs))
concatMapS :: (a -> Stream b) -> Stream a -> Stream b
concatMapS f (Stream next0 s0) = Stream next (s0 :!: Nothing)
where
# INLINE next #
next (s :!: Nothing) = case next0 s of
Done -> Done
Skip s' -> Skip (s' :!: Nothing)
Yield x s' -> Skip (s' :!: Just (f x))
next (s :!: Just (Stream g t)) = case g t of
Done -> Skip (s :!: Nothing)
Skip t' -> Skip (s :!: Just (Stream g t'))
Yield x t' -> Yield x (s :!: Just (Stream g t'))
main = do
print $ concatMap (\y -> replicate 2 y) (concatMap (\x -> enumFromTo x 5) (enumFromTo 1 (5 :: Int)))
print $ concatMap ( \x - > concatMap ( \y - > replicate 2 y ) ( enumFromTo x 5 ) ) ( enumFromTo 1 ( 5 : : Int ) )
|
3e2fcb4b240b93d33693bfde82f6bd95d4bc21022dffc533344a24591873c9f4 | active-group/reacl-c | reacl_c.clj | (ns hooks.reacl-c
(:require [clj-kondo.hooks-api :as api]
[clojure.string :as str]))
(defn- rewrite-list [expr f]
;; rewrite children list of a list-node to a single new node.
(-> expr
(update :node
(fn [node]
(if (api/list-node? node)
(let [cs (:children node)]
(let [res (f cs)]
(println "xxxxx" cs "=>" res)
res))
;; just keep? or an error?
(do #_(assert false node) ;; TODO: proper error
node))))))
(defn- is-keyword? [node kw]
(and (api/keyword-node? node)
(= kw (:k node))
(not (:namespaced? node))))
(defn- remove-schemas [params]
(-> (reduce (fn [[res drop-next?] p]
(if drop-next?
[res false]
(if (is-keyword? p :-)
[res true]
[(conj res p) false])))
[[] false]
params)
(first)))
(defn- schema-fn-0 [params & body]
#_(assert (api/vector-node? params) (pr-str params))
;; How to f*ing reuse what there is already for schema.core???
#_(api/list-node (list* (api/token-node 'schema.core/fn)
params
body))
(api/list-node (list* (api/token-node 'fn)
(api/vector-node (remove-schemas (:children params)))
body)))
(defn- schema-fn-n [params-bodies]
;; multi arity
(api/list-node (list* (api/token-node 'fn)
(map (fn [[params body]]
(api/list-node (list* (api/vector-node (remove-schemas (:children params)))
body)))
params-bodies))))
(defn- schema-defn-0 [name params & more]
#_(assert (api/vector-node? params) (pr-str params))
(api/list-node (list (api/token-node 'def)
name
(apply schema-fn-0 params more))))
(defn- schema-defn-n [name params-bodies]
(api/list-node (list (api/token-node 'def)
name
(schema-fn-n params-bodies))))
(defn- schema-defn [name x & more]
(if (is-keyword? x :-)
(apply schema-defn-0 name (rest more))
(apply schema-defn-0 name (cons x more))))
(defn- drop-docstring [name x & more]
(if (api/string-node? x)
(list* name more)
(list* name (cons x more))))
(defn- as-do [& nodes]
;; multiple nodes in a 'do'
(api/list-node (list* (api/token-node 'do) nodes)))
(defn defn-item [expr]
(-> expr
(rewrite-list (fn [children]
rewrite node to defn , removing : static
(let [[name & r] (apply drop-docstring (rest children))]
;; TODO: maybe register a finding if x is some other keyword than :static or :-
(apply schema-defn name (if (is-keyword? (first r) :static)
(rest r)
r)))))))
(defn defn-dom [expr]
basically the same as defn - item , but add arity with one arg less .
(-> expr
(rewrite-list (fn [children]
rewrite node to defn , removing : static
(let [[name & r] (apply drop-docstring (rest children))
r (cond
(is-keyword? (first r) :static) (rest r)
(is-keyword? (first r) :-) (rest (rest r))
:else r)
[params & body] r]
;; TODO: add finding when no args?
(schema-defn-n name
[[params body]
[(api/vector-node (rest (:children params)))
Note : even though the first param ( attrs ) need not be passed , it is still bound in body
(list (apply schema-fn-0 (api/vector-node (list (first (:children params))))
body))]]))))))
(defn- with-state-as* [expr]
(-> expr
(rewrite-list (fn [children]
(let [[binding & body] (rest children)
as-fn (fn [params]
(apply schema-fn-0 params body))]
(if (and (api/vector-node? binding)
(> (count (:children binding)) 3)
(is-keyword? (nth (:children binding) 2) :local))
(let [b (:children binding)]
;; (with-state-as [b0 b1 :local value] & body) => (do value (fn [b0 b1] & body))
TODO : warn if length(b ) > 4 ?
(as-do (nth b 3)
(as-fn (api/vector-node (list (nth b 0) (nth b 1))))))
;; (with-state-as binding & body) => (fn [binding] & body)
(as-fn (api/vector-node (list binding)))))))))
(defn with-state-as [expr]
(with-state-as* expr))
(def ^:private empty-1-arg-fn
(api/list-node (list (api/token-node 'fn)
(api/vector-node (list (api/token-node '_))))))
(defn defn-subscription [expr]
( defn - subscription name deliver ! : - Schema [ args ] & body )
(-> expr
(rewrite-list (fn [children]
(let [[name deliver x & r] (apply drop-docstring (rest children))
[[deliver _] params body] ;; ignoring deliver schema for now
(if (is-keyword? x :-)
[(list deliver (nth r 0)) (nth r 1) (rest (rest r))]
[(list deliver nil) x r])]
(schema-defn name params
(api/list-node (list* (api/token-node 'let)
(api/vector-node (list deliver empty-1-arg-fn))
body))))))))
| null | https://raw.githubusercontent.com/active-group/reacl-c/53fec3e78e61176a6c4a2376cf88c0e6c4e99e22/resources/clj-kondo.exports/de.active-group/reacl-c/hooks/reacl_c.clj | clojure | rewrite children list of a list-node to a single new node.
just keep? or an error?
TODO: proper error
How to f*ing reuse what there is already for schema.core???
multi arity
multiple nodes in a 'do'
TODO: maybe register a finding if x is some other keyword than :static or :-
TODO: add finding when no args?
(with-state-as [b0 b1 :local value] & body) => (do value (fn [b0 b1] & body))
(with-state-as binding & body) => (fn [binding] & body)
ignoring deliver schema for now | (ns hooks.reacl-c
(:require [clj-kondo.hooks-api :as api]
[clojure.string :as str]))
(defn- rewrite-list [expr f]
(-> expr
(update :node
(fn [node]
(if (api/list-node? node)
(let [cs (:children node)]
(let [res (f cs)]
(println "xxxxx" cs "=>" res)
res))
node))))))
(defn- is-keyword? [node kw]
(and (api/keyword-node? node)
(= kw (:k node))
(not (:namespaced? node))))
(defn- remove-schemas [params]
(-> (reduce (fn [[res drop-next?] p]
(if drop-next?
[res false]
(if (is-keyword? p :-)
[res true]
[(conj res p) false])))
[[] false]
params)
(first)))
(defn- schema-fn-0 [params & body]
#_(assert (api/vector-node? params) (pr-str params))
#_(api/list-node (list* (api/token-node 'schema.core/fn)
params
body))
(api/list-node (list* (api/token-node 'fn)
(api/vector-node (remove-schemas (:children params)))
body)))
(defn- schema-fn-n [params-bodies]
(api/list-node (list* (api/token-node 'fn)
(map (fn [[params body]]
(api/list-node (list* (api/vector-node (remove-schemas (:children params)))
body)))
params-bodies))))
(defn- schema-defn-0 [name params & more]
#_(assert (api/vector-node? params) (pr-str params))
(api/list-node (list (api/token-node 'def)
name
(apply schema-fn-0 params more))))
(defn- schema-defn-n [name params-bodies]
(api/list-node (list (api/token-node 'def)
name
(schema-fn-n params-bodies))))
(defn- schema-defn [name x & more]
(if (is-keyword? x :-)
(apply schema-defn-0 name (rest more))
(apply schema-defn-0 name (cons x more))))
(defn- drop-docstring [name x & more]
(if (api/string-node? x)
(list* name more)
(list* name (cons x more))))
(defn- as-do [& nodes]
(api/list-node (list* (api/token-node 'do) nodes)))
(defn defn-item [expr]
(-> expr
(rewrite-list (fn [children]
rewrite node to defn , removing : static
(let [[name & r] (apply drop-docstring (rest children))]
(apply schema-defn name (if (is-keyword? (first r) :static)
(rest r)
r)))))))
(defn defn-dom [expr]
basically the same as defn - item , but add arity with one arg less .
(-> expr
(rewrite-list (fn [children]
rewrite node to defn , removing : static
(let [[name & r] (apply drop-docstring (rest children))
r (cond
(is-keyword? (first r) :static) (rest r)
(is-keyword? (first r) :-) (rest (rest r))
:else r)
[params & body] r]
(schema-defn-n name
[[params body]
[(api/vector-node (rest (:children params)))
Note : even though the first param ( attrs ) need not be passed , it is still bound in body
(list (apply schema-fn-0 (api/vector-node (list (first (:children params))))
body))]]))))))
(defn- with-state-as* [expr]
(-> expr
(rewrite-list (fn [children]
(let [[binding & body] (rest children)
as-fn (fn [params]
(apply schema-fn-0 params body))]
(if (and (api/vector-node? binding)
(> (count (:children binding)) 3)
(is-keyword? (nth (:children binding) 2) :local))
(let [b (:children binding)]
TODO : warn if length(b ) > 4 ?
(as-do (nth b 3)
(as-fn (api/vector-node (list (nth b 0) (nth b 1))))))
(as-fn (api/vector-node (list binding)))))))))
(defn with-state-as [expr]
(with-state-as* expr))
(def ^:private empty-1-arg-fn
(api/list-node (list (api/token-node 'fn)
(api/vector-node (list (api/token-node '_))))))
(defn defn-subscription [expr]
( defn - subscription name deliver ! : - Schema [ args ] & body )
(-> expr
(rewrite-list (fn [children]
(let [[name deliver x & r] (apply drop-docstring (rest children))
(if (is-keyword? x :-)
[(list deliver (nth r 0)) (nth r 1) (rest (rest r))]
[(list deliver nil) x r])]
(schema-defn name params
(api/list-node (list* (api/token-node 'let)
(api/vector-node (list deliver empty-1-arg-fn))
body))))))))
|
6afb7f641d36dda346d36f82408ac96e715d5633e86a366c7ce2e88bc8061be3 | untangled-web/untangled-ui | parser_spec.clj | (ns untangled.ui.server.image-library.parser-spec
(:require
[com.stuartsierra.component :as component]
[untangled-spec.core :refer [specification behavior component assertions]]
[untangled.ui.server.image-library.parser :as src]
[untangled.ui.server.image-library :as src.lib]
[untangled.ui.server.image-library.image :as src.img]
[untangled.ui.server.image-library.storage :as src.storage]
[untangled.server.core :as usc])
(:import
(java.util Base64)))
(defn test-image-library [& [opts [meta blob]]]
(-> (src.lib/image-library
(merge {:owner-fn src.lib/example-owner-fn}
(or opts {})))
(assoc ::src.storage/meta
(component/start (or meta (src.storage/map->InMemMetaStore {}))))
(assoc ::src.storage/blob
(component/start (or blob (src.storage/map->FileStore {}))))))
(defn store-image [img-lib params]
(with-redefs
[src.img/infer-img-ext (constantly "TEST")]
(let [api-mutate (.api-mutate img-lib)
base64-encode #(.encodeToString (Base64/getEncoder) (.getBytes %))
{:keys [tempids]} ((:action (api-mutate img-lib 'untangled.component.image-library/store
(update params :content/data base64-encode))))]
(get tempids (:db/id params)))))
(specification "build-mutate"
(component "'untangled.component.image-library/store"
(assertions
"the image should in the params be under :content/data base 64 encoded"
(with-redefs
[src.img/infer-img-ext (constantly "TEST")]
(let [img-lib (test-image-library)]
((:action ((.api-mutate img-lib) img-lib 'untangled.component.image-library/store
{:db/id (rand-int 1e6) :content/data "(*&@#%NM<DSV:SL#PO%_@"})))))
=throws=> (IllegalArgumentException #"Illegal base64 character")
"returns a tempids mapping"
(store-image (test-image-library) {:db/id (rand-int 1e6) :content/data "hello"}) => 0
"relies on owner-fn and auth-fn for ownership and authorization"
(let [owner-fn (fn [_ im]
(assertions (:id im) => 42)
(assoc im :owner "test owner"))
auth-fn (fn [_ im loc]
(assertions
(:id im) => 42
(:owner im) => "test owner"
loc => :store))]
(store-image (test-image-library
{:owner-fn owner-fn
:auth-fn auth-fn})
{:db/id 42 :content/data "hello"}))
=> 0
"if it does not handle the dispatch-key it return nil"
(let [img-lib (test-image-library)]
((.api-mutate img-lib) img-lib ::should-not-handle {}))
=> nil)))
(defn read-images [img-lib]
(:value ((.api-read img-lib) img-lib :images {})))
(specification "build-read"
(component ":images"
(let [owner-fn (fn [_ im]
(assoc im :owner "test owner"))
auth-fn (fn [_ im loc]
(assertions
(:owner im) => "test owner"
loc =fn=> #{:store :read-all}))]
(assertions
"reads all the images currently stored"
0 => 0
"relies on owner-fn and auth-fn for ownership and authorization"
(let [img-lib (test-image-library
{:owner-fn owner-fn
:auth-fn auth-fn})]
(store-image img-lib
{:db/id 42 :content/data "hello"})
(read-images img-lib))
=> [{:db/id 0
:image/owner "test owner"
:image/name nil
:image/size nil
:image/dimensions nil
:image/extension "TEST"}]
"if it does not handle the dispatch-key it return nil"
(let [img-lib (test-image-library)]
((.api-read img-lib) img-lib ::should-not-handle {})) => nil))))
| null | https://raw.githubusercontent.com/untangled-web/untangled-ui/ae101f90cd9b7bf5d0c80e9453595fdfe784923c/src/test/untangled/ui/server/image_library/parser_spec.clj | clojure | (ns untangled.ui.server.image-library.parser-spec
(:require
[com.stuartsierra.component :as component]
[untangled-spec.core :refer [specification behavior component assertions]]
[untangled.ui.server.image-library.parser :as src]
[untangled.ui.server.image-library :as src.lib]
[untangled.ui.server.image-library.image :as src.img]
[untangled.ui.server.image-library.storage :as src.storage]
[untangled.server.core :as usc])
(:import
(java.util Base64)))
(defn test-image-library [& [opts [meta blob]]]
(-> (src.lib/image-library
(merge {:owner-fn src.lib/example-owner-fn}
(or opts {})))
(assoc ::src.storage/meta
(component/start (or meta (src.storage/map->InMemMetaStore {}))))
(assoc ::src.storage/blob
(component/start (or blob (src.storage/map->FileStore {}))))))
(defn store-image [img-lib params]
(with-redefs
[src.img/infer-img-ext (constantly "TEST")]
(let [api-mutate (.api-mutate img-lib)
base64-encode #(.encodeToString (Base64/getEncoder) (.getBytes %))
{:keys [tempids]} ((:action (api-mutate img-lib 'untangled.component.image-library/store
(update params :content/data base64-encode))))]
(get tempids (:db/id params)))))
(specification "build-mutate"
(component "'untangled.component.image-library/store"
(assertions
"the image should in the params be under :content/data base 64 encoded"
(with-redefs
[src.img/infer-img-ext (constantly "TEST")]
(let [img-lib (test-image-library)]
((:action ((.api-mutate img-lib) img-lib 'untangled.component.image-library/store
{:db/id (rand-int 1e6) :content/data "(*&@#%NM<DSV:SL#PO%_@"})))))
=throws=> (IllegalArgumentException #"Illegal base64 character")
"returns a tempids mapping"
(store-image (test-image-library) {:db/id (rand-int 1e6) :content/data "hello"}) => 0
"relies on owner-fn and auth-fn for ownership and authorization"
(let [owner-fn (fn [_ im]
(assertions (:id im) => 42)
(assoc im :owner "test owner"))
auth-fn (fn [_ im loc]
(assertions
(:id im) => 42
(:owner im) => "test owner"
loc => :store))]
(store-image (test-image-library
{:owner-fn owner-fn
:auth-fn auth-fn})
{:db/id 42 :content/data "hello"}))
=> 0
"if it does not handle the dispatch-key it return nil"
(let [img-lib (test-image-library)]
((.api-mutate img-lib) img-lib ::should-not-handle {}))
=> nil)))
(defn read-images [img-lib]
(:value ((.api-read img-lib) img-lib :images {})))
(specification "build-read"
(component ":images"
(let [owner-fn (fn [_ im]
(assoc im :owner "test owner"))
auth-fn (fn [_ im loc]
(assertions
(:owner im) => "test owner"
loc =fn=> #{:store :read-all}))]
(assertions
"reads all the images currently stored"
0 => 0
"relies on owner-fn and auth-fn for ownership and authorization"
(let [img-lib (test-image-library
{:owner-fn owner-fn
:auth-fn auth-fn})]
(store-image img-lib
{:db/id 42 :content/data "hello"})
(read-images img-lib))
=> [{:db/id 0
:image/owner "test owner"
:image/name nil
:image/size nil
:image/dimensions nil
:image/extension "TEST"}]
"if it does not handle the dispatch-key it return nil"
(let [img-lib (test-image-library)]
((.api-read img-lib) img-lib ::should-not-handle {})) => nil))))
| |
ddbe847fea6c097ec396979bb2429734076231a96eb64d9fb3de89881f1962a0 | nasa/PRECiSA | MapRealPVSLangAST.hs | -- Notices:
--
Copyright 2020 United States Government as represented by the Administrator of the National Aeronautics and Space Administration . All Rights Reserved .
-- Disclaimers
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO SPECIFICATIONS , ANY IMPLIED WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE , OR FREEDOM FROM INFRINGEMENT , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL BE ERROR FREE , OR ANY WARRANTY THAT DOCUMENTATION , IF PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN ANY MANNER , CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . FURTHER , GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES IT " AS IS . "
Waiver and Indemnity : RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT . IF RECIPIENT 'S USE OF THE SUBJECT SOFTWARE RESULTS IN ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE , INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON , OR RESULTING FROM , RECIPIENT 'S USE OF THE SUBJECT SOFTWARE , RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S SOLE REMEDY FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS AGREEMENT .
module MapRealPVSLangAST
where
import AbsRawRealPVSLang
import AbsPVSLang
import Common.TypesUtils
import Data.Maybe(fromMaybe)
import ErrM
import PVSTypes
import qualified Operators as Op
import Parser.ParRawRealPVSLang
import Parser.LexRawRealPVSLang
type VarTypeEnv = [(String, PVSType)]
type FunTypeEnv = [(String, PVSType)]
namePVSRealTheory :: AbsRawRealPVSLang.Program -> String
namePVSRealTheory (Prog (Id name) _ _ _ _) = name
namePVSRealTheory (ProgImp (Id name) _ _ _) = name
rawparserRealPVS :: String -> Err AbsRawRealPVSLang.Program
rawparserRealPVS = pProgram . tokens
raw2Id :: AbsRawRealPVSLang.Id -> VarName
raw2Id (AbsRawRealPVSLang.Id x) = x
raw2FPType :: AbsRawRealPVSLang.Type -> PVSType
raw2FPType TypeInt = TInt
raw2FPType TypeInteger = TInt
raw2FPType TypeReal = Real
raw2FPType TypePosNat = TInt
raw2FPType (TypeBelow _) = TInt
raw2FPType TypeBool = Boolean
raw2FPType (TypeArrayInteger t) = Array (raw2FPType t) Nothing
raw2FPType (TypeArrayInt t) = Array (raw2FPType t) Nothing
raw2FPType (TypeArrayBelow (AbsRawRealPVSLang.Int n) t) = Array (raw2FPType t) (Just (ArraySizeInt n))
raw2FPType (TypeArrayBelow (AbsRawRealPVSLang.Var (AbsRawRealPVSLang.Id x)) t) = Array (raw2FPType t) (Just (ArraySizeVar x))
raw2FPType t = error $ "raw2FPType: unexpected value " ++ show t ++ "."
raw2RealProg :: AbsRawRealPVSLang.Program -> AbsPVSLang.RProgram
raw2RealProg (AbsRawRealPVSLang.Prog _ _ _ listDecl _) = raw2Decsl (map retTypeFun listDecl) listDecl
raw2RealProg (AbsRawRealPVSLang.ProgImp _ _ listDecl _) = raw2Decsl (map retTypeFun listDecl) listDecl
retTypeFun :: AbsRawRealPVSLang.Decl -> (String, PVSType)
retTypeFun (Decl0 (AbsRawRealPVSLang.Id f) t _) = (f, raw2FPType t)
retTypeFun (DeclN (AbsRawRealPVSLang.Id f) _ t _) = (f, raw2FPType t)
retTypeFun (DeclRec (AbsRawRealPVSLang.Id f) _ t _) = (f, raw2FPType t)
raw2Decsl :: FunTypeEnv -> [AbsRawRealPVSLang.Decl] -> [AbsPVSLang.RDecl]
raw2Decsl fenv = map (raw2Decl fenv)
raw2Decl :: FunTypeEnv -> AbsRawRealPVSLang.Decl -> AbsPVSLang.RDecl
raw2Decl fenv (DeclN f rawArgs TypeBool expr) = RPred (raw2Id f) args (raw2BExprStm env fenv expr)
where
args = raw2Args rawArgs
env = map mapArg2Pair args
raw2Decl fenv (Decl0 f TypeBool expr) = RPred (raw2Id f) [] (raw2BExprStm [] fenv expr)
raw2Decl fenv (DeclN f rawArgs t stm) = RDecl (raw2FPType t) (raw2Id f) args (raw2AExpr env fenv stm)
where
args = raw2Args rawArgs
env = map mapArg2Pair args
raw2Decl fenv (DeclRec f rawArgs t stm) = RDecl (raw2FPType t) (raw2Id f) args (raw2AExpr env fenv stm)
where
args = raw2Args rawArgs
env = map mapArg2Pair args
raw2Decl fenv (Decl0 f t stm) = RDecl (raw2FPType t) (raw2Id f) [] (raw2AExpr [] fenv stm)
raw2Args :: AbsRawRealPVSLang.Args -> [AbsPVSLang.Arg]
raw2Args (FArgs args) = concatMap raw2Arg args
raw2Args (FArgsNoType _) = error "Arguments have no type."
raw2Arg :: AbsRawRealPVSLang.Arg -> [AbsPVSLang.Arg]
raw2Arg (FArg xs t) = map (raw2ArgWithType t) xs
raw2Arg (FArgSubrange xs _) = map (raw2ArgWithType TypeInteger) xs
raw2Arg (FArgGuard xs t _) = map (raw2ArgWithType t) xs
raw2ArgWithType :: Type -> AbsRawRealPVSLang.Id -> AbsPVSLang.Arg
raw2ArgWithType t x = AbsPVSLang.Arg (raw2Id x) (raw2FPType t)
raw2Elsif :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.ElsIf -> (AbsPVSLang.BExpr, AbsPVSLang.AExpr)
raw2Elsif env fenv (ElsIf fbexpr stm) = (raw2BExpr env fenv fbexpr, raw2AExpr env fenv stm)
raw2BElsif :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.ElsIf -> (AbsPVSLang.BExpr, AbsPVSLang.BExprStm)
raw2BElsif env fenv (ElsIf fbexpr stm) = (raw2BExpr env fenv fbexpr, raw2BExprStm env fenv stm)
raw2LetElem :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.LetElem -> AbsPVSLang.LetElem
raw2LetElem env fenv (AbsRawRealPVSLang.LetElem x rawExpr)
| (isIntAExpr expr) = AbsPVSLang.LetElem {letVar = raw2Id x, letType = TInt, letExpr = expr}
| otherwise = AbsPVSLang.LetElem {letVar = raw2Id x, letType = Real, letExpr = expr}
where
expr = raw2AExpr env fenv rawExpr
raw2LetElem env fenv (LetElemType x t rawExpr) = AbsPVSLang.LetElem {letVar = raw2Id x
,letType = raw2FPType t
,letExpr = raw2AExpr env fenv rawExpr}
raw2AExpr :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.Expr -> AbsPVSLang.AExpr
raw2AExpr env fenv (AbsRawRealPVSLang.Let letElems stm)
= RLet letList (raw2AExpr newenv fenv stm)
where
(newenv,letList) = foldl aux_fold (env,[]) letElems
aux_fold (accEnv,elems) letElem = (env',elems ++ [newLetElem])
where
newLetElem = raw2LetElem accEnv fenv letElem
env' = (letVar newLetElem, letType newLetElem):accEnv
raw2AExpr env fenv (AbsRawRealPVSLang.For retType startIdx endIdx initValueAcc idxVarId@(AbsRawRealPVSLang.Id idx) _ _ accVarId@(AbsRawRealPVSLang.Id acc) accType forBody)
= if retType == accType
then RForLoop fp
(raw2AExpr env fenv startIdx)
(raw2AExpr env fenv endIdx)
(raw2AExpr env fenv initValueAcc)
(raw2Id idxVarId)
(raw2Id accVarId)
(raw2AExpr ((idx,TInt):(acc,fp):env) fenv forBody)
else error "Type mismatch for for loop."
where
fp = raw2FPType retType
raw2AExpr env fenv (AbsRawRealPVSLang.If be thenSmt elseStm) = RIte (raw2BExpr env fenv be) (raw2AExpr env fenv thenSmt) (raw2AExpr env fenv elseStm)
raw2AExpr env fenv (AbsRawRealPVSLang.ListIf be stmThen listElsif elseStm) =
RListIte ((raw2BExpr env fenv be,raw2AExpr env fenv stmThen) : map (raw2Elsif env fenv) listElsif) (raw2AExpr env fenv elseStm)
raw2AExpr _ _ AbsRawRealPVSLang.UnstWarning = RUnstWarning
raw2AExpr env fenv (AbsRawRealPVSLang.Add ae1 ae2) = AbsPVSLang.BinaryOp Op.AddOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Sub ae1 ae2) = AbsPVSLang.BinaryOp Op.SubOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Mul ae1 ae2) = AbsPVSLang.BinaryOp Op.MulOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Div ae1 ae2) = AbsPVSLang.BinaryOp Op.DivOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Pow ae1 ae2) = AbsPVSLang.BinaryOp Op.PowOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Mod1 ae1 ae2) = AbsPVSLang.BinaryOp Op.ModOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Mod2 ae1 ae2) = AbsPVSLang.BinaryOp Op.ModOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr _ _ (AbsRawRealPVSLang.Neg (AbsRawRealPVSLang.Int i)) = AbsPVSLang.Int (-i)
raw2AExpr _ _ (AbsRawRealPVSLang.Neg (AbsRawRealPVSLang.Rat r)) = AbsPVSLang.Rat (-(toRational r))
raw2AExpr env fenv (AbsRawRealPVSLang.Neg ae) = AbsPVSLang.UnaryOp Op.NegOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Floor ae) = AbsPVSLang.UnaryOp Op.FloorOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Sqrt ae) = AbsPVSLang.UnaryOp Op.SqrtOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Abs ae) = AbsPVSLang.UnaryOp Op.AbsOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Sin ae) = AbsPVSLang.UnaryOp Op.SinOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Cos ae) = AbsPVSLang.UnaryOp Op.CosOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Tan ae) = AbsPVSLang.UnaryOp Op.TanOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.ASin ae) = AbsPVSLang.UnaryOp Op.AsinOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.ACos ae) = AbsPVSLang.UnaryOp Op.AcosOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.ATan ae) = AbsPVSLang.UnaryOp Op.AtanOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Ln ae) = AbsPVSLang.UnaryOp Op.LnOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Exp ae) = AbsPVSLang.UnaryOp Op.ExpoOp (raw2AExpr env fenv ae)
raw2AExpr _ _ (AbsRawRealPVSLang.Int i) = AbsPVSLang.Int i
raw2AExpr _ _ (AbsRawRealPVSLang.Rat d) = AbsPVSLang.Rat (toRational d)
raw2AExpr env _ (AbsRawRealPVSLang.Var (AbsRawRealPVSLang.Id x)) = AbsPVSLang.Var fp x
where
fp = fromMaybe (error $ "raw2FAExpr: variable " ++ show x ++ " not found in " ++ show env ++ ".")
(lookup x env)
raw2AExpr env fenv (FCallN (AbsRawRealPVSLang.Id f) actArgs) =
case lookup f fenv of
Just Boolean -> error "raw2AExpr: Numerical function expected."
Just fp -> AbsPVSLang.EFun f fp (map (raw2AExpr env fenv) actArgs)
Nothing -> case lookup f env of
Just (Array fp size) -> AbsPVSLang.ArrayElem fp size f idx
_ -> error $ "raw2FAExpr: something went wrong "++ show f ++ " is not an array or function."
where
idx = case actArgs of
[i] -> raw2AExpr env fenv i
_ -> error "raw2FAExpr: index should be unique."
raw2AExpr _ _ Pi1 = error "Constant Pi not supported, use, for instance, 3.14"
raw2AExpr _ _ Pi2 = error "Constant Pi not supported, use, for instance, 3.14"
raw2AExpr _ _ ae = error $ "Something went wrong: arithmetic expression expected but got " ++ show ae ++ "."
raw2BExprStm :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.Expr -> AbsPVSLang.BExprStm
raw2BExprStm env fenv (AbsRawRealPVSLang.Let letElems stm)
= RBLet letList (raw2BExprStm newenv fenv stm)
where
(newenv,letList) = foldr aux_fold (env,[]) letElems
aux_fold letElem (accEnv,elems) = (env',elems ++ [newLetElem])
where
newLetElem = raw2LetElem accEnv fenv letElem
env' = (letVar newLetElem, letType newLetElem):accEnv
raw2BExprStm env fenv (AbsRawRealPVSLang.If be thenSmt elseStm)
= RBIte (raw2BExpr env fenv be) (raw2BExprStm env fenv thenSmt)
(raw2BExprStm env fenv elseStm)
raw2BExprStm env fenv (AbsRawRealPVSLang.ListIf be stmThen listElsif elseStm) =
RBListIte ((raw2BExpr env fenv be,raw2BExprStm env fenv stmThen) : map (raw2BElsif env fenv) listElsif) (raw2BExprStm env fenv elseStm)
raw2BExprStm env fenv be = RBExpr $ raw2BExpr env fenv be
raw2BExpr :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.Expr -> AbsPVSLang.BExpr
raw2BExpr env fenv (AbsRawRealPVSLang.Or be1 be2) = AbsPVSLang.Or (raw2BExpr env fenv be1) (raw2BExpr env fenv be2)
raw2BExpr env fenv (AbsRawRealPVSLang.And be1 be2) = AbsPVSLang.And (raw2BExpr env fenv be1) (raw2BExpr env fenv be2)
raw2BExpr env fenv (AbsRawRealPVSLang.Not be) = AbsPVSLang.Not (raw2BExpr env fenv be)
raw2BExpr env fenv (AbsRawRealPVSLang.Eq ae1 ae2) = AbsPVSLang.Rel Op.Eq (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.Neq ae1 ae2) = AbsPVSLang.Rel Op.Neq (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.Lt ae1 ae2) = AbsPVSLang.Rel Op.Lt (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.LtE ae1 ae2) = AbsPVSLang.Rel Op.LtE (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.Gt ae1 ae2) = AbsPVSLang.Rel Op.Gt (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.GtE ae1 ae2) = AbsPVSLang.Rel Op.GtE (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr _ _ AbsRawRealPVSLang.BTrue = AbsPVSLang.BTrue
raw2BExpr _ _ AbsRawRealPVSLang.BFalse = AbsPVSLang.BFalse
raw2BExpr env fenv (FCallN (Id f) args) =
case lookup f fenv of
Just Boolean -> AbsPVSLang.EPred f (map (raw2AExpr env fenv) args)
Just _ -> error "raw2BExpr: Boolean function expected."
Nothing -> error $ "raw2BExpr: something went wrong "++ show f ++ " is not a predicate."
raw2BExpr _ _ be = error $ "Something went wrong: boolean expression expected but got " ++ show be ++ "."
| null | https://raw.githubusercontent.com/nasa/PRECiSA/91e1e7543c5888ad5fb123d3462f71d085b99741/PRECiSA/src/MapRealPVSLangAST.hs | haskell | Notices:
Disclaimers | Copyright 2020 United States Government as represented by the Administrator of the National Aeronautics and Space Administration . All Rights Reserved .
No Warranty : THE SUBJECT SOFTWARE IS PROVIDED " AS IS " WITHOUT ANY WARRANTY OF ANY KIND , EITHER EXPRESSED , IMPLIED , OR STATUTORY , INCLUDING , BUT NOT LIMITED TO , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL CONFORM TO SPECIFICATIONS , ANY IMPLIED WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE , OR FREEDOM FROM INFRINGEMENT , ANY WARRANTY THAT THE SUBJECT SOFTWARE WILL BE ERROR FREE , OR ANY WARRANTY THAT DOCUMENTATION , IF PROVIDED , WILL CONFORM TO THE SUBJECT SOFTWARE . THIS AGREEMENT DOES NOT , IN ANY MANNER , CONSTITUTE AN ENDORSEMENT BY GOVERNMENT AGENCY OR ANY PRIOR RECIPIENT OF ANY RESULTS , RESULTING DESIGNS , HARDWARE , SOFTWARE PRODUCTS OR ANY OTHER APPLICATIONS RESULTING FROM USE OF THE SUBJECT SOFTWARE . FURTHER , GOVERNMENT AGENCY DISCLAIMS ALL WARRANTIES AND LIABILITIES REGARDING THIRD - PARTY SOFTWARE , IF PRESENT IN THE ORIGINAL SOFTWARE , AND DISTRIBUTES IT " AS IS . "
Waiver and Indemnity : RECIPIENT AGREES TO WAIVE ANY AND ALL CLAIMS AGAINST THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT . IF RECIPIENT 'S USE OF THE SUBJECT SOFTWARE RESULTS IN ANY LIABILITIES , DEMANDS , DAMAGES , EXPENSES OR LOSSES ARISING FROM SUCH USE , INCLUDING ANY DAMAGES FROM PRODUCTS BASED ON , OR RESULTING FROM , RECIPIENT 'S USE OF THE SUBJECT SOFTWARE , RECIPIENT SHALL INDEMNIFY AND HOLD HARMLESS THE UNITED STATES GOVERNMENT , ITS CONTRACTORS AND SUBCONTRACTORS , AS WELL AS ANY PRIOR RECIPIENT , TO THE EXTENT PERMITTED BY LAW . RECIPIENT 'S SOLE REMEDY FOR ANY SUCH MATTER SHALL BE THE IMMEDIATE , UNILATERAL TERMINATION OF THIS AGREEMENT .
module MapRealPVSLangAST
where
import AbsRawRealPVSLang
import AbsPVSLang
import Common.TypesUtils
import Data.Maybe(fromMaybe)
import ErrM
import PVSTypes
import qualified Operators as Op
import Parser.ParRawRealPVSLang
import Parser.LexRawRealPVSLang
type VarTypeEnv = [(String, PVSType)]
type FunTypeEnv = [(String, PVSType)]
namePVSRealTheory :: AbsRawRealPVSLang.Program -> String
namePVSRealTheory (Prog (Id name) _ _ _ _) = name
namePVSRealTheory (ProgImp (Id name) _ _ _) = name
rawparserRealPVS :: String -> Err AbsRawRealPVSLang.Program
rawparserRealPVS = pProgram . tokens
raw2Id :: AbsRawRealPVSLang.Id -> VarName
raw2Id (AbsRawRealPVSLang.Id x) = x
raw2FPType :: AbsRawRealPVSLang.Type -> PVSType
raw2FPType TypeInt = TInt
raw2FPType TypeInteger = TInt
raw2FPType TypeReal = Real
raw2FPType TypePosNat = TInt
raw2FPType (TypeBelow _) = TInt
raw2FPType TypeBool = Boolean
raw2FPType (TypeArrayInteger t) = Array (raw2FPType t) Nothing
raw2FPType (TypeArrayInt t) = Array (raw2FPType t) Nothing
raw2FPType (TypeArrayBelow (AbsRawRealPVSLang.Int n) t) = Array (raw2FPType t) (Just (ArraySizeInt n))
raw2FPType (TypeArrayBelow (AbsRawRealPVSLang.Var (AbsRawRealPVSLang.Id x)) t) = Array (raw2FPType t) (Just (ArraySizeVar x))
raw2FPType t = error $ "raw2FPType: unexpected value " ++ show t ++ "."
raw2RealProg :: AbsRawRealPVSLang.Program -> AbsPVSLang.RProgram
raw2RealProg (AbsRawRealPVSLang.Prog _ _ _ listDecl _) = raw2Decsl (map retTypeFun listDecl) listDecl
raw2RealProg (AbsRawRealPVSLang.ProgImp _ _ listDecl _) = raw2Decsl (map retTypeFun listDecl) listDecl
retTypeFun :: AbsRawRealPVSLang.Decl -> (String, PVSType)
retTypeFun (Decl0 (AbsRawRealPVSLang.Id f) t _) = (f, raw2FPType t)
retTypeFun (DeclN (AbsRawRealPVSLang.Id f) _ t _) = (f, raw2FPType t)
retTypeFun (DeclRec (AbsRawRealPVSLang.Id f) _ t _) = (f, raw2FPType t)
raw2Decsl :: FunTypeEnv -> [AbsRawRealPVSLang.Decl] -> [AbsPVSLang.RDecl]
raw2Decsl fenv = map (raw2Decl fenv)
raw2Decl :: FunTypeEnv -> AbsRawRealPVSLang.Decl -> AbsPVSLang.RDecl
raw2Decl fenv (DeclN f rawArgs TypeBool expr) = RPred (raw2Id f) args (raw2BExprStm env fenv expr)
where
args = raw2Args rawArgs
env = map mapArg2Pair args
raw2Decl fenv (Decl0 f TypeBool expr) = RPred (raw2Id f) [] (raw2BExprStm [] fenv expr)
raw2Decl fenv (DeclN f rawArgs t stm) = RDecl (raw2FPType t) (raw2Id f) args (raw2AExpr env fenv stm)
where
args = raw2Args rawArgs
env = map mapArg2Pair args
raw2Decl fenv (DeclRec f rawArgs t stm) = RDecl (raw2FPType t) (raw2Id f) args (raw2AExpr env fenv stm)
where
args = raw2Args rawArgs
env = map mapArg2Pair args
raw2Decl fenv (Decl0 f t stm) = RDecl (raw2FPType t) (raw2Id f) [] (raw2AExpr [] fenv stm)
raw2Args :: AbsRawRealPVSLang.Args -> [AbsPVSLang.Arg]
raw2Args (FArgs args) = concatMap raw2Arg args
raw2Args (FArgsNoType _) = error "Arguments have no type."
raw2Arg :: AbsRawRealPVSLang.Arg -> [AbsPVSLang.Arg]
raw2Arg (FArg xs t) = map (raw2ArgWithType t) xs
raw2Arg (FArgSubrange xs _) = map (raw2ArgWithType TypeInteger) xs
raw2Arg (FArgGuard xs t _) = map (raw2ArgWithType t) xs
raw2ArgWithType :: Type -> AbsRawRealPVSLang.Id -> AbsPVSLang.Arg
raw2ArgWithType t x = AbsPVSLang.Arg (raw2Id x) (raw2FPType t)
raw2Elsif :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.ElsIf -> (AbsPVSLang.BExpr, AbsPVSLang.AExpr)
raw2Elsif env fenv (ElsIf fbexpr stm) = (raw2BExpr env fenv fbexpr, raw2AExpr env fenv stm)
raw2BElsif :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.ElsIf -> (AbsPVSLang.BExpr, AbsPVSLang.BExprStm)
raw2BElsif env fenv (ElsIf fbexpr stm) = (raw2BExpr env fenv fbexpr, raw2BExprStm env fenv stm)
raw2LetElem :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.LetElem -> AbsPVSLang.LetElem
raw2LetElem env fenv (AbsRawRealPVSLang.LetElem x rawExpr)
| (isIntAExpr expr) = AbsPVSLang.LetElem {letVar = raw2Id x, letType = TInt, letExpr = expr}
| otherwise = AbsPVSLang.LetElem {letVar = raw2Id x, letType = Real, letExpr = expr}
where
expr = raw2AExpr env fenv rawExpr
raw2LetElem env fenv (LetElemType x t rawExpr) = AbsPVSLang.LetElem {letVar = raw2Id x
,letType = raw2FPType t
,letExpr = raw2AExpr env fenv rawExpr}
raw2AExpr :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.Expr -> AbsPVSLang.AExpr
raw2AExpr env fenv (AbsRawRealPVSLang.Let letElems stm)
= RLet letList (raw2AExpr newenv fenv stm)
where
(newenv,letList) = foldl aux_fold (env,[]) letElems
aux_fold (accEnv,elems) letElem = (env',elems ++ [newLetElem])
where
newLetElem = raw2LetElem accEnv fenv letElem
env' = (letVar newLetElem, letType newLetElem):accEnv
raw2AExpr env fenv (AbsRawRealPVSLang.For retType startIdx endIdx initValueAcc idxVarId@(AbsRawRealPVSLang.Id idx) _ _ accVarId@(AbsRawRealPVSLang.Id acc) accType forBody)
= if retType == accType
then RForLoop fp
(raw2AExpr env fenv startIdx)
(raw2AExpr env fenv endIdx)
(raw2AExpr env fenv initValueAcc)
(raw2Id idxVarId)
(raw2Id accVarId)
(raw2AExpr ((idx,TInt):(acc,fp):env) fenv forBody)
else error "Type mismatch for for loop."
where
fp = raw2FPType retType
raw2AExpr env fenv (AbsRawRealPVSLang.If be thenSmt elseStm) = RIte (raw2BExpr env fenv be) (raw2AExpr env fenv thenSmt) (raw2AExpr env fenv elseStm)
raw2AExpr env fenv (AbsRawRealPVSLang.ListIf be stmThen listElsif elseStm) =
RListIte ((raw2BExpr env fenv be,raw2AExpr env fenv stmThen) : map (raw2Elsif env fenv) listElsif) (raw2AExpr env fenv elseStm)
raw2AExpr _ _ AbsRawRealPVSLang.UnstWarning = RUnstWarning
raw2AExpr env fenv (AbsRawRealPVSLang.Add ae1 ae2) = AbsPVSLang.BinaryOp Op.AddOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Sub ae1 ae2) = AbsPVSLang.BinaryOp Op.SubOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Mul ae1 ae2) = AbsPVSLang.BinaryOp Op.MulOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Div ae1 ae2) = AbsPVSLang.BinaryOp Op.DivOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Pow ae1 ae2) = AbsPVSLang.BinaryOp Op.PowOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Mod1 ae1 ae2) = AbsPVSLang.BinaryOp Op.ModOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr env fenv (AbsRawRealPVSLang.Mod2 ae1 ae2) = AbsPVSLang.BinaryOp Op.ModOp (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2AExpr _ _ (AbsRawRealPVSLang.Neg (AbsRawRealPVSLang.Int i)) = AbsPVSLang.Int (-i)
raw2AExpr _ _ (AbsRawRealPVSLang.Neg (AbsRawRealPVSLang.Rat r)) = AbsPVSLang.Rat (-(toRational r))
raw2AExpr env fenv (AbsRawRealPVSLang.Neg ae) = AbsPVSLang.UnaryOp Op.NegOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Floor ae) = AbsPVSLang.UnaryOp Op.FloorOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Sqrt ae) = AbsPVSLang.UnaryOp Op.SqrtOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Abs ae) = AbsPVSLang.UnaryOp Op.AbsOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Sin ae) = AbsPVSLang.UnaryOp Op.SinOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Cos ae) = AbsPVSLang.UnaryOp Op.CosOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Tan ae) = AbsPVSLang.UnaryOp Op.TanOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.ASin ae) = AbsPVSLang.UnaryOp Op.AsinOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.ACos ae) = AbsPVSLang.UnaryOp Op.AcosOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.ATan ae) = AbsPVSLang.UnaryOp Op.AtanOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Ln ae) = AbsPVSLang.UnaryOp Op.LnOp (raw2AExpr env fenv ae)
raw2AExpr env fenv (AbsRawRealPVSLang.Exp ae) = AbsPVSLang.UnaryOp Op.ExpoOp (raw2AExpr env fenv ae)
raw2AExpr _ _ (AbsRawRealPVSLang.Int i) = AbsPVSLang.Int i
raw2AExpr _ _ (AbsRawRealPVSLang.Rat d) = AbsPVSLang.Rat (toRational d)
raw2AExpr env _ (AbsRawRealPVSLang.Var (AbsRawRealPVSLang.Id x)) = AbsPVSLang.Var fp x
where
fp = fromMaybe (error $ "raw2FAExpr: variable " ++ show x ++ " not found in " ++ show env ++ ".")
(lookup x env)
raw2AExpr env fenv (FCallN (AbsRawRealPVSLang.Id f) actArgs) =
case lookup f fenv of
Just Boolean -> error "raw2AExpr: Numerical function expected."
Just fp -> AbsPVSLang.EFun f fp (map (raw2AExpr env fenv) actArgs)
Nothing -> case lookup f env of
Just (Array fp size) -> AbsPVSLang.ArrayElem fp size f idx
_ -> error $ "raw2FAExpr: something went wrong "++ show f ++ " is not an array or function."
where
idx = case actArgs of
[i] -> raw2AExpr env fenv i
_ -> error "raw2FAExpr: index should be unique."
raw2AExpr _ _ Pi1 = error "Constant Pi not supported, use, for instance, 3.14"
raw2AExpr _ _ Pi2 = error "Constant Pi not supported, use, for instance, 3.14"
raw2AExpr _ _ ae = error $ "Something went wrong: arithmetic expression expected but got " ++ show ae ++ "."
raw2BExprStm :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.Expr -> AbsPVSLang.BExprStm
raw2BExprStm env fenv (AbsRawRealPVSLang.Let letElems stm)
= RBLet letList (raw2BExprStm newenv fenv stm)
where
(newenv,letList) = foldr aux_fold (env,[]) letElems
aux_fold letElem (accEnv,elems) = (env',elems ++ [newLetElem])
where
newLetElem = raw2LetElem accEnv fenv letElem
env' = (letVar newLetElem, letType newLetElem):accEnv
raw2BExprStm env fenv (AbsRawRealPVSLang.If be thenSmt elseStm)
= RBIte (raw2BExpr env fenv be) (raw2BExprStm env fenv thenSmt)
(raw2BExprStm env fenv elseStm)
raw2BExprStm env fenv (AbsRawRealPVSLang.ListIf be stmThen listElsif elseStm) =
RBListIte ((raw2BExpr env fenv be,raw2BExprStm env fenv stmThen) : map (raw2BElsif env fenv) listElsif) (raw2BExprStm env fenv elseStm)
raw2BExprStm env fenv be = RBExpr $ raw2BExpr env fenv be
raw2BExpr :: VarTypeEnv -> FunTypeEnv -> AbsRawRealPVSLang.Expr -> AbsPVSLang.BExpr
raw2BExpr env fenv (AbsRawRealPVSLang.Or be1 be2) = AbsPVSLang.Or (raw2BExpr env fenv be1) (raw2BExpr env fenv be2)
raw2BExpr env fenv (AbsRawRealPVSLang.And be1 be2) = AbsPVSLang.And (raw2BExpr env fenv be1) (raw2BExpr env fenv be2)
raw2BExpr env fenv (AbsRawRealPVSLang.Not be) = AbsPVSLang.Not (raw2BExpr env fenv be)
raw2BExpr env fenv (AbsRawRealPVSLang.Eq ae1 ae2) = AbsPVSLang.Rel Op.Eq (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.Neq ae1 ae2) = AbsPVSLang.Rel Op.Neq (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.Lt ae1 ae2) = AbsPVSLang.Rel Op.Lt (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.LtE ae1 ae2) = AbsPVSLang.Rel Op.LtE (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.Gt ae1 ae2) = AbsPVSLang.Rel Op.Gt (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr env fenv (AbsRawRealPVSLang.GtE ae1 ae2) = AbsPVSLang.Rel Op.GtE (raw2AExpr env fenv ae1) (raw2AExpr env fenv ae2)
raw2BExpr _ _ AbsRawRealPVSLang.BTrue = AbsPVSLang.BTrue
raw2BExpr _ _ AbsRawRealPVSLang.BFalse = AbsPVSLang.BFalse
raw2BExpr env fenv (FCallN (Id f) args) =
case lookup f fenv of
Just Boolean -> AbsPVSLang.EPred f (map (raw2AExpr env fenv) args)
Just _ -> error "raw2BExpr: Boolean function expected."
Nothing -> error $ "raw2BExpr: something went wrong "++ show f ++ " is not a predicate."
raw2BExpr _ _ be = error $ "Something went wrong: boolean expression expected but got " ++ show be ++ "."
|
7f17158d4c2eb5a371684193efda075555c969b4f76b16f7adfb3eba6f3cf483 | niquola/reframe-template | core.cljs | (ns ui.core
(:require-macros [reagent.ratom :refer [reaction]])
(:require
[clojure.string :as str]
[cljsjs.react]
[reagent.core :as reagent]
[re-frame.core :as rf]
[frames.routing]
[frames.xhr]
[frames.debounce]
[frames.cookies :as cookies]
[frames.openid :as openid]
[frames.redirect :as redirect]
[ui.db]
;; [ui.pages.core :as pages]
[ui.pages :as pages]
[ui.patients.core]
[ui.coverage.core]
[ui.database.core]
[ui.dashboard.core]
[ui.user.core]
[ui.routes :as routes]
[ui.layout :as layout]
[ui.fhir :as fhir]))
(def open-id-keys
{:client-id "646067746089-6ujhvnv1bi8qvd7due8hdp3ob9qtcumv.apps.googleusercontent.com"
:uri ""})
;; (def base-url "")
(def base-url "")
;; (def open-id-keys
;; {:client-id "khI6JcdsQ3dgHMdWJnej0OZjr5DXGWRU"
;; :uri ""})
;; this is the root component wich switch pages
;; using current-route key from database
(defn current-page []
(let [{page :match params :params} @(rf/subscribe [:route-map/current-route])]
(if page
(if-let [cmp (get @pages/pages page)]
[:div [cmp params]]
[:div.not-found (str "Page not found [" (str page) "]" )])
[:div.not-found (str "Route not found ")])))
this is first event , which should initialize
;; application
;; handler use coefects cookies & openid to check for
;; user in cookies or in location string (after OpenId redirect)
(rf/reg-event-fx
::initialize
[(rf/inject-cofx ::cookies/get :auth)
(rf/inject-cofx ::openid/jwt :auth)]
(fn [{jwt :jwt {auth :auth} :cookie :as cofx} _]
(if (and (nil? jwt) (nil? auth))
;; if no user we redirect to openid endpoint
;; for SignIn
{::redirect/page-redirect
{:uri (:uri open-id-keys)
:params {:redirect_uri (first (str/split (.. js/window -location -href) #"#"))
:client_id (:client-id open-id-keys)
:scope "openid profile email"
:nonce "ups"
:response_type "id_token"}}}
{:dispatch [:route-map/init routes/routes]
::cookies/set {:key :auth :value (or jwt auth)}
:db (merge (:db cofx) {:auth (or jwt auth)})})))
(defn- mount-root []
(reagent/render
[layout/layout [current-page]]
(.getElementById js/document "app")))
(defn init! []
(rf/dispatch [::initialize])
(mount-root))
| null | https://raw.githubusercontent.com/niquola/reframe-template/6482afabc1967d2b6cb39ddc3fc0158075535700/srcs/ui/core.cljs | clojure | [ui.pages.core :as pages]
(def base-url "")
(def open-id-keys
{:client-id "khI6JcdsQ3dgHMdWJnej0OZjr5DXGWRU"
:uri ""})
this is the root component wich switch pages
using current-route key from database
application
handler use coefects cookies & openid to check for
user in cookies or in location string (after OpenId redirect)
if no user we redirect to openid endpoint
for SignIn | (ns ui.core
(:require-macros [reagent.ratom :refer [reaction]])
(:require
[clojure.string :as str]
[cljsjs.react]
[reagent.core :as reagent]
[re-frame.core :as rf]
[frames.routing]
[frames.xhr]
[frames.debounce]
[frames.cookies :as cookies]
[frames.openid :as openid]
[frames.redirect :as redirect]
[ui.db]
[ui.pages :as pages]
[ui.patients.core]
[ui.coverage.core]
[ui.database.core]
[ui.dashboard.core]
[ui.user.core]
[ui.routes :as routes]
[ui.layout :as layout]
[ui.fhir :as fhir]))
(def open-id-keys
{:client-id "646067746089-6ujhvnv1bi8qvd7due8hdp3ob9qtcumv.apps.googleusercontent.com"
:uri ""})
(def base-url "")
(defn current-page []
(let [{page :match params :params} @(rf/subscribe [:route-map/current-route])]
(if page
(if-let [cmp (get @pages/pages page)]
[:div [cmp params]]
[:div.not-found (str "Page not found [" (str page) "]" )])
[:div.not-found (str "Route not found ")])))
this is first event , which should initialize
(rf/reg-event-fx
::initialize
[(rf/inject-cofx ::cookies/get :auth)
(rf/inject-cofx ::openid/jwt :auth)]
(fn [{jwt :jwt {auth :auth} :cookie :as cofx} _]
(if (and (nil? jwt) (nil? auth))
{::redirect/page-redirect
{:uri (:uri open-id-keys)
:params {:redirect_uri (first (str/split (.. js/window -location -href) #"#"))
:client_id (:client-id open-id-keys)
:scope "openid profile email"
:nonce "ups"
:response_type "id_token"}}}
{:dispatch [:route-map/init routes/routes]
::cookies/set {:key :auth :value (or jwt auth)}
:db (merge (:db cofx) {:auth (or jwt auth)})})))
(defn- mount-root []
(reagent/render
[layout/layout [current-page]]
(.getElementById js/document "app")))
(defn init! []
(rf/dispatch [::initialize])
(mount-root))
|
7b82c920dbaf1af79705c9283e5491af86400681f015659ddbfd244417ca319a | madvas/catlantis | detail.cljs | (ns catlantis.ios.screens.detail
(:require [catlantis.shared.ui :as ui]
[re-frame.core :as rf]
[print.foo :as pf :include-macros true]
[reagent.core :as r]))
(declare styles)
(def close-icon (js/require "./images/close.png"))
(def star-icon (js/require "./images/star.png"))
(def star-icon-full (js/require "./images/star_selected.png"))
(defn btn-icon [icon on-press tint-color]
[ui/touchable-opacity
{:on-press on-press
:style (:close-btn styles)}
[ui/image
{:source icon
:style {:tint-color (ui/color tint-color)}}]])
(def detail
{:component
(r/create-class
{:reagent-render
(fn []
(let [detail (rf/subscribe [:detail])
{:keys [image-selected random-fact]} @detail
{:keys [url source-url id favorite?] :as image} image-selected]
[ui/scroll-view
{:style (:container styles)}
[ui/view
{:style (:buttons-wrap styles)}
[btn-icon close-icon #(rf/dispatch [:nav/pop]) :white]
[btn-icon (if favorite? star-icon-full star-icon)
#(rf/dispatch [:image-favorite image favorite?]) :yellow700]]
[ui/scroll-view
{:maximum-zoom-scale 2.5}
[ui/touchable-opacity
{:on-press #(rf/dispatch [:nav/pop])}
[ui/image-progress
{:source {:uri url}
:resize-mode :contain
:style (:image-detail styles)}]]]
[ui/view
{:style (:text-wrap styles)}
[ui/text
{:style (:image-text styles)}
random-fact]
[ui/text
{:on-press #(ui/open-url source-url)
:style (:source-link styles)}
"Image Source"]]
]))})
:config
{:screen :detail
:screen-type :light-box
:title ""
:navigator-buttons {:right-buttons []
:left-buttons [{:icon close-icon
:id :close}]}
:style {:background-blur "dark"}}})
(def styles
(ui/create-stylesheet
{:container {:flex 1
:background-color :transparent
:flex-direction :column}
:text {:color "white" :text-align "center" :font-weight "bold"}
:image-detail {:flex 1
:height "60%"
:width "100%"
:margin-top 20}
:buttons-wrap {:flex-direction "row"
:justify-content :space-between
:margin-top 0
:padding-left 20
:padding-right 20}
:text-wrap {:justify-content :center
:align-items :center
:margin-top 20}
:source-link {:text-align :right
:color (ui/color :grey400)
:width "90%"
:height 20
:font-size 12}
:image-text {:text-align :center
:color (ui/color :white)
:width "90%"
:height "15%"}})) | null | https://raw.githubusercontent.com/madvas/catlantis/b8880ec2cab27ecfcb3c0ab30e2bbc7767db0d1c/src/catlantis/ios/screens/detail.cljs | clojure | (ns catlantis.ios.screens.detail
(:require [catlantis.shared.ui :as ui]
[re-frame.core :as rf]
[print.foo :as pf :include-macros true]
[reagent.core :as r]))
(declare styles)
(def close-icon (js/require "./images/close.png"))
(def star-icon (js/require "./images/star.png"))
(def star-icon-full (js/require "./images/star_selected.png"))
(defn btn-icon [icon on-press tint-color]
[ui/touchable-opacity
{:on-press on-press
:style (:close-btn styles)}
[ui/image
{:source icon
:style {:tint-color (ui/color tint-color)}}]])
(def detail
{:component
(r/create-class
{:reagent-render
(fn []
(let [detail (rf/subscribe [:detail])
{:keys [image-selected random-fact]} @detail
{:keys [url source-url id favorite?] :as image} image-selected]
[ui/scroll-view
{:style (:container styles)}
[ui/view
{:style (:buttons-wrap styles)}
[btn-icon close-icon #(rf/dispatch [:nav/pop]) :white]
[btn-icon (if favorite? star-icon-full star-icon)
#(rf/dispatch [:image-favorite image favorite?]) :yellow700]]
[ui/scroll-view
{:maximum-zoom-scale 2.5}
[ui/touchable-opacity
{:on-press #(rf/dispatch [:nav/pop])}
[ui/image-progress
{:source {:uri url}
:resize-mode :contain
:style (:image-detail styles)}]]]
[ui/view
{:style (:text-wrap styles)}
[ui/text
{:style (:image-text styles)}
random-fact]
[ui/text
{:on-press #(ui/open-url source-url)
:style (:source-link styles)}
"Image Source"]]
]))})
:config
{:screen :detail
:screen-type :light-box
:title ""
:navigator-buttons {:right-buttons []
:left-buttons [{:icon close-icon
:id :close}]}
:style {:background-blur "dark"}}})
(def styles
(ui/create-stylesheet
{:container {:flex 1
:background-color :transparent
:flex-direction :column}
:text {:color "white" :text-align "center" :font-weight "bold"}
:image-detail {:flex 1
:height "60%"
:width "100%"
:margin-top 20}
:buttons-wrap {:flex-direction "row"
:justify-content :space-between
:margin-top 0
:padding-left 20
:padding-right 20}
:text-wrap {:justify-content :center
:align-items :center
:margin-top 20}
:source-link {:text-align :right
:color (ui/color :grey400)
:width "90%"
:height 20
:font-size 12}
:image-text {:text-align :center
:color (ui/color :white)
:width "90%"
:height "15%"}})) | |
5c00f66082ad6fd5a5e60f455c1d8406b3b23ff7fbdeb37d830ba8a9cdf2af21 | xsc/kithara | test.clj | (ns kithara.test
(:require [kithara.test
[fixtures :as fix]
[property :as property]
[stack :as stack]]
[potemkin :refer [import-vars]]))
(import-vars
[kithara.test.fixtures
connection-config
exchange-name
publish!
use-rabbitmq-fixtures]
[kithara.test.property
consumer-property]
[kithara.test.stack
optional-stack-elements
stack-gen
stack-elements])
| null | https://raw.githubusercontent.com/xsc/kithara/3394a9e9ef5e6e605637a74e070c7d24bfaf19cc/test/kithara/test.clj | clojure | (ns kithara.test
(:require [kithara.test
[fixtures :as fix]
[property :as property]
[stack :as stack]]
[potemkin :refer [import-vars]]))
(import-vars
[kithara.test.fixtures
connection-config
exchange-name
publish!
use-rabbitmq-fixtures]
[kithara.test.property
consumer-property]
[kithara.test.stack
optional-stack-elements
stack-gen
stack-elements])
| |
593d3df755632d1e706e256029eefebb3915f3f0d9de7bf553c48fab7fb09268 | tisnik/clojure-examples | core_test.clj | (ns ircbot2.core-test
(:require [clojure.test :refer :all]
[ircbot2.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/984af4a3e20d994b4f4989678ee1330e409fdae3/ircbot2/test/ircbot2/core_test.clj | clojure | (ns ircbot2.core-test
(:require [clojure.test :refer :all]
[ircbot2.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| |
4f28fc06f80cb3e9063c86d69d04db32fe97c6f001fb26e8eb5da5b90c9bfad8 | ravichugh/djs | test03.ml |
#use "tests/functional/arrays/__arrays.ml"
val tup4 :: {(and (v::Arr({(or (Int v) (Bool v) (Str v))}))
(packed v) (= (len v) 4)
(Int (sel v 0))
(Bool (sel v 1)))}
let _ :: Int =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 0 in
let _ :: Bool =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 1 in
let _ :: {(or (Int v) (Bool v) (Str v))} =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 2 in
let _ :: {(or (Int v) (Bool v) (Str v))} =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 3 in
let _ :: {(= v undefined)} =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 4 in
0
| null | https://raw.githubusercontent.com/ravichugh/djs/c4a13e06adb3e0945f39966523a4d944448c1941/tests/functional/arrays/test03.ml | ocaml |
#use "tests/functional/arrays/__arrays.ml"
val tup4 :: {(and (v::Arr({(or (Int v) (Bool v) (Str v))}))
(packed v) (= (len v) 4)
(Int (sel v 0))
(Bool (sel v 1)))}
let _ :: Int =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 0 in
let _ :: Bool =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 1 in
let _ :: {(or (Int v) (Bool v) (Str v))} =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 2 in
let _ :: {(or (Int v) (Bool v) (Str v))} =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 3 in
let _ :: {(= v undefined)} =
([{(or (Int v) (Bool v) (Str v))}] geti) tup4 4 in
0
| |
695f67ac8e8e877d8c21ca3217788ad142415fbe65595e5f95ed19a7eaebe41a | SKA-ScienceDataProcessor/RC | Types.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE TemplateHaskell #
{-# LANGUAGE BangPatterns #-}
# LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable, DeriveTraversable #-}
# LANGUAGE DeriveGeneric #
module DNA.Types where
import Control.Applicative
import Control.Monad.IO.Class
import Control.Monad.State (StateT)
import Control.Monad.Except
import Control.Monad.Reader
import Control.Distributed.Process
import Control.Distributed.Process.Serializable (Serializable)
import Data.Binary.Get
import Data.Binary.Put
import Data.Binary (Binary(..))
import Data.Typeable (Typeable)
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import GHC.Generics (Generic)
----------------------------------------------------------------
MonadProcess
----------------------------------------------------------------
-- | Monad to which computations in the 'Process' could be lifted
class MonadIO m => MonadProcess m where
liftP :: Process a -> m a
instance MonadProcess Process where
liftP = id
instance MonadProcess m => MonadProcess (StateT s m) where
liftP = lift . liftP
instance MonadProcess m => MonadProcess (ExceptT e m) where
liftP = lift . liftP
instance MonadProcess m => MonadProcess (ReaderT r m) where
liftP = lift . liftP
----------------------------------------------------------------
-- Data types
----------------------------------------------------------------
-- | Rank of actor
newtype Rank = Rank Int
deriving (Show,Eq,Ord,Typeable,Binary)
-- | Size of group of proceesses
newtype GroupSize = GroupSize Int
deriving (Show,Eq,Ord,Typeable,Binary)
-- | ID of group of processes
newtype GroupID = GroupID Int
deriving (Show,Eq,Ord,Typeable,Binary)
-- | ID of actor
newtype ActorID = ActorID Int
deriving (Show,Eq,Ord,Typeable,Binary)
-- | ID of resourses
newtype Resources = Resources Int
deriving (Show,Eq,Ord,Typeable,Binary)
----------------------------------------------------------------
-- DNA data types
----------------------------------------------------------------
-- | Handle for node controlling process
newtype NCP = NCP { ncpPID :: ProcessId }
deriving (Show,Eq,Ord,Typeable,Generic,Binary)
-- | Handle for actor controlling process
newtype ACP = ACP { acpPID :: ProcessId }
deriving (Show,Eq,Ord,Typeable,Generic,Binary)
----------------------------------------------------------------
CAD & Node information
----------------------------------------------------------------
-- | Cluster architecture description. Nodes are arranged into rose
-- tree and it's polymorphic in
data CAD a = CAD a [CAD a]
deriving (Show,Typeable,Generic,Functor,Foldable,Traversable)
-- | Information about node. It's normally used in the CAD.
data NodeInfo = NodeInfo
{ nodeCP :: NCP -- ^ PID of controller process
, nodeParent :: Maybe NCP -- ^ PID of parent's controller process
, nodeID :: NodeId -- ^ Node ID
}
deriving (Show,Eq,Ord,Typeable,Generic)
data Location = Remote
| Local
deriving (Show,Eq,Ord,Typeable,Generic)
-- | Resources allocated to single process. It always have access to
-- node it owns and possibly list of other nodes.
data VirtualCAD = VirtualCAD Location NodeInfo [NodeInfo]
deriving (Show,Eq,Ord,Typeable,Generic)
| Parameters for ACP process
data ParamACP a = ParamACP
{ acpSelf :: Closure (Process ())
^ Closure for the DNA.DNA.runACP function . We have to pass it
-- explicitly since we cannot create it inside @runACP@.
, acpActorClosure :: a
-- ^ Closure for actor to run
, acpVCAD :: VirtualCAD
-- ^ Part of cluster allocated to the process
, acpActor :: ParamActor
-- ^ Parameters for actor
}
deriving (Show,Typeable,Generic)
-- | Parameter send to actor on startup
data ParamActor = ParamActor
{ actorParentACP :: ProcessId
-- ^ Destination to send channels to.
, actorRank :: Rank
-- ^ Rank of an actor
, actorGroupSize :: GroupSize
-- ^ Size of group of actors
}
deriving (Show,Typeable,Generic)
-- | Destination for actor computation
data Dest a
= SendLocally (SendPort a)
-- ^ Send result using using unsafe primitive
| SendRemote [SendPort a]
-- ^ Send result using standard primitives
deriving (Show,Typeable,Generic)
instance Binary a => Binary (CAD a)
instance Binary NodeInfo
instance Binary VirtualCAD
instance Binary Location
instance Binary a => Binary (ParamACP a)
instance Binary ParamActor
instance Serializable a => Binary (Dest a)
----------------------------------------------------------------
Shell actors
----------------------------------------------------------------
-- | Tag for single value.
--
-- * Receive: actor accept single value as parameter
-- * Send: actor produces single value as result
data Val a
deriving (Typeable)
-- | Tag for unordered group of values.
--
-- * Receive: ???
-- * Send: actor produces set of messages in arbitrary order.
data Grp a
deriving (Typeable)
-- | Tags for ordered set of values
data Scatter a
deriving (Typeable)
data MR a
deriving (Typeable)
-- | Way to encode
data ActorACP = SingleActor ACP
| ActorGroup GroupID
deriving (Show,Typeable,Generic)
instance Binary ActorACP
| Shell actor . It 's actor which has n't been connected anywhere .
data Shell a b = Shell
ActorACP
(RecvEnd a)
(SendEnd b)
deriving (Typeable,Generic)
-- Quadratic number of instances in number of type tags. Sigh
instance (Serializable a, Serializable b) => Binary (Shell (Val a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (Val a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (Val a) (MR b))
--
instance (Serializable a, Serializable b) => Binary (Shell (Grp a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (Grp a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (Grp a) (MR b))
--
instance (Serializable a, Serializable b) => Binary (Shell (Scatter a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (Scatter a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (Scatter a) (MR b))
--
instance (Serializable a, Serializable b) => Binary (Shell (MR a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (MR a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (MR a) (MR b))
-- | Describe how actor accepts
data RecvEnd a where
-- | Actor receives single value
RecvVal :: SendPort a
-> RecvEnd (Val a)
-- | Actor receives group of values
RecvGrp :: [SendPort a]
-> RecvEnd (Scatter a)
-- | Same value is broadcasted to all actors in group
RecvBroadcast :: RecvEnd (Scatter a)
-> RecvEnd (Val a)
-- | Actor(s) which reduces set of values
RecvReduce :: [(SendPort Int,SendPort a)]
-> RecvEnd (Grp a)
-- | Actors which reduce output of mappers
RecvMR :: [(SendPort Int, SendPort (Maybe a))]
-> RecvEnd (MR a)
deriving (Typeable)
-- | Description of send end of actor
data SendEnd a where
-- | Actor sends single value
SendVal :: SendPort (Dest a)
-> SendEnd (Val a)
-- | Actor sends group of values
SendGrp :: [SendPort (Dest a)]
-> SendEnd (Grp a)
-- | Actor sends group of streams
SendMR :: [SendPort [SendPort (Maybe a)]]
-> SendEnd (MR a)
deriving (Typeable)
instance (Typeable a, Binary a) => Binary (RecvEnd (Val a)) where
put (RecvVal p) = putWord8 1 >> put p
put (RecvBroadcast p) = putWord8 3 >> put p
get = do
t <- getWord8
case t of
1 -> RecvVal <$> get
3 -> RecvBroadcast <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (RecvEnd (Scatter a)) where
put (RecvGrp p ) = putWord8 2 >> put p
get = do
t <- getWord8
case t of
2 -> RecvGrp <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (RecvEnd (Grp a)) where
put (RecvReduce a) = putWord8 4 >> put a
get = do
t <- getWord8
case t of
4 -> RecvReduce <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (RecvEnd (MR a)) where
put (RecvMR a) = putWord8 5 >> put a
get = do
t <- getWord8
case t of
5 -> RecvMR <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (SendEnd (Val a)) where
put (SendVal ch) = putWord8 1 >> put ch
get = do
t <- getWord8
case t of
1 -> SendVal <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (SendEnd (Grp a)) where
put (SendGrp ch) = putWord8 2 >> put ch
get = do
t <- getWord8
case t of
2 -> SendGrp <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (SendEnd (MR a)) where
put (SendMR a) = putWord8 3 >> put a
get = do
t <- getWord8
case t of
3 -> SendMR <$> get
_ -> fail "Bad tag"
| null | https://raw.githubusercontent.com/SKA-ScienceDataProcessor/RC/1b5e25baf9204a9f7ef40ed8ee94a86cc6c674af/MS2/lib/DNA/Types.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE BangPatterns #
# LANGUAGE DeriveDataTypeable, DeriveFunctor #
# LANGUAGE DeriveFoldable, DeriveTraversable #
--------------------------------------------------------------
--------------------------------------------------------------
| Monad to which computations in the 'Process' could be lifted
--------------------------------------------------------------
Data types
--------------------------------------------------------------
| Rank of actor
| Size of group of proceesses
| ID of group of processes
| ID of actor
| ID of resourses
--------------------------------------------------------------
DNA data types
--------------------------------------------------------------
| Handle for node controlling process
| Handle for actor controlling process
--------------------------------------------------------------
--------------------------------------------------------------
| Cluster architecture description. Nodes are arranged into rose
tree and it's polymorphic in
| Information about node. It's normally used in the CAD.
^ PID of controller process
^ PID of parent's controller process
^ Node ID
| Resources allocated to single process. It always have access to
node it owns and possibly list of other nodes.
explicitly since we cannot create it inside @runACP@.
^ Closure for actor to run
^ Part of cluster allocated to the process
^ Parameters for actor
| Parameter send to actor on startup
^ Destination to send channels to.
^ Rank of an actor
^ Size of group of actors
| Destination for actor computation
^ Send result using using unsafe primitive
^ Send result using standard primitives
--------------------------------------------------------------
--------------------------------------------------------------
| Tag for single value.
* Receive: actor accept single value as parameter
* Send: actor produces single value as result
| Tag for unordered group of values.
* Receive: ???
* Send: actor produces set of messages in arbitrary order.
| Tags for ordered set of values
| Way to encode
Quadratic number of instances in number of type tags. Sigh
| Describe how actor accepts
| Actor receives single value
| Actor receives group of values
| Same value is broadcasted to all actors in group
| Actor(s) which reduces set of values
| Actors which reduce output of mappers
| Description of send end of actor
| Actor sends single value
| Actor sends group of values
| Actor sends group of streams | # LANGUAGE FlexibleInstances #
# LANGUAGE TemplateHaskell #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE DeriveGeneric #
module DNA.Types where
import Control.Applicative
import Control.Monad.IO.Class
import Control.Monad.State (StateT)
import Control.Monad.Except
import Control.Monad.Reader
import Control.Distributed.Process
import Control.Distributed.Process.Serializable (Serializable)
import Data.Binary.Get
import Data.Binary.Put
import Data.Binary (Binary(..))
import Data.Typeable (Typeable)
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import GHC.Generics (Generic)
MonadProcess
class MonadIO m => MonadProcess m where
liftP :: Process a -> m a
instance MonadProcess Process where
liftP = id
instance MonadProcess m => MonadProcess (StateT s m) where
liftP = lift . liftP
instance MonadProcess m => MonadProcess (ExceptT e m) where
liftP = lift . liftP
instance MonadProcess m => MonadProcess (ReaderT r m) where
liftP = lift . liftP
newtype Rank = Rank Int
deriving (Show,Eq,Ord,Typeable,Binary)
newtype GroupSize = GroupSize Int
deriving (Show,Eq,Ord,Typeable,Binary)
newtype GroupID = GroupID Int
deriving (Show,Eq,Ord,Typeable,Binary)
newtype ActorID = ActorID Int
deriving (Show,Eq,Ord,Typeable,Binary)
newtype Resources = Resources Int
deriving (Show,Eq,Ord,Typeable,Binary)
newtype NCP = NCP { ncpPID :: ProcessId }
deriving (Show,Eq,Ord,Typeable,Generic,Binary)
newtype ACP = ACP { acpPID :: ProcessId }
deriving (Show,Eq,Ord,Typeable,Generic,Binary)
CAD & Node information
data CAD a = CAD a [CAD a]
deriving (Show,Typeable,Generic,Functor,Foldable,Traversable)
data NodeInfo = NodeInfo
}
deriving (Show,Eq,Ord,Typeable,Generic)
data Location = Remote
| Local
deriving (Show,Eq,Ord,Typeable,Generic)
data VirtualCAD = VirtualCAD Location NodeInfo [NodeInfo]
deriving (Show,Eq,Ord,Typeable,Generic)
| Parameters for ACP process
data ParamACP a = ParamACP
{ acpSelf :: Closure (Process ())
^ Closure for the DNA.DNA.runACP function . We have to pass it
, acpActorClosure :: a
, acpVCAD :: VirtualCAD
, acpActor :: ParamActor
}
deriving (Show,Typeable,Generic)
data ParamActor = ParamActor
{ actorParentACP :: ProcessId
, actorRank :: Rank
, actorGroupSize :: GroupSize
}
deriving (Show,Typeable,Generic)
data Dest a
= SendLocally (SendPort a)
| SendRemote [SendPort a]
deriving (Show,Typeable,Generic)
instance Binary a => Binary (CAD a)
instance Binary NodeInfo
instance Binary VirtualCAD
instance Binary Location
instance Binary a => Binary (ParamACP a)
instance Binary ParamActor
instance Serializable a => Binary (Dest a)
Shell actors
data Val a
deriving (Typeable)
data Grp a
deriving (Typeable)
data Scatter a
deriving (Typeable)
data MR a
deriving (Typeable)
data ActorACP = SingleActor ACP
| ActorGroup GroupID
deriving (Show,Typeable,Generic)
instance Binary ActorACP
| Shell actor . It 's actor which has n't been connected anywhere .
data Shell a b = Shell
ActorACP
(RecvEnd a)
(SendEnd b)
deriving (Typeable,Generic)
instance (Serializable a, Serializable b) => Binary (Shell (Val a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (Val a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (Val a) (MR b))
instance (Serializable a, Serializable b) => Binary (Shell (Grp a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (Grp a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (Grp a) (MR b))
instance (Serializable a, Serializable b) => Binary (Shell (Scatter a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (Scatter a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (Scatter a) (MR b))
instance (Serializable a, Serializable b) => Binary (Shell (MR a) (Val b))
instance (Serializable a, Serializable b) => Binary (Shell (MR a) (Grp b))
instance (Serializable a, Serializable b) => Binary (Shell (MR a) (MR b))
data RecvEnd a where
RecvVal :: SendPort a
-> RecvEnd (Val a)
RecvGrp :: [SendPort a]
-> RecvEnd (Scatter a)
RecvBroadcast :: RecvEnd (Scatter a)
-> RecvEnd (Val a)
RecvReduce :: [(SendPort Int,SendPort a)]
-> RecvEnd (Grp a)
RecvMR :: [(SendPort Int, SendPort (Maybe a))]
-> RecvEnd (MR a)
deriving (Typeable)
data SendEnd a where
SendVal :: SendPort (Dest a)
-> SendEnd (Val a)
SendGrp :: [SendPort (Dest a)]
-> SendEnd (Grp a)
SendMR :: [SendPort [SendPort (Maybe a)]]
-> SendEnd (MR a)
deriving (Typeable)
instance (Typeable a, Binary a) => Binary (RecvEnd (Val a)) where
put (RecvVal p) = putWord8 1 >> put p
put (RecvBroadcast p) = putWord8 3 >> put p
get = do
t <- getWord8
case t of
1 -> RecvVal <$> get
3 -> RecvBroadcast <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (RecvEnd (Scatter a)) where
put (RecvGrp p ) = putWord8 2 >> put p
get = do
t <- getWord8
case t of
2 -> RecvGrp <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (RecvEnd (Grp a)) where
put (RecvReduce a) = putWord8 4 >> put a
get = do
t <- getWord8
case t of
4 -> RecvReduce <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (RecvEnd (MR a)) where
put (RecvMR a) = putWord8 5 >> put a
get = do
t <- getWord8
case t of
5 -> RecvMR <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (SendEnd (Val a)) where
put (SendVal ch) = putWord8 1 >> put ch
get = do
t <- getWord8
case t of
1 -> SendVal <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (SendEnd (Grp a)) where
put (SendGrp ch) = putWord8 2 >> put ch
get = do
t <- getWord8
case t of
2 -> SendGrp <$> get
_ -> fail "Bad tag"
instance (Typeable a, Binary a) => Binary (SendEnd (MR a)) where
put (SendMR a) = putWord8 3 >> put a
get = do
t <- getWord8
case t of
3 -> SendMR <$> get
_ -> fail "Bad tag"
|
b6bccfbbd7e1057a8c68c6c058c5b3c72d745bff8fa04406a9ff84cceca8e4e7 | jdormit/sicp-logic | core.clj | (ns sicp-logic.core
(:require [sicp-logic.binding :refer [instantiate var?]]
[sicp-logic.db :as db]
[sicp-logic.evaluator :refer [qeval]]))
(defn contract-question-mark [v]
(symbol
(str "?"
(second v))))
(defn map-over-symbols [proc exp]
(cond
(and (sequential? exp) (not (empty? exp)))
(cons (map-over-symbols proc (first exp))
(map-over-symbols proc (rest exp)))
(symbol? exp) (proc exp)
:else exp))
(defn expand-question-mark [sym]
(let [chars (str sym)]
(if (= "?" (subs chars 0 1))
['? (symbol (subs chars 1))]
sym)))
(defn query-syntax-process [q]
(map-over-symbols #'expand-question-mark q))
(defn sanitize-frame [q frame]
"Fully resolves all variables in q and returns a map
of the variable names to their bindings"
(letfn [(vars [acc node]
(cond
(var? node) (conj acc [(second node) node])
(and (sequential? node) (not (empty? node)))
(concat
(vars acc (first node))
(vars acc (rest node)))))]
(let [qvars (vars [] q)]
(into {} (map vec (instantiate qvars frame (fn [v f] v)))))))
(defn query-results [db q]
"Queries the database for assertions that match the query."
(let [processed-q (query-syntax-process q)]
(map (fn [frame]
(sanitize-frame processed-q frame))
(qeval db processed-q [{}]))))
(defn instantiate-query [q frames]
"Fills in the query with variables from frames"
(let [processed-q (query-syntax-process q)]
(map (fn [frame]
(instantiate processed-q
frame
(fn [v f] (contract-question-mark v))))
frames)))
(defn query* [db q]
(instantiate-query q
(query-results db q)))
(defmacro query [db q]
"Convenience macro to query the database for assertions
that match the query."
`(query* ~db (quote ~q)))
(defn assert! [db assertion]
"Adds a new assertion to the database."
(db/add-assertion db assertion))
(defn add-rule! [db rule]
"Adds a new rule to the database."
(db/add-rule db (query-syntax-process rule)))
(defmacro defrule!
"Convenience macro to add a new rule to the database.
Usage example:
(defrule [grandparent ?x ?y]
(and [parent ?x ?z]
[parent ?z ?y]))"
([db conclusion]
`(add-rule! ~db (quote [~conclusion])))
([db conclusion body]
`(add-rule! ~db (quote [~conclusion ~body]))))
| null | https://raw.githubusercontent.com/jdormit/sicp-logic/56ae0fc344d3fce943dcc740a64d3eebc82062d1/src/sicp_logic/core.clj | clojure | (ns sicp-logic.core
(:require [sicp-logic.binding :refer [instantiate var?]]
[sicp-logic.db :as db]
[sicp-logic.evaluator :refer [qeval]]))
(defn contract-question-mark [v]
(symbol
(str "?"
(second v))))
(defn map-over-symbols [proc exp]
(cond
(and (sequential? exp) (not (empty? exp)))
(cons (map-over-symbols proc (first exp))
(map-over-symbols proc (rest exp)))
(symbol? exp) (proc exp)
:else exp))
(defn expand-question-mark [sym]
(let [chars (str sym)]
(if (= "?" (subs chars 0 1))
['? (symbol (subs chars 1))]
sym)))
(defn query-syntax-process [q]
(map-over-symbols #'expand-question-mark q))
(defn sanitize-frame [q frame]
"Fully resolves all variables in q and returns a map
of the variable names to their bindings"
(letfn [(vars [acc node]
(cond
(var? node) (conj acc [(second node) node])
(and (sequential? node) (not (empty? node)))
(concat
(vars acc (first node))
(vars acc (rest node)))))]
(let [qvars (vars [] q)]
(into {} (map vec (instantiate qvars frame (fn [v f] v)))))))
(defn query-results [db q]
"Queries the database for assertions that match the query."
(let [processed-q (query-syntax-process q)]
(map (fn [frame]
(sanitize-frame processed-q frame))
(qeval db processed-q [{}]))))
(defn instantiate-query [q frames]
"Fills in the query with variables from frames"
(let [processed-q (query-syntax-process q)]
(map (fn [frame]
(instantiate processed-q
frame
(fn [v f] (contract-question-mark v))))
frames)))
(defn query* [db q]
(instantiate-query q
(query-results db q)))
(defmacro query [db q]
"Convenience macro to query the database for assertions
that match the query."
`(query* ~db (quote ~q)))
(defn assert! [db assertion]
"Adds a new assertion to the database."
(db/add-assertion db assertion))
(defn add-rule! [db rule]
"Adds a new rule to the database."
(db/add-rule db (query-syntax-process rule)))
(defmacro defrule!
"Convenience macro to add a new rule to the database.
Usage example:
(defrule [grandparent ?x ?y]
(and [parent ?x ?z]
[parent ?z ?y]))"
([db conclusion]
`(add-rule! ~db (quote [~conclusion])))
([db conclusion body]
`(add-rule! ~db (quote [~conclusion ~body]))))
| |
a139a4112c32ce447b29fb2aa83b36ca65bf2e5ab16a82b2e5f5a3906ac694b5 | matthias-margush/aka | main.clj | (ns aka.main
"Conveniences for managing tools.deps aliases."
(:refer-clojure :exclude [alias])
(:require [aka.cmd :as cmd]
[aka.taps :as t]))
(defn -main
""
[& [cmd & args]]
(let [taps (t/registry)
cmd (cmd/->aka cmd)]
(cmd/run cmd taps args)))
| null | https://raw.githubusercontent.com/matthias-margush/aka/7c71bbe48aa3429a69401ad0ca9ee6361907732b/src/aka/main.clj | clojure | (ns aka.main
"Conveniences for managing tools.deps aliases."
(:refer-clojure :exclude [alias])
(:require [aka.cmd :as cmd]
[aka.taps :as t]))
(defn -main
""
[& [cmd & args]]
(let [taps (t/registry)
cmd (cmd/->aka cmd)]
(cmd/run cmd taps args)))
| |
a7ff726b96ddfc91d3f59f3a6a7a3381588f61df8a7232fe2e9d853da9b1e513 | tilk/vocoder | Filter.hs | |
Module : Vocoder . Conduit . Filter
Description : Frequency - domain filters in Conduit
Copyright : ( c ) , 2021
License : BSD2
This module defines some useful frequency - domain filters as conduits .
It includes convenience wrappers for filters defined in the vocoder package .
Module : Vocoder.Conduit.Filter
Description : Frequency-domain filters in Conduit
Copyright : (c) Marek Materzok, 2021
License : BSD2
This module defines some useful frequency-domain filters as conduits.
It includes convenience wrappers for filters defined in the vocoder package.
-}
{-# LANGUAGE RankNTypes #-}
module Vocoder.Conduit.Filter(
Filter,
runFilter,
idFilter,
composeFilters,
realtimeFilter,
amplitudeFilter,
linearAmplitudeFilter,
amplify,
lowpassBrickwall,
highpassBrickwall,
bandpassBrickwall,
bandstopBrickwall,
lowpassButterworth,
highpassButterworth,
bandpassButterworth,
bandstopButterworth,
pitchShiftInterpolate,
convolutionFilter,
envelopeFilter,
randomPhaseFilter,
playSpeed
) where
import Vocoder
import qualified Vocoder.Filter as F
import Data.Conduit
import Control.Monad.IO.Class
import qualified Data.Vector.Storable as V
import qualified Data.Conduit.Combinators as DCC
-- | Conduit frequency-domain filter type. A conduit filter extends
-- basic frequency-domain filters by using a conduit instead of a
-- pure function. This enables time transformation filters.
newtype Filter m = Filter { runFilter :: forall f. Traversable f => F.FreqStep -> ConduitT (f STFTFrame) (f STFTFrame) m () }
-- | Identity filter
idFilter :: Monad m => Filter m
idFilter = Filter $ \_ -> awaitForever yield
-- | Sequential filter composition.
composeFilters :: Monad m => Filter m -> Filter m -> Filter m
composeFilters (Filter f1) (Filter f2) = Filter $ \step -> f1 step .| f2 step
-- | Use a basic frequency-domain filter as a conduit filter.
realtimeFilter :: Monad m => F.Filter m -> Filter m
realtimeFilter f = Filter (\step -> DCC.mapM $ mapM $ f step)
-- | Creates a conduit filter which transforms only amplitudes, leaving
-- phase increments unchanged.
amplitudeFilter :: Monad m => (F.FreqStep -> Moduli -> Moduli) -> Filter m
amplitudeFilter = realtimeFilter . F.amplitudeFilter
-- | Creates a filter which scales amplitudes depending on frequency.
linearAmplitudeFilter :: Monad m => (Double -> Double) -> Filter m
linearAmplitudeFilter = realtimeFilter . F.linearAmplitudeFilter
-- | Creates an "amplifier" which scales all frequencies.
amplify :: Monad m => Double -> Filter m
amplify = realtimeFilter . F.amplify
-- | Creates a brickwall lowpass filter.
lowpassBrickwall :: Monad m => Double -> Filter m
lowpassBrickwall t = realtimeFilter $ F.lowpassBrickwall t
-- | Creates a brickwall highpass filter.
highpassBrickwall :: Monad m => Double -> Filter m
highpassBrickwall t = realtimeFilter $ F.highpassBrickwall t
-- | Creates a brickwall bandpass filter.
bandpassBrickwall :: Monad m => Double -> Double -> Filter m
bandpassBrickwall t u = realtimeFilter $ F.bandpassBrickwall t u
-- | Creates a brickwall bandstop filter.
bandstopBrickwall :: Monad m => Double -> Double -> Filter m
bandstopBrickwall t u = realtimeFilter $ F.bandstopBrickwall t u
-- | Creates an n-th degree Butterworth-style lowpass filter.
lowpassButterworth :: Monad m => Double -> Double -> Filter m
lowpassButterworth n t = realtimeFilter $ F.lowpassButterworth n t
-- | Creates an n-th degree Butterworth-style highpass filter.
highpassButterworth :: Monad m => Double -> Double -> Filter m
highpassButterworth n t = realtimeFilter $ F.highpassButterworth n t
-- | Creates an n-th degree Butterworth-style bandpass filter.
bandpassButterworth :: Monad m => Double -> Double -> Double -> Filter m
bandpassButterworth n t u = realtimeFilter $ F.bandpassButterworth n t u
-- | Creates an n-th degree Butterworth-style bandstop filter.
bandstopButterworth :: Monad m => Double -> Double -> Double -> Filter m
bandstopButterworth n t u = realtimeFilter $ F.bandstopButterworth n t u
-- | Creates an interpolative pitch-shifting filter.
pitchShiftInterpolate :: Monad m => Double -> Filter m
pitchShiftInterpolate n = realtimeFilter $ F.pitchShiftInterpolate n
-- | Creates a filter which convolves the spectrum using a kernel.
convolutionFilter :: Monad m => V.Vector Double -> Filter m
convolutionFilter ker = realtimeFilter $ F.convolutionFilter ker
-- | Creates a filter which replaces the amplitudes with their envelope.
envelopeFilter :: Monad m => Length -> Filter m
envelopeFilter ksize = realtimeFilter $ F.envelopeFilter ksize
-- | Sets the phase increments so that the bins have horizontal consistency.
-- This erases the phase information, introducing "phasiness".
randomPhaseFilter :: MonadIO m => Filter m
randomPhaseFilter = realtimeFilter $ F.randomPhaseFilter
-- | Changes play speed by replicating or dropping frames.
playSpeed :: Monad m => Rational -> Filter m
playSpeed coeff = Filter $ \_ -> f [] 0
where
f l c
| c < 1 = do
next <- await
case next of
Nothing -> mapM_ leftover $ reverse l
Just i -> f (i:l) (c + coeff)
| otherwise = g l c
g l c
| c >= 1 = do
yield $ l !! 0
g l (c - 1)
| otherwise = f [] c
| null | https://raw.githubusercontent.com/tilk/vocoder/540d489d87fdb5d0cdc0ee4e0bd7df774f734d47/vocoder-conduit/src/Vocoder/Conduit/Filter.hs | haskell | # LANGUAGE RankNTypes #
| Conduit frequency-domain filter type. A conduit filter extends
basic frequency-domain filters by using a conduit instead of a
pure function. This enables time transformation filters.
| Identity filter
| Sequential filter composition.
| Use a basic frequency-domain filter as a conduit filter.
| Creates a conduit filter which transforms only amplitudes, leaving
phase increments unchanged.
| Creates a filter which scales amplitudes depending on frequency.
| Creates an "amplifier" which scales all frequencies.
| Creates a brickwall lowpass filter.
| Creates a brickwall highpass filter.
| Creates a brickwall bandpass filter.
| Creates a brickwall bandstop filter.
| Creates an n-th degree Butterworth-style lowpass filter.
| Creates an n-th degree Butterworth-style highpass filter.
| Creates an n-th degree Butterworth-style bandpass filter.
| Creates an n-th degree Butterworth-style bandstop filter.
| Creates an interpolative pitch-shifting filter.
| Creates a filter which convolves the spectrum using a kernel.
| Creates a filter which replaces the amplitudes with their envelope.
| Sets the phase increments so that the bins have horizontal consistency.
This erases the phase information, introducing "phasiness".
| Changes play speed by replicating or dropping frames. | |
Module : Vocoder . Conduit . Filter
Description : Frequency - domain filters in Conduit
Copyright : ( c ) , 2021
License : BSD2
This module defines some useful frequency - domain filters as conduits .
It includes convenience wrappers for filters defined in the vocoder package .
Module : Vocoder.Conduit.Filter
Description : Frequency-domain filters in Conduit
Copyright : (c) Marek Materzok, 2021
License : BSD2
This module defines some useful frequency-domain filters as conduits.
It includes convenience wrappers for filters defined in the vocoder package.
-}
module Vocoder.Conduit.Filter(
Filter,
runFilter,
idFilter,
composeFilters,
realtimeFilter,
amplitudeFilter,
linearAmplitudeFilter,
amplify,
lowpassBrickwall,
highpassBrickwall,
bandpassBrickwall,
bandstopBrickwall,
lowpassButterworth,
highpassButterworth,
bandpassButterworth,
bandstopButterworth,
pitchShiftInterpolate,
convolutionFilter,
envelopeFilter,
randomPhaseFilter,
playSpeed
) where
import Vocoder
import qualified Vocoder.Filter as F
import Data.Conduit
import Control.Monad.IO.Class
import qualified Data.Vector.Storable as V
import qualified Data.Conduit.Combinators as DCC
newtype Filter m = Filter { runFilter :: forall f. Traversable f => F.FreqStep -> ConduitT (f STFTFrame) (f STFTFrame) m () }
idFilter :: Monad m => Filter m
idFilter = Filter $ \_ -> awaitForever yield
composeFilters :: Monad m => Filter m -> Filter m -> Filter m
composeFilters (Filter f1) (Filter f2) = Filter $ \step -> f1 step .| f2 step
realtimeFilter :: Monad m => F.Filter m -> Filter m
realtimeFilter f = Filter (\step -> DCC.mapM $ mapM $ f step)
amplitudeFilter :: Monad m => (F.FreqStep -> Moduli -> Moduli) -> Filter m
amplitudeFilter = realtimeFilter . F.amplitudeFilter
linearAmplitudeFilter :: Monad m => (Double -> Double) -> Filter m
linearAmplitudeFilter = realtimeFilter . F.linearAmplitudeFilter
amplify :: Monad m => Double -> Filter m
amplify = realtimeFilter . F.amplify
lowpassBrickwall :: Monad m => Double -> Filter m
lowpassBrickwall t = realtimeFilter $ F.lowpassBrickwall t
highpassBrickwall :: Monad m => Double -> Filter m
highpassBrickwall t = realtimeFilter $ F.highpassBrickwall t
bandpassBrickwall :: Monad m => Double -> Double -> Filter m
bandpassBrickwall t u = realtimeFilter $ F.bandpassBrickwall t u
bandstopBrickwall :: Monad m => Double -> Double -> Filter m
bandstopBrickwall t u = realtimeFilter $ F.bandstopBrickwall t u
lowpassButterworth :: Monad m => Double -> Double -> Filter m
lowpassButterworth n t = realtimeFilter $ F.lowpassButterworth n t
highpassButterworth :: Monad m => Double -> Double -> Filter m
highpassButterworth n t = realtimeFilter $ F.highpassButterworth n t
bandpassButterworth :: Monad m => Double -> Double -> Double -> Filter m
bandpassButterworth n t u = realtimeFilter $ F.bandpassButterworth n t u
bandstopButterworth :: Monad m => Double -> Double -> Double -> Filter m
bandstopButterworth n t u = realtimeFilter $ F.bandstopButterworth n t u
pitchShiftInterpolate :: Monad m => Double -> Filter m
pitchShiftInterpolate n = realtimeFilter $ F.pitchShiftInterpolate n
convolutionFilter :: Monad m => V.Vector Double -> Filter m
convolutionFilter ker = realtimeFilter $ F.convolutionFilter ker
envelopeFilter :: Monad m => Length -> Filter m
envelopeFilter ksize = realtimeFilter $ F.envelopeFilter ksize
randomPhaseFilter :: MonadIO m => Filter m
randomPhaseFilter = realtimeFilter $ F.randomPhaseFilter
playSpeed :: Monad m => Rational -> Filter m
playSpeed coeff = Filter $ \_ -> f [] 0
where
f l c
| c < 1 = do
next <- await
case next of
Nothing -> mapM_ leftover $ reverse l
Just i -> f (i:l) (c + coeff)
| otherwise = g l c
g l c
| c >= 1 = do
yield $ l !! 0
g l (c - 1)
| otherwise = f [] c
|
360351b539b24a70a22f789c38a186968ad28030d25c2d3b622c803dd1ab6b2c | opencog/pln | back-predictive-implication-scope-conditional-conjunction-introduction.scm | ;; BackPredictiveImplicationScope Conditional Conjuntion Introduction Rule
;;
;; This rule is similar to a conjunction introduction rule with an
;; extra condition (more specifically the antecedent of a predictive
;; implication). Its compact notation is:
;;
;; P↝Q
;; P↝R
;; ⊢
;; P↝(Q∧R)
;;
Its Atomese notation is :
;;
BackPredictiveImplicationScope < >
;; V
;; T
;; P
;; Q
;; BackPredictiveImplicationScope <TV2>
;; V
;; T
;; P
;; R
;; |-
;; BackPredictiveImplicationScope <TV>
;; V
;; T
;; P
;; And
;; Q
;; R
;;
where TV is calculated using TV1 and TV2 ( their product assuming
;; P↝Q and P↝R are independent).
(use-modules (opencog))
(use-modules (opencog exec))
(use-modules (opencog spacetime))
(use-modules (opencog ure))
(use-modules (opencog pln))
(use-modules (opencog logger))
(define back-predictive-implication-scope-conditional-conjunction-introduction-rule
(let* ((V (Variable "$V"))
(T (Variable "$T"))
(P (Variable "$P"))
(Q (Variable "$Q"))
(R (Variable "$R"))
(NaturalT (TypeInh 'NaturalLink))
(VardeclT (TypeChoice
(TypeInh 'VariableNode)
(Type 'VariableSet)
(Type 'VariableList)
(Type 'TypedVariableLink)))
(P↝Q (Quote
(BackPredictiveImplicationScope
(Unquote V)
(Unquote T)
(Unquote P)
(Unquote Q))))
(P↝R (Quote
(BackPredictiveImplicationScope
(Unquote V)
(Unquote T)
(Unquote P)
(Unquote R))))
(Q∧R (And Q R))
(P↝Q∧R (Quote
(BackPredictiveImplicationScope
(Unquote V)
(Unquote T)
(Unquote P)
(Unquote Q∧R)))))
(Bind
(VariableSet
(TypedVariable V VardeclT)
(TypedVariable T NaturalT)
P
Q
R)
(And
(Present P↝Q P↝R)
(Not (Identical Q R))
(EvaluationLink
(GroundedPredicate "scm: check_preconditions")
(List
Q
R)
)
)
(ExecutionOutput
(GroundedSchema "scm: back-predictive-implication-scope-conditional-conjunction-introduction")
(List
;; Conclusion
P↝Q∧R
Premises
(Set
P↝Q
P↝R))))))
;; Make sure that Q is not in the outgoing of R and that R is not in
;; the outgoing of Q. This is to avoid redundant conjuncts after
;; introducing the conjunction.
(define (check_preconditions Q R)
(define (andlink? atom)
(equal? (cog-type atom) 'AndLink))
(if (or (and (andlink? Q) (member R (cog-outgoing-set Q)))
(and (andlink? R) (member Q (cog-outgoing-set R))))
(stv 0 1)
(stv 1 1)))
Formula
(define (back-predictive-implication-scope-conditional-conjunction-introduction conclusion . premises)
(cog-logger-fine "(back-predictive-implication-scope-conditional-conjunction-introduction conclusion=~a . premises=~a)" conclusion premises)
(if (= (length premises) 1)
(let* ((premises (car premises))
(P↝Q (gar premises))
(P↝R (gdr premises))
(sP↝Q (cog-mean P↝Q))
(cP↝Q (cog-confidence P↝Q))
(sP↝R (cog-mean P↝R))
(cP↝R (cog-confidence P↝R))
;; This code:
;;
( sP ↝ Q∧R ( * sP ↝ Q sP ↝ R ) )
( cP ↝ Q∧R ( ↝ ↝ R ) )
( tv ( stv sP ↝ Q∧R cP ↝ Q∧R ) ) )
( if ( < 0 cP ↝ Q∧R )
;; (cog-merge-hi-conf-tv! conclusion tv)))))
;;
;; leads to the following warning:
;;
WARNING : compilation of /home / nilg / Work / OpenCog / pln / opencog / pln / rules / temporal / back - predictive - implication - scope - conditional - conjunction - introduction.scm failed :
Throw to key ` decoding - error ' with args ` ( " scm_from_utf8_stringn " " input locale conversion error " 22 # vu8(157 81 226 136 167 82 ) ) ' .
;;
;; Just to be cautious it has been ASCII-fied for now
(sPQR (* sP↝Q sP↝R))
(cPQR (min cP↝Q cP↝R))
(tv (stv sPQR cPQR)))
(if (< 0 cPQR)
(cog-merge-hi-conf-tv! conclusion tv)))))
;; Declaration
(define back-predictive-implication-scope-conditional-conjunction-introduction-rule-name
(DefinedSchemaNode "back-predictive-implication-scope-conditional-conjunction-introduction-rule"))
(DefineLink back-predictive-implication-scope-conditional-conjunction-introduction-rule-name
back-predictive-implication-scope-conditional-conjunction-introduction-rule)
| null | https://raw.githubusercontent.com/opencog/pln/5c1b8401b32d54e221e783338596e85d53d3793b/opencog/pln/rules/temporal/back-predictive-implication-scope-conditional-conjunction-introduction.scm | scheme | BackPredictiveImplicationScope Conditional Conjuntion Introduction Rule
This rule is similar to a conjunction introduction rule with an
extra condition (more specifically the antecedent of a predictive
implication). Its compact notation is:
P↝Q
P↝R
⊢
P↝(Q∧R)
V
T
P
Q
BackPredictiveImplicationScope <TV2>
V
T
P
R
|-
BackPredictiveImplicationScope <TV>
V
T
P
And
Q
R
P↝Q and P↝R are independent).
Conclusion
Make sure that Q is not in the outgoing of R and that R is not in
the outgoing of Q. This is to avoid redundant conjuncts after
introducing the conjunction.
This code:
(cog-merge-hi-conf-tv! conclusion tv)))))
leads to the following warning:
Just to be cautious it has been ASCII-fied for now
Declaration | Its Atomese notation is :
BackPredictiveImplicationScope < >
where TV is calculated using TV1 and TV2 ( their product assuming
(use-modules (opencog))
(use-modules (opencog exec))
(use-modules (opencog spacetime))
(use-modules (opencog ure))
(use-modules (opencog pln))
(use-modules (opencog logger))
(define back-predictive-implication-scope-conditional-conjunction-introduction-rule
(let* ((V (Variable "$V"))
(T (Variable "$T"))
(P (Variable "$P"))
(Q (Variable "$Q"))
(R (Variable "$R"))
(NaturalT (TypeInh 'NaturalLink))
(VardeclT (TypeChoice
(TypeInh 'VariableNode)
(Type 'VariableSet)
(Type 'VariableList)
(Type 'TypedVariableLink)))
(P↝Q (Quote
(BackPredictiveImplicationScope
(Unquote V)
(Unquote T)
(Unquote P)
(Unquote Q))))
(P↝R (Quote
(BackPredictiveImplicationScope
(Unquote V)
(Unquote T)
(Unquote P)
(Unquote R))))
(Q∧R (And Q R))
(P↝Q∧R (Quote
(BackPredictiveImplicationScope
(Unquote V)
(Unquote T)
(Unquote P)
(Unquote Q∧R)))))
(Bind
(VariableSet
(TypedVariable V VardeclT)
(TypedVariable T NaturalT)
P
Q
R)
(And
(Present P↝Q P↝R)
(Not (Identical Q R))
(EvaluationLink
(GroundedPredicate "scm: check_preconditions")
(List
Q
R)
)
)
(ExecutionOutput
(GroundedSchema "scm: back-predictive-implication-scope-conditional-conjunction-introduction")
(List
P↝Q∧R
Premises
(Set
P↝Q
P↝R))))))
(define (check_preconditions Q R)
(define (andlink? atom)
(equal? (cog-type atom) 'AndLink))
(if (or (and (andlink? Q) (member R (cog-outgoing-set Q)))
(and (andlink? R) (member Q (cog-outgoing-set R))))
(stv 0 1)
(stv 1 1)))
Formula
(define (back-predictive-implication-scope-conditional-conjunction-introduction conclusion . premises)
(cog-logger-fine "(back-predictive-implication-scope-conditional-conjunction-introduction conclusion=~a . premises=~a)" conclusion premises)
(if (= (length premises) 1)
(let* ((premises (car premises))
(P↝Q (gar premises))
(P↝R (gdr premises))
(sP↝Q (cog-mean P↝Q))
(cP↝Q (cog-confidence P↝Q))
(sP↝R (cog-mean P↝R))
(cP↝R (cog-confidence P↝R))
( sP ↝ Q∧R ( * sP ↝ Q sP ↝ R ) )
( cP ↝ Q∧R ( ↝ ↝ R ) )
( tv ( stv sP ↝ Q∧R cP ↝ Q∧R ) ) )
( if ( < 0 cP ↝ Q∧R )
WARNING : compilation of /home / nilg / Work / OpenCog / pln / opencog / pln / rules / temporal / back - predictive - implication - scope - conditional - conjunction - introduction.scm failed :
Throw to key ` decoding - error ' with args ` ( " scm_from_utf8_stringn " " input locale conversion error " 22 # vu8(157 81 226 136 167 82 ) ) ' .
(sPQR (* sP↝Q sP↝R))
(cPQR (min cP↝Q cP↝R))
(tv (stv sPQR cPQR)))
(if (< 0 cPQR)
(cog-merge-hi-conf-tv! conclusion tv)))))
(define back-predictive-implication-scope-conditional-conjunction-introduction-rule-name
(DefinedSchemaNode "back-predictive-implication-scope-conditional-conjunction-introduction-rule"))
(DefineLink back-predictive-implication-scope-conditional-conjunction-introduction-rule-name
back-predictive-implication-scope-conditional-conjunction-introduction-rule)
|
aac1fde2346776f7f94d8958cc874dd78a1434ee9a0085de2999fb3a7a8cc417 | eugeneia/athens | helper.lisp | (in-package :cl-user)
(defpackage cl-annot.helper
(:nicknames :annot.helper)
(:use :cl
:annot.util
:annot.core
:annot.syntax)
(:export :defannotation
:annotation))
(in-package :annot.helper)
(defun set-annotation-options (name options)
(when (getf options :alias)
(setf (annotation-real (getf options :alias)) name))
(when (getf options :arity)
(setf (annotation-arity name) (getf options :arity)))
(when (getf options :inline)
(setf (annotation-inline-p name) t)))
(defmacro defannotation (name lambda-list options &body body)
`(progn
(set-annotation-options ',name ',options)
(defmacro ,name ,lambda-list ,@body)))
(defannotation annotation (options function-definition-form)
(:arity 2)
(let ((name (definition-form-symbol
(progn-form-last function-definition-form))))
`(progn
(set-annotation-options ',name ',options)
,function-definition-form)))
| null | https://raw.githubusercontent.com/eugeneia/athens/cc9d456edd3891b764b0fbf0202a3e2f58865cbf/quicklisp/dists/quicklisp/software/cl-annot-20150608-git/src/main/helper.lisp | lisp | (in-package :cl-user)
(defpackage cl-annot.helper
(:nicknames :annot.helper)
(:use :cl
:annot.util
:annot.core
:annot.syntax)
(:export :defannotation
:annotation))
(in-package :annot.helper)
(defun set-annotation-options (name options)
(when (getf options :alias)
(setf (annotation-real (getf options :alias)) name))
(when (getf options :arity)
(setf (annotation-arity name) (getf options :arity)))
(when (getf options :inline)
(setf (annotation-inline-p name) t)))
(defmacro defannotation (name lambda-list options &body body)
`(progn
(set-annotation-options ',name ',options)
(defmacro ,name ,lambda-list ,@body)))
(defannotation annotation (options function-definition-form)
(:arity 2)
(let ((name (definition-form-symbol
(progn-form-last function-definition-form))))
`(progn
(set-annotation-options ',name ',options)
,function-definition-form)))
| |
fb0146c94df39d64083bcc2e63ff42fbcc2098e28c33bb2b86b100365cb7efa6 | geophf/1HaskellADay | Exercise.hs | module Y2018.M02.D14.Exercise where
-- I'm thinking of a word that has the letter 'v' in it, for some strange reason
import Y2018.M02.D01.Exercise
-
So from the above import , we have sets of words associated with a hash . This
hash is the product of their letters - as - primes . AND we know which letter is
which prime :
> > > primes ! ' A '
2
Voila !
So , given our anagramSets , how many ' English ' words have a ' v ' in them ?
-
So from the above import, we have sets of words associated with a hash. This
hash is the product of their letters-as-primes. AND we know which letter is
which prime:
>>> primes ! 'A'
2
Voila!
So, given our anagramSets, how many 'English' words have a 'v' in them?
--}
import Data.Array
import Data.Map (Map)
import Data.Set (Set)
type AnagramSets = Map Integer [String]
wordsOf :: Char -> AnagramSets -> [String]
wordsOf letter sets = undefined
But how many v - words are of length ... 5 ?
len :: Int -> [[a]] -> [[a]]
len n words = undefined
- BONUS -----------------------------------------------------------------
So , I 'm playing scrabble and the 3rd letter has to be ' a ' and the 5th letter
has to be ' r ' ...
How many words are like that ? How would you go about finding those words ?
-
So, I'm playing scrabble and the 3rd letter has to be 'a' and the 5th letter
has to be 'r' ...
How many words are like that? How would you go about finding those words?
--}
letterAt :: Char -> Int -> AnagramSets -> Set String
letterAt letter position sets = undefined
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2018/M02/D14/Exercise.hs | haskell | I'm thinking of a word that has the letter 'v' in it, for some strange reason
}
---------------------------------------------------------------
} | module Y2018.M02.D14.Exercise where
import Y2018.M02.D01.Exercise
-
So from the above import , we have sets of words associated with a hash . This
hash is the product of their letters - as - primes . AND we know which letter is
which prime :
> > > primes ! ' A '
2
Voila !
So , given our anagramSets , how many ' English ' words have a ' v ' in them ?
-
So from the above import, we have sets of words associated with a hash. This
hash is the product of their letters-as-primes. AND we know which letter is
which prime:
>>> primes ! 'A'
2
Voila!
So, given our anagramSets, how many 'English' words have a 'v' in them?
import Data.Array
import Data.Map (Map)
import Data.Set (Set)
type AnagramSets = Map Integer [String]
wordsOf :: Char -> AnagramSets -> [String]
wordsOf letter sets = undefined
But how many v - words are of length ... 5 ?
len :: Int -> [[a]] -> [[a]]
len n words = undefined
So , I 'm playing scrabble and the 3rd letter has to be ' a ' and the 5th letter
has to be ' r ' ...
How many words are like that ? How would you go about finding those words ?
-
So, I'm playing scrabble and the 3rd letter has to be 'a' and the 5th letter
has to be 'r' ...
How many words are like that? How would you go about finding those words?
letterAt :: Char -> Int -> AnagramSets -> Set String
letterAt letter position sets = undefined
|
4cb1675c1b0ac607191f9039b7a95bcfd0c305f5e796a2dd979cb8a76c0e0246 | exoscale/clojure-kubernetes-client | v1_persistent_volume_claim_status.clj | (ns clojure-kubernetes-client.specs.v1-persistent-volume-claim-status
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-condition :refer :all]
)
(:import (java.io File)))
(declare v1-persistent-volume-claim-status-data v1-persistent-volume-claim-status)
(def v1-persistent-volume-claim-status-data
{
(ds/opt :accessModes) (s/coll-of string?)
(ds/opt :capacity) (s/map-of string? string?)
(ds/opt :conditions) (s/coll-of v1-persistent-volume-claim-condition)
(ds/opt :phase) string?
})
(def v1-persistent-volume-claim-status
(ds/spec
{:name ::v1-persistent-volume-claim-status
:spec v1-persistent-volume-claim-status-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_persistent_volume_claim_status.clj | clojure | (ns clojure-kubernetes-client.specs.v1-persistent-volume-claim-status
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-persistent-volume-claim-condition :refer :all]
)
(:import (java.io File)))
(declare v1-persistent-volume-claim-status-data v1-persistent-volume-claim-status)
(def v1-persistent-volume-claim-status-data
{
(ds/opt :accessModes) (s/coll-of string?)
(ds/opt :capacity) (s/map-of string? string?)
(ds/opt :conditions) (s/coll-of v1-persistent-volume-claim-condition)
(ds/opt :phase) string?
})
(def v1-persistent-volume-claim-status
(ds/spec
{:name ::v1-persistent-volume-claim-status
:spec v1-persistent-volume-claim-status-data}))
| |
6f30364253bde35fc88a3f7fee75c8da71e1d0bd2ed83c358d436fdec372030a | well-typed/large-records | R100.hs | #if PROFILE_CORESIZE
{-# OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #-}
#endif
#if PROFILE_TIMING
{-# OPTIONS_GHC -ddump-to-file -ddump-timings #-}
#endif
# OPTIONS_GHC -fplugin = TypeLet -fplugin = Data . Record . Anon . Plugin #
{-# OPTIONS_GHC -fplugin-opt=Data.Record.Anon.Plugin:typelet #-}
module Experiment.ConstructWithTypeLet.Sized.R100 where
import Data.Record.Anon.Simple (Record)
import Bench.Types
import Common.RowOfSize.Row100
record :: Word -> Record ExampleRow
record x = ANON {
-- 00 .. 09
t00 = MkT x
, t01 = MkT x
, t02 = MkT x
, t03 = MkT x
, t04 = MkT x
, t05 = MkT x
, t06 = MkT x
, t07 = MkT x
, t08 = MkT x
, t09 = MkT x
10 .. 19
, t10 = MkT x
, t11 = MkT x
, t12 = MkT x
, t13 = MkT x
, t14 = MkT x
, t15 = MkT x
, t16 = MkT x
, t17 = MkT x
, t18 = MkT x
, t19 = MkT x
20 .. 29
, t20 = MkT x
, t21 = MkT x
, t22 = MkT x
, t23 = MkT x
, t24 = MkT x
, t25 = MkT x
, t26 = MkT x
, t27 = MkT x
, t28 = MkT x
, t29 = MkT x
30 .. 39
, t30 = MkT x
, t31 = MkT x
, t32 = MkT x
, t33 = MkT x
, t34 = MkT x
, t35 = MkT x
, t36 = MkT x
, t37 = MkT x
, t38 = MkT x
, t39 = MkT x
40 .. 49
, t40 = MkT x
, t41 = MkT x
, t42 = MkT x
, t43 = MkT x
, t44 = MkT x
, t45 = MkT x
, t46 = MkT x
, t47 = MkT x
, t48 = MkT x
, t49 = MkT x
50 .. 59
, t50 = MkT x
, t51 = MkT x
, t52 = MkT x
, t53 = MkT x
, t54 = MkT x
, t55 = MkT x
, t56 = MkT x
, t57 = MkT x
, t58 = MkT x
, t59 = MkT x
60 .. 69
, t60 = MkT x
, t61 = MkT x
, t62 = MkT x
, t63 = MkT x
, t64 = MkT x
, t65 = MkT x
, t66 = MkT x
, t67 = MkT x
, t68 = MkT x
, t69 = MkT x
70 .. 79
, t70 = MkT x
, t71 = MkT x
, t72 = MkT x
, t73 = MkT x
, t74 = MkT x
, t75 = MkT x
, t76 = MkT x
, t77 = MkT x
, t78 = MkT x
, t79 = MkT x
80 .. 89
, t80 = MkT x
, t81 = MkT x
, t82 = MkT x
, t83 = MkT x
, t84 = MkT x
, t85 = MkT x
, t86 = MkT x
, t87 = MkT x
, t88 = MkT x
, t89 = MkT x
90 .. 99
, t90 = MkT x
, t91 = MkT x
, t92 = MkT x
, t93 = MkT x
, t94 = MkT x
, t95 = MkT x
, t96 = MkT x
, t97 = MkT x
, t98 = MkT x
, t99 = MkT x
} | null | https://raw.githubusercontent.com/well-typed/large-records/78d0966e4871847e2c17a0aa821bacf38bdf96bc/large-records-benchmarks/bench/large-anon/Experiment/ConstructWithTypeLet/Sized/R100.hs | haskell | # OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #
# OPTIONS_GHC -ddump-to-file -ddump-timings #
# OPTIONS_GHC -fplugin-opt=Data.Record.Anon.Plugin:typelet #
00 .. 09 | #if PROFILE_CORESIZE
#endif
#if PROFILE_TIMING
#endif
# OPTIONS_GHC -fplugin = TypeLet -fplugin = Data . Record . Anon . Plugin #
module Experiment.ConstructWithTypeLet.Sized.R100 where
import Data.Record.Anon.Simple (Record)
import Bench.Types
import Common.RowOfSize.Row100
record :: Word -> Record ExampleRow
record x = ANON {
t00 = MkT x
, t01 = MkT x
, t02 = MkT x
, t03 = MkT x
, t04 = MkT x
, t05 = MkT x
, t06 = MkT x
, t07 = MkT x
, t08 = MkT x
, t09 = MkT x
10 .. 19
, t10 = MkT x
, t11 = MkT x
, t12 = MkT x
, t13 = MkT x
, t14 = MkT x
, t15 = MkT x
, t16 = MkT x
, t17 = MkT x
, t18 = MkT x
, t19 = MkT x
20 .. 29
, t20 = MkT x
, t21 = MkT x
, t22 = MkT x
, t23 = MkT x
, t24 = MkT x
, t25 = MkT x
, t26 = MkT x
, t27 = MkT x
, t28 = MkT x
, t29 = MkT x
30 .. 39
, t30 = MkT x
, t31 = MkT x
, t32 = MkT x
, t33 = MkT x
, t34 = MkT x
, t35 = MkT x
, t36 = MkT x
, t37 = MkT x
, t38 = MkT x
, t39 = MkT x
40 .. 49
, t40 = MkT x
, t41 = MkT x
, t42 = MkT x
, t43 = MkT x
, t44 = MkT x
, t45 = MkT x
, t46 = MkT x
, t47 = MkT x
, t48 = MkT x
, t49 = MkT x
50 .. 59
, t50 = MkT x
, t51 = MkT x
, t52 = MkT x
, t53 = MkT x
, t54 = MkT x
, t55 = MkT x
, t56 = MkT x
, t57 = MkT x
, t58 = MkT x
, t59 = MkT x
60 .. 69
, t60 = MkT x
, t61 = MkT x
, t62 = MkT x
, t63 = MkT x
, t64 = MkT x
, t65 = MkT x
, t66 = MkT x
, t67 = MkT x
, t68 = MkT x
, t69 = MkT x
70 .. 79
, t70 = MkT x
, t71 = MkT x
, t72 = MkT x
, t73 = MkT x
, t74 = MkT x
, t75 = MkT x
, t76 = MkT x
, t77 = MkT x
, t78 = MkT x
, t79 = MkT x
80 .. 89
, t80 = MkT x
, t81 = MkT x
, t82 = MkT x
, t83 = MkT x
, t84 = MkT x
, t85 = MkT x
, t86 = MkT x
, t87 = MkT x
, t88 = MkT x
, t89 = MkT x
90 .. 99
, t90 = MkT x
, t91 = MkT x
, t92 = MkT x
, t93 = MkT x
, t94 = MkT x
, t95 = MkT x
, t96 = MkT x
, t97 = MkT x
, t98 = MkT x
, t99 = MkT x
} |
5debcad9e4abd1c0ec65bf63864ac3937261961996bf508296eb1d86236f154a | ocaml/oasis | c1.mli | (******************************************************************************)
OASIS : architecture for building OCaml libraries and applications
(* *)
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
the Free Software Foundation ; either version 2.1 of the License , or ( at
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *)
(* or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more *)
(* details. *)
(* *)
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
(******************************************************************************)
val f : int -> int -> float
| null | https://raw.githubusercontent.com/ocaml/oasis/3d1a9421db92a0882ebc58c5df219b18c1e5681d/test/data/TestPluginOMake/complex/src/libc_/c1.mli | ocaml | ****************************************************************************
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
your option) any later version, with the OCaml static compilation
exception.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more
details.
**************************************************************************** | OASIS : architecture for building OCaml libraries and applications
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
the Free Software Foundation ; either version 2.1 of the License , or ( at
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
val f : int -> int -> float
|
817e65b9fcf8981558cfb527cc66bfa1067fc568c721fc3f51b081cbc89460c3 | manavpatnaik/haskell | 2.hs | import Data.List
import System.IO
primeNumbers = [3,5,7,11]
morePrime = primeNumbers ++ [13, 17, 19, 23, 29]
-- Another way of constructing lists
favNums = 2 : 7 : 21 : 5 : []
favNumsUpdated = 12 : favNums
-- While using colons the list must be at the end
anotherList = 1 : 4 : 5 : [6,7,8]
multList = [[3,5,7], [1,2,3]]
lenPrime = length primeNumbers
revPrime = reverse primeNumbers
isListEmpty = null primeNumbers
myNums = [1,2,3,4,5,6]
firstNum = myNums !! 0
secondNum = myNums !! 1
a = head myNums
b = tail myNums
primeInit = init myNums
primeLast = last myNums
first3Nums = take 3 myNums
moreNums = 0 : myNums
is5InNums = 5 `elem` myNums
is7InNums = 7 `elem` myNums
maxNum = maximum myNums
minNum = minimum myNums
prodNums = product myNums
-- List comprehension
zeroToTen = [0..10]
evenList = [2,4..20]
letterList = ['A'..'Z']
evenLetterList = ['a', 'c'..'z']
infiList = [10,20..]
fifty = infiList !! 4
many2s = take 10 (repeat 2)
ten5s = replicate 10 5
cycleMyNums = take 34 (cycle [1,2,3,4,5]) | null | https://raw.githubusercontent.com/manavpatnaik/haskell/aceb618af2bbf58e8fa925555053083438f82eb5/learning-modules/2.hs | haskell | Another way of constructing lists
While using colons the list must be at the end
List comprehension | import Data.List
import System.IO
primeNumbers = [3,5,7,11]
morePrime = primeNumbers ++ [13, 17, 19, 23, 29]
favNums = 2 : 7 : 21 : 5 : []
favNumsUpdated = 12 : favNums
anotherList = 1 : 4 : 5 : [6,7,8]
multList = [[3,5,7], [1,2,3]]
lenPrime = length primeNumbers
revPrime = reverse primeNumbers
isListEmpty = null primeNumbers
myNums = [1,2,3,4,5,6]
firstNum = myNums !! 0
secondNum = myNums !! 1
a = head myNums
b = tail myNums
primeInit = init myNums
primeLast = last myNums
first3Nums = take 3 myNums
moreNums = 0 : myNums
is5InNums = 5 `elem` myNums
is7InNums = 7 `elem` myNums
maxNum = maximum myNums
minNum = minimum myNums
prodNums = product myNums
zeroToTen = [0..10]
evenList = [2,4..20]
letterList = ['A'..'Z']
evenLetterList = ['a', 'c'..'z']
infiList = [10,20..]
fifty = infiList !! 4
many2s = take 10 (repeat 2)
ten5s = replicate 10 5
cycleMyNums = take 34 (cycle [1,2,3,4,5]) |
3869ac5ae9e46da9b4a51c465fc90d9deaeb743efeeb1d5b8383aece15ca5ba3 | racket/plai | mutator.rkt | #lang scheme
(require (prefix-in scheme: scheme)
plai/private/command-line
(for-syntax plai/private/command-line)
plai/gc2/private/collector-exports
plai/gc2/private/gc-core
scheme/gui/dynamic
(only-in plai/test-harness
exn:plai? equal~?
plai-error generic-test test halt-on-errors print-only-errors)
(for-syntax scheme)
(for-syntax plai/gc2/private/gc-transformer)
scheme/stxparam
(for-syntax scheme/stxparam-exptime))
(provide else require provide #%top
values
test/location=?
test/value=?
(rename-out
[plai-error error]
[mutator-and and]
[mutator-or or]
[mutator-cond cond]
[mutator-case case]
[mutator-define define]
[mutator-define-values define-values]
(mutator-let let)
[mutator-let* let*]
[mutator-begin begin]
[mutator-if if]
[mutator-let-values let-values]
[mutator-set! set!]
[mutator-lambda lambda]
[mutator-lambda λ]
(mutator-app #%app)
(mutator-datum #%datum)
(mutator-cons cons)
(collector:first first)
(collector:rest rest)
(mutator-quote quote)
(mutator-top-interaction #%top-interaction)
(mutator-module-begin #%module-begin)))
(define-syntax-parameter mutator-name #f)
(define-syntax-parameter mutator-tail-call? #t)
(define-syntax-parameter mutator-env-roots empty)
(define-syntax-parameter mutator-assignment-allowed? #t)
(define-syntax-rule (no! e) (syntax-parameterize ([mutator-assignment-allowed? #f]) e))
(define-syntax-rule (yes! e) (syntax-parameterize ([mutator-assignment-allowed? #t]) e))
; Sugar Macros
(define-syntax mutator-and
(syntax-rules ()
[(_) (mutator-quote #t)]
[(_ fe) fe]
[(_ fe e ...) (mutator-if fe (mutator-and e ...) (mutator-quote #f))]))
(define-syntax mutator-or
(syntax-rules ()
[(_) (mutator-quote #f)]
[(_ fe) fe]
[(_ fe e ...) (mutator-let ([tmp fe]) (mutator-if tmp tmp (mutator-or e ...)))]))
(define-syntax mutator-cond
(syntax-rules (else)
[(_) (mutator-begin)]
[(_ [else e ...]) (mutator-begin e ...)]
[(_ [q ans] e ...) (mutator-if q ans (mutator-cond e ...))]))
(define-syntax mutator-case
(syntax-rules (else)
[(_ value
[(v ...) e ...]
...
[else ee ...])
(mutator-let ([tmp value])
(mutator-cond [(mutator-app mutator-member? tmp (mutator-quote (v ...)))
e ...]
...
[else ee ...]))]
[(_ value
[(v ...) e ...]
...)
(mutator-case value
[(v ...) e ...]
...
[else (mutator-begin)])]))
(define-syntax mutator-define
(syntax-rules ()
[(_ (f a ...) e ...)
(mutator-define-values (f)
(syntax-parameterize ([mutator-name #'f])
(mutator-lambda (a ...) e ...)))]
[(_ id e)
(mutator-define-values (id)
(syntax-parameterize ([mutator-name #'id])
e))]))
(define-syntax-rule (mutator-let ([id e] ...) be ...)
(mutator-let-values ([(id) (syntax-parameterize ([mutator-name #'id])
e)]
...)
be ...))
(define-syntax mutator-let*
(syntax-rules ()
[(_ () be ...)
(mutator-begin be ...)]
[(_ ([fid fe] [rid re] ...) be ...)
(mutator-let ([fid fe])
(mutator-let* ([rid re] ...)
be ...))]))
(define-syntax mutator-begin
(syntax-rules ()
[(_) (mutator-app void)]
[(_ e) e]
[(_ fe e ...)
(let ([tmp
(syntax-parameterize ([mutator-tail-call? #f])
(yes! fe))])
(mutator-begin e ...))]))
(define mutator-cons
(let ([cons
(λ (hd tl)
(define roots (compute-current-roots))
(define-values (hd-roots no-hd-roots)
(partition (λ (x) (= hd (read-root x))) roots))
(define-values (tl-roots no-hd-no-tl-roots)
(partition (λ (x) (= tl (read-root x))) no-hd-roots))
(parameterize ([active-roots no-hd-no-tl-roots])
(collector:cons (make-root 'hd
(λ () hd)
(λ (v)
(set! hd v)
(for ([r (in-list hd-roots)])
(set-root! r v))))
(make-root 'tl
(λ () tl)
(λ (v)
(set! tl v)
(for ([r (in-list tl-roots)])
(set-root! r v)))))))])
cons))
(define (do-alloc-flat flat)
(parameterize ([active-roots (compute-current-roots)])
(collector:alloc-flat flat)))
; Real Macros
(define-syntax-rule (mutator-define-values (id ...) e)
(begin (define-values (id ...)
(syntax-parameterize ([mutator-tail-call? #f])
e))
(add-global-root! (make-env-root id))
...))
(define-syntax-rule (mutator-if test true false)
(if (syntax-parameterize ([mutator-tail-call? #f])
(collector:deref (no! test)))
true
false))
(define-syntax (mutator-set! stx)
(syntax-case stx ()
[(_ id e)
(let ()
(if (syntax-parameter-value #'mutator-assignment-allowed?)
#'(begin
(set! id (no! e))
(mutator-app void))
(raise-syntax-error 'set! "allowed only inside begin expressions and at the top-level" stx)))]))
(define-syntax (mutator-let-values stx)
(syntax-case stx ()
[(_ ([(id ...) expr] ...) body-expr)
(with-syntax ([((tmp ...) ...)
(map generate-temporaries (syntax->list #'((id ...) ...)))])
(let ([binding-list (syntax->list #'((id ...) ...))])
(with-syntax ([((previous-id ...) ...)
(build-list (length binding-list)
(λ (n) (append-map syntax->list (take binding-list n))))])
(syntax/loc stx
(let*-values ([(tmp ...)
(syntax-parameterize ([mutator-env-roots
(append
(switch-over
(syntax->list #'(id ... ...))
(syntax->list #'(tmp ... ...))
(find-referenced-locals
(list #'previous-id ...)
#'body-expr))
(syntax-parameter-value #'mutator-env-roots))]
[mutator-tail-call? #f])
(no! expr))]
...)
(let-values ([(id ...) (values tmp ...)] ...)
(syntax-parameterize ([mutator-env-roots
(append (find-referenced-locals
(list #'id ... ...)
#'body-expr)
(syntax-parameter-value #'mutator-env-roots))])
body-expr)))))))]
[(_ ([(id ...) expr] ...) body-expr ...)
(syntax/loc stx
(mutator-let-values
([(id ...) expr] ...)
(mutator-begin body-expr ...)))]))
(define-syntax (mutator-lambda stx)
(syntax-case stx ()
[(_ (id ...) body)
(let ([env-roots (syntax-parameter-value #'mutator-env-roots)])
(with-syntax ([(free-id ...) (map syntax-local-introduce
(filter
(λ (x) (for/and ([id (in-list (syntax->list #'(id ...)))])
(not (free-identifier=? id x))))
(find-referenced-locals env-roots stx)))]
[(env-id ...) env-roots]
[closure (or (syntax-parameter-value #'mutator-name)
(syntax-local-name)
(let ([prop (syntax-property stx 'inferred-name)])
(if (or (identifier? prop)
(symbol? prop))
prop
#f))
(string->symbol "#<proc>"))])
(quasisyntax/loc stx
(let ([closure
(closure-code
#,(length (syntax->list #'(free-id ...)))
(let ([closure
(lambda (free-id ... id ...)
(syntax-parameterize ([mutator-env-roots
(append
(find-referenced-locals
(list #'id ...)
#'body)
(list #'free-id ...))]
[mutator-tail-call? #t])
(no! body)))])
closure))])
#,(if (syntax-parameter-value #'mutator-tail-call?)
(syntax/loc stx
(#%app do-collector:closure closure
(list (λ () free-id) ...)
(list (λ (v) (set! free-id v)) ...)))
(syntax/loc stx
(with-continuation-mark
gc-roots-key
(list (make-env-root env-id) ...)
(#%app do-collector:closure closure
(list (λ () free-id) ...)
(list (λ (v) (set! free-id v)) ...)))))))))]
[(_ (id ...) body ...)
(syntax/loc stx
(mutator-lambda (id ...) (mutator-begin body ...)))]))
(define (do-collector:closure closure getters setters)
(define-values (remaining-roots closure-roots)
(let loop ([getters getters]
[setters setters]
[remaining-roots (compute-current-roots)]
[closure-roots '()])
(cond
[(null? getters) (values remaining-roots closure-roots)]
[else
(define this-loc ((car getters)))
(define this-setter (car setters))
(define-values (this-other-roots leftovers)
(partition (λ (x) (= (read-root x) this-loc)) remaining-roots))
(loop (cdr getters) (cdr setters)
leftovers
(cons (make-root 'closure-root
(λ () this-loc)
(λ (v) (set! this-loc v)
(this-setter v)
(for ([root (in-list this-other-roots)])
(set-root! root v))))
closure-roots))])))
(parameterize ([active-roots remaining-roots])
(collector:closure closure (reverse closure-roots))))
(define-syntax (mutator-app stx)
(syntax-case stx ()
[(_ e ...)
(local [(define (do-not-expand? exp)
(and (identifier? exp)
(not (set!-transformer?
(syntax-local-value exp (lambda () #f))))))
(define exps (syntax->list #'(e ...)))
(define tmps
(generate-temporaries #'(e ...)))]
(with-syntax ([(ne ...)
(map (lambda (exp tmp) (if (do-not-expand? exp) exp tmp))
exps tmps)])
(for/fold ([acc (syntax/loc stx (mutator-anf-app ne ...))])
([exp (in-list (reverse exps))]
[tmp (in-list (reverse tmps))])
(if (do-not-expand? exp)
acc
(quasisyntax/loc stx
(mutator-let ([#,tmp #,exp])
#,acc))))))]))
(define-syntax (mutator-anf-app stx)
(syntax-case stx ()
[(_ fe ae ...)
(let ()
(define prim-app? (ormap (λ (x) (free-identifier=? x #'fe))
prim-ids))
(define is-set-fst? (free-identifier=? #'collector:set-first! #'fe))
(when (or is-set-fst? (free-identifier=? #'collector:set-rest! #'fe))
(unless (syntax-parameter-value #'mutator-assignment-allowed?)
(raise-syntax-error (if is-set-fst? 'set-first! 'set-rest!)
"can appear only at the top-level or in a begin"
stx)))
(with-syntax ([(env-id ...) (syntax-parameter-value #'mutator-env-roots)]
[app-exp (if prim-app?
(syntax/loc stx (do-alloc-flat (fe (collector:deref ae) ...)))
(syntax/loc stx ((deref-proc fe) ae ...)))])
(if (syntax-parameter-value #'mutator-tail-call?)
; If this call is in tail position, we will not need access
; to its environment when it returns.
#'app-exp
; If this call is not in tail position, we make the
; environment at the call site reachable.
#`(with-continuation-mark gc-roots-key
(list (make-env-root env-id) ...)
app-exp))))]))
(define-syntax mutator-quote
(syntax-rules ()
[(_ (a . d))
(mutator-app mutator-cons (mutator-quote a) (mutator-quote d))]
[(_ s)
(mutator-datum . s)]))
(define-syntax (mutator-datum stx)
(syntax-case stx ()
[(_ . e)
(quasisyntax/loc stx (mutator-anf-app do-alloc-flat (#%datum . e)))]))
(define-syntax (mutator-top-interaction stx)
(syntax-case stx (require provide mutator-define mutator-define-values test/value=? import-primitives)
[(_ . (require . e))
(syntax/loc stx
(require . e))]
[(_ . (provide . e))
(syntax/loc stx
(provide . e))]
[(_ . (mutator-define . e))
(syntax/loc stx
(mutator-define . e))]
[(_ . (mutator-define-values . e))
(syntax/loc stx
(mutator-define-values . e))]
[(_ . (test/value=? . e))
(syntax/loc stx
(test/value=? . e))]
[(_ . (import-primitives . e))
(syntax/loc stx
(import-primitives . e))]
[(_ . expr)
(syntax/loc stx
(call-with-values
(lambda ()
(syntax-parameterize ([mutator-tail-call? #f])
expr))
(case-lambda
[() (void)]
[(result-addr)
(show-one-result result-addr)]
[result-addrs
(show-multiple-results result-addrs)])))]))
(define (show-one-result result-addr)
(cond
[(procedure? result-addr)
(printf "Imported procedure:\n")
result-addr]
[(location? result-addr)
(printf "Value at location ~a:\n" result-addr)
(gc->scheme result-addr)]))
(define (show-multiple-results results)
(define addrs
(for/list ([result-addr (in-list results)]
#:when (location? result-addr))
result-addr))
(printf "Values at locations ")
(cond
[(= (length addrs) 2)
(printf "~a and ~a:\n" (car addrs) (cadr addrs))]
[else
(let loop ([addr (car addrs)]
[addrs (cdr addrs)])
(cond
[(null? addrs)
(printf "and ~a:\n" addr)]
[else
(printf "~a, " addr)
(loop (car addrs) (cdr addrs))]))])
(apply values
(for/list ([result (in-list results)])
(cond
[(procedure? result)
result]
[(location? result)
(gc->scheme result)]))))
; Module Begin
(define-for-syntax (allocator-setup-internal stx)
(syntax-case stx ()
[(collector-module heap-size)
(with-syntax ([(args ...)
(map (λ (s) (datum->syntax stx s))
'(init-allocator gc:deref gc:alloc-flat gc:cons
gc:closure gc:closure? gc:closure-code-ptr gc:closure-env-ref
gc:first gc:rest
gc:flat? gc:cons?
gc:set-first! gc:set-rest!))])
#`(begin
#,(if (alternate-collector)
#`(require #,(datum->syntax #'collector-module (alternate-collector)))
#`(require #,(syntax-case #'collector-module (mutator-quote)
[(mutator-quote . x)
(datum->syntax #'collector-module (cons #'quote #'x))]
[else #'collector-module])))
(allocator-setup/proc args ... (#%datum . heap-size))))]
[_ (raise-syntax-error 'mutator
"Mutator must start with an 'allocator-setup' expression, such as: (allocator-setup <module-path> <literal-number>)"
stx)]))
(define (allocator-setup/proc init-allocator gc:deref gc:alloc-flat gc:cons
gc:closure gc:closure? gc:closure-code-ptr gc:closure-env-ref
gc:first gc:rest
gc:flat? gc:cons?
gc:set-first! gc:set-rest!
heap-size)
(set-collector:deref! gc:deref)
(set-collector:alloc-flat! gc:alloc-flat)
(set-collector:cons! gc:cons)
(set-collector:first! gc:first)
(set-collector:rest! gc:rest)
(set-collector:flat?! gc:flat?)
(set-collector:cons?! gc:cons?)
(set-collector:set-first!! gc:set-first!)
(set-collector:set-rest!! gc:set-rest!)
(set-collector:closure! gc:closure)
(set-collector:closure?! gc:closure?)
(set-collector:closure-code-ptr! gc:closure-code-ptr)
(set-collector:closure-env-ref! gc:closure-env-ref)
(init-heap! heap-size)
(when (gui-available?)
(if (<= heap-size 500)
(set-ui! (dynamic-require `plai/gc2/private/gc-gui 'heap-viz%))
(printf "Large heap; the heap visualizer will not be displayed.\n")))
(init-allocator))
(define-for-syntax allocator-setup-error-msg
"Mutator must start with an 'allocator-setup' expression, such as: (allocator-setup <module-path> <literal-number>)")
(define-syntax (mutator-module-begin stx)
(syntax-case stx (allocator-setup)
[(_ (allocator-setup . setup) module-expr ...)
(begin
(syntax-case #'setup ()
[(collector heap-size)
(begin
(unless (module-path? (syntax->datum #'collector))
(raise-syntax-error 'allocator-setup "expected a module path" #'collector))
(unless (number? (syntax->datum #'heap-size))
(raise-syntax-error 'allocator-setup "expected a literal number" #'heap-size)))]
[_
(raise-syntax-error 'mutator allocator-setup-error-msg (syntax/loc #'setup (allocator-setup . setup)))])
(quasisyntax/loc stx
(#%module-begin
#,(allocator-setup-internal #'setup)
#,@(for/list ([me (in-list (syntax->list #'(module-expr ...)))])
(quasisyntax/loc me
(mutator-top-interaction . #,me))))))]
[(_ first-expr module-expr ...)
(raise-syntax-error 'mutator allocator-setup-error-msg #'first-expr)]
[(_)
(raise-syntax-error 'mutator allocator-setup-error-msg)]))
; User Macros
(provide import-primitives)
(define-syntax (import-primitives stx)
(syntax-case stx ()
[(_ id ...)
(andmap identifier? (syntax->list #'(id ...)))
(with-syntax ([(renamed-id ...) (generate-temporaries #'(id ...))]
[source (datum->syntax (and (pair? (syntax-e #'(id ...)))
(car (syntax-e #'(id ...))))
'scheme)])
#`(begin
(require (only-in source [id renamed-id] ...))
;; XXX make a macro to unify this and provide/lift
(define id
(lambda args
(unless (andmap (lambda (v) (and (location? v) (collector:flat? v))) args)
(error 'id (string-append "all arguments must be <heap-value?>s, "
"even if the imported procedure accepts structured "
"data")))
(let ([result (apply renamed-id (map collector:deref args))])
(cond
[(void? result) (void)]
[(heap-value? result) (do-alloc-flat result)]
[else
(error 'id (string-append "imported primitive must return <heap-value?>, "
"received ~a" result))]))))
...))]
[(_ maybe-id ...)
(ormap (λ (v) (and (not (identifier? v)) v)) (syntax->list #'(maybe-id ...)))
(let ([offending-stx (findf (λ (v) (not (identifier? v))) (syntax->list #'(maybe-id ...)))])
(raise-syntax-error
#f "expected identifier to import" offending-stx))]
[(_ . __)
(raise-syntax-error #f "expected list of identifiers to import" stx)]
[_ (raise-syntax-error #f "expected open parenthesis before import-primitive")]))
(define-for-syntax ((mk-id-macro p-id) stx)
(syntax-case stx ()
[id
(identifier? #'id)
(raise-syntax-error (syntax-e stx)
"primitive must appear in the function position of an application"
stx)]
[(id exp ...)
#`(mutator-app #,p-id exp ...)]))
(define-syntax (provide-flat-prims/lift stx)
(syntax-case stx ()
[(_ prim-ids id ...)
(andmap identifier? (syntax->list #'(id ...)))
(with-syntax ([(id2 ...) (generate-temporaries #'(id ...))]
[(p ...) (generate-temporaries #'(id ...))])
#'(begin
(define-for-syntax prim-ids (syntax->list #'(id ...)))
(provide (rename-out [id2 id] ...))
(define-syntax id2 (mk-id-macro #'id)) ...))]))
(provide-flat-prims/lift
prim-ids
symbol? boolean? number? symbol=?
add1 sub1 zero? + - * / even? odd? = < > <= >=)
(define (member? v l)
(and (member v l) #t))
(define (mutator-member? v l)
(do-alloc-flat
(member? (collector:deref v)
(gc->scheme l))))
(provide (rename-out (mutator-set-first! set-first!)))
(define-syntax (mutator-set-first! stx)
(syntax-case stx ()
[x
(identifier? #'x)
(raise-syntax-error 'set-first! "must appear immediately following an open paren" stx)]
[(_ args ...)
(begin
#'(mutator-app collector:set-first! args ...))]))
(provide (rename-out (mutator-set-rest! set-rest!)))
(define-syntax (mutator-set-rest! stx)
(syntax-case stx ()
[x
(identifier? #'x)
(raise-syntax-error 'set-rest! "must appear immediately following an open paren" stx)]
[(_ args ...)
(begin
#'(mutator-app collector:set-rest! args ...))]))
(provide (rename-out [mutator-empty empty]))
(define-syntax mutator-empty
(syntax-id-rules (mutator-empty)
[_ (mutator-quote ())]))
(provide (rename-out (mutator-empty? empty?)))
(define (mutator-empty? loc)
(cond
[(collector:flat? loc)
(do-alloc-flat (empty? (collector:deref loc)))]
[else
(do-alloc-flat false)]))
(provide (rename-out [mutator-cons? cons?]))
(define (mutator-cons? loc)
(do-alloc-flat (collector:cons? loc)))
(provide (rename-out [mutator-eq? eq?]))
(define (mutator-eq? l1 l2)
(do-alloc-flat (= l1 l2)))
(provide (rename-out [mutator-printf printf]))
(define-syntax (mutator-printf stx)
(syntax-case stx ()
[(_ fmt arg ...)
; We must invoke mutator-app to A-normalize the arguments.
(syntax/loc stx
(begin
(mutator-app printf (#%datum . fmt)
(mutator-app gc->scheme arg) ...)
(void)))]))
(provide (rename-out
(mutator-halt-on-errors halt-on-errors)
(mutator-print-only-errors print-only-errors)))
(define-syntax (mutator-halt-on-errors stx)
(syntax-case stx ()
[(_) #'(halt-on-errors)]
[(_ arg) #'(#%app halt-on-errors (#%datum . arg))]))
(define-syntax (mutator-print-only-errors stx)
(syntax-case stx ()
[(_) #'(print-only-errors)]
[(_ arg) #'(#%app print-only-errors (#%datum . arg))]))
; Implementation Functions
(define (deref-proc proc/loc)
(define v
(cond
[(procedure? proc/loc) proc/loc]
[(location? proc/loc) (collector:closure-code-ptr proc/loc)]
[else
(error 'procedure-application "expected procedure, given something else")]))
(cond
[(procedure? v)
v]
[(closure-code? v)
(lambda args
(apply (closure-code-proc v)
(append
(for/list ([i (in-range (closure-code-env-count v))])
(collector:closure-env-ref proc/loc i))
args)))]
[else
(error 'procedure-application "expected procedure, given ~e" v)]))
(define (gc->scheme loc)
(define-struct an-unset ())
(define unset (make-an-unset))
(define phs (make-hash))
(define (unwrap loc)
(if (hash-has-key? phs loc)
(hash-ref phs loc)
(begin
(local [(define ph (make-placeholder unset))]
(hash-set! phs loc ph)
(cond
[(collector:flat? loc)
(placeholder-set! ph (collector:deref loc))]
[(collector:cons? loc)
(local [(define car-ph (make-placeholder unset))
(define cdr-ph (make-placeholder unset))]
(placeholder-set! ph (cons car-ph cdr-ph))
(placeholder-set! car-ph (unwrap (collector:first loc)))
(placeholder-set! cdr-ph (unwrap (collector:rest loc))))]
[(collector:closure? loc)
;; XXX get env?
(placeholder-set! ph (closure-code-proc (collector:closure-code-ptr loc)))]
[else
(error (format "gc:flat?, gc:cons?, gc:closure? all returned false for ~a" loc))])
(placeholder-get ph)))))
(make-reader-graph (unwrap loc)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Testing support
(define-syntax (test/location=? stx)
(syntax-case stx ()
[(_ e1 e2)
(quasisyntax/loc stx
(generic-test
(λ () e1)
(λ (result-value)
(define expected-val e2)
(values
(cond
[(exn:plai? result-value) result-value]
[(equal~? result-value expected-val) true]
[else false])
expected-val))
(quote (heap-loc #,(syntax->datum #'e1)))
(format "at line ~a" #,(syntax-line stx))))]))
(define-for-syntax (flat-heap-value? v)
(or (number? v) (boolean? v)))
(define-syntax (expand-scheme stx)
(syntax-case stx (mutator-quote mutator-datum)
[(_ val) (flat-heap-value? (syntax->datum #'val)) #'(#%datum . val)]
[(_ (mutator-datum . val))
#'(#%datum . val)]
[(_ (mutator-quote e))
#'(quote e)]
[_
(raise-syntax-error 'test/value=? "must be a number, boolean or a quoted value" stx)]))
(define-syntax (test/value=? stx)
(syntax-case stx (mutator-quote)
[(_ mutator-expr scheme-datum)
(quasisyntax/loc stx
(generic-test
(λ ()
(mutator-let ([v1 mutator-expr])
(gc->scheme v1)))
(λ (result-value)
(define expected-val (expand-scheme scheme-datum))
(values
(cond
[(exn:plai? result-value) result-value]
[(equal~? result-value expected-val) true]
[else false])
expected-val))
(quote #,(syntax->datum #'mutator-expr))
(format "at line ~a" #,(syntax-line stx))))]))
| null | https://raw.githubusercontent.com/racket/plai/164f3b763116fcfa7bd827be511650e71fa04319/plai-lib/gc2/mutator.rkt | racket | Sugar Macros
Real Macros
If this call is in tail position, we will not need access
to its environment when it returns.
If this call is not in tail position, we make the
environment at the call site reachable.
Module Begin
User Macros
XXX make a macro to unify this and provide/lift
We must invoke mutator-app to A-normalize the arguments.
Implementation Functions
XXX get env?
Testing support | #lang scheme
(require (prefix-in scheme: scheme)
plai/private/command-line
(for-syntax plai/private/command-line)
plai/gc2/private/collector-exports
plai/gc2/private/gc-core
scheme/gui/dynamic
(only-in plai/test-harness
exn:plai? equal~?
plai-error generic-test test halt-on-errors print-only-errors)
(for-syntax scheme)
(for-syntax plai/gc2/private/gc-transformer)
scheme/stxparam
(for-syntax scheme/stxparam-exptime))
(provide else require provide #%top
values
test/location=?
test/value=?
(rename-out
[plai-error error]
[mutator-and and]
[mutator-or or]
[mutator-cond cond]
[mutator-case case]
[mutator-define define]
[mutator-define-values define-values]
(mutator-let let)
[mutator-let* let*]
[mutator-begin begin]
[mutator-if if]
[mutator-let-values let-values]
[mutator-set! set!]
[mutator-lambda lambda]
[mutator-lambda λ]
(mutator-app #%app)
(mutator-datum #%datum)
(mutator-cons cons)
(collector:first first)
(collector:rest rest)
(mutator-quote quote)
(mutator-top-interaction #%top-interaction)
(mutator-module-begin #%module-begin)))
(define-syntax-parameter mutator-name #f)
(define-syntax-parameter mutator-tail-call? #t)
(define-syntax-parameter mutator-env-roots empty)
(define-syntax-parameter mutator-assignment-allowed? #t)
(define-syntax-rule (no! e) (syntax-parameterize ([mutator-assignment-allowed? #f]) e))
(define-syntax-rule (yes! e) (syntax-parameterize ([mutator-assignment-allowed? #t]) e))
(define-syntax mutator-and
(syntax-rules ()
[(_) (mutator-quote #t)]
[(_ fe) fe]
[(_ fe e ...) (mutator-if fe (mutator-and e ...) (mutator-quote #f))]))
(define-syntax mutator-or
(syntax-rules ()
[(_) (mutator-quote #f)]
[(_ fe) fe]
[(_ fe e ...) (mutator-let ([tmp fe]) (mutator-if tmp tmp (mutator-or e ...)))]))
(define-syntax mutator-cond
(syntax-rules (else)
[(_) (mutator-begin)]
[(_ [else e ...]) (mutator-begin e ...)]
[(_ [q ans] e ...) (mutator-if q ans (mutator-cond e ...))]))
(define-syntax mutator-case
(syntax-rules (else)
[(_ value
[(v ...) e ...]
...
[else ee ...])
(mutator-let ([tmp value])
(mutator-cond [(mutator-app mutator-member? tmp (mutator-quote (v ...)))
e ...]
...
[else ee ...]))]
[(_ value
[(v ...) e ...]
...)
(mutator-case value
[(v ...) e ...]
...
[else (mutator-begin)])]))
(define-syntax mutator-define
(syntax-rules ()
[(_ (f a ...) e ...)
(mutator-define-values (f)
(syntax-parameterize ([mutator-name #'f])
(mutator-lambda (a ...) e ...)))]
[(_ id e)
(mutator-define-values (id)
(syntax-parameterize ([mutator-name #'id])
e))]))
(define-syntax-rule (mutator-let ([id e] ...) be ...)
(mutator-let-values ([(id) (syntax-parameterize ([mutator-name #'id])
e)]
...)
be ...))
(define-syntax mutator-let*
(syntax-rules ()
[(_ () be ...)
(mutator-begin be ...)]
[(_ ([fid fe] [rid re] ...) be ...)
(mutator-let ([fid fe])
(mutator-let* ([rid re] ...)
be ...))]))
(define-syntax mutator-begin
(syntax-rules ()
[(_) (mutator-app void)]
[(_ e) e]
[(_ fe e ...)
(let ([tmp
(syntax-parameterize ([mutator-tail-call? #f])
(yes! fe))])
(mutator-begin e ...))]))
(define mutator-cons
(let ([cons
(λ (hd tl)
(define roots (compute-current-roots))
(define-values (hd-roots no-hd-roots)
(partition (λ (x) (= hd (read-root x))) roots))
(define-values (tl-roots no-hd-no-tl-roots)
(partition (λ (x) (= tl (read-root x))) no-hd-roots))
(parameterize ([active-roots no-hd-no-tl-roots])
(collector:cons (make-root 'hd
(λ () hd)
(λ (v)
(set! hd v)
(for ([r (in-list hd-roots)])
(set-root! r v))))
(make-root 'tl
(λ () tl)
(λ (v)
(set! tl v)
(for ([r (in-list tl-roots)])
(set-root! r v)))))))])
cons))
(define (do-alloc-flat flat)
(parameterize ([active-roots (compute-current-roots)])
(collector:alloc-flat flat)))
(define-syntax-rule (mutator-define-values (id ...) e)
(begin (define-values (id ...)
(syntax-parameterize ([mutator-tail-call? #f])
e))
(add-global-root! (make-env-root id))
...))
(define-syntax-rule (mutator-if test true false)
(if (syntax-parameterize ([mutator-tail-call? #f])
(collector:deref (no! test)))
true
false))
(define-syntax (mutator-set! stx)
(syntax-case stx ()
[(_ id e)
(let ()
(if (syntax-parameter-value #'mutator-assignment-allowed?)
#'(begin
(set! id (no! e))
(mutator-app void))
(raise-syntax-error 'set! "allowed only inside begin expressions and at the top-level" stx)))]))
(define-syntax (mutator-let-values stx)
(syntax-case stx ()
[(_ ([(id ...) expr] ...) body-expr)
(with-syntax ([((tmp ...) ...)
(map generate-temporaries (syntax->list #'((id ...) ...)))])
(let ([binding-list (syntax->list #'((id ...) ...))])
(with-syntax ([((previous-id ...) ...)
(build-list (length binding-list)
(λ (n) (append-map syntax->list (take binding-list n))))])
(syntax/loc stx
(let*-values ([(tmp ...)
(syntax-parameterize ([mutator-env-roots
(append
(switch-over
(syntax->list #'(id ... ...))
(syntax->list #'(tmp ... ...))
(find-referenced-locals
(list #'previous-id ...)
#'body-expr))
(syntax-parameter-value #'mutator-env-roots))]
[mutator-tail-call? #f])
(no! expr))]
...)
(let-values ([(id ...) (values tmp ...)] ...)
(syntax-parameterize ([mutator-env-roots
(append (find-referenced-locals
(list #'id ... ...)
#'body-expr)
(syntax-parameter-value #'mutator-env-roots))])
body-expr)))))))]
[(_ ([(id ...) expr] ...) body-expr ...)
(syntax/loc stx
(mutator-let-values
([(id ...) expr] ...)
(mutator-begin body-expr ...)))]))
(define-syntax (mutator-lambda stx)
(syntax-case stx ()
[(_ (id ...) body)
(let ([env-roots (syntax-parameter-value #'mutator-env-roots)])
(with-syntax ([(free-id ...) (map syntax-local-introduce
(filter
(λ (x) (for/and ([id (in-list (syntax->list #'(id ...)))])
(not (free-identifier=? id x))))
(find-referenced-locals env-roots stx)))]
[(env-id ...) env-roots]
[closure (or (syntax-parameter-value #'mutator-name)
(syntax-local-name)
(let ([prop (syntax-property stx 'inferred-name)])
(if (or (identifier? prop)
(symbol? prop))
prop
#f))
(string->symbol "#<proc>"))])
(quasisyntax/loc stx
(let ([closure
(closure-code
#,(length (syntax->list #'(free-id ...)))
(let ([closure
(lambda (free-id ... id ...)
(syntax-parameterize ([mutator-env-roots
(append
(find-referenced-locals
(list #'id ...)
#'body)
(list #'free-id ...))]
[mutator-tail-call? #t])
(no! body)))])
closure))])
#,(if (syntax-parameter-value #'mutator-tail-call?)
(syntax/loc stx
(#%app do-collector:closure closure
(list (λ () free-id) ...)
(list (λ (v) (set! free-id v)) ...)))
(syntax/loc stx
(with-continuation-mark
gc-roots-key
(list (make-env-root env-id) ...)
(#%app do-collector:closure closure
(list (λ () free-id) ...)
(list (λ (v) (set! free-id v)) ...)))))))))]
[(_ (id ...) body ...)
(syntax/loc stx
(mutator-lambda (id ...) (mutator-begin body ...)))]))
(define (do-collector:closure closure getters setters)
(define-values (remaining-roots closure-roots)
(let loop ([getters getters]
[setters setters]
[remaining-roots (compute-current-roots)]
[closure-roots '()])
(cond
[(null? getters) (values remaining-roots closure-roots)]
[else
(define this-loc ((car getters)))
(define this-setter (car setters))
(define-values (this-other-roots leftovers)
(partition (λ (x) (= (read-root x) this-loc)) remaining-roots))
(loop (cdr getters) (cdr setters)
leftovers
(cons (make-root 'closure-root
(λ () this-loc)
(λ (v) (set! this-loc v)
(this-setter v)
(for ([root (in-list this-other-roots)])
(set-root! root v))))
closure-roots))])))
(parameterize ([active-roots remaining-roots])
(collector:closure closure (reverse closure-roots))))
(define-syntax (mutator-app stx)
(syntax-case stx ()
[(_ e ...)
(local [(define (do-not-expand? exp)
(and (identifier? exp)
(not (set!-transformer?
(syntax-local-value exp (lambda () #f))))))
(define exps (syntax->list #'(e ...)))
(define tmps
(generate-temporaries #'(e ...)))]
(with-syntax ([(ne ...)
(map (lambda (exp tmp) (if (do-not-expand? exp) exp tmp))
exps tmps)])
(for/fold ([acc (syntax/loc stx (mutator-anf-app ne ...))])
([exp (in-list (reverse exps))]
[tmp (in-list (reverse tmps))])
(if (do-not-expand? exp)
acc
(quasisyntax/loc stx
(mutator-let ([#,tmp #,exp])
#,acc))))))]))
(define-syntax (mutator-anf-app stx)
(syntax-case stx ()
[(_ fe ae ...)
(let ()
(define prim-app? (ormap (λ (x) (free-identifier=? x #'fe))
prim-ids))
(define is-set-fst? (free-identifier=? #'collector:set-first! #'fe))
(when (or is-set-fst? (free-identifier=? #'collector:set-rest! #'fe))
(unless (syntax-parameter-value #'mutator-assignment-allowed?)
(raise-syntax-error (if is-set-fst? 'set-first! 'set-rest!)
"can appear only at the top-level or in a begin"
stx)))
(with-syntax ([(env-id ...) (syntax-parameter-value #'mutator-env-roots)]
[app-exp (if prim-app?
(syntax/loc stx (do-alloc-flat (fe (collector:deref ae) ...)))
(syntax/loc stx ((deref-proc fe) ae ...)))])
(if (syntax-parameter-value #'mutator-tail-call?)
#'app-exp
#`(with-continuation-mark gc-roots-key
(list (make-env-root env-id) ...)
app-exp))))]))
(define-syntax mutator-quote
(syntax-rules ()
[(_ (a . d))
(mutator-app mutator-cons (mutator-quote a) (mutator-quote d))]
[(_ s)
(mutator-datum . s)]))
(define-syntax (mutator-datum stx)
(syntax-case stx ()
[(_ . e)
(quasisyntax/loc stx (mutator-anf-app do-alloc-flat (#%datum . e)))]))
(define-syntax (mutator-top-interaction stx)
(syntax-case stx (require provide mutator-define mutator-define-values test/value=? import-primitives)
[(_ . (require . e))
(syntax/loc stx
(require . e))]
[(_ . (provide . e))
(syntax/loc stx
(provide . e))]
[(_ . (mutator-define . e))
(syntax/loc stx
(mutator-define . e))]
[(_ . (mutator-define-values . e))
(syntax/loc stx
(mutator-define-values . e))]
[(_ . (test/value=? . e))
(syntax/loc stx
(test/value=? . e))]
[(_ . (import-primitives . e))
(syntax/loc stx
(import-primitives . e))]
[(_ . expr)
(syntax/loc stx
(call-with-values
(lambda ()
(syntax-parameterize ([mutator-tail-call? #f])
expr))
(case-lambda
[() (void)]
[(result-addr)
(show-one-result result-addr)]
[result-addrs
(show-multiple-results result-addrs)])))]))
(define (show-one-result result-addr)
(cond
[(procedure? result-addr)
(printf "Imported procedure:\n")
result-addr]
[(location? result-addr)
(printf "Value at location ~a:\n" result-addr)
(gc->scheme result-addr)]))
(define (show-multiple-results results)
(define addrs
(for/list ([result-addr (in-list results)]
#:when (location? result-addr))
result-addr))
(printf "Values at locations ")
(cond
[(= (length addrs) 2)
(printf "~a and ~a:\n" (car addrs) (cadr addrs))]
[else
(let loop ([addr (car addrs)]
[addrs (cdr addrs)])
(cond
[(null? addrs)
(printf "and ~a:\n" addr)]
[else
(printf "~a, " addr)
(loop (car addrs) (cdr addrs))]))])
(apply values
(for/list ([result (in-list results)])
(cond
[(procedure? result)
result]
[(location? result)
(gc->scheme result)]))))
(define-for-syntax (allocator-setup-internal stx)
(syntax-case stx ()
[(collector-module heap-size)
(with-syntax ([(args ...)
(map (λ (s) (datum->syntax stx s))
'(init-allocator gc:deref gc:alloc-flat gc:cons
gc:closure gc:closure? gc:closure-code-ptr gc:closure-env-ref
gc:first gc:rest
gc:flat? gc:cons?
gc:set-first! gc:set-rest!))])
#`(begin
#,(if (alternate-collector)
#`(require #,(datum->syntax #'collector-module (alternate-collector)))
#`(require #,(syntax-case #'collector-module (mutator-quote)
[(mutator-quote . x)
(datum->syntax #'collector-module (cons #'quote #'x))]
[else #'collector-module])))
(allocator-setup/proc args ... (#%datum . heap-size))))]
[_ (raise-syntax-error 'mutator
"Mutator must start with an 'allocator-setup' expression, such as: (allocator-setup <module-path> <literal-number>)"
stx)]))
(define (allocator-setup/proc init-allocator gc:deref gc:alloc-flat gc:cons
gc:closure gc:closure? gc:closure-code-ptr gc:closure-env-ref
gc:first gc:rest
gc:flat? gc:cons?
gc:set-first! gc:set-rest!
heap-size)
(set-collector:deref! gc:deref)
(set-collector:alloc-flat! gc:alloc-flat)
(set-collector:cons! gc:cons)
(set-collector:first! gc:first)
(set-collector:rest! gc:rest)
(set-collector:flat?! gc:flat?)
(set-collector:cons?! gc:cons?)
(set-collector:set-first!! gc:set-first!)
(set-collector:set-rest!! gc:set-rest!)
(set-collector:closure! gc:closure)
(set-collector:closure?! gc:closure?)
(set-collector:closure-code-ptr! gc:closure-code-ptr)
(set-collector:closure-env-ref! gc:closure-env-ref)
(init-heap! heap-size)
(when (gui-available?)
(if (<= heap-size 500)
(set-ui! (dynamic-require `plai/gc2/private/gc-gui 'heap-viz%))
(printf "Large heap; the heap visualizer will not be displayed.\n")))
(init-allocator))
(define-for-syntax allocator-setup-error-msg
"Mutator must start with an 'allocator-setup' expression, such as: (allocator-setup <module-path> <literal-number>)")
(define-syntax (mutator-module-begin stx)
(syntax-case stx (allocator-setup)
[(_ (allocator-setup . setup) module-expr ...)
(begin
(syntax-case #'setup ()
[(collector heap-size)
(begin
(unless (module-path? (syntax->datum #'collector))
(raise-syntax-error 'allocator-setup "expected a module path" #'collector))
(unless (number? (syntax->datum #'heap-size))
(raise-syntax-error 'allocator-setup "expected a literal number" #'heap-size)))]
[_
(raise-syntax-error 'mutator allocator-setup-error-msg (syntax/loc #'setup (allocator-setup . setup)))])
(quasisyntax/loc stx
(#%module-begin
#,(allocator-setup-internal #'setup)
#,@(for/list ([me (in-list (syntax->list #'(module-expr ...)))])
(quasisyntax/loc me
(mutator-top-interaction . #,me))))))]
[(_ first-expr module-expr ...)
(raise-syntax-error 'mutator allocator-setup-error-msg #'first-expr)]
[(_)
(raise-syntax-error 'mutator allocator-setup-error-msg)]))
(provide import-primitives)
(define-syntax (import-primitives stx)
(syntax-case stx ()
[(_ id ...)
(andmap identifier? (syntax->list #'(id ...)))
(with-syntax ([(renamed-id ...) (generate-temporaries #'(id ...))]
[source (datum->syntax (and (pair? (syntax-e #'(id ...)))
(car (syntax-e #'(id ...))))
'scheme)])
#`(begin
(require (only-in source [id renamed-id] ...))
(define id
(lambda args
(unless (andmap (lambda (v) (and (location? v) (collector:flat? v))) args)
(error 'id (string-append "all arguments must be <heap-value?>s, "
"even if the imported procedure accepts structured "
"data")))
(let ([result (apply renamed-id (map collector:deref args))])
(cond
[(void? result) (void)]
[(heap-value? result) (do-alloc-flat result)]
[else
(error 'id (string-append "imported primitive must return <heap-value?>, "
"received ~a" result))]))))
...))]
[(_ maybe-id ...)
(ormap (λ (v) (and (not (identifier? v)) v)) (syntax->list #'(maybe-id ...)))
(let ([offending-stx (findf (λ (v) (not (identifier? v))) (syntax->list #'(maybe-id ...)))])
(raise-syntax-error
#f "expected identifier to import" offending-stx))]
[(_ . __)
(raise-syntax-error #f "expected list of identifiers to import" stx)]
[_ (raise-syntax-error #f "expected open parenthesis before import-primitive")]))
(define-for-syntax ((mk-id-macro p-id) stx)
(syntax-case stx ()
[id
(identifier? #'id)
(raise-syntax-error (syntax-e stx)
"primitive must appear in the function position of an application"
stx)]
[(id exp ...)
#`(mutator-app #,p-id exp ...)]))
(define-syntax (provide-flat-prims/lift stx)
(syntax-case stx ()
[(_ prim-ids id ...)
(andmap identifier? (syntax->list #'(id ...)))
(with-syntax ([(id2 ...) (generate-temporaries #'(id ...))]
[(p ...) (generate-temporaries #'(id ...))])
#'(begin
(define-for-syntax prim-ids (syntax->list #'(id ...)))
(provide (rename-out [id2 id] ...))
(define-syntax id2 (mk-id-macro #'id)) ...))]))
(provide-flat-prims/lift
prim-ids
symbol? boolean? number? symbol=?
add1 sub1 zero? + - * / even? odd? = < > <= >=)
(define (member? v l)
(and (member v l) #t))
(define (mutator-member? v l)
(do-alloc-flat
(member? (collector:deref v)
(gc->scheme l))))
(provide (rename-out (mutator-set-first! set-first!)))
(define-syntax (mutator-set-first! stx)
(syntax-case stx ()
[x
(identifier? #'x)
(raise-syntax-error 'set-first! "must appear immediately following an open paren" stx)]
[(_ args ...)
(begin
#'(mutator-app collector:set-first! args ...))]))
(provide (rename-out (mutator-set-rest! set-rest!)))
(define-syntax (mutator-set-rest! stx)
(syntax-case stx ()
[x
(identifier? #'x)
(raise-syntax-error 'set-rest! "must appear immediately following an open paren" stx)]
[(_ args ...)
(begin
#'(mutator-app collector:set-rest! args ...))]))
(provide (rename-out [mutator-empty empty]))
(define-syntax mutator-empty
(syntax-id-rules (mutator-empty)
[_ (mutator-quote ())]))
(provide (rename-out (mutator-empty? empty?)))
(define (mutator-empty? loc)
(cond
[(collector:flat? loc)
(do-alloc-flat (empty? (collector:deref loc)))]
[else
(do-alloc-flat false)]))
(provide (rename-out [mutator-cons? cons?]))
(define (mutator-cons? loc)
(do-alloc-flat (collector:cons? loc)))
(provide (rename-out [mutator-eq? eq?]))
(define (mutator-eq? l1 l2)
(do-alloc-flat (= l1 l2)))
(provide (rename-out [mutator-printf printf]))
(define-syntax (mutator-printf stx)
(syntax-case stx ()
[(_ fmt arg ...)
(syntax/loc stx
(begin
(mutator-app printf (#%datum . fmt)
(mutator-app gc->scheme arg) ...)
(void)))]))
(provide (rename-out
(mutator-halt-on-errors halt-on-errors)
(mutator-print-only-errors print-only-errors)))
(define-syntax (mutator-halt-on-errors stx)
(syntax-case stx ()
[(_) #'(halt-on-errors)]
[(_ arg) #'(#%app halt-on-errors (#%datum . arg))]))
(define-syntax (mutator-print-only-errors stx)
(syntax-case stx ()
[(_) #'(print-only-errors)]
[(_ arg) #'(#%app print-only-errors (#%datum . arg))]))
(define (deref-proc proc/loc)
(define v
(cond
[(procedure? proc/loc) proc/loc]
[(location? proc/loc) (collector:closure-code-ptr proc/loc)]
[else
(error 'procedure-application "expected procedure, given something else")]))
(cond
[(procedure? v)
v]
[(closure-code? v)
(lambda args
(apply (closure-code-proc v)
(append
(for/list ([i (in-range (closure-code-env-count v))])
(collector:closure-env-ref proc/loc i))
args)))]
[else
(error 'procedure-application "expected procedure, given ~e" v)]))
(define (gc->scheme loc)
(define-struct an-unset ())
(define unset (make-an-unset))
(define phs (make-hash))
(define (unwrap loc)
(if (hash-has-key? phs loc)
(hash-ref phs loc)
(begin
(local [(define ph (make-placeholder unset))]
(hash-set! phs loc ph)
(cond
[(collector:flat? loc)
(placeholder-set! ph (collector:deref loc))]
[(collector:cons? loc)
(local [(define car-ph (make-placeholder unset))
(define cdr-ph (make-placeholder unset))]
(placeholder-set! ph (cons car-ph cdr-ph))
(placeholder-set! car-ph (unwrap (collector:first loc)))
(placeholder-set! cdr-ph (unwrap (collector:rest loc))))]
[(collector:closure? loc)
(placeholder-set! ph (closure-code-proc (collector:closure-code-ptr loc)))]
[else
(error (format "gc:flat?, gc:cons?, gc:closure? all returned false for ~a" loc))])
(placeholder-get ph)))))
(make-reader-graph (unwrap loc)))
(define-syntax (test/location=? stx)
(syntax-case stx ()
[(_ e1 e2)
(quasisyntax/loc stx
(generic-test
(λ () e1)
(λ (result-value)
(define expected-val e2)
(values
(cond
[(exn:plai? result-value) result-value]
[(equal~? result-value expected-val) true]
[else false])
expected-val))
(quote (heap-loc #,(syntax->datum #'e1)))
(format "at line ~a" #,(syntax-line stx))))]))
(define-for-syntax (flat-heap-value? v)
(or (number? v) (boolean? v)))
(define-syntax (expand-scheme stx)
(syntax-case stx (mutator-quote mutator-datum)
[(_ val) (flat-heap-value? (syntax->datum #'val)) #'(#%datum . val)]
[(_ (mutator-datum . val))
#'(#%datum . val)]
[(_ (mutator-quote e))
#'(quote e)]
[_
(raise-syntax-error 'test/value=? "must be a number, boolean or a quoted value" stx)]))
(define-syntax (test/value=? stx)
(syntax-case stx (mutator-quote)
[(_ mutator-expr scheme-datum)
(quasisyntax/loc stx
(generic-test
(λ ()
(mutator-let ([v1 mutator-expr])
(gc->scheme v1)))
(λ (result-value)
(define expected-val (expand-scheme scheme-datum))
(values
(cond
[(exn:plai? result-value) result-value]
[(equal~? result-value expected-val) true]
[else false])
expected-val))
(quote #,(syntax->datum #'mutator-expr))
(format "at line ~a" #,(syntax-line stx))))]))
|
5011a51ed92e95f06de17dca98c4c28d14a86552069061ef8ed39889d54a1a81 | marigold-dev/mankavar | scre.ml | [@@@warning "-34"]
(* Take care of gas and bytes *)
type do_operation_result = {
state : unit ;
gas : int64 ;
bytes : int64 ;
}
[@@deriving ez]
open Das_helpers
open Structs
module type STATE = Patricia_state.STATE
module Do_transfer = struct
type continuation = {
input : Transfer.payload ;
storage : Eval.memory ;
state : unit Das_vm.state ;
cache : (module Eval.CACHE) ;
dst_index : Contract_index.t ;
}
type continue =
| Continuation of continuation
| Finish
type do_transfer =
| Continue of continue
| Finished of int
let flush_contract
(module State : STATE) (module Cache : Eval.CACHE) storage dst_index =
let c = State.get_contract_exn dst_index in
let c = c |> Contract.set_storage storage in
(!Cache.content) |> Das_vm.VMap.to_list |> List.iter (fun (k , (v , to_write)) ->
if to_write then State.write_slow dst_index k v
) ;
Format.printf "Contract Storage preflush:%a@;%!" Eval.memory_pp storage ;
State.set_contract_exn dst_index c ;
()
let mk_state (module State : STATE) op =
let src , dst , amount , input , _max_gas = Transfer.destruct op in
let transfer () =
match State.debit src amount with
| Ok () -> (
State.credit dst amount ;
)
| Error () -> ()
in
match dst with
| Contract_index dst_index -> (
let c = State.get_contract_exn dst_index in
let module Cache = Eval.Cache(struct
let read_slow k = State.read_slow dst_index k
end)() in
let module Run = Eval.Make(Cache.Rw_slow) in
let input = op.payload in
let storage = Contract.storage c in
let state = Run.empty_state @@ Contract.program c in
Continuation (
{ input ; storage ; state ; cache = (module Cache) ; dst_index }
)
)
| Key_index _ -> (
assert (Array.length input = 0) ;
transfer () ;
Finish
)
let step_n (module State : STATE) ~n s =
match s with
| Continuation { input ; storage ; state ; cache ; dst_index } -> (
let module Cache = (val cache) in
let module Run = Eval.Make(Cache.Rw_slow) in
match Run.step_n ~n state ~input ~storage with
| Finished (storage' , n') -> (
flush_contract (module State) (module Cache) storage' dst_index ;
Finished n'
)
| Pending (storage' , s') -> (
Continue ( Continuation {
input ;
storage = storage' ;
state = s' ;
cache = (module Cache) ;
dst_index ;
} )
)
)
| Finish -> Finished 1
let eval (module State : STATE) op =
let s = mk_state (module State) op in
match s with
| Continuation { input ; storage ; state ; cache ; dst_index } -> (
let module Cache = (val cache) in
let module Run = Eval.Make(Cache.Rw_slow) in
let storage' = Run.step_until_stop state ~input ~storage in
flush_contract (module State) (module Cache) storage' dst_index ;
()
)
| Finish -> ()
end
module Do_origination = struct
let main (module State : STATE) op : unit =
PseudoEffect.returner @@ fun { return } ->
let noop () = return @@ () in
let src , amount , contract , slow_memory , _max_gas =
Origination.destruct op
in
XResult.value' noop @@
State.debit src amount ;
let dst_index = State.init_contract contract amount in
slow_memory |> Das_vm.VMap.iter (fun k v ->
State.write_slow dst_index k v
) ;
()
let do_origination_n op (module State : STATE) ~n:_ =
main (module State) op
end
module Do_operation = struct
type continue =
| Do_transfer of Do_transfer.continue
| Do_origination of unit
[@@deriving ez]
type do_operation =
| Continue of continue
| Finished of int
let eval (module State : STATE) op =
(* Format.printf "DO OPERATION@;\n" ; *)
Operation.destruct op
~transfer:(Do_transfer.eval (module State))
~origination:(Do_origination.main (module State))
let mk_state (module State : STATE) op =
match op with
| Operation.Transfer tx -> Do_transfer (
Do_transfer.mk_state (module State) tx
)
| Origination o -> Do_origination (
Do_origination.main (module State) o
)
let step_n (module State : STATE) ~n s =
match s with
| Do_origination () -> Finished 1
| Do_transfer tr -> (
match Do_transfer.step_n (module State) ~n tr with
| Do_transfer.Continue c -> Continue (Do_transfer c)
| Finished n -> Finished n
)
end
let do_operation op (module State : STATE) : do_operation_result =
Do_operation.eval (module State) op ;
do_operation_result_make_tpl () 0L 0L
let start_operation_n (module State : STATE) op ~n =
let s = Do_operation.mk_state (module State) op in
Do_operation.step_n (module State) s ~n
let resume_operation_n (module State : STATE) s ~n =
Do_operation.step_n (module State) s ~n
| null | https://raw.githubusercontent.com/marigold-dev/mankavar/13592b5eb888f2ec73816e3d200a6e89228941da/src/consensus/tx-oru/proof-scre/scre.ml | ocaml | Take care of gas and bytes
Format.printf "DO OPERATION@;\n" ; | [@@@warning "-34"]
type do_operation_result = {
state : unit ;
gas : int64 ;
bytes : int64 ;
}
[@@deriving ez]
open Das_helpers
open Structs
module type STATE = Patricia_state.STATE
module Do_transfer = struct
type continuation = {
input : Transfer.payload ;
storage : Eval.memory ;
state : unit Das_vm.state ;
cache : (module Eval.CACHE) ;
dst_index : Contract_index.t ;
}
type continue =
| Continuation of continuation
| Finish
type do_transfer =
| Continue of continue
| Finished of int
let flush_contract
(module State : STATE) (module Cache : Eval.CACHE) storage dst_index =
let c = State.get_contract_exn dst_index in
let c = c |> Contract.set_storage storage in
(!Cache.content) |> Das_vm.VMap.to_list |> List.iter (fun (k , (v , to_write)) ->
if to_write then State.write_slow dst_index k v
) ;
Format.printf "Contract Storage preflush:%a@;%!" Eval.memory_pp storage ;
State.set_contract_exn dst_index c ;
()
let mk_state (module State : STATE) op =
let src , dst , amount , input , _max_gas = Transfer.destruct op in
let transfer () =
match State.debit src amount with
| Ok () -> (
State.credit dst amount ;
)
| Error () -> ()
in
match dst with
| Contract_index dst_index -> (
let c = State.get_contract_exn dst_index in
let module Cache = Eval.Cache(struct
let read_slow k = State.read_slow dst_index k
end)() in
let module Run = Eval.Make(Cache.Rw_slow) in
let input = op.payload in
let storage = Contract.storage c in
let state = Run.empty_state @@ Contract.program c in
Continuation (
{ input ; storage ; state ; cache = (module Cache) ; dst_index }
)
)
| Key_index _ -> (
assert (Array.length input = 0) ;
transfer () ;
Finish
)
let step_n (module State : STATE) ~n s =
match s with
| Continuation { input ; storage ; state ; cache ; dst_index } -> (
let module Cache = (val cache) in
let module Run = Eval.Make(Cache.Rw_slow) in
match Run.step_n ~n state ~input ~storage with
| Finished (storage' , n') -> (
flush_contract (module State) (module Cache) storage' dst_index ;
Finished n'
)
| Pending (storage' , s') -> (
Continue ( Continuation {
input ;
storage = storage' ;
state = s' ;
cache = (module Cache) ;
dst_index ;
} )
)
)
| Finish -> Finished 1
let eval (module State : STATE) op =
let s = mk_state (module State) op in
match s with
| Continuation { input ; storage ; state ; cache ; dst_index } -> (
let module Cache = (val cache) in
let module Run = Eval.Make(Cache.Rw_slow) in
let storage' = Run.step_until_stop state ~input ~storage in
flush_contract (module State) (module Cache) storage' dst_index ;
()
)
| Finish -> ()
end
module Do_origination = struct
let main (module State : STATE) op : unit =
PseudoEffect.returner @@ fun { return } ->
let noop () = return @@ () in
let src , amount , contract , slow_memory , _max_gas =
Origination.destruct op
in
XResult.value' noop @@
State.debit src amount ;
let dst_index = State.init_contract contract amount in
slow_memory |> Das_vm.VMap.iter (fun k v ->
State.write_slow dst_index k v
) ;
()
let do_origination_n op (module State : STATE) ~n:_ =
main (module State) op
end
module Do_operation = struct
type continue =
| Do_transfer of Do_transfer.continue
| Do_origination of unit
[@@deriving ez]
type do_operation =
| Continue of continue
| Finished of int
let eval (module State : STATE) op =
Operation.destruct op
~transfer:(Do_transfer.eval (module State))
~origination:(Do_origination.main (module State))
let mk_state (module State : STATE) op =
match op with
| Operation.Transfer tx -> Do_transfer (
Do_transfer.mk_state (module State) tx
)
| Origination o -> Do_origination (
Do_origination.main (module State) o
)
let step_n (module State : STATE) ~n s =
match s with
| Do_origination () -> Finished 1
| Do_transfer tr -> (
match Do_transfer.step_n (module State) ~n tr with
| Do_transfer.Continue c -> Continue (Do_transfer c)
| Finished n -> Finished n
)
end
let do_operation op (module State : STATE) : do_operation_result =
Do_operation.eval (module State) op ;
do_operation_result_make_tpl () 0L 0L
let start_operation_n (module State : STATE) op ~n =
let s = Do_operation.mk_state (module State) op in
Do_operation.step_n (module State) s ~n
let resume_operation_n (module State : STATE) s ~n =
Do_operation.step_n (module State) s ~n
|
b1b02c0525bc85e76c5c1130393f4ad9f54d6423df538a275ebb0b0b2002bc28 | rsnikhil/Forvis_RISCV-ISA-Spec | Mem_Ops.hs | Copyright ( c ) 2018 - 2019
-- See LICENSE for license details
module Mem_Ops where
-- ================================================================
-- This module defines instruction field values that specify the type
-- and size of memory operations.
Note : these are duplicates of defs in Forvis_Spec.hs where they are
used in the specs of LOAD , STORE and AMO instructions . They are
-- repeated here because this information is also needed by memory and
-- I/O servers, and by top-level execution and debug wrappers.
Forvis_Spec.hs could have just imported this module , but these defs
-- are repeated there for local, self-contained readability.
-- ================================================================
Standard Haskell imports
import Data.Word
import Data.Bits
-- Project imports
import ALU
import Arch_Defs
-- ================================================================
NOTE : the following are defined in module :
opcode_LOAD , funct3_LB / LH / LW / LD / LBU / LHU / LWU
-- opcode_STORE, funct3_SB/SH/SW/SD
opcode_AMO , funct3_AMO_W / D , msbs5_AMO_LR / SC / ADD / SWAP / XOR / AND / OR / MIN / MAX / MINU / MAXU
-- ================================================================
-- Definitions within opcode_LOAD
is_LOAD_aligned :: InstrField -> Integer -> Bool
is_LOAD_aligned funct3 addr = (( (funct3 == funct3_LB) || (funct3 == funct3_LBU))
|| (((funct3 == funct3_LH) || (funct3 == funct3_LHU)) && ((addr .&. 0x1) == 0))
|| (((funct3 == funct3_LW) || (funct3 == funct3_LWU)) && ((addr .&. 0x3) == 0))
|| ( (funct3 == funct3_LD) && ((addr .&. 0x7) == 0)))
-- ================================================================
-- Definitions within opcode_STORE
is_STORE_aligned :: InstrField -> Integer -> Bool
is_STORE_aligned funct3 addr = (( funct3 == funct3_SB)
|| ((funct3 == funct3_SH) && ((addr .&. 0x1) == 0))
|| ((funct3 == funct3_SW) && ((addr .&. 0x3) == 0))
|| ((funct3 == funct3_SD) && ((addr .&. 0x7) == 0)))
-- ================================================================
-- Definitions within opcode_AMO
is_AMO_aligned :: InstrField -> Integer -> Bool
is_AMO_aligned funct3 addr = (( (funct3 == funct3_AMO_W) && ((addr .&. 0x3) == 0))
|| ((funct3 == funct3_AMO_D) && ((addr .&. 0x7) == 0)))
-- ================================================================
ALU for AMO ops
-- Computes new_mem_value from op, store_value and old_mem_value
alu_amo_op :: InstrField -> -- funct3: AMO_W or AMO_D
msbs5 : SC / SWAP / ADD / AND / OR / XOR / MAX / MIN / MAXU / MINU
Integer -> -- store-value
Integer -> -- old mem-value
Integer -- new-mem-value
alu_amo_op funct3 msbs5 store_val old_mem_val =
let
xlen = if (funct3 == funct3_AMO_W) then 32 else 64
-- New memory value (to be stored back)
new_mem_val = (if (msbs5 == msbs5_AMO_SC) then store_val
else if (msbs5 == msbs5_AMO_SWAP) then store_val
else if (msbs5 == msbs5_AMO_ADD) then alu_add xlen old_mem_val store_val
else if (msbs5 == msbs5_AMO_AND) then old_mem_val .&. store_val
else if (msbs5 == msbs5_AMO_OR) then old_mem_val .|. store_val
else if (msbs5 == msbs5_AMO_XOR) then xor old_mem_val store_val
else if (msbs5 == msbs5_AMO_MAX) then (if alu_ge xlen old_mem_val store_val then
old_mem_val
else
store_val)
else if (msbs5 == msbs5_AMO_MIN) then (if alu_lt xlen old_mem_val store_val then
old_mem_val
else
store_val)
else if (msbs5 == msbs5_AMO_MAXU) then (if alu_geu xlen old_mem_val store_val then
old_mem_val
else
store_val)
else if (msbs5 == msbs5_AMO_MINU) then (if alu_ltu xlen old_mem_val store_val then
old_mem_val
else
store_val)
else error ("alu_amo_op: unknown msbs5: " ++ show msbs5))
in
new_mem_val
-- ================================================================
| null | https://raw.githubusercontent.com/rsnikhil/Forvis_RISCV-ISA-Spec/0c5590a12f4b39644d0497fa6285ad5e33003dfc/src/Mem_Ops.hs | haskell | See LICENSE for license details
================================================================
This module defines instruction field values that specify the type
and size of memory operations.
repeated here because this information is also needed by memory and
I/O servers, and by top-level execution and debug wrappers.
are repeated there for local, self-contained readability.
================================================================
Project imports
================================================================
opcode_STORE, funct3_SB/SH/SW/SD
================================================================
Definitions within opcode_LOAD
================================================================
Definitions within opcode_STORE
================================================================
Definitions within opcode_AMO
================================================================
Computes new_mem_value from op, store_value and old_mem_value
funct3: AMO_W or AMO_D
store-value
old mem-value
new-mem-value
New memory value (to be stored back)
================================================================ | Copyright ( c ) 2018 - 2019
module Mem_Ops where
Note : these are duplicates of defs in Forvis_Spec.hs where they are
used in the specs of LOAD , STORE and AMO instructions . They are
Forvis_Spec.hs could have just imported this module , but these defs
Standard Haskell imports
import Data.Word
import Data.Bits
import ALU
import Arch_Defs
NOTE : the following are defined in module :
opcode_LOAD , funct3_LB / LH / LW / LD / LBU / LHU / LWU
opcode_AMO , funct3_AMO_W / D , msbs5_AMO_LR / SC / ADD / SWAP / XOR / AND / OR / MIN / MAX / MINU / MAXU
is_LOAD_aligned :: InstrField -> Integer -> Bool
is_LOAD_aligned funct3 addr = (( (funct3 == funct3_LB) || (funct3 == funct3_LBU))
|| (((funct3 == funct3_LH) || (funct3 == funct3_LHU)) && ((addr .&. 0x1) == 0))
|| (((funct3 == funct3_LW) || (funct3 == funct3_LWU)) && ((addr .&. 0x3) == 0))
|| ( (funct3 == funct3_LD) && ((addr .&. 0x7) == 0)))
is_STORE_aligned :: InstrField -> Integer -> Bool
is_STORE_aligned funct3 addr = (( funct3 == funct3_SB)
|| ((funct3 == funct3_SH) && ((addr .&. 0x1) == 0))
|| ((funct3 == funct3_SW) && ((addr .&. 0x3) == 0))
|| ((funct3 == funct3_SD) && ((addr .&. 0x7) == 0)))
is_AMO_aligned :: InstrField -> Integer -> Bool
is_AMO_aligned funct3 addr = (( (funct3 == funct3_AMO_W) && ((addr .&. 0x3) == 0))
|| ((funct3 == funct3_AMO_D) && ((addr .&. 0x7) == 0)))
ALU for AMO ops
msbs5 : SC / SWAP / ADD / AND / OR / XOR / MAX / MIN / MAXU / MINU
alu_amo_op funct3 msbs5 store_val old_mem_val =
let
xlen = if (funct3 == funct3_AMO_W) then 32 else 64
new_mem_val = (if (msbs5 == msbs5_AMO_SC) then store_val
else if (msbs5 == msbs5_AMO_SWAP) then store_val
else if (msbs5 == msbs5_AMO_ADD) then alu_add xlen old_mem_val store_val
else if (msbs5 == msbs5_AMO_AND) then old_mem_val .&. store_val
else if (msbs5 == msbs5_AMO_OR) then old_mem_val .|. store_val
else if (msbs5 == msbs5_AMO_XOR) then xor old_mem_val store_val
else if (msbs5 == msbs5_AMO_MAX) then (if alu_ge xlen old_mem_val store_val then
old_mem_val
else
store_val)
else if (msbs5 == msbs5_AMO_MIN) then (if alu_lt xlen old_mem_val store_val then
old_mem_val
else
store_val)
else if (msbs5 == msbs5_AMO_MAXU) then (if alu_geu xlen old_mem_val store_val then
old_mem_val
else
store_val)
else if (msbs5 == msbs5_AMO_MINU) then (if alu_ltu xlen old_mem_val store_val then
old_mem_val
else
store_val)
else error ("alu_amo_op: unknown msbs5: " ++ show msbs5))
in
new_mem_val
|
faedb4f3341a3e4d9c307b455f870185c734d3d2a83fc94c4abe8dc73a3e1f2d | inaka/sumo_db | sumo_changeset_SUITE.erl | -module(sumo_changeset_SUITE).
-compile({parse_transform, fancyflow_trans}).
-include_lib("common_test/include/ct.hrl").
-import(sumo_test_utils, [assert_error/2]).
%% Common Test
-export([
all/0,
init_per_suite/1,
end_per_suite/1
]).
%% Test Cases
-export([
t_add_error/1,
t_cast/1,
t_change/1,
t_put_change/1,
t_get_change/1,
t_delete_change/1,
t_apply_changes/1,
t_validate_change/1,
t_validate_required/1,
t_validate_inclusion/1,
t_validate_number/1,
t_validate_length/1,
t_validate_format/1,
t_nested_changeset_validations/1
]).
-define(EXCLUDED_FUNS, [
module_info,
all,
init_per_suite,
end_per_suite
]).
-define(ALLOWED, [
id,
name,
last_name,
age,
address,
height,
description,
status,
birthdate
]).
-define(REQUIRED, [id, name, last_name, age]).
-define(PERSON, #{
id => 1,
last_name => <<"other">>,
age => 33,
height => 1.85,
birthdate => <<"1980-09-22">>,
created_at => {{2012, 2, 16}, {1, 6, 48}},
is_blocked => false,
status => "active"
}).
-type config() :: [{atom(), term()}].
%%%=============================================================================
%%% CT
%%%=============================================================================
-spec all() -> [atom()].
all() ->
Exports = ?MODULE:module_info(exports),
[F || {F, _} <- Exports, not lists:member(F, ?EXCLUDED_FUNS)].
-spec init_per_suite(config()) -> config().
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(sumo_db),
Config.
-spec end_per_suite(config()) -> config().
end_per_suite(Config) ->
_ = application:stop(sumo_db),
Config.
%%%=============================================================================
%%% Test Cases
%%%=============================================================================
-spec t_add_error(config()) -> ok.
t_add_error(_Config) ->
%% run changeset pipeline adding an error
CS = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), ?PERSON, ?ALLOWED),
sumo_changeset:add_error(_, status, <<"Invalid">>)),
%% validate errors
false = sumo_changeset:is_valid(CS),
1 = length(sumo_changeset:errors(CS)),
_ = validate_cs_errors(CS, [status]),
ok.
-spec t_cast(config()) -> ok.
t_cast(_Config) ->
%% create a person doc
Person = default_person_doc(),
PersonModel = sumo_internal:doc_fields(sumo_internal:from_user_doc(people, Person)),
%% create params to be cast adding some intentional errors
Params = ?PERSON#{age => '33', missing => 1},
Allowed = [missing | ?ALLOWED],
%% run changeset pipeline
ExpectedChanges = #{
birthdate => {{1980, 9, 22}, {0, 0, 0}},
height => 1.85,
id => 1,
last_name => <<"other">>,
status => <<"active">>
},
CS = sumo_changeset:cast(people, Person, Params, Allowed),
_ = validate_cs(CS, #{
schema => people,
store => sumo_test_mnesia,
data => PersonModel,
params => maps:with(Allowed, Params),
changes => ExpectedChanges,
types => {true, fun(M) -> maps:size(M) > 0 end},
required => {[], fun(L) -> L end}
}),
%% validate errors
false = sumo_changeset:is_valid(CS),
1 = length(sumo_changeset:errors(CS)),
_ = validate_cs_errors(CS, [age]),
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:cast(_, #{last_name => <<"other">>}, Allowed)),
%% validate errors
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
ok.
-spec t_change(config()) -> ok.
t_change(_Config) ->
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:change(_, #{last_name => <<"other">>})),
1 = maps:size(sumo_changeset:changes(CS1)),
CS2 = [pipe](people,
sumo_changeset:change(_, default_person_doc(), #{last_name => <<"Darwin">>}),
sumo_changeset:change(_, #{last_name => <<"other">>})),
1 = maps:size(sumo_changeset:changes(CS2)),
CS3 = [pipe](people,
sumo_changeset:change(_, default_person_doc(), #{last_name => <<"Darwin">>}),
sumo_changeset:cast(_, #{}, ?ALLOWED)),
1 = maps:size(sumo_changeset:changes(CS3)),
ok.
-spec t_put_change(config()) -> ok.
t_put_change(_Config) ->
#{last_name := <<"other">>} = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>),
sumo_changeset:changes(_)),
0 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"Doe">>),
sumo_changeset:changes(_),
maps:size(_)),
0 = [pipe](people,
sumo_changeset:cast(_, sumo_test_people:new(<<"other">>, <<"other">>), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>),
sumo_changeset:cast(_, #{last_name => <<"other">>}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>),
sumo_changeset:changes(_),
maps:size(_)),
ok.
-spec t_get_change(config()) -> ok.
t_get_change(_Config) ->
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>)),
1 = maps:size(sumo_changeset:changes(CS1)),
<<"other">> = sumo_changeset:get_change(CS1, last_name),
undefined = sumo_changeset:get_change(CS1, name),
<<"default">> = sumo_changeset:get_change(CS1, name, <<"default">>),
ok.
-spec t_delete_change(config()) -> ok.
t_delete_change(_Config) ->
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>)),
1 = maps:size(sumo_changeset:changes(CS1)),
CS2 = sumo_changeset:delete_change(CS1, last_name),
0 = maps:size(sumo_changeset:changes(CS2)),
ok.
-spec t_apply_changes(config()) -> ok.
t_apply_changes(_Config) ->
%% create a person doc
Person = default_person_doc(),
PersonModel = sumo_internal:doc_fields(sumo_internal:from_user_doc(people, Person)),
%% run changeset pipeline
CS1 = sumo_changeset:cast(people, Person, ?PERSON#{missing => 1}, ?ALLOWED),
Data = sumo_changeset:data(CS1),
true = Data == PersonModel,
undefined = sumo_test_people:id(Person),
<<"Doe">> = sumo_test_people:last_name(Person),
undefined = sumo_test_people:age(Person),
%% apply changes
NewData = sumo_changeset:apply_changes(CS1),
false = NewData == PersonModel,
NewPerson = sumo_test_people:sumo_wakeup(NewData),
1 = sumo_test_people:id(NewPerson),
<<"other">> = sumo_test_people:last_name(NewPerson),
33 = sumo_test_people:age(NewPerson),
%% run changeset pipeline
CS2 = sumo_changeset:cast(people, Person, #{}, ?ALLOWED),
0 = maps:size(sumo_changeset:changes(CS2)),
PersonModel = sumo_changeset:apply_changes(CS2),
%% run changeset pipeline
CS3 = [pipe](people,
sumo_changeset:cast(_, Person, #{}, ?ALLOWED),
sumo_changeset:put_change(_, missing, 2)
),
1 = maps:size(sumo_changeset:changes(CS3)),
PersonModel = sumo_changeset:apply_changes(CS3),
ok.
-spec t_validate_change(config()) -> ok.
t_validate_change(_Config) ->
%% create a person doc
Person = default_person_doc(),
%% run changeset pipeline
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_change(_, age, fun(age, Age) ->
case Age > 30 of
true -> [{age, <<"cannot be greater than 30">>}];
false -> []
end
end)),
%% validate errors
false = sumo_changeset:is_valid(CS1),
[{age, {<<"cannot be greater than 30">>, []}}] = sumo_changeset:errors(CS1),
ok.
-spec t_validate_required(config()) -> ok.
t_validate_required(_Config) ->
%% create a person doc
Person = default_person_doc(),
%% run changeset pipeline
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED)),
%% validate errors
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
%% run changeset pipeline
CS2 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON#{age => nil}, ?ALLOWED),
sumo_changeset:validate_required(_, [address | ?REQUIRED])),
%% validate errors
false = sumo_changeset:is_valid(CS2),
2 = length(sumo_changeset:errors(CS2)),
_ = validate_cs_errors(CS2, [address, age]),
%% should fails
_ = assert_error({badarg, invalid}, fun() ->
[pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, [invalid | ?REQUIRED]))
end),
ok.
-spec t_validate_inclusion(config()) -> ok.
t_validate_inclusion(_Config) ->
%% create a person doc
Person = default_person_doc(),
%% valid statuses
Statuses = [<<"active">>, <<"blocked">>],
%% run changeset pipeline
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_inclusion(_, status, Statuses)),
%% validate errors
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
%% run changeset pipeline
CS2 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON#{status => <<"invalid">>}, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_inclusion(_, status, Statuses)),
%% validate errors
false = sumo_changeset:is_valid(CS2),
1 = length(sumo_changeset:errors(CS2)),
_ = validate_cs_errors(CS2, [status]),
ok.
-spec t_validate_number(config()) -> ok.
t_validate_number(_Config) ->
%% create a person doc
Person = default_person_doc(),
%% run changeset pipeline
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_number(_, age, [
{less_than, 34},
{less_than_or_equal_to, 33},
{greater_than, 32},
{greater_than_or_equal_to, 33},
{equal_to, 33}
])),
%% validate errors
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
ValidationSet = [
[{less_than, 30}],
[{less_than_or_equal_to, 30}],
[{greater_than, 40}],
[{greater_than_or_equal_to, 40}],
[{equal_to, 30}],
[{less_than, 30}, {equal_to, 30}]
],
_ = lists:foreach(fun(Validations) ->
%% run changeset pipeline
CS = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_number(_, age, Validations)),
%% validate errors
false = sumo_changeset:is_valid(CS),
1 = length(sumo_changeset:errors(CS)),
_ = validate_cs_errors(CS, [age])
end, ValidationSet),
%% should fails
_ = assert_error({badarg, invalid_validation}, fun() ->
[pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_number(_, age, [{invalid_validation, 33}]))
end),
ok.
-spec t_validate_length(config()) -> ok.
t_validate_length(_Config) ->
%% create a person doc
Person = default_person_doc(),
%% run changeset pipeline
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_length(_, last_name, [{is, 5}, {min, 2}, {max, 10}])),
%% validate errors
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
ValidationSet = [
[{is, 3}],
[{min, 10}],
[{max, 3}],
[{is, 5}, {min, 2}, {max, 3}]
],
_ = lists:foreach(fun(Validations) ->
%% run changeset pipeline
CS = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_length(_, last_name, Validations)),
%% validate errors
false = sumo_changeset:is_valid(CS),
[{last_name, {_, [{validation, length}]}}] = sumo_changeset:errors(CS)
end, ValidationSet),
ok.
-spec t_validate_format(config()) -> ok.
t_validate_format(_Config) ->
%% create a person doc
Person = default_person_doc(),
%% run changeset pipeline
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_format(_, last_name, <<"^oth">>)),
%% validate errors
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
%% run changeset pipeline
CS2 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_format(_, last_name, <<"^Doe">>)),
%% validate errors
false = sumo_changeset:is_valid(CS2),
[{last_name, {<<"has invalid format">>, [{validation, format}]}}] = sumo_changeset:errors(CS2),
ok.
-spec t_nested_changeset_validations(config()) -> ok.
t_nested_changeset_validations(_Config) ->
Person = sumo_test_people:new(<<"John">>, <<"Doe">>),
Params = #{age => 33, id => 1, <<"last_name">> => <<"other">>},
_ = [pipe](people,
sumo_changeset:cast(_, Person, Params, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_inclusion(_, status, [<<"active">>, <<"blocked">>]),
sumo_changeset:validate_number(_, age, [{less_than_or_equal_to, 33}]),
sumo_changeset:validate_length(_, last_name, [{min, 3}]),
sumo_changeset:validate_format(_, last_name, <<"^oth">>)),
ok.
%%%=============================================================================
Internal functions
%%%=============================================================================
@private
default_person_doc() ->
sumo_test_people:new(<<"John">>, <<"Doe">>).
@private
validate_cs(CS, ParamsToCheck) ->
maps:fold(fun
(K, {Expected, Fun}, _Acc) when is_function(Fun) ->
Expected = Fun(sumo_changeset:K(CS));
(K, V, _Acc) ->
V = sumo_changeset:K(CS)
end, ok, ParamsToCheck).
@private
validate_cs_errors(CS, ErrorKeys) ->
Errors = sumo_changeset:errors(CS),
[true = sumo_utils:is_key(K, Errors) || K <- ErrorKeys].
| null | https://raw.githubusercontent.com/inaka/sumo_db/331ea718c13a01748a7739ad4078b0032f4d32e5/test/sumo_changeset_SUITE.erl | erlang | Common Test
Test Cases
=============================================================================
CT
=============================================================================
=============================================================================
Test Cases
=============================================================================
run changeset pipeline adding an error
validate errors
create a person doc
create params to be cast adding some intentional errors
run changeset pipeline
validate errors
validate errors
create a person doc
run changeset pipeline
apply changes
run changeset pipeline
run changeset pipeline
create a person doc
run changeset pipeline
validate errors
create a person doc
run changeset pipeline
validate errors
run changeset pipeline
validate errors
should fails
create a person doc
valid statuses
run changeset pipeline
validate errors
run changeset pipeline
validate errors
create a person doc
run changeset pipeline
validate errors
run changeset pipeline
validate errors
should fails
create a person doc
run changeset pipeline
validate errors
run changeset pipeline
validate errors
create a person doc
run changeset pipeline
validate errors
run changeset pipeline
validate errors
=============================================================================
============================================================================= | -module(sumo_changeset_SUITE).
-compile({parse_transform, fancyflow_trans}).
-include_lib("common_test/include/ct.hrl").
-import(sumo_test_utils, [assert_error/2]).
-export([
all/0,
init_per_suite/1,
end_per_suite/1
]).
-export([
t_add_error/1,
t_cast/1,
t_change/1,
t_put_change/1,
t_get_change/1,
t_delete_change/1,
t_apply_changes/1,
t_validate_change/1,
t_validate_required/1,
t_validate_inclusion/1,
t_validate_number/1,
t_validate_length/1,
t_validate_format/1,
t_nested_changeset_validations/1
]).
-define(EXCLUDED_FUNS, [
module_info,
all,
init_per_suite,
end_per_suite
]).
-define(ALLOWED, [
id,
name,
last_name,
age,
address,
height,
description,
status,
birthdate
]).
-define(REQUIRED, [id, name, last_name, age]).
-define(PERSON, #{
id => 1,
last_name => <<"other">>,
age => 33,
height => 1.85,
birthdate => <<"1980-09-22">>,
created_at => {{2012, 2, 16}, {1, 6, 48}},
is_blocked => false,
status => "active"
}).
-type config() :: [{atom(), term()}].
-spec all() -> [atom()].
all() ->
Exports = ?MODULE:module_info(exports),
[F || {F, _} <- Exports, not lists:member(F, ?EXCLUDED_FUNS)].
-spec init_per_suite(config()) -> config().
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(sumo_db),
Config.
-spec end_per_suite(config()) -> config().
end_per_suite(Config) ->
_ = application:stop(sumo_db),
Config.
-spec t_add_error(config()) -> ok.
t_add_error(_Config) ->
CS = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), ?PERSON, ?ALLOWED),
sumo_changeset:add_error(_, status, <<"Invalid">>)),
false = sumo_changeset:is_valid(CS),
1 = length(sumo_changeset:errors(CS)),
_ = validate_cs_errors(CS, [status]),
ok.
-spec t_cast(config()) -> ok.
t_cast(_Config) ->
Person = default_person_doc(),
PersonModel = sumo_internal:doc_fields(sumo_internal:from_user_doc(people, Person)),
Params = ?PERSON#{age => '33', missing => 1},
Allowed = [missing | ?ALLOWED],
ExpectedChanges = #{
birthdate => {{1980, 9, 22}, {0, 0, 0}},
height => 1.85,
id => 1,
last_name => <<"other">>,
status => <<"active">>
},
CS = sumo_changeset:cast(people, Person, Params, Allowed),
_ = validate_cs(CS, #{
schema => people,
store => sumo_test_mnesia,
data => PersonModel,
params => maps:with(Allowed, Params),
changes => ExpectedChanges,
types => {true, fun(M) -> maps:size(M) > 0 end},
required => {[], fun(L) -> L end}
}),
false = sumo_changeset:is_valid(CS),
1 = length(sumo_changeset:errors(CS)),
_ = validate_cs_errors(CS, [age]),
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:cast(_, #{last_name => <<"other">>}, Allowed)),
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
ok.
-spec t_change(config()) -> ok.
t_change(_Config) ->
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:change(_, #{last_name => <<"other">>})),
1 = maps:size(sumo_changeset:changes(CS1)),
CS2 = [pipe](people,
sumo_changeset:change(_, default_person_doc(), #{last_name => <<"Darwin">>}),
sumo_changeset:change(_, #{last_name => <<"other">>})),
1 = maps:size(sumo_changeset:changes(CS2)),
CS3 = [pipe](people,
sumo_changeset:change(_, default_person_doc(), #{last_name => <<"Darwin">>}),
sumo_changeset:cast(_, #{}, ?ALLOWED)),
1 = maps:size(sumo_changeset:changes(CS3)),
ok.
-spec t_put_change(config()) -> ok.
t_put_change(_Config) ->
#{last_name := <<"other">>} = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>),
sumo_changeset:changes(_)),
0 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"Doe">>),
sumo_changeset:changes(_),
maps:size(_)),
0 = [pipe](people,
sumo_changeset:cast(_, sumo_test_people:new(<<"other">>, <<"other">>), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>),
sumo_changeset:cast(_, #{last_name => <<"other">>}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>),
sumo_changeset:changes(_),
maps:size(_)),
ok.
-spec t_get_change(config()) -> ok.
t_get_change(_Config) ->
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>)),
1 = maps:size(sumo_changeset:changes(CS1)),
<<"other">> = sumo_changeset:get_change(CS1, last_name),
undefined = sumo_changeset:get_change(CS1, name),
<<"default">> = sumo_changeset:get_change(CS1, name, <<"default">>),
ok.
-spec t_delete_change(config()) -> ok.
t_delete_change(_Config) ->
CS1 = [pipe](people,
sumo_changeset:cast(_, default_person_doc(), #{}, ?ALLOWED),
sumo_changeset:put_change(_, last_name, <<"other">>)),
1 = maps:size(sumo_changeset:changes(CS1)),
CS2 = sumo_changeset:delete_change(CS1, last_name),
0 = maps:size(sumo_changeset:changes(CS2)),
ok.
-spec t_apply_changes(config()) -> ok.
t_apply_changes(_Config) ->
Person = default_person_doc(),
PersonModel = sumo_internal:doc_fields(sumo_internal:from_user_doc(people, Person)),
CS1 = sumo_changeset:cast(people, Person, ?PERSON#{missing => 1}, ?ALLOWED),
Data = sumo_changeset:data(CS1),
true = Data == PersonModel,
undefined = sumo_test_people:id(Person),
<<"Doe">> = sumo_test_people:last_name(Person),
undefined = sumo_test_people:age(Person),
NewData = sumo_changeset:apply_changes(CS1),
false = NewData == PersonModel,
NewPerson = sumo_test_people:sumo_wakeup(NewData),
1 = sumo_test_people:id(NewPerson),
<<"other">> = sumo_test_people:last_name(NewPerson),
33 = sumo_test_people:age(NewPerson),
CS2 = sumo_changeset:cast(people, Person, #{}, ?ALLOWED),
0 = maps:size(sumo_changeset:changes(CS2)),
PersonModel = sumo_changeset:apply_changes(CS2),
CS3 = [pipe](people,
sumo_changeset:cast(_, Person, #{}, ?ALLOWED),
sumo_changeset:put_change(_, missing, 2)
),
1 = maps:size(sumo_changeset:changes(CS3)),
PersonModel = sumo_changeset:apply_changes(CS3),
ok.
-spec t_validate_change(config()) -> ok.
t_validate_change(_Config) ->
Person = default_person_doc(),
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_change(_, age, fun(age, Age) ->
case Age > 30 of
true -> [{age, <<"cannot be greater than 30">>}];
false -> []
end
end)),
false = sumo_changeset:is_valid(CS1),
[{age, {<<"cannot be greater than 30">>, []}}] = sumo_changeset:errors(CS1),
ok.
-spec t_validate_required(config()) -> ok.
t_validate_required(_Config) ->
Person = default_person_doc(),
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED)),
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
CS2 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON#{age => nil}, ?ALLOWED),
sumo_changeset:validate_required(_, [address | ?REQUIRED])),
false = sumo_changeset:is_valid(CS2),
2 = length(sumo_changeset:errors(CS2)),
_ = validate_cs_errors(CS2, [address, age]),
_ = assert_error({badarg, invalid}, fun() ->
[pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, [invalid | ?REQUIRED]))
end),
ok.
-spec t_validate_inclusion(config()) -> ok.
t_validate_inclusion(_Config) ->
Person = default_person_doc(),
Statuses = [<<"active">>, <<"blocked">>],
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_inclusion(_, status, Statuses)),
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
CS2 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON#{status => <<"invalid">>}, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_inclusion(_, status, Statuses)),
false = sumo_changeset:is_valid(CS2),
1 = length(sumo_changeset:errors(CS2)),
_ = validate_cs_errors(CS2, [status]),
ok.
-spec t_validate_number(config()) -> ok.
t_validate_number(_Config) ->
Person = default_person_doc(),
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_number(_, age, [
{less_than, 34},
{less_than_or_equal_to, 33},
{greater_than, 32},
{greater_than_or_equal_to, 33},
{equal_to, 33}
])),
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
ValidationSet = [
[{less_than, 30}],
[{less_than_or_equal_to, 30}],
[{greater_than, 40}],
[{greater_than_or_equal_to, 40}],
[{equal_to, 30}],
[{less_than, 30}, {equal_to, 30}]
],
_ = lists:foreach(fun(Validations) ->
CS = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_number(_, age, Validations)),
false = sumo_changeset:is_valid(CS),
1 = length(sumo_changeset:errors(CS)),
_ = validate_cs_errors(CS, [age])
end, ValidationSet),
_ = assert_error({badarg, invalid_validation}, fun() ->
[pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_number(_, age, [{invalid_validation, 33}]))
end),
ok.
-spec t_validate_length(config()) -> ok.
t_validate_length(_Config) ->
Person = default_person_doc(),
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_length(_, last_name, [{is, 5}, {min, 2}, {max, 10}])),
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
ValidationSet = [
[{is, 3}],
[{min, 10}],
[{max, 3}],
[{is, 5}, {min, 2}, {max, 3}]
],
_ = lists:foreach(fun(Validations) ->
CS = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_length(_, last_name, Validations)),
false = sumo_changeset:is_valid(CS),
[{last_name, {_, [{validation, length}]}}] = sumo_changeset:errors(CS)
end, ValidationSet),
ok.
-spec t_validate_format(config()) -> ok.
t_validate_format(_Config) ->
Person = default_person_doc(),
CS1 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_format(_, last_name, <<"^oth">>)),
true = sumo_changeset:is_valid(CS1),
0 = length(sumo_changeset:errors(CS1)),
CS2 = [pipe](people,
sumo_changeset:cast(_, Person, ?PERSON, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_format(_, last_name, <<"^Doe">>)),
false = sumo_changeset:is_valid(CS2),
[{last_name, {<<"has invalid format">>, [{validation, format}]}}] = sumo_changeset:errors(CS2),
ok.
-spec t_nested_changeset_validations(config()) -> ok.
t_nested_changeset_validations(_Config) ->
Person = sumo_test_people:new(<<"John">>, <<"Doe">>),
Params = #{age => 33, id => 1, <<"last_name">> => <<"other">>},
_ = [pipe](people,
sumo_changeset:cast(_, Person, Params, ?ALLOWED),
sumo_changeset:validate_required(_, ?REQUIRED),
sumo_changeset:validate_inclusion(_, status, [<<"active">>, <<"blocked">>]),
sumo_changeset:validate_number(_, age, [{less_than_or_equal_to, 33}]),
sumo_changeset:validate_length(_, last_name, [{min, 3}]),
sumo_changeset:validate_format(_, last_name, <<"^oth">>)),
ok.
Internal functions
@private
default_person_doc() ->
sumo_test_people:new(<<"John">>, <<"Doe">>).
@private
validate_cs(CS, ParamsToCheck) ->
maps:fold(fun
(K, {Expected, Fun}, _Acc) when is_function(Fun) ->
Expected = Fun(sumo_changeset:K(CS));
(K, V, _Acc) ->
V = sumo_changeset:K(CS)
end, ok, ParamsToCheck).
@private
validate_cs_errors(CS, ErrorKeys) ->
Errors = sumo_changeset:errors(CS),
[true = sumo_utils:is_key(K, Errors) || K <- ErrorKeys].
|
67f624ade0032f62f92657c7fa29f7288fe1fb9fd594d2be0a51e68ca880c9fe | buntine/Haskell--Craft-of-FP | Chapter3.hs | ------------------------------------------------------------------------------
--
Haskell : The Craft of Functional Programming
( c ) Addison - Wesley , 1999 .
--
Chapter 3
--
------------------------------------------------------------------------------
module Chapter3 where
The import statement which follows hides certain of the Prelude functions
-- so that they can be given the definitions they have in their book.
import Prelude hiding (max,toUpper,isDigit)
The Booleans .
-- ^^^^^^^^^^^^^
-- Exclusive or: this gives the result True if one of its arguments is True and
-- the other False, and gives the result False in other cases.
exOr :: Bool -> Bool -> Bool
exOr x y = (x || y) && not (x && y)
-- Using literals instead of variables in a definition; a simple example of
-- pattern matching to give another definition of `not', ...
myNot :: Bool -> Bool
myNot True = False
myNot False = True
-- ... and of `exclusive or'.
exOr1 True x = not x
exOr1 False x = x
-- Integers and guards.
-- ^^^^^^^^^^^^^^^^^^^^
A to test whether three Ints are equal .
threeEqual :: Int -> Int -> Int -> Bool
threeEqual m n p = (m==n) && (n==p)
The maximum of two integers ; this is already defined in the Prelude ,
-- so its definition is hidden by the import statement at the top of this file.
max :: Int -> Int -> Int
max x y
| x >= y = x
| otherwise = y
The maximum of three integers .
maxThree :: Int -> Int -> Int -> Int
maxThree x y z
| x >= y && x >= z = x
| y >= z = y
| otherwise = z
An alternative definition of which uses if ... then ... else ...
max' :: Int -> Int -> Int
max' x y
= if x >= y then x else y
-- Characters.
^^^^^^^^^^^
-- Converting lower-case letters to upper-case; does something odd if you apply
-- it to anythig else: how would you modify it to return anything else
-- unchanged?
toUpper :: Char -> Char
toUpper ch = chr (ord ch + offset)
offset = ord 'A' - ord 'a'
A check whether a character is a digit ( already defined in the Prelude )
isDigit :: Char -> Bool
isDigit ch = ('0' <= ch) && (ch <= '9')
-- Some syntax.
^^^^^^^^^^^^
Layout : two definitions on one line , separated by a ` ; ' .
answer = 42 ; facSix = 720
Adding two integers : you can use longer names for variables than x and y !
addTwo :: Int -> Int -> Int
addTwo first second = first+second
Defining an operators for yourself : another version of !
(&&&) :: Int -> Int -> Int
x &&& y
| x > y = y
| otherwise = x
| null | https://raw.githubusercontent.com/buntine/Haskell--Craft-of-FP/a1a8cd70eb9d1609fd384e608b7fe26b2a878803/Chapter3.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
so that they can be given the definitions they have in their book.
^^^^^^^^^^^^^
Exclusive or: this gives the result True if one of its arguments is True and
the other False, and gives the result False in other cases.
Using literals instead of variables in a definition; a simple example of
pattern matching to give another definition of `not', ...
... and of `exclusive or'.
Integers and guards.
^^^^^^^^^^^^^^^^^^^^
so its definition is hidden by the import statement at the top of this file.
Characters.
Converting lower-case letters to upper-case; does something odd if you apply
it to anythig else: how would you modify it to return anything else
unchanged?
Some syntax. | Haskell : The Craft of Functional Programming
( c ) Addison - Wesley , 1999 .
Chapter 3
module Chapter3 where
The import statement which follows hides certain of the Prelude functions
import Prelude hiding (max,toUpper,isDigit)
The Booleans .
exOr :: Bool -> Bool -> Bool
exOr x y = (x || y) && not (x && y)
myNot :: Bool -> Bool
myNot True = False
myNot False = True
exOr1 True x = not x
exOr1 False x = x
A to test whether three Ints are equal .
threeEqual :: Int -> Int -> Int -> Bool
threeEqual m n p = (m==n) && (n==p)
The maximum of two integers ; this is already defined in the Prelude ,
max :: Int -> Int -> Int
max x y
| x >= y = x
| otherwise = y
The maximum of three integers .
maxThree :: Int -> Int -> Int -> Int
maxThree x y z
| x >= y && x >= z = x
| y >= z = y
| otherwise = z
An alternative definition of which uses if ... then ... else ...
max' :: Int -> Int -> Int
max' x y
= if x >= y then x else y
^^^^^^^^^^^
toUpper :: Char -> Char
toUpper ch = chr (ord ch + offset)
offset = ord 'A' - ord 'a'
A check whether a character is a digit ( already defined in the Prelude )
isDigit :: Char -> Bool
isDigit ch = ('0' <= ch) && (ch <= '9')
^^^^^^^^^^^^
Layout : two definitions on one line , separated by a ` ; ' .
answer = 42 ; facSix = 720
Adding two integers : you can use longer names for variables than x and y !
addTwo :: Int -> Int -> Int
addTwo first second = first+second
Defining an operators for yourself : another version of !
(&&&) :: Int -> Int -> Int
x &&& y
| x > y = y
| otherwise = x
|
e2abe6966ec03561dd0a80d0e7c91bd902d599377874478ccd33d4019148203b | jrh13/hol-light | struct_equal.ml | (******************************************************************************)
(* FILE : struct_equal.ml *)
DESCRIPTION : Proof procedure for simplifying an equation between two
(* data-structures of the same type. *)
(* *)
(* READS FILES : <none> *)
(* WRITES FILES : <none> *)
(* *)
AUTHOR : R.J.Boulton & T.F.Melham
DATE : 4th June 1992
(* *)
LAST MODIFIED : R.J.Boulton
DATE : 14th October 1992
(* *)
LAST MODIFIED : ( University of Edinburgh )
DATE : 2008
(******************************************************************************)
let subst_occs =
let rec subst_occs slist tm =
let applic,noway = partition (fun (i,(t,x)) -> aconv tm x) slist in
let sposs = map (fun (l,z) -> let l1,l2 = partition ((=) 1) l in
(l1,z),(l2,z)) applic in
let racts,rrest = unzip sposs in
let acts = filter (fun t -> not (fst t = [])) racts in
let trest = map (fun (n,t) -> (map (C (-) 1) n,t)) rrest in
let urest = filter (fun t -> not (fst t = [])) trest in
let tlist = urest @ noway in
if acts = [] then
if is_comb tm then
let l,r = dest_comb tm in
let l',s' = subst_occs tlist l in
let r',s'' = subst_occs s' r in
mk_comb(l',r'),s''
else if is_abs tm then
let bv,bod = dest_abs tm in
let gv = genvar(type_of bv) in
let nbod = vsubst[gv,bv] bod in
let tm',s' = subst_occs tlist nbod in
alpha bv (mk_abs(gv,tm')),s'
else
tm,tlist
else
let tm' = (fun (n,(t,x)) -> subst[t,x] tm) (hd acts) in
tm',tlist in
fun ilist slist tm -> fst(subst_occs (zip ilist slist) tm);;
let GSUBS substfn ths th =
let ls = map (lhs o concl) ths
in let vars = map (genvar o type_of) ls
in let w = substfn (List.combine ls vars) (concl th)
in SUBST (List.combine ths vars) w th ;;
let SUBS_OCCS nlths th =
try (let (nll, ths) = unzip nlths
in GSUBS (subst_occs nll) ths th
) with Failure _ -> failwith "SUBS_OCCS";;
(*----------------------------------------------------------------------------*)
(* VAR_NOT_EQ_STRUCT_OF_VAR_CONV : (thm # thm list # thm list) -> conv *)
(* *)
(* Proof method developed through discussion between *)
, and .
(* *)
(* This conversion can be used to prove that a variable is not equal to a *)
(* structure containing that variable as a proper subterm. The structures are *)
(* restricted to applications of constructors from a single recursive type. *)
The leaf nodes must be either variables or 0 - ary constructors of the type .
(* *)
(* The theorems taken as arguments are the induction, distinctness and *)
(* injectivity theorems for the recursive type, as proved by the functions: *)
(* *)
(* prove_induction_thm *)
(* prove_constructors_distinct *)
(* prove_constructors_one_one *)
(* *)
Since the latter two functions may fail , the distinctness and injectivity
(* theorems are passed around as lists of conjuncts, so that a failure *)
(* results in an empty list. *)
(* *)
(* Examples of input terms: *)
(* *)
(* ~(l = CONS h l) *)
(* ~(CONS h1 (CONS h2 l) = l) *)
(* ~(n = SUC(SUC(SUC n))) *)
~(t = TWO ( ONE u ) ( THREE v ( ONE t ) ( TWO u ( ONE t ) ) ) )
(* *)
(* where the last example is for the type defined by: *)
(* *)
test = ZERO | ONE test | TWO test test | THREE test test test
(* *)
The procedure works by first generalising the structure to eliminate any
(* irrelevant substructures. If the variable occurs more than once in the *)
(* structure the more deeply nested occurrences are replaced by new variables *)
(* because multiple occurrences of the variable prevent the induction from *)
(* working. The generalised term for the last example is: *)
(* *)
TWO a ( THREE v ( ONE t ) b )
(* *)
(* The procedure then forms a conjunction of the inequalities for this term *)
(* and all of its `rotations': *)
(* *)
( ! a v b. ~(t = TWO a ( THREE v ( ONE t ) b ) ) ) /\
( ! a v b. ~(t = THREE v ( ONE ( TWO a t ) ) b ) ) /\
( ! a v b. ~(t = ONE ( TWO a ( THREE v t b ) ) ) )
(* *)
(* This can be proved by a straightforward structural induction. The reason *)
(* for including the rotations is that the induction hypothesis required for *)
(* the proof of the original generalised term is the rotation of it. *)
(* *)
(* The procedure could be optimised by detecting duplicated rotations. For *)
(* example it is not necessary to prove: *)
(* *)
(* !n. ~(n = SUC(SUC(SUC n))) /\ *)
(* ~(n = SUC(SUC(SUC n))) /\ *)
(* ~(n = SUC(SUC(SUC n))) *)
(* *)
(* in order to prove "~(n = SUC(SUC(SUC n)))" because the structure is its *)
(* own rotations. It is sufficient to prove: *)
(* *)
(* !n. ~(n = SUC(SUC(SUC n))) *)
(* *)
(* The procedure currently uses backwards proof. It would probably be more *)
(* efficient to use forwards proof. *)
(*----------------------------------------------------------------------------*)
let VAR_NOT_EQ_STRUCT_OF_VAR_CONV =
try( let number_list l =
let rec number_list' n l =
if (l = [])
then []
else (hd l,n)::(number_list' (n + 1) (tl l))
in number_list' 1 l
in let name = fst o dest_const
in let occurrences constrs v st =
let rec occurrences' v st path =
if (not (type_of st = type_of v)) then []
else if (st = v) then [rev path]
else if (is_var st) then []
else let (f,args) =
(check ( ((can (C assoc constrs)) o name o fst) )) (strip_comb st)
(* Boulton was using hashI here... but I don't know why *)
in flat (map (fun (arg,n) -> occurrences' v arg (n::path))
(number_list args))
in occurrences' v st []
in let min_length l =
let rec min_length' (x,n) l =
if (l = [])
then x
else if (length (hd l) < n)
then min_length' (hd l,length (hd l)) (tl l)
else min_length' (x,n) (tl l)
in if (l = [])
then failwith "min_length"
else min_length' (hd l,length (hd l)) (tl l)
in let rec generalise (st,occ) =
let rec replace_side_structs (n,argn',binding) m args =
if (args = [])
then ([],[])
else let m' = m + 1
and arg = hd args
in let (rest,bind) =
replace_side_structs (n,argn',binding) m' (tl args)
in if (m' = n) then ((argn'::rest),(binding @ bind))
else if (is_var arg) then ((arg::rest),((arg,arg)::bind))
else let var = genvar (type_of arg)
in ((var::rest),((var,arg)::bind))
in if (occ = [])
then (st,[])
else let (f,args) = strip_comb st
and (n::occ') = occ
in let (argn',binding) = generalise (el (n-1) args,occ')
in let (args',bind) =
replace_side_structs (n,argn',binding) 0 args
in (list_mk_comb (f,args'),bind)
in let rec constr_apps v (st,occ) =
let rec replace_argn (n,argn') m args =
if (args = [])
then []
else let m' = m + 1
in if (m' = n)
then argn'::(tl args)
else (hd args)::(replace_argn (n,argn') m' (tl args))
in if (occ = [])
then []
else let (f,args) = strip_comb st
and (n::occ') = occ
in let args' = replace_argn (n,v) 0 args
in (list_mk_comb (f,args'))::(constr_apps v (el (n-1) args,occ'))
in let rotations l =
let rec rotations' l n =
if (n < 1)
then []
else l::(rotations' ((tl l) @ [hd l]) (n - 1))
in rotations' l (length l)
in let two_constrs = (hash (fst o strip_comb) (fst o strip_comb)) o
dest_eq o dest_neg o snd o strip_forall o concl
in let flip (x,y) = (y,x)
in let DEPTH_SYM = GEN_ALL o NOT_EQ_SYM o SPEC_ALL
in let rec arg_types ty =
try (match (dest_type ty) with
| ("fun",[argty;rest]) -> argty::(arg_types rest)
| _ -> [])
with Failure _ -> []
in let name_and_args = ((hash I) arg_types) o dest_const
in
fun (induction,distincts,oneOnes) ->
let half_distincts = map (fun th -> ((hash name) name) (two_constrs th), th) distincts
in let distincts = half_distincts @ (map ((hash flip) DEPTH_SYM) half_distincts)
in let ind_goals =
(conjuncts o fst o dest_imp o snd o dest_forall o concl) induction
in let constrs =
map (name_and_args o fst o strip_comb o rand o snd o strip_forall o
snd o (splitlist dest_imp) o snd o strip_forall) ind_goals
in
fun tm ->
(let (l,r) = dest_eq (dest_neg tm)
in let (flipped,v,st) =
if (is_var l)
then if (is_var r) then failwith "" else (false,l,r)
else if (is_var r)
then (true,r,l)
else failwith ""
in let occ = min_length (occurrences constrs v st)
in let (st',bind) = generalise (st,occ)
in let (vars,subterms) = List.split bind
in let apps = constr_apps v (st',occ)
in let rotats =
map (end_itlist (fun t1 t2 -> subst [(t2,v)] t1)) (rotations apps)
in let uneqs = map (mk_neg o (curry mk_eq v)) rotats
in let conj =
mk_forall (v,list_mk_conj (map (curry list_mk_forall vars) uneqs))
in let th1 =
prove (conj,INDUCT_TAC_ induction THEN
ASM_REWRITE_TAC (oneOnes @ (map snd distincts)))
in let th2 = (hd o CONJUNCTS o (SPEC v)) th1
in let th3 = SPECL subterms th2
in let th4 = if flipped then (NOT_EQ_SYM th3) else th3
in EQT_INTRO (CONV_RULE (C ALPHA tm) th4)
)) with Failure _ -> failwith "VAR_NOT_EQ_STRUCT_OF_VAR_CONV";;
(*----------------------------------------------------------------------------*)
(* CONJS_CONV : conv -> conv *)
(* *)
(* Written by T.F.Melham. *)
Modified by .
(* *)
(* Apply a given conversion to a sequence of conjuncts. *)
(* *)
(* * need to check T case *)
* need to flatten conjuncts on RHS
(*----------------------------------------------------------------------------*)
let CONJS_CONV =
try(
let is st th = try(fst(dest_const(rand(concl th))) = st) with Failure _ -> false
in let v1 = genvar `:bool` and v2 = genvar `:bool`
in let fthm1 =
let th1 = ASSUME (mk_eq(v1,`F`))
in let cnj = mk_conj(v1,v2)
in let th1 = DISCH cnj (EQ_MP th1 (CONJUNCT1 (ASSUME cnj)))
in let th2 = DISCH `F` (CONTR cnj (ASSUME `F`))
in DISCH (mk_eq(v1,`F`)) (IMP_ANTISYM_RULE th1 th2)
in let fthm2 = CONV_RULE(ONCE_DEPTH_CONV(REWR_CONV CONJ_SYM)) fthm1
in let fandr th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] fthm1) th
in let fandl th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] fthm2) th
in let tthm1 =
let th1 = ASSUME (mk_eq(v1,`T`))
in let th2 = SUBS_OCCS [[2],th1] (REFL (mk_conj(v1,v2)))
in DISCH (mk_eq(v1,`T`)) (ONCE_REWRITE_RULE [] th2)
in let tthm2 = CONV_RULE(ONCE_DEPTH_CONV(REWR_CONV CONJ_SYM)) tthm1
in let tandr th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] tthm1) th
in let tandl th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] tthm2) th
in let rec cconv conv tm =
(let (c,cs) = dest_conj tm
in let cth = conv c
in if (is "F" cth) then fandr cth cs else
let csth = cconv conv cs
in if (is "F" csth) then fandl csth c
else if (is "T" cth) then TRANS (tandr cth cs) csth
else if (is "T" csth) then TRANS (tandl csth c) cth
else try (MK_COMB((AP_TERM `(/\)` cth),csth)) with Failure _ -> conv tm )
in fun conv tm -> cconv conv tm) with Failure _ -> failwith "CONJS_CONV";;
(*----------------------------------------------------------------------------*)
(* ONE_STEP_RECTY_EQ_CONV : (thm # thm list # thm list) -> conv -> conv *)
(* *)
(* Single step conversion for equality between structures of a single *)
(* recursive type. *)
(* *)
(* Based on code written by T.F.Melham. *)
(* *)
(* The theorems taken as arguments are the induction, distinctness and *)
(* injectivity theorems for the recursive type, as proved by the functions: *)
(* *)
(* prove_induction_thm *)
(* prove_constructors_distinct *)
(* prove_constructors_one_one *)
(* *)
Since the latter two functions may fail , the distinctness and injectivity
(* theorems are passed around as lists of conjuncts. *)
(* *)
If one side of the equation is a variable and that variable appears in the
(* other side (nested in a structure) the equation is proved false. *)
(* *)
If the top - level constructors on the two sides of the equation are
(* distinct the equation is proved false. *)
(* *)
If the top - level constructors on the two sides of the equation are the
same a conjunction of equations is generated , one equation for each
(* argument of the constructor. The conversion given as argument is then *)
(* applied to each conjunct. If any of the applications of this conversion *)
(* fail, so will the entire call. *)
(* *)
(* In other conditions the function fails. *)
(*----------------------------------------------------------------------------*)
(* Taken from HOL90 *)
let ONE_STEP_RECTY_EQ_CONV (induction,distincts,oneOnes) =
let NOT_EQ_CONV =
EQF_INTRO o EQT_ELIM o
(VAR_NOT_EQ_STRUCT_OF_VAR_CONV (induction,distincts,oneOnes)) o
mk_neg
in let INJ_REW = GEN_REWRITE_CONV I oneOnes
Deleted empty_rewrites - GEN_REWRITE_CONV different in light - hope it works
in let ths1 = map SPEC_ALL distincts
in let ths2 = map (GEN_ALL o EQF_INTRO o NOT_EQ_SYM) ths1
in let dths = ths2 @ (map (GEN_ALL o EQF_INTRO) ths1)
in let DIST_REW = GEN_REWRITE_CONV I dths
in fun conv -> NOT_EQ_CONV ORELSEC
DIST_REW ORELSEC
(INJ_REW THENC (CONJS_CONV conv)) ORELSEC
(fun tm -> failwith "ONE_STEP_RECTY_EQ_CONV")
(*----------------------------------------------------------------------------*)
(* RECTY_EQ_CONV : (thm # thm list # thm list) -> conv *)
(* *)
Function to simplify as far as possible an equation between two structures
(* of some type, the type being specified by the triple of theorems. The *)
(* structures may involve variables. The result may be a conjunction of *)
(* equations simpler than the original. *)
(*----------------------------------------------------------------------------*)
let RECTY_EQ_CONV (induction,distincts,oneOnes) =
try (
let one_step_conv = ONE_STEP_RECTY_EQ_CONV (induction,distincts,oneOnes)
and REFL_CONV tm =
let (l,r) = dest_eq tm
in if (l = r)
then EQT_INTRO (REFL l)
else failwith "REFL_CONV"
in let rec conv tm =
(one_step_conv conv ORELSEC REFL_CONV ORELSEC ALL_CONV) tm
in fun tm -> conv tm ) with Failure _ -> failwith "RECTY_EQ_CONV";;
| null | https://raw.githubusercontent.com/jrh13/hol-light/ea44a4cacd238d7fa5a397f043f3e3321eb66543/Boyer_Moore/struct_equal.ml | ocaml | ****************************************************************************
FILE : struct_equal.ml
data-structures of the same type.
READS FILES : <none>
WRITES FILES : <none>
****************************************************************************
----------------------------------------------------------------------------
VAR_NOT_EQ_STRUCT_OF_VAR_CONV : (thm # thm list # thm list) -> conv
Proof method developed through discussion between
This conversion can be used to prove that a variable is not equal to a
structure containing that variable as a proper subterm. The structures are
restricted to applications of constructors from a single recursive type.
The theorems taken as arguments are the induction, distinctness and
injectivity theorems for the recursive type, as proved by the functions:
prove_induction_thm
prove_constructors_distinct
prove_constructors_one_one
theorems are passed around as lists of conjuncts, so that a failure
results in an empty list.
Examples of input terms:
~(l = CONS h l)
~(CONS h1 (CONS h2 l) = l)
~(n = SUC(SUC(SUC n)))
where the last example is for the type defined by:
irrelevant substructures. If the variable occurs more than once in the
structure the more deeply nested occurrences are replaced by new variables
because multiple occurrences of the variable prevent the induction from
working. The generalised term for the last example is:
The procedure then forms a conjunction of the inequalities for this term
and all of its `rotations':
This can be proved by a straightforward structural induction. The reason
for including the rotations is that the induction hypothesis required for
the proof of the original generalised term is the rotation of it.
The procedure could be optimised by detecting duplicated rotations. For
example it is not necessary to prove:
!n. ~(n = SUC(SUC(SUC n))) /\
~(n = SUC(SUC(SUC n))) /\
~(n = SUC(SUC(SUC n)))
in order to prove "~(n = SUC(SUC(SUC n)))" because the structure is its
own rotations. It is sufficient to prove:
!n. ~(n = SUC(SUC(SUC n)))
The procedure currently uses backwards proof. It would probably be more
efficient to use forwards proof.
----------------------------------------------------------------------------
Boulton was using hashI here... but I don't know why
----------------------------------------------------------------------------
CONJS_CONV : conv -> conv
Written by T.F.Melham.
Apply a given conversion to a sequence of conjuncts.
* need to check T case
----------------------------------------------------------------------------
----------------------------------------------------------------------------
ONE_STEP_RECTY_EQ_CONV : (thm # thm list # thm list) -> conv -> conv
Single step conversion for equality between structures of a single
recursive type.
Based on code written by T.F.Melham.
The theorems taken as arguments are the induction, distinctness and
injectivity theorems for the recursive type, as proved by the functions:
prove_induction_thm
prove_constructors_distinct
prove_constructors_one_one
theorems are passed around as lists of conjuncts.
other side (nested in a structure) the equation is proved false.
distinct the equation is proved false.
argument of the constructor. The conversion given as argument is then
applied to each conjunct. If any of the applications of this conversion
fail, so will the entire call.
In other conditions the function fails.
----------------------------------------------------------------------------
Taken from HOL90
----------------------------------------------------------------------------
RECTY_EQ_CONV : (thm # thm list # thm list) -> conv
of some type, the type being specified by the triple of theorems. The
structures may involve variables. The result may be a conjunction of
equations simpler than the original.
---------------------------------------------------------------------------- | DESCRIPTION : Proof procedure for simplifying an equation between two
AUTHOR : R.J.Boulton & T.F.Melham
DATE : 4th June 1992
LAST MODIFIED : R.J.Boulton
DATE : 14th October 1992
LAST MODIFIED : ( University of Edinburgh )
DATE : 2008
let subst_occs =
let rec subst_occs slist tm =
let applic,noway = partition (fun (i,(t,x)) -> aconv tm x) slist in
let sposs = map (fun (l,z) -> let l1,l2 = partition ((=) 1) l in
(l1,z),(l2,z)) applic in
let racts,rrest = unzip sposs in
let acts = filter (fun t -> not (fst t = [])) racts in
let trest = map (fun (n,t) -> (map (C (-) 1) n,t)) rrest in
let urest = filter (fun t -> not (fst t = [])) trest in
let tlist = urest @ noway in
if acts = [] then
if is_comb tm then
let l,r = dest_comb tm in
let l',s' = subst_occs tlist l in
let r',s'' = subst_occs s' r in
mk_comb(l',r'),s''
else if is_abs tm then
let bv,bod = dest_abs tm in
let gv = genvar(type_of bv) in
let nbod = vsubst[gv,bv] bod in
let tm',s' = subst_occs tlist nbod in
alpha bv (mk_abs(gv,tm')),s'
else
tm,tlist
else
let tm' = (fun (n,(t,x)) -> subst[t,x] tm) (hd acts) in
tm',tlist in
fun ilist slist tm -> fst(subst_occs (zip ilist slist) tm);;
let GSUBS substfn ths th =
let ls = map (lhs o concl) ths
in let vars = map (genvar o type_of) ls
in let w = substfn (List.combine ls vars) (concl th)
in SUBST (List.combine ths vars) w th ;;
let SUBS_OCCS nlths th =
try (let (nll, ths) = unzip nlths
in GSUBS (subst_occs nll) ths th
) with Failure _ -> failwith "SUBS_OCCS";;
, and .
The leaf nodes must be either variables or 0 - ary constructors of the type .
Since the latter two functions may fail , the distinctness and injectivity
~(t = TWO ( ONE u ) ( THREE v ( ONE t ) ( TWO u ( ONE t ) ) ) )
test = ZERO | ONE test | TWO test test | THREE test test test
The procedure works by first generalising the structure to eliminate any
TWO a ( THREE v ( ONE t ) b )
( ! a v b. ~(t = TWO a ( THREE v ( ONE t ) b ) ) ) /\
( ! a v b. ~(t = THREE v ( ONE ( TWO a t ) ) b ) ) /\
( ! a v b. ~(t = ONE ( TWO a ( THREE v t b ) ) ) )
let VAR_NOT_EQ_STRUCT_OF_VAR_CONV =
try( let number_list l =
let rec number_list' n l =
if (l = [])
then []
else (hd l,n)::(number_list' (n + 1) (tl l))
in number_list' 1 l
in let name = fst o dest_const
in let occurrences constrs v st =
let rec occurrences' v st path =
if (not (type_of st = type_of v)) then []
else if (st = v) then [rev path]
else if (is_var st) then []
else let (f,args) =
(check ( ((can (C assoc constrs)) o name o fst) )) (strip_comb st)
in flat (map (fun (arg,n) -> occurrences' v arg (n::path))
(number_list args))
in occurrences' v st []
in let min_length l =
let rec min_length' (x,n) l =
if (l = [])
then x
else if (length (hd l) < n)
then min_length' (hd l,length (hd l)) (tl l)
else min_length' (x,n) (tl l)
in if (l = [])
then failwith "min_length"
else min_length' (hd l,length (hd l)) (tl l)
in let rec generalise (st,occ) =
let rec replace_side_structs (n,argn',binding) m args =
if (args = [])
then ([],[])
else let m' = m + 1
and arg = hd args
in let (rest,bind) =
replace_side_structs (n,argn',binding) m' (tl args)
in if (m' = n) then ((argn'::rest),(binding @ bind))
else if (is_var arg) then ((arg::rest),((arg,arg)::bind))
else let var = genvar (type_of arg)
in ((var::rest),((var,arg)::bind))
in if (occ = [])
then (st,[])
else let (f,args) = strip_comb st
and (n::occ') = occ
in let (argn',binding) = generalise (el (n-1) args,occ')
in let (args',bind) =
replace_side_structs (n,argn',binding) 0 args
in (list_mk_comb (f,args'),bind)
in let rec constr_apps v (st,occ) =
let rec replace_argn (n,argn') m args =
if (args = [])
then []
else let m' = m + 1
in if (m' = n)
then argn'::(tl args)
else (hd args)::(replace_argn (n,argn') m' (tl args))
in if (occ = [])
then []
else let (f,args) = strip_comb st
and (n::occ') = occ
in let args' = replace_argn (n,v) 0 args
in (list_mk_comb (f,args'))::(constr_apps v (el (n-1) args,occ'))
in let rotations l =
let rec rotations' l n =
if (n < 1)
then []
else l::(rotations' ((tl l) @ [hd l]) (n - 1))
in rotations' l (length l)
in let two_constrs = (hash (fst o strip_comb) (fst o strip_comb)) o
dest_eq o dest_neg o snd o strip_forall o concl
in let flip (x,y) = (y,x)
in let DEPTH_SYM = GEN_ALL o NOT_EQ_SYM o SPEC_ALL
in let rec arg_types ty =
try (match (dest_type ty) with
| ("fun",[argty;rest]) -> argty::(arg_types rest)
| _ -> [])
with Failure _ -> []
in let name_and_args = ((hash I) arg_types) o dest_const
in
fun (induction,distincts,oneOnes) ->
let half_distincts = map (fun th -> ((hash name) name) (two_constrs th), th) distincts
in let distincts = half_distincts @ (map ((hash flip) DEPTH_SYM) half_distincts)
in let ind_goals =
(conjuncts o fst o dest_imp o snd o dest_forall o concl) induction
in let constrs =
map (name_and_args o fst o strip_comb o rand o snd o strip_forall o
snd o (splitlist dest_imp) o snd o strip_forall) ind_goals
in
fun tm ->
(let (l,r) = dest_eq (dest_neg tm)
in let (flipped,v,st) =
if (is_var l)
then if (is_var r) then failwith "" else (false,l,r)
else if (is_var r)
then (true,r,l)
else failwith ""
in let occ = min_length (occurrences constrs v st)
in let (st',bind) = generalise (st,occ)
in let (vars,subterms) = List.split bind
in let apps = constr_apps v (st',occ)
in let rotats =
map (end_itlist (fun t1 t2 -> subst [(t2,v)] t1)) (rotations apps)
in let uneqs = map (mk_neg o (curry mk_eq v)) rotats
in let conj =
mk_forall (v,list_mk_conj (map (curry list_mk_forall vars) uneqs))
in let th1 =
prove (conj,INDUCT_TAC_ induction THEN
ASM_REWRITE_TAC (oneOnes @ (map snd distincts)))
in let th2 = (hd o CONJUNCTS o (SPEC v)) th1
in let th3 = SPECL subterms th2
in let th4 = if flipped then (NOT_EQ_SYM th3) else th3
in EQT_INTRO (CONV_RULE (C ALPHA tm) th4)
)) with Failure _ -> failwith "VAR_NOT_EQ_STRUCT_OF_VAR_CONV";;
Modified by .
* need to flatten conjuncts on RHS
let CONJS_CONV =
try(
let is st th = try(fst(dest_const(rand(concl th))) = st) with Failure _ -> false
in let v1 = genvar `:bool` and v2 = genvar `:bool`
in let fthm1 =
let th1 = ASSUME (mk_eq(v1,`F`))
in let cnj = mk_conj(v1,v2)
in let th1 = DISCH cnj (EQ_MP th1 (CONJUNCT1 (ASSUME cnj)))
in let th2 = DISCH `F` (CONTR cnj (ASSUME `F`))
in DISCH (mk_eq(v1,`F`)) (IMP_ANTISYM_RULE th1 th2)
in let fthm2 = CONV_RULE(ONCE_DEPTH_CONV(REWR_CONV CONJ_SYM)) fthm1
in let fandr th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] fthm1) th
in let fandl th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] fthm2) th
in let tthm1 =
let th1 = ASSUME (mk_eq(v1,`T`))
in let th2 = SUBS_OCCS [[2],th1] (REFL (mk_conj(v1,v2)))
in DISCH (mk_eq(v1,`T`)) (ONCE_REWRITE_RULE [] th2)
in let tthm2 = CONV_RULE(ONCE_DEPTH_CONV(REWR_CONV CONJ_SYM)) tthm1
in let tandr th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] tthm1) th
in let tandl th tm = MP (INST [(lhs(concl th),v1);(tm,v2)] tthm2) th
in let rec cconv conv tm =
(let (c,cs) = dest_conj tm
in let cth = conv c
in if (is "F" cth) then fandr cth cs else
let csth = cconv conv cs
in if (is "F" csth) then fandl csth c
else if (is "T" cth) then TRANS (tandr cth cs) csth
else if (is "T" csth) then TRANS (tandl csth c) cth
else try (MK_COMB((AP_TERM `(/\)` cth),csth)) with Failure _ -> conv tm )
in fun conv tm -> cconv conv tm) with Failure _ -> failwith "CONJS_CONV";;
Since the latter two functions may fail , the distinctness and injectivity
If one side of the equation is a variable and that variable appears in the
If the top - level constructors on the two sides of the equation are
If the top - level constructors on the two sides of the equation are the
same a conjunction of equations is generated , one equation for each
let ONE_STEP_RECTY_EQ_CONV (induction,distincts,oneOnes) =
let NOT_EQ_CONV =
EQF_INTRO o EQT_ELIM o
(VAR_NOT_EQ_STRUCT_OF_VAR_CONV (induction,distincts,oneOnes)) o
mk_neg
in let INJ_REW = GEN_REWRITE_CONV I oneOnes
Deleted empty_rewrites - GEN_REWRITE_CONV different in light - hope it works
in let ths1 = map SPEC_ALL distincts
in let ths2 = map (GEN_ALL o EQF_INTRO o NOT_EQ_SYM) ths1
in let dths = ths2 @ (map (GEN_ALL o EQF_INTRO) ths1)
in let DIST_REW = GEN_REWRITE_CONV I dths
in fun conv -> NOT_EQ_CONV ORELSEC
DIST_REW ORELSEC
(INJ_REW THENC (CONJS_CONV conv)) ORELSEC
(fun tm -> failwith "ONE_STEP_RECTY_EQ_CONV")
Function to simplify as far as possible an equation between two structures
let RECTY_EQ_CONV (induction,distincts,oneOnes) =
try (
let one_step_conv = ONE_STEP_RECTY_EQ_CONV (induction,distincts,oneOnes)
and REFL_CONV tm =
let (l,r) = dest_eq tm
in if (l = r)
then EQT_INTRO (REFL l)
else failwith "REFL_CONV"
in let rec conv tm =
(one_step_conv conv ORELSEC REFL_CONV ORELSEC ALL_CONV) tm
in fun tm -> conv tm ) with Failure _ -> failwith "RECTY_EQ_CONV";;
|
c535c029be204fe3e7b6231e053eff5e1db021e64432d01007abf6e578d532fd | Introduction-to-Functional-Programming/simple_exercises | rna_transcription.erl | -module(rna_transcription).
-export([to_rna/1]).
to_rna([]) ->
[];
to_rna(Strand) ->
RnaMap = #{$A => $U, $C => $G, $G => $C, $T => $A},
lists:map(fun (X) ->
maps:get(X, RnaMap)
end,
Strand).
| null | https://raw.githubusercontent.com/Introduction-to-Functional-Programming/simple_exercises/674cc97ac01df41179fdd9a7af0743f6dfa0f780/adolfont/exercism/erlang/rna-transcription/src/rna_transcription.erl | erlang | -module(rna_transcription).
-export([to_rna/1]).
to_rna([]) ->
[];
to_rna(Strand) ->
RnaMap = #{$A => $U, $C => $G, $G => $C, $T => $A},
lists:map(fun (X) ->
maps:get(X, RnaMap)
end,
Strand).
| |
1bc9a33dcda22e7c9d2f591c84c604d2bb232e45c43bc82adcbacad4eb859577 | bdeket/rktsicm | mathutil.rkt | #lang racket/base
(provide (all-defined-out)
(all-from-out "cstm/mathutil.rkt")
g:identity)
(require (only-in "../rkt/glue.rkt" cons*)
(only-in "../rkt/define.rkt" define default-object?)
(only-in racket/syntax format-id)
"../general/list-utils.rkt"
"cstm/make-plain-procedure.rkt"
"numeric.rkt"
"utils.rkt"
"generic.rkt"
"cstm/mathutil.rkt"
"cstm/matrices.rkt"
"structs.rkt"
"strutl.rkt"
"types.rkt"
)
bdk ; ; start original file
Derived Generic Operators
(define ratnum? rational?);not sure ... should this be exact?
#;
(define ratnum?
(access ratnum?
(->environment '(runtime number))))
(define (g:cube x)
(g:* x x x))
(define (g:log10 x)
(g:/ (g:log x) (g:log 10)))
(define (g:log2 x)
(g:/ (g:log x) (g:log 2)))
(define (g:exp10 x)
(g:expt 10 x))
(define (g:exp2 x)
(g:expt 2 x))
;;; See numbers.scm
(define (g:tan x)
(g:/ (g:sin x) (g:cos x)))
(define (g:cot x)
(g:/ (g:cos x) (g:sin x)))
(define (g:sec x)
(g:/ :one (g:cos x)))
(define (g:csc x)
(g:/ :one (g:sin x)))
(define (g:tanh x)
(g:/ (g:sinh x) (g:cosh x)))
(define (g:sech x)
(g:/ :one (g:cosh x)))
(define (g:csch x)
(g:/ :one (g:sinh x)))
(define (g:asinh z)
(g:log (g:+ z (g:sqrt (g:+ :one (g:square z))))))
(define (g:acosh z)
(g:* :two
(g:log (g:+ (g:sqrt (g:/ (g:+ z :one) :two))
(g:sqrt (g:/ (g:- z :one) :two))))))
(define (g:atanh z)
(g:/ (g:- (g:log (g:+ :one z))
(g:log (g:- :one z)))
:two))
(define (g:arg-shift f . shifts)
(define (g . xs)
(g:apply f (map g:+ xs shifts)))
g)
(define (g:arg-scale f . scales)
(define (g . xs)
(g:apply f (map g:* xs scales)))
g)
bdk ; ; moved to cstm / generic 11
;;; The generalized selector:
bdk ; ; moved to cstm / mathutil 1
(define ((component . selectors) x)
(ref-internal x selectors))
bdk ; ; moved to cstm / mathutil 2
(define (g:size x)
(cond ((vector? x) (vector-length x))
((matrix? x) (matrix-size x))
((structure? x) (s:length x))
((series? x) #f)
((stream-pair? x) #f)
((list? x) (length x))
((string? x) (string-length x))
(else
(error "Unknown compound -- G:size" x))))
Generic composition duplicates composition in utils
(define (g:compose . fs)
(define (lp fs)
(cond ((null? (cdr fs)) (car fs))
(else (g:compose-2 (car fs) (lp (cdr fs))))))
(cond ((null? fs) g:identity)
((null? (cdr fs)) (car fs))
(else
(g:compose-bin (lp (butlast fs))
(car (last-pair fs))))))
bdk ; ; moved to cstm / generic 10
(define (g:compose-2 f g)
(cond ((pair? g)
(lambda x
(g:apply f
(map (lambda (gi)
(g:apply gi x))
g))))
(else
(lambda x
(f (g:apply g x))))))
(define (g:compose-bin f g)
(cond
[(and (pair? g) (not (structure? g)))
(define a (a-reduce joint-arity (map g:arity g)))
(make-plain-procedure-slct 'g:compose-bin+n
a
(λ (xs) #`(g:apply f (map (λ (gi) (g:apply gi (list #,@xs))) g)))
(λ (xs rst) #`(g:apply f (map (λ (gi) (g:apply gi (cons* #,@xs #,rst))) g)))
(λ (xs) #`(g:apply #,f (map (λ (gi) (g:apply gi (list #,@xs))) '#,g)))
(λ (xs rst) #`(g:apply #,f (map (λ (gi) (g:apply gi (cons* #,@xs #,rst))) '#,g))))]
[else
(define a (g:arity g))
(make-plain-procedure-slct 'compose-bin+1
a
(λ (xs) #`(g:apply f (list (g:apply g (list #,@xs)))))
(λ (xs rst) #`(g:apply f (list (g:apply g (cons* #,@xs #,rst)))))
(λ (xs) #`(g:apply #,f (list (g:apply #,g (list #,@xs)))))
(λ (xs rst) #`(g:apply #,f (list (g:apply #,g (cons* #,@xs #,rst))))))]))
#;
(define (g:compose-bin f g)
(cond ((and (pair? g) (not (structure? g)))
(let ((a
(a-reduce joint-arity
(map g:arity g))))
(cond ((equal? a *at-least-zero*)
(lambda x
(g:apply f
(map
(lambda (gi)
(g:apply gi x))
g))))
((equal? a *exactly-zero*)
(lambda ()
(g:apply f
(map (lambda (gi)
(gi))
g))))
((equal? a *at-least-one*)
(lambda (x . y)
(g:apply f
(map (lambda (gi)
(g:apply gi x y))
g))))
((equal? a *exactly-one*)
(lambda (x)
(g:apply f
(map (lambda (gi)
(gi x))
g))))
((equal? a *at-least-two*)
(lambda (x y . z)
(g:apply f
(map (lambda (gi)
(g:apply gi x y z))
g))))
((equal? a *exactly-two*)
(lambda (x y)
(g:apply f
(map (lambda (gi)
(gi x y))
g))))
((equal? a *at-least-three*)
(lambda (u x y . z)
(g:apply f
(map (lambda (gi)
(g:apply gi u x y z))
g))))
((equal? a *exactly-three*)
(lambda (x y z)
(g:apply f
(map (lambda (gi)
(gi x y z))
g))))
((equal? a *one-or-two*)
(lambda (x #:optional y)
(if (default-object? y)
(g:apply f
(map (lambda (gi)
(gi x))
g))
(g:apply f
(map (lambda (gi)
(gi x y))
g)))))
(else
(lambda x
(g:apply f
(map
(lambda (gi)
(g:apply gi x))
g)))))))
(else
(let ((a (g:arity g)))
(cond ((equal? a *at-least-zero*)
(lambda x
(g:apply f
(list (g:apply g x)))))
((equal? a *exactly-zero*)
(lambda ()
(g:apply f
(list (g:apply g '())))))
((equal? a *at-least-one*)
(lambda (x . y)
(g:apply f
(list (g:apply g x y)))))
((equal? a *exactly-one*)
(lambda (x)
(g:apply f
(list (g:apply g (list x))))))
((equal? a *at-least-two*)
(lambda (x y . z)
(g:apply f
(list (g:apply g x y z)))))
((equal? a *exactly-two*)
(lambda (x y)
(g:apply f
(list (g:apply g (list x y))))))
((equal? a *at-least-three*)
(lambda (u x y . z)
(g:apply f
(list (g:apply g u x y z)))))
((equal? a *exactly-three*)
(lambda (x y z)
(g:apply f
(list (g:apply g (list x y z))))))
((equal? a *one-or-two*)
(lambda (x #:optional y)
(if (default-object? y)
(g:apply f
(list (g:apply g (list x))))
(g:apply f
(list (g:apply g (list x y)))))))
(else
(lambda x
(g:apply f
(list (g:apply g x))))))))))
| null | https://raw.githubusercontent.com/bdeket/rktsicm/4ac66a098189ec80091422050bc48d443b68a41d/rktsicm/sicm/kernel/mathutil.rkt | racket | ; start original file
not sure ... should this be exact?
See numbers.scm
; moved to cstm / generic 11
The generalized selector:
; moved to cstm / mathutil 1
; moved to cstm / mathutil 2
; moved to cstm / generic 10
| #lang racket/base
(provide (all-defined-out)
(all-from-out "cstm/mathutil.rkt")
g:identity)
(require (only-in "../rkt/glue.rkt" cons*)
(only-in "../rkt/define.rkt" define default-object?)
(only-in racket/syntax format-id)
"../general/list-utils.rkt"
"cstm/make-plain-procedure.rkt"
"numeric.rkt"
"utils.rkt"
"generic.rkt"
"cstm/mathutil.rkt"
"cstm/matrices.rkt"
"structs.rkt"
"strutl.rkt"
"types.rkt"
)
Derived Generic Operators
(define ratnum?
(access ratnum?
(->environment '(runtime number))))
(define (g:cube x)
(g:* x x x))
(define (g:log10 x)
(g:/ (g:log x) (g:log 10)))
(define (g:log2 x)
(g:/ (g:log x) (g:log 2)))
(define (g:exp10 x)
(g:expt 10 x))
(define (g:exp2 x)
(g:expt 2 x))
(define (g:tan x)
(g:/ (g:sin x) (g:cos x)))
(define (g:cot x)
(g:/ (g:cos x) (g:sin x)))
(define (g:sec x)
(g:/ :one (g:cos x)))
(define (g:csc x)
(g:/ :one (g:sin x)))
(define (g:tanh x)
(g:/ (g:sinh x) (g:cosh x)))
(define (g:sech x)
(g:/ :one (g:cosh x)))
(define (g:csch x)
(g:/ :one (g:sinh x)))
(define (g:asinh z)
(g:log (g:+ z (g:sqrt (g:+ :one (g:square z))))))
(define (g:acosh z)
(g:* :two
(g:log (g:+ (g:sqrt (g:/ (g:+ z :one) :two))
(g:sqrt (g:/ (g:- z :one) :two))))))
(define (g:atanh z)
(g:/ (g:- (g:log (g:+ :one z))
(g:log (g:- :one z)))
:two))
(define (g:arg-shift f . shifts)
(define (g . xs)
(g:apply f (map g:+ xs shifts)))
g)
(define (g:arg-scale f . scales)
(define (g . xs)
(g:apply f (map g:* xs scales)))
g)
(define ((component . selectors) x)
(ref-internal x selectors))
(define (g:size x)
(cond ((vector? x) (vector-length x))
((matrix? x) (matrix-size x))
((structure? x) (s:length x))
((series? x) #f)
((stream-pair? x) #f)
((list? x) (length x))
((string? x) (string-length x))
(else
(error "Unknown compound -- G:size" x))))
Generic composition duplicates composition in utils
(define (g:compose . fs)
(define (lp fs)
(cond ((null? (cdr fs)) (car fs))
(else (g:compose-2 (car fs) (lp (cdr fs))))))
(cond ((null? fs) g:identity)
((null? (cdr fs)) (car fs))
(else
(g:compose-bin (lp (butlast fs))
(car (last-pair fs))))))
(define (g:compose-2 f g)
(cond ((pair? g)
(lambda x
(g:apply f
(map (lambda (gi)
(g:apply gi x))
g))))
(else
(lambda x
(f (g:apply g x))))))
(define (g:compose-bin f g)
(cond
[(and (pair? g) (not (structure? g)))
(define a (a-reduce joint-arity (map g:arity g)))
(make-plain-procedure-slct 'g:compose-bin+n
a
(λ (xs) #`(g:apply f (map (λ (gi) (g:apply gi (list #,@xs))) g)))
(λ (xs rst) #`(g:apply f (map (λ (gi) (g:apply gi (cons* #,@xs #,rst))) g)))
(λ (xs) #`(g:apply #,f (map (λ (gi) (g:apply gi (list #,@xs))) '#,g)))
(λ (xs rst) #`(g:apply #,f (map (λ (gi) (g:apply gi (cons* #,@xs #,rst))) '#,g))))]
[else
(define a (g:arity g))
(make-plain-procedure-slct 'compose-bin+1
a
(λ (xs) #`(g:apply f (list (g:apply g (list #,@xs)))))
(λ (xs rst) #`(g:apply f (list (g:apply g (cons* #,@xs #,rst)))))
(λ (xs) #`(g:apply #,f (list (g:apply #,g (list #,@xs)))))
(λ (xs rst) #`(g:apply #,f (list (g:apply #,g (cons* #,@xs #,rst))))))]))
(define (g:compose-bin f g)
(cond ((and (pair? g) (not (structure? g)))
(let ((a
(a-reduce joint-arity
(map g:arity g))))
(cond ((equal? a *at-least-zero*)
(lambda x
(g:apply f
(map
(lambda (gi)
(g:apply gi x))
g))))
((equal? a *exactly-zero*)
(lambda ()
(g:apply f
(map (lambda (gi)
(gi))
g))))
((equal? a *at-least-one*)
(lambda (x . y)
(g:apply f
(map (lambda (gi)
(g:apply gi x y))
g))))
((equal? a *exactly-one*)
(lambda (x)
(g:apply f
(map (lambda (gi)
(gi x))
g))))
((equal? a *at-least-two*)
(lambda (x y . z)
(g:apply f
(map (lambda (gi)
(g:apply gi x y z))
g))))
((equal? a *exactly-two*)
(lambda (x y)
(g:apply f
(map (lambda (gi)
(gi x y))
g))))
((equal? a *at-least-three*)
(lambda (u x y . z)
(g:apply f
(map (lambda (gi)
(g:apply gi u x y z))
g))))
((equal? a *exactly-three*)
(lambda (x y z)
(g:apply f
(map (lambda (gi)
(gi x y z))
g))))
((equal? a *one-or-two*)
(lambda (x #:optional y)
(if (default-object? y)
(g:apply f
(map (lambda (gi)
(gi x))
g))
(g:apply f
(map (lambda (gi)
(gi x y))
g)))))
(else
(lambda x
(g:apply f
(map
(lambda (gi)
(g:apply gi x))
g)))))))
(else
(let ((a (g:arity g)))
(cond ((equal? a *at-least-zero*)
(lambda x
(g:apply f
(list (g:apply g x)))))
((equal? a *exactly-zero*)
(lambda ()
(g:apply f
(list (g:apply g '())))))
((equal? a *at-least-one*)
(lambda (x . y)
(g:apply f
(list (g:apply g x y)))))
((equal? a *exactly-one*)
(lambda (x)
(g:apply f
(list (g:apply g (list x))))))
((equal? a *at-least-two*)
(lambda (x y . z)
(g:apply f
(list (g:apply g x y z)))))
((equal? a *exactly-two*)
(lambda (x y)
(g:apply f
(list (g:apply g (list x y))))))
((equal? a *at-least-three*)
(lambda (u x y . z)
(g:apply f
(list (g:apply g u x y z)))))
((equal? a *exactly-three*)
(lambda (x y z)
(g:apply f
(list (g:apply g (list x y z))))))
((equal? a *one-or-two*)
(lambda (x #:optional y)
(if (default-object? y)
(g:apply f
(list (g:apply g (list x))))
(g:apply f
(list (g:apply g (list x y)))))))
(else
(lambda x
(g:apply f
(list (g:apply g x))))))))))
|
f5eec9dfd3e3ce0d600a4e36f99a2f8b1beb49f08eb7ef235ea5cbc536427956 | arcusfelis/xapian-erlang-bindings | xapian.erl | @headerfile " xapian.hrl "
-module(xapian).
-export([start/0]).
-include_lib("xapian/include/xapian.hrl").
start() ->
application:start(xapian).
| null | https://raw.githubusercontent.com/arcusfelis/xapian-erlang-bindings/29871b3e64d658e74701c6ba68bf59e1a9b168f1/src/xapian.erl | erlang | @headerfile " xapian.hrl "
-module(xapian).
-export([start/0]).
-include_lib("xapian/include/xapian.hrl").
start() ->
application:start(xapian).
| |
aed34e6905ea3fc6b1aae7e2c43864ad8a18f01b39cd74ac28ae22196c023372 | phmarek/yason | parse.lisp | This file is part of yason , a Common Lisp JSON parser / encoder
;;
Copyright ( c ) 2008 - 2014 and contributors
;; All rights reserved.
;;
;; Please see the file LICENSE in the distribution.
(in-package :yason)
(defconstant +default-string-length+ 20
"Default length of strings that are created while reading json input.")
(declaim (type symbol true))
(defvar true 'true
"Symbol representing the JSON value true.")
(declaim (type symbol false))
(defvar false 'false
"Symbol representing the JSON value false.")
(defvar *parse-object-key-fn* #'identity
"Function to call to convert a key string in a JSON array to a key
in the CL hash produced.")
(defvar *parse-json-arrays-as-vectors* nil
"If set to a true value, JSON arrays will be parsed as vectors, not
as lists.")
(defvar *parse-json-booleans-as-symbols* nil
"If set to a true value, JSON booleans will be read as the symbols
TRUE and FALSE, not as T and NIL, respectively. The actual symbols
can be customized via the TRUE and FALSE special variables.")
(defvar *parse-json-null-as-keyword* nil
"If set to a true value, JSON nulls will be read as the keyword :NULL, not as NIL.")
(defvar *parse-object-as* :hash-table
"Set to either :hash-table, :plist or :alist to determine the data
structure that objects are parsed to.")
(defvar *parse-object-as-alist* nil
"DEPRECATED, provided for backward compatibility")
(defun make-adjustable-string ()
"Return an adjustable empty string, usable as a buffer for parsing strings and numbers."
(make-array +default-string-length+
:adjustable t :fill-pointer 0 :element-type 'character))
(defun parse-number (input)
;; would be
;; (cl-ppcre:scan-to-strings "^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]+|)(?:[eE][-+]?[0-9]+|)" buffer)
;; but we want to operate on streams
(let ((buffer (make-adjustable-string))
(*read-default-float-format* 'double-float))
(loop while (position (peek-char nil input nil) ".0123456789+-Ee")
do (vector-push-extend (read-char input) buffer))
(values (read-from-string buffer))))
(defun parse-unicode-escape (input)
(let ((char-code (let ((buffer (make-string 4)))
(read-sequence buffer input)
(parse-integer buffer :radix 16))))
(if (and (>= char-code #xd800)
(<= char-code #xdbff))
(let ((buffer (make-string 6)))
(read-sequence buffer input)
(when (not (string= buffer "\\u" :end1 2))
(error "Lead Surrogate without Tail Surrogate"))
(let ((tail-code (parse-integer buffer :radix 16 :start 2)))
(when (not (and (>= tail-code #xdc00)
(<= tail-code #xdfff)))
(error "Lead Surrogate without Tail Surrogate"))
#-cmucl
(code-char (+ #x010000
(ash (- char-code #xd800) 10)
(- tail-code #xdc00)))
Cmucl strings use utf-16 encoding . Just return the two
;; surrogate chars as is.
#+cmucl
(values (code-char char-code) (code-char tail-code))))
(code-char char-code))))
(defun parse-string (input)
(let ((output (make-adjustable-string)))
(labels ((outc (c)
(vector-push-extend c output))
(next ()
(read-char input))
(peek ()
(peek-char nil input)))
(let* ((starting-symbol (next))
(string-quoted (equal starting-symbol #\")))
(unless string-quoted
(outc starting-symbol))
(loop
(cond
((eql (peek) #\")
(next)
(return-from parse-string output))
((eql (peek) #\\)
(next)
(ecase (next)
(#\" (outc #\"))
(#\\ (outc #\\))
(#\/ (outc #\/))
(#\b (outc #\Backspace))
(#\f (outc #\Page))
(#\n (outc #\Newline))
(#\r (outc #\Return))
(#\t (outc #\Tab))
(#\u
#-cmucl
(outc (parse-unicode-escape input))
#+cmucl
(multiple-value-bind (char tail)
(parse-unicode-escape input)
(outc char)
Output the surrogate as is for cmucl .
(when tail
(outc tail))))))
((and (or (whitespace-p (peek))
(eql (peek) #\:))
(not string-quoted))
(return-from parse-string output))
(t
(outc (next)))))))))
(defun whitespace-p (char)
(member char '(#\Space #\Newline #\Tab #\Linefeed #\Return)))
(defun skip-whitespace (input)
(loop for c = (peek-char nil input nil nil)
while (and c (whitespace-p c))
do (read-char input)))
(defun peek-char-skipping-whitespace (input &optional (eof-error-p t))
(skip-whitespace input)
(peek-char nil input eof-error-p))
(defun parse-constant (input)
(destructuring-bind (expected-string return-value)
(find (peek-char nil input nil)
`(("true" ,(if *parse-json-booleans-as-symbols* true t))
("false" ,(if *parse-json-booleans-as-symbols* false nil))
("null" ,(if *parse-json-null-as-keyword* :null nil)))
:key (lambda (entry) (aref (car entry) 0))
:test #'eql)
(loop for char across expected-string
unless (eql (read-char input nil) char)
do (error "invalid constant"))
return-value))
(define-condition cannot-convert-key (error)
((key-string :initarg :key-string
:reader key-string))
(:report (lambda (c stream)
(format stream "cannot convert key ~S used in JSON object to hash table key"
(key-string c)))))
(define-condition duplicate-key (error)
((key-string :initarg :key-string
:reader key-string))
(:report (lambda (c stream)
(format stream "Duplicate dict key ~S"
(key-string c)))))
(defun create-container (ht)
(ecase *parse-object-as*
((:plist :alist)
nil)
(:hash-table
;; Uses hash-table
ht)))
(defun add-attribute (to key value)
(ecase *parse-object-as*
(:plist
(append to (list key value)))
(:alist
(acons key value to))
(:hash-table
(setf (gethash key to) value)
to)))
(define-condition expected-colon (error)
((key-string :initarg :key-string
:reader key-string))
(:report (lambda (c stream)
(format stream "expected colon to follow key ~S used in JSON object"
(key-string c)))))
(defun parse-object (input)
(let* ((ht (make-hash-table :test #'equal))
(return-value (create-container ht)))
(read-char input)
(loop
(when (eql (peek-char-skipping-whitespace input)
#\})
(return))
(skip-whitespace input)
(let* ((key-string (parse-string input))
(key (or (funcall *parse-object-key-fn* key-string)
(error 'cannot-convert-key :key-string key-string))))
(when (nth-value 1 (gethash key ht))
(error 'duplicate-key :key-string key-string))
(skip-whitespace input)
(unless (eql #\: (read-char input))
(error 'expected-colon :key-string key-string))
(skip-whitespace input)
(let ((value (parse input)))
(setf return-value
(add-attribute return-value key value))))
(ecase (peek-char-skipping-whitespace input)
(#\, (read-char input))
(#\} nil)))
(read-char input)
(values (if (eq *parse-object-as* :alist)
(nreverse return-value)
return-value)
ht)))
(defconstant +initial-array-size+ 20
"Initial size of JSON arrays read, they will grow as needed.")
(defun %parse-array (input add-element-function)
"Parse JSON array from input, calling ADD-ELEMENT-FUNCTION for each array element parsed."
(read-char input)
(loop
(when (eql (peek-char-skipping-whitespace input)
#\])
(return))
(funcall add-element-function (parse input))
(ecase (peek-char-skipping-whitespace input)
(#\, (read-char input))
(#\] nil)))
(read-char input))
(defun parse-array (input)
(if *parse-json-arrays-as-vectors*
(let ((return-value (make-array +initial-array-size+ :adjustable t :fill-pointer 0)))
(%parse-array input
(lambda (element)
(vector-push-extend element return-value)))
return-value)
(let (return-value)
(%parse-array input
(lambda (element)
(push element return-value)))
(nreverse return-value))))
(defgeneric parse% (input)
(:method ((input stream))
;; backward compatibility code
(assert (or (not *parse-object-as-alist*)
(eq *parse-object-as* :hash-table))
() "unexpected combination of *parse-object-as* and *parse-object-as-alist*, please use *parse-object-as* exclusively")
(let ((*parse-object-as* (if *parse-object-as-alist*
:alist
*parse-object-as*)))
;; end of backward compatibility code
(check-type *parse-object-as* (member :hash-table :alist :plist))
(ecase (peek-char-skipping-whitespace input)
(#\"
(parse-string input))
((#\- #\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9)
(parse-number input))
(#\{
(parse-object input))
(#\[
(parse-array input))
((#\t #\f #\n)
(parse-constant input)))))
(:method ((input pathname))
(with-open-file (stream input)
(parse stream)))
(:method ((input string))
(parse (make-string-input-stream input))))
(defun parse (input
&key
(object-key-fn *parse-object-key-fn*)
(object-as *parse-object-as*)
(json-arrays-as-vectors *parse-json-arrays-as-vectors*)
(json-booleans-as-symbols *parse-json-booleans-as-symbols*)
(json-nulls-as-keyword *parse-json-null-as-keyword*))
"Parse INPUT, which needs to be a string or a stream, as JSON.
Returns the lisp representation of the JSON structure parsed. The
keyword arguments can be used to override the parser settings as
defined by the respective special variables."
(let ((*parse-object-key-fn* object-key-fn)
(*parse-object-as* object-as)
(*parse-json-arrays-as-vectors* json-arrays-as-vectors)
(*parse-json-booleans-as-symbols* json-booleans-as-symbols)
(*parse-json-null-as-keyword* json-nulls-as-keyword))
(parse% input)))
| null | https://raw.githubusercontent.com/phmarek/yason/f4ad893e9dc4142396a86bc518ff6bc814fd43da/parse.lisp | lisp |
All rights reserved.
Please see the file LICENSE in the distribution.
would be
(cl-ppcre:scan-to-strings "^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]+|)(?:[eE][-+]?[0-9]+|)" buffer)
but we want to operate on streams
surrogate chars as is.
Uses hash-table
backward compatibility code
end of backward compatibility code | This file is part of yason , a Common Lisp JSON parser / encoder
Copyright ( c ) 2008 - 2014 and contributors
(in-package :yason)
(defconstant +default-string-length+ 20
"Default length of strings that are created while reading json input.")
(declaim (type symbol true))
(defvar true 'true
"Symbol representing the JSON value true.")
(declaim (type symbol false))
(defvar false 'false
"Symbol representing the JSON value false.")
(defvar *parse-object-key-fn* #'identity
"Function to call to convert a key string in a JSON array to a key
in the CL hash produced.")
(defvar *parse-json-arrays-as-vectors* nil
"If set to a true value, JSON arrays will be parsed as vectors, not
as lists.")
(defvar *parse-json-booleans-as-symbols* nil
"If set to a true value, JSON booleans will be read as the symbols
TRUE and FALSE, not as T and NIL, respectively. The actual symbols
can be customized via the TRUE and FALSE special variables.")
(defvar *parse-json-null-as-keyword* nil
"If set to a true value, JSON nulls will be read as the keyword :NULL, not as NIL.")
(defvar *parse-object-as* :hash-table
"Set to either :hash-table, :plist or :alist to determine the data
structure that objects are parsed to.")
(defvar *parse-object-as-alist* nil
"DEPRECATED, provided for backward compatibility")
(defun make-adjustable-string ()
"Return an adjustable empty string, usable as a buffer for parsing strings and numbers."
(make-array +default-string-length+
:adjustable t :fill-pointer 0 :element-type 'character))
(defun parse-number (input)
(let ((buffer (make-adjustable-string))
(*read-default-float-format* 'double-float))
(loop while (position (peek-char nil input nil) ".0123456789+-Ee")
do (vector-push-extend (read-char input) buffer))
(values (read-from-string buffer))))
(defun parse-unicode-escape (input)
(let ((char-code (let ((buffer (make-string 4)))
(read-sequence buffer input)
(parse-integer buffer :radix 16))))
(if (and (>= char-code #xd800)
(<= char-code #xdbff))
(let ((buffer (make-string 6)))
(read-sequence buffer input)
(when (not (string= buffer "\\u" :end1 2))
(error "Lead Surrogate without Tail Surrogate"))
(let ((tail-code (parse-integer buffer :radix 16 :start 2)))
(when (not (and (>= tail-code #xdc00)
(<= tail-code #xdfff)))
(error "Lead Surrogate without Tail Surrogate"))
#-cmucl
(code-char (+ #x010000
(ash (- char-code #xd800) 10)
(- tail-code #xdc00)))
Cmucl strings use utf-16 encoding . Just return the two
#+cmucl
(values (code-char char-code) (code-char tail-code))))
(code-char char-code))))
(defun parse-string (input)
(let ((output (make-adjustable-string)))
(labels ((outc (c)
(vector-push-extend c output))
(next ()
(read-char input))
(peek ()
(peek-char nil input)))
(let* ((starting-symbol (next))
(string-quoted (equal starting-symbol #\")))
(unless string-quoted
(outc starting-symbol))
(loop
(cond
((eql (peek) #\")
(next)
(return-from parse-string output))
((eql (peek) #\\)
(next)
(ecase (next)
(#\" (outc #\"))
(#\\ (outc #\\))
(#\/ (outc #\/))
(#\b (outc #\Backspace))
(#\f (outc #\Page))
(#\n (outc #\Newline))
(#\r (outc #\Return))
(#\t (outc #\Tab))
(#\u
#-cmucl
(outc (parse-unicode-escape input))
#+cmucl
(multiple-value-bind (char tail)
(parse-unicode-escape input)
(outc char)
Output the surrogate as is for cmucl .
(when tail
(outc tail))))))
((and (or (whitespace-p (peek))
(eql (peek) #\:))
(not string-quoted))
(return-from parse-string output))
(t
(outc (next)))))))))
(defun whitespace-p (char)
(member char '(#\Space #\Newline #\Tab #\Linefeed #\Return)))
(defun skip-whitespace (input)
(loop for c = (peek-char nil input nil nil)
while (and c (whitespace-p c))
do (read-char input)))
(defun peek-char-skipping-whitespace (input &optional (eof-error-p t))
(skip-whitespace input)
(peek-char nil input eof-error-p))
(defun parse-constant (input)
(destructuring-bind (expected-string return-value)
(find (peek-char nil input nil)
`(("true" ,(if *parse-json-booleans-as-symbols* true t))
("false" ,(if *parse-json-booleans-as-symbols* false nil))
("null" ,(if *parse-json-null-as-keyword* :null nil)))
:key (lambda (entry) (aref (car entry) 0))
:test #'eql)
(loop for char across expected-string
unless (eql (read-char input nil) char)
do (error "invalid constant"))
return-value))
(define-condition cannot-convert-key (error)
((key-string :initarg :key-string
:reader key-string))
(:report (lambda (c stream)
(format stream "cannot convert key ~S used in JSON object to hash table key"
(key-string c)))))
(define-condition duplicate-key (error)
((key-string :initarg :key-string
:reader key-string))
(:report (lambda (c stream)
(format stream "Duplicate dict key ~S"
(key-string c)))))
(defun create-container (ht)
(ecase *parse-object-as*
((:plist :alist)
nil)
(:hash-table
ht)))
(defun add-attribute (to key value)
(ecase *parse-object-as*
(:plist
(append to (list key value)))
(:alist
(acons key value to))
(:hash-table
(setf (gethash key to) value)
to)))
(define-condition expected-colon (error)
((key-string :initarg :key-string
:reader key-string))
(:report (lambda (c stream)
(format stream "expected colon to follow key ~S used in JSON object"
(key-string c)))))
(defun parse-object (input)
(let* ((ht (make-hash-table :test #'equal))
(return-value (create-container ht)))
(read-char input)
(loop
(when (eql (peek-char-skipping-whitespace input)
#\})
(return))
(skip-whitespace input)
(let* ((key-string (parse-string input))
(key (or (funcall *parse-object-key-fn* key-string)
(error 'cannot-convert-key :key-string key-string))))
(when (nth-value 1 (gethash key ht))
(error 'duplicate-key :key-string key-string))
(skip-whitespace input)
(unless (eql #\: (read-char input))
(error 'expected-colon :key-string key-string))
(skip-whitespace input)
(let ((value (parse input)))
(setf return-value
(add-attribute return-value key value))))
(ecase (peek-char-skipping-whitespace input)
(#\, (read-char input))
(#\} nil)))
(read-char input)
(values (if (eq *parse-object-as* :alist)
(nreverse return-value)
return-value)
ht)))
(defconstant +initial-array-size+ 20
"Initial size of JSON arrays read, they will grow as needed.")
(defun %parse-array (input add-element-function)
"Parse JSON array from input, calling ADD-ELEMENT-FUNCTION for each array element parsed."
(read-char input)
(loop
(when (eql (peek-char-skipping-whitespace input)
#\])
(return))
(funcall add-element-function (parse input))
(ecase (peek-char-skipping-whitespace input)
(#\, (read-char input))
(#\] nil)))
(read-char input))
(defun parse-array (input)
(if *parse-json-arrays-as-vectors*
(let ((return-value (make-array +initial-array-size+ :adjustable t :fill-pointer 0)))
(%parse-array input
(lambda (element)
(vector-push-extend element return-value)))
return-value)
(let (return-value)
(%parse-array input
(lambda (element)
(push element return-value)))
(nreverse return-value))))
(defgeneric parse% (input)
(:method ((input stream))
(assert (or (not *parse-object-as-alist*)
(eq *parse-object-as* :hash-table))
() "unexpected combination of *parse-object-as* and *parse-object-as-alist*, please use *parse-object-as* exclusively")
(let ((*parse-object-as* (if *parse-object-as-alist*
:alist
*parse-object-as*)))
(check-type *parse-object-as* (member :hash-table :alist :plist))
(ecase (peek-char-skipping-whitespace input)
(#\"
(parse-string input))
((#\- #\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9)
(parse-number input))
(#\{
(parse-object input))
(#\[
(parse-array input))
((#\t #\f #\n)
(parse-constant input)))))
(:method ((input pathname))
(with-open-file (stream input)
(parse stream)))
(:method ((input string))
(parse (make-string-input-stream input))))
(defun parse (input
&key
(object-key-fn *parse-object-key-fn*)
(object-as *parse-object-as*)
(json-arrays-as-vectors *parse-json-arrays-as-vectors*)
(json-booleans-as-symbols *parse-json-booleans-as-symbols*)
(json-nulls-as-keyword *parse-json-null-as-keyword*))
"Parse INPUT, which needs to be a string or a stream, as JSON.
Returns the lisp representation of the JSON structure parsed. The
keyword arguments can be used to override the parser settings as
defined by the respective special variables."
(let ((*parse-object-key-fn* object-key-fn)
(*parse-object-as* object-as)
(*parse-json-arrays-as-vectors* json-arrays-as-vectors)
(*parse-json-booleans-as-symbols* json-booleans-as-symbols)
(*parse-json-null-as-keyword* json-nulls-as-keyword))
(parse% input)))
|
ccc9addfac39ea720c12093da9aed1917fba1ee8994aa6fce4a5e3f70d68a7cf | VisionsGlobalEmpowerment/webchange | handler.clj | (ns webchange.test.accounts.handler
(:require [clojure.test :refer :all]
[ring.mock.request :as mock]
[webchange.test.fixtures.core :as f]
[webchange.handler :as handler]
[mount.core :as mount]
[clojure.data.json :as json]
[clojure.tools.logging :as log]
[webchange.emails.core :as emails]))
(use-fixtures :once f/init)
(use-fixtures :each f/clear-db-fixture f/with-default-school)
(deftest registration
(let [data {:firstname "first name"
:lastname "last name"
:email ""
:password "test"}]
(testing "successful regisrtation redirects to success page"
(with-redefs [emails/request-email-confirmation! (fn [_] nil)]
(let [request (-> (mock/request :post "/accounts/registration")
(mock/header :content-type "application/json")
(mock/body data))
response (handler/dev-handler request)]
(is (= 302 (:status response)))
(is (= "/accounts/sign-up-success" (-> response :headers (get "Location")))))))))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/9ecee6f21e264cb41fc128754f2256ea98e79f9d/test/clj/webchange/test/accounts/handler.clj | clojure | (ns webchange.test.accounts.handler
(:require [clojure.test :refer :all]
[ring.mock.request :as mock]
[webchange.test.fixtures.core :as f]
[webchange.handler :as handler]
[mount.core :as mount]
[clojure.data.json :as json]
[clojure.tools.logging :as log]
[webchange.emails.core :as emails]))
(use-fixtures :once f/init)
(use-fixtures :each f/clear-db-fixture f/with-default-school)
(deftest registration
(let [data {:firstname "first name"
:lastname "last name"
:email ""
:password "test"}]
(testing "successful regisrtation redirects to success page"
(with-redefs [emails/request-email-confirmation! (fn [_] nil)]
(let [request (-> (mock/request :post "/accounts/registration")
(mock/header :content-type "application/json")
(mock/body data))
response (handler/dev-handler request)]
(is (= 302 (:status response)))
(is (= "/accounts/sign-up-success" (-> response :headers (get "Location")))))))))
| |
8333f72fe34c97629787adb225aa071fe528e10540562ba11c7050a361162c8f | ocaml-batteries-team/batteries-included | batEnum.mli |
* - enumeration over abstract collection of elements .
* Copyright ( C ) 2003
* 2009 , LIFO , Universite d'Orleans
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatEnum - enumeration over abstract collection of elements.
* Copyright (C) 2003 Nicolas Cannasse
* 2009 David Rajchenbach-Teller, LIFO, Universite d'Orleans
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
*
Enumeration over abstract collection of elements .
Enumerations are a representation of finite or infinite sequences
of elements . In Batteries Included , enumerations are used
pervasively , both as a uniform manner of reading and manipulating
the contents of a data structure , or as a simple manner of reading
or writing sequences of characters , numbers , strings , etc . from / to
files , network connections or other inputs / outputs .
Enumerations are typically computed as needed , which allows the
definition and manipulation of huge ( possibly infinite ) sequences .
Manipulating an enumeration is a uniform and often comfortable way
of extracting subsequences ( function { ! filter } or operator [ // ] et
al ) , converting sequences into other sequences ( function { ! map } or
operators [ /@ ] and [ @/ ] et al ) , gathering information ( function
{ ! } et al ) or performing loops ( functions { ! iter } and
{ ! map } ) .
For instance , function { ! BatRandom.enum_int } creates an
infinite enumeration of random numbers . Combined with [ // ]
and { ! map } , we may turn this into an infinite enumeration of
squares of random even numbers :
[ map ( fun x - > x * x ) ( ( Random.enum_int 100 ) // even ) ]
Similarly , to obtain an enumeration of 50 random integers ,
we may use { ! take } , as follows :
[ take 50 ( Random.enum_int 100 ) ]
As most data structures in Batteries can be enumerated and built
from enumerations , these operations may be used also on lists ,
arrays , hashtables , etc . When designing a new data structure , it
is usually a good idea to allow enumeration and construction
from an enumeration .
{ b Note } Enumerations are not thread - safe . You should not attempt
to access one enumeration from different threads .
@author @author
Enumeration over abstract collection of elements.
Enumerations are a representation of finite or infinite sequences
of elements. In Batteries Included, enumerations are used
pervasively, both as a uniform manner of reading and manipulating
the contents of a data structure, or as a simple manner of reading
or writing sequences of characters, numbers, strings, etc. from/to
files, network connections or other inputs/outputs.
Enumerations are typically computed as needed, which allows the
definition and manipulation of huge (possibly infinite) sequences.
Manipulating an enumeration is a uniform and often comfortable way
of extracting subsequences (function {!filter} or operator [//] et
al), converting sequences into other sequences (function {!map} or
operators [/@] and [@/] et al), gathering information (function
{!scanl} et al) or performing loops (functions {!iter} and
{!map}).
For instance, function {!BatRandom.enum_int} creates an
infinite enumeration of random numbers. Combined with [//]
and {!map}, we may turn this into an infinite enumeration of
squares of random even numbers:
[map (fun x -> x * x) ( (Random.enum_int 100) // even )]
Similarly, to obtain an enumeration of 50 random integers,
we may use {!take}, as follows:
[take 50 (Random.enum_int 100)]
As most data structures in Batteries can be enumerated and built
from enumerations, these operations may be used also on lists,
arrays, hashtables, etc. When designing a new data structure, it
is usually a good idea to allow enumeration and construction
from an enumeration.
{b Note} Enumerations are not thread-safe. You should not attempt
to access one enumeration from different threads.
@author Nicolas Cannasse
@author David Rajchenbach-Teller
*)
type 'a t
(** A signature for data structures which may be converted to and from [enum].
If you create a new data structure, you should make it compatible
with [Enumerable].
*)
module type Enumerable = sig
type 'a enumerable (** The data structure, e.g. ['a List.t] *)
val enum : 'a enumerable -> 'a t
(** Return an enumeration of the elements of the data structure *)
val of_enum : 'a t -> 'a enumerable
(** Build a data structure from an enumeration *)
end
include Enumerable with type 'a enumerable = 'a t
include BatInterfaces.Mappable with type 'a mappable = 'a t
* { 6 Final functions }
These functions consume the enumeration until
it ends or an exception is raised by the first
argument function .
These functions consume the enumeration until
it ends or an exception is raised by the first
argument function.
*)
val iter : ('a -> unit) -> 'a t -> unit
(** [iter f e] calls the function [f] with each elements of [e] in turn. *)
val iter2 : ('a -> 'b -> unit) -> 'a t -> 'b t -> unit
* [ iter2 f e1 e2 ] calls the function [ f ] with the next elements of [ e1 ] and
[ e2 ] repeatedly until one of the two enumerations ends .
[e2] repeatedly until one of the two enumerations ends. *)
val exists: ('a -> bool) -> 'a t -> bool
(** [exists f e] returns [true] if there is some [x] in [e] such
that [f x]*)
val for_all: ('a -> bool) -> 'a t -> bool
(** [for_all f e] returns [true] if for every [x] in [e], [f x] is true*)
val fold : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b
(** A general loop on an enumeration.
If [e] is empty, [fold f v e] returns [v]. Otherwise, [fold v e]
returns [f (... (f (f v a0) a1) ...) aN] where [a0,a1..aN] are the
elements of [e]. This function may be used, for instance, to
compute the sum of all elements of an enumeration [e] as follows:
[fold ( + ) 0 e]. Eager.
*)
val reduce : ('a -> 'a -> 'a) -> 'a t -> 'a
* A simplified version of [ fold ] , which uses the first element
of the enumeration as a default value .
[ reduce f e ] throws [ Not_found ] if [ e ] is empty , returns its only
element if e is a singleton , otherwise [ f ( ... ( f ( f a0 a1 )
a2 ) ... ) aN ] where [ a0,a1 .. aN ] are the elements of [ e ] .
of the enumeration as a default value.
[reduce f e] throws [Not_found] if [e] is empty, returns its only
element if e is a singleton, otherwise [f (... (f (f a0 a1)
a2)...) aN] where [a0,a1..aN] are the elements of [e]. *)
val sum : int t -> int
(** [sum] returns the sum of the given int enum. If the argument is
empty, returns 0. Eager *)
val fsum : float t -> float
* @returns the sum of the enum 's elements . Uses summing to
get a more accurate answer than [ reduce ( + . ) ] would return , but runs slower .
@since 2.0
get a more accurate answer than [reduce (+.)] would return, but runs slower.
@since 2.0
*)
val kahan_sum : float t -> float
* [ kahan_sum l ] returns a numerically - accurate sum of the floats of
[ l ] . See { ! BatArray.fsum } for more details .
@since 2.2.0
[l]. See {!BatArray.fsum} for more details.
@since 2.2.0
*)
val fold2 : ('a -> 'b -> 'c -> 'c) -> 'c -> 'a t -> 'b t -> 'c
* [ fold2 ] is similar to [ fold ] but will fold over two enumerations at the
same time until one of the two enumerations ends .
same time until one of the two enumerations ends. *)
val scanl : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b t
* A variant of [ fold ] producing an enumeration of its intermediate values .
If [ e ] contains [ x0 ] , [ x1 ] , ... , [ f init e ] is the enumeration
containing [ init ] , [ f init ] , [ f ( f init x0 ) x1 ] ... Lazy .
If [e] contains [x0], [x1], ..., [scanl f init e] is the enumeration
containing [init], [f init x0], [f (f init x0) x1]... Lazy. *)
val scan : ('a -> 'a -> 'a) -> 'a t -> 'a t
* [ scan ] is similar to [ ] but without the [ init ] value : if [ e ]
contains [ x0 ] , [ x1 ] , [ x2 ] ... , [ scan f e ] is the enumeration containing
[ x0 ] , [ f x0 x1 ] , [ f ( f x0 x1 ) x2 ] ...
For instance , [ scan ( * ) ( 1 -- 10 ) ] will produce an enumeration
containing the successive values of the factorial function .
contains [x0], [x1], [x2] ..., [scan f e] is the enumeration containing
[x0], [f x0 x1], [f (f x0 x1) x2]...
For instance, [scan ( * ) (1 -- 10)] will produce an enumeration
containing the successive values of the factorial function.*)
* Indexed functions : these functions are similar to previous ones
except that they call the function with one additional argument which
is an index starting at 0 and incremented after each call to the function .
except that they call the function with one additional argument which
is an index starting at 0 and incremented after each call to the function. *)
val iteri : (int -> 'a -> unit) -> 'a t -> unit
val iter2i : ( int -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
val foldi : (int -> 'a -> 'b -> 'b) -> 'b -> 'a t -> 'b
val fold2i : (int -> 'a -> 'b -> 'c -> 'c) -> 'c -> 'a t -> 'b t -> 'c
* { 6 Useful functions }
val find : ('a -> bool) -> 'a t -> 'a
* [ find f e ] returns the first element [ x ] of [ e ] such that [ f x ] returns
[ true ] , consuming the enumeration up to and including the
found element .
@raise Not_found if no such element exists
in the enumeration , consuming the whole enumeration in the search .
Since [ find ] ( eagerly ) consumes a prefix of the enumeration , it
can be used several times on the same enumeration to find the
next element .
[true], consuming the enumeration up to and including the
found element.
@raise Not_found if no such element exists
in the enumeration, consuming the whole enumeration in the search.
Since [find] (eagerly) consumes a prefix of the enumeration, it
can be used several times on the same enumeration to find the
next element. *)
val find_map : ('a -> 'b option) -> 'a t -> 'b
* [ find_map f e ] finds the first element [ x ] of [ e ] such that [ f x ] returns
[ Some r ] , then returns r. It consumes the enumeration up to and including
the found element .
@raise Not_found if no such element exists in the
enumeration , consuming the whole enumeration in the search .
Since [ find_map ] ( eagerly ) consumes a prefix of the enumeration , it can be
used several times on the same enumeration to find the next element .
@since 2.0
[Some r], then returns r. It consumes the enumeration up to and including
the found element.
@raise Not_found if no such element exists in the
enumeration, consuming the whole enumeration in the search.
Since [find_map] (eagerly) consumes a prefix of the enumeration, it can be
used several times on the same enumeration to find the next element.
@since 2.0
*)
val is_empty : 'a t -> bool
(** [is_empty e] returns true if [e] does not contains any element.
Forces at most one element. *)
val peek : 'a t -> 'a option
(** [peek e] returns [None] if [e] is empty or [Some x] where [x] is
the next element of [e]. The element is not removed from the
enumeration. *)
val get : 'a t -> 'a option
(** [get e] returns [None] if [e] is empty or [Some x] where [x] is
the next element of [e], in which case the element is removed
from the enumeration. *)
val get_exn : 'a t -> 'a
* [ get_exn e ] returns the first element of [ e ] .
@raise No_more_elements if [ e ] is empty .
@since 2.0
@raise No_more_elements if [e] is empty.
@since 2.0 *)
val push : 'a t -> 'a -> unit
(** [push e x] will add [x] at the beginning of [e]. *)
val junk : 'a t -> unit
* [ junk e ] removes the first element from the enumeration , if any .
val clone : 'a t -> 'a t
(** [clone e] creates a new enumeration that is copy of [e]. If [e]
is consumed by later operations, the clone will not get affected. *)
val force : 'a t -> unit
(** [force e] forces the application of all lazy functions and the
enumeration of all elements, exhausting the enumeration.
An efficient intermediate data structure
of enumerated elements is constructed and [e] will now enumerate over
that data structure. *)
val take : int -> 'a t -> 'a t
(** [take n e] returns the prefix of [e] of length [n], or [e]
itself if [n] is greater than the length of [e] *)
val drop : int -> 'a t -> unit
* [ drop n e ] removes the first [ n ] element from the enumeration , if any .
val skip: int -> 'a t -> 'a t
* [ skip n e ] removes the first [ n ] element from the enumeration , if any ,
then returns [ e ] .
This function has the same behavior as [ drop ] but is often easier to
compose with , e.g. , [ skip 5 % > take 3 ] is a new function which skips
5 elements and then returns the next 3 elements .
then returns [e].
This function has the same behavior as [drop] but is often easier to
compose with, e.g., [skip 5 %> take 3] is a new function which skips
5 elements and then returns the next 3 elements.*)
val take_while : ('a -> bool) -> 'a t -> 'a t
* [ take_while f e ] produces a new enumeration in which only remain
the first few elements [ x ] of [ e ] such that [ f x ]
the first few elements [x] of [e] such that [f x] *)
val drop_while : ('a -> bool) -> 'a t -> 'a t
* [ drop_while p e ] produces a new enumeration in which only
all the first elements such that [ f e ] have been junked .
all the first elements such that [f e] have been junked.*)
val span : ('a -> bool) -> 'a t -> 'a t * 'a t
* [ span test e ] produces two enumerations [ ( hd , tl ) ] , such that
[ hd ] is the same as [ take_while test e ] and [ tl ] is the same
as [ drop_while test e ] .
[hd] is the same as [take_while test e] and [tl] is the same
as [drop_while test e]. *)
val break : ('a -> bool) -> 'a t -> 'a t * 'a t
(** Negated span.
[break test e] is equivalent to [span (fun x -> not (test x)) e] *)
val group : ('a -> 'b) -> 'a t -> 'a t t
* [ group test e ] divides [ e ] into an enumeration of enumerations ,
where each sub - enumeration is the longest continuous enumeration
of elements whose [ test ] results are the same .
[ Enum.group ( x - > x mod 2 ) [ 1;2;4;1 ] = [ [ 1];[2;4];[1 ] ] ]
[ Enum.group ( fun x - > x mod 3 ) [ 1;2;4;1 ] = [ [ 1];[2];[4;1 ] ] ]
[ Enum.group ( fun s - > s.[0 ] ) [ " cat " ; " canary " ; " dog " ; " dodo " ; " ant " ; " cow " ] = [ [ " cat " ; " canary"];["dog";"dodo"];["ant"];["cow " ] ] ]
Warning : The result of this operation can not be directly cloned
safely ; instead , reify to a non - lazy structure and read from that
structure multiple times .
where each sub-enumeration is the longest continuous enumeration
of elements whose [test] results are the same.
[Enum.group (x -> x mod 2) [1;2;4;1] = [[1];[2;4];[1]]]
[Enum.group (fun x -> x mod 3) [1;2;4;1] = [[1];[2];[4;1]]]
[Enum.group (fun s -> s.[0]) ["cat"; "canary"; "dog"; "dodo"; "ant"; "cow"] = [["cat"; "canary"];["dog";"dodo"];["ant"];["cow"]]]
Warning: The result of this operation cannot be directly cloned
safely; instead, reify to a non-lazy structure and read from that
structure multiple times.
*)
val group_by : ('a -> 'a -> bool) -> 'a t -> 'a t t
(** [group_by eq e] divides [e] into an enumeration of enumerations,
where each sub-enumeration is the longest continuous enumeration
of elements that are equal, as judged by [eq].
Warning: The result of this operation cannot be directly cloned
safely; instead, reify to a non-lazy structure and read from that
structure multiple times.
*)
val clump : int -> ('a -> unit) -> (unit -> 'b) -> 'a t -> 'b t
(** [clump size add get e] runs [add] on [size] (or less at the end)
elements of [e] and then runs [get] to produce value for the
result enumeration. Useful to convert a char enum into string
enum. *)
val cartesian_product : 'a t -> 'b t -> ('a * 'b) t
* [ e1 e2 ] computes the cartesian product of [ e1 ] and [ e2 ] .
Pairs are enumerated in a non - specified order , but in fair enough an order
so that it works on infinite enums ( i.e. even then , any pair is eventually
returned )
@since 2.2.0
Pairs are enumerated in a non-specified order, but in fair enough an order
so that it works on infinite enums (i.e. even then, any pair is eventually
returned)
@since 2.2.0 *)
* { 6 Lazy constructors }
These functions are lazy which means that they will create a new modified
enumeration without actually enumerating any element until they are asked
to do so by the programmer ( using one of the functions above ) .
When the resulting enumerations of these functions are consumed , the
underlying enumerations they were created from are also consumed .
These functions are lazy which means that they will create a new modified
enumeration without actually enumerating any element until they are asked
to do so by the programmer (using one of the functions above).
When the resulting enumerations of these functions are consumed, the
underlying enumerations they were created from are also consumed. *)
val map : ('a -> 'b) -> 'a t -> 'b t
(** [map f e] returns an enumeration over [(f a0, f a1, ...)] where
[a0,a1...] are the elements of [e]. Lazy.
*)
val mapi : (int -> 'a -> 'b) -> 'a t -> 'b t
* [ mapi ] is similar to [ map ] except that [ f ] is passed one extra argument
which is the index of the element in the enumeration , starting from 0 :
mapi f e returns an enumeration over [ ( f 0 a0 , f 1 a1 , ... ) ] where
[ a0,a1 ... ] are the elements of [ e ] .
which is the index of the element in the enumeration, starting from 0 :
mapi f e returns an enumeration over [(f 0 a0, f 1 a1, ...)] where
[a0,a1...] are the elements of [e]. *)
val filter : ('a -> bool) -> 'a t -> 'a t
(** [filter f e] returns an enumeration over all elements [x] of [e] such
as [f x] returns [true]. Lazy.
{b Note} filter is lazy in that it returns a lazy enumeration, but
each element in the result is eagerly searched in the input
enumeration. Therefore, the access to a given element in the result
will diverge if it is preceded, in the input enumeration, by
infinitely many false elements (elements on which the predicate
[p] returns [false]).
Other functions that may drop an unbound number of elements
([filter_map], [take_while], etc.) have the same behavior.
*)
val filter_map : ('a -> 'b option) -> 'a t -> 'b t
(** [filter_map f e] returns an enumeration over all elements [x] such as
[f y] returns [Some x] , where [y] is an element of [e].
[filter_map] works on infinite enumerations; see [filter].
*)
val append : 'a t -> 'a t -> 'a t
* [ append e1 e2 ] returns an enumeration that will enumerate over all
elements of [ e1 ] followed by all elements of [ e2 ] . Lazy .
{ b Note } The behavior of appending [ e ] to itself or to something
derived from [ e ] is not specified . In particular , cloning [ append e e ]
may destroy any sharing between the first and the second argument .
elements of [e1] followed by all elements of [e2]. Lazy.
{b Note} The behavior of appending [e] to itself or to something
derived from [e] is not specified. In particular, cloning [append e e]
may destroy any sharing between the first and the second argument.
*)
val prefix_action : (unit -> unit) -> 'a t -> 'a t
* [ prefix_action f e ] will behave as [ e ] but guarantees that [ f ( ) ]
will be invoked exactly once before the current first element of [ e ]
is read .
If [ prefix_action f e ] is cloned , [ f ] is invoked only once , during
the cloning . If [ prefix_action f e ] is counted , [ f ] is invoked
only once , during the counting .
May be used for signalling that reading starts or for performing
delayed evaluations .
will be invoked exactly once before the current first element of [e]
is read.
If [prefix_action f e] is cloned, [f] is invoked only once, during
the cloning. If [prefix_action f e] is counted, [f] is invoked
only once, during the counting.
May be used for signalling that reading starts or for performing
delayed evaluations.*)
val suffix_action : (unit -> unit) -> 'a t -> 'a t
* [ suffix_action f e ] will behave as [ e ] but guarantees that [ f ( ) ]
will be invoked after the contents of [ e ] are exhausted .
If [ suffix_action f e ] is cloned , [ f ] is invoked only once , when
the original enumeration is exhausted . If [ suffix_action f e ]
is counted , [ f ] is only invoked if the act of counting
requires a call to [ force ] .
May be used for signalling that reading stopped or for performing
delayed evaluations .
will be invoked after the contents of [e] are exhausted.
If [suffix_action f e] is cloned, [f] is invoked only once, when
the original enumeration is exhausted. If [suffix_action f e]
is counted, [f] is only invoked if the act of counting
requires a call to [force].
May be used for signalling that reading stopped or for performing
delayed evaluations.*)
val concat : 'a t t -> 'a t
(** [concat e] returns an enumeration over all elements of all enumerations
of [e]. *)
val flatten : 'a t t -> 'a t
(** Synonym of {!concat}*)
val concat_map : ('a -> 'b t) -> 'a t -> 'b t
* Synonym of { ! Monad.bind } , with flipped arguments .
[ concat_map f e ] is the same as [ concat ( map f e ) ] .
@since 2.2.0
[concat_map f e] is the same as [concat (map f e)].
@since 2.2.0 *)
* { 6 Constructors }
In this section the word { i shall } denotes a semantic
requirement . The correct operation of the functions in this
interface are conditional on the client meeting these
requirements .
In this section the word {i shall} denotes a semantic
requirement. The correct operation of the functions in this
interface are conditional on the client meeting these
requirements.
*)
exception No_more_elements
(** This exception {i shall} be raised by the [next] function of [make]
or [from] when no more elements can be enumerated, it {i shall not}
be raised by any function which is an argument to any
other function specified in the interface.
*)
exception Infinite_enum
(** As a convenience for debugging, this exception {i may} be raised by
the [count] function of [make] when attempting to count an infinite enum.*)
val empty : unit -> 'a t
(** The empty enumeration : contains no element *)
val make : next:(unit -> 'a) -> count:(unit -> int) -> clone:(unit -> 'a t) -> 'a t
(** This function creates a fully defined enumeration.
{ul {li the [next] function {i shall} return the next element of the
enumeration or raise [No_more_elements] if the underlying data structure
does not have any more elements to enumerate.}
{li the [count] function {i shall} return the actual number of remaining
elements in the enumeration or {i may} raise [Infinite_enum] if it is known
that the enumeration is infinite.}
{li the [clone] function {i shall} create a clone of the enumeration
such as operations on the original enumeration will not affect the
clone. }}
For some samples on how to correctly use [make], you can have a look
at implementation of [BatList.enum].
*)
val from : (unit -> 'a) -> 'a t
(** [from next] creates an enumeration from the [next] function.
[next] {i shall} return the next element of the enumeration or raise
[No_more_elements] when no more elements can be enumerated. Since the
enumeration definition is incomplete, a call to [count] will result in
a call to [force] that will enumerate all elements in order to
return a correct value. *)
val from_while : (unit -> 'a option) -> 'a t
(** [from_while next] creates an enumeration from the [next] function.
[next] {i shall} return [Some x] where [x] is the next element of the
enumeration or [None] when no more elements can be enumerated. Since the
enumeration definition is incomplete, a call to [clone] or [count] will
result in a call to [force] that will enumerate all elements in order to
return a correct value. *)
val from_loop: 'b -> ('b -> ('a * 'b)) -> 'a t
* [ from_loop data next ] creates a ( possibly infinite ) enumeration from
the successive results of applying [ next ] to [ data ] , then to the
result , etc . The list ends whenever the function raises
{ ! . No_more_elements } .
the successive results of applying [next] to [data], then to the
result, etc. The list ends whenever the function raises
{!BatEnum.No_more_elements}.*)
val seq : 'a -> ('a -> 'a) -> ('a -> bool) -> 'a t
* [ seq init step cond ] creates a sequence of data , which starts
from [ init ] , extends by [ step ] , until the condition [ cond ]
fails . E.g. [ seq 1 ( ( + ) 1 ) ( ( > ) 100 ) ] returns [ 1 , 2 , ... 99 ] . If [ cond
init ] is false , the result is empty .
from [init], extends by [step], until the condition [cond]
fails. E.g. [seq 1 ((+) 1) ((>) 100)] returns [1, 2, ... 99]. If [cond
init] is false, the result is empty. *)
val unfold: 'b -> ('b -> ('a * 'b) option) -> 'a t
* As [ from_loop ] , except uses option type to signal the end of the enumeration .
[ unfold data next ] creates a ( possibly infinite ) enumeration from
the successive results of applying [ next ] to [ data ] , then to the
result , etc . The enumeration ends whenever the function returns [ None ]
Example : [ Enum.unfold n ( fun x - > if x = 1 then None else Some
( x , if x land 1 = 1 then 3 * x + 1 else x / 2 ) ) ] returns the
hailstone sequence starting at [ n ] .
[unfold data next] creates a (possibly infinite) enumeration from
the successive results of applying [next] to [data], then to the
result, etc. The enumeration ends whenever the function returns [None]
Example: [Enum.unfold n (fun x -> if x = 1 then None else Some
(x, if x land 1 = 1 then 3 * x + 1 else x / 2))] returns the
hailstone sequence starting at [n].
*)
val init : int -> (int -> 'a) -> 'a t
(** [init n f] creates a new enumeration over elements
[f 0, f 1, ..., f (n-1)] *)
val singleton : 'a -> 'a t
* Create an enumeration consisting of exactly one element .
val repeat : ?times:int -> 'a -> 'a t
(** [repeat ~times:n x] creates a enum sequence filled with [n] times of
[x]. It return infinite enum when [~times] is absent. It returns empty
enum when [times <= 0] *)
val cycle : ?times:int -> 'a t -> 'a t
(** [cycle] is similar to [repeat], except that the content to fill is a
subenum rather than a single element. Note that [times] represents the
times of repeating not the length of enum. When [~times] is absent the
result is an infinite enum. *)
val delay : (unit -> 'a t) -> 'a t
(** [delay (fun () -> e)] produces an enumeration which behaves as [e].
The enumeration itself will only be computed when consumed.
A typical use of this function is to explore lazily non-trivial
data structures, as follows:
[type 'a tree = Leaf
| Node of 'a * 'a tree * 'a tree
let enum_tree =
let rec aux = function
| Leaf -> BatEnum.empty ()
| Node (n, l, r) -> BatEnum.append (BatEnum.singleton n)
(BatEnum.append (delay (fun () -> aux l))
(delay (fun () -> aux r)))
]
*)
val to_object: 'a t -> (<next:'a; count:int; clone:'b> as 'b)
(**[to_object e] returns a representation of [e] as an object.*)
val of_object: (<next:'a; count:int; clone:'b> as 'b) -> 'a t
(**[of_object e] returns a representation of an object as an enumeration*)
val enum : 'a t -> 'a t
(** identity : added for consistency with the other data structures *)
val of_enum : 'a t -> 'a t
(** identity : added for consistency with the other data structures *)
val combination : ?repeat:bool -> int -> int -> int list t
(** [combination n k] returns an enumeration over combination of [k] elements
between [n] distincts elements.
If [repeat] is true, the combination may contain the same elements many
times.*)
* { 6 Counting }
val count : 'a t -> int
(** [count e] returns the number of remaining elements in [e] without
consuming the enumeration.
Depending of the underlying data structure that is implementing the
enumeration functions, the count operation can be costly, and even sometimes
can cause a call to [force]. *)
val fast_count : 'a t -> bool
(** For users worried about the speed of [count] you can call the [fast_count]
function that will give an hint about [count] implementation. Basically, if
the enumeration has been created with [make] or [init] or if [force] has
been called on it, then [fast_count] will return true. *)
val hard_count : 'a t -> int
(** [hard_count] returns the number of remaining in elements in [e],
consuming the whole enumeration somewhere along the way. This
function is always at least as fast as the fastest of either
[count] or a [fold] on the elements of [t].
This function is useful when you have opened an enumeration for
the sole purpose of counting its elements (e.g. the number of
lines in a file).*)
*
{ 6 Utilities }
{6 Utilities }
*)
val range : ?until:int -> int -> int t
(** [range p until:q] creates an enumeration of integers [[p, p+1, ..., q]].
If [until] is omitted, the enumeration is not bounded. Behaviour is
not-specified once [max_int] has been reached.*)
val dup : 'a t -> 'a t * 'a t
* [ dup stream ] returns a pair of streams which are identical to [ stream ] . Note
that stream is a destructive data structure , the point of [ dup ] is to
return two streams can be used independently .
that stream is a destructive data structure, the point of [dup] is to
return two streams can be used independently. *)
val combine : 'a t -> 'b t -> ('a * 'b) t
* [ combine ] transform two streams into a stream of pairs of corresponding
elements . If one stream is shorter , excess elements of the longer stream are
ignored .
Curried @since 3.0
elements. If one stream is shorter, excess elements of the longer stream are
ignored.
Curried @since 3.0
*)
val uncombine : ('a * 'b) t -> 'a t * 'b t
(** [uncombine] is the opposite of [combine] *)
val merge : ('a -> 'a -> bool) -> 'a t -> 'a t -> 'a t
* [ merge test a b ] merge the elements from [ a ] and [ b ] into a single
enumeration . At each step , [ test ] is applied to the first element [ xa ] of
[ a ] and the first element [ xb ] of [ b ] to determine which should get first
into resulting enumeration . If [ test xa xb ] returns [ true ] , [ xa ] ( the
first element of [ a ] ) is used , otherwise [ xb ] is used . If [ a ] or [ b ] runs
out of elements , the process will append all elements of the other
enumeration to the result .
For example , if [ a ] and [ b ] are enumerations of integers sorted
in increasing order , then [ merge ( < ) a b ] will also be sorted .
enumeration. At each step, [test] is applied to the first element [xa] of
[a] and the first element [xb] of [b] to determine which should get first
into resulting enumeration. If [test xa xb] returns [true], [xa] (the
first element of [a]) is used, otherwise [xb] is used. If [a] or [b] runs
out of elements, the process will append all elements of the other
enumeration to the result.
For example, if [a] and [b] are enumerations of integers sorted
in increasing order, then [merge (<) a b] will also be sorted.
*)
val interleave : 'a t array -> 'a t
* [ interleave enums ] creates a new enumeration from an array of enumerations .
The new enumeration first yields the first elements of the enumerations in
the supplied order , then second elements , etc . Thus , a sequence
[ [ | [ x11 ; x12 ; ... ] ; [ x21 ; , ... ] , ... [ xN1 ; ; ... ] | ] ] becomes
[ [ x11 ; x12 ; ... ; xN1 ; x21 ; ; ... ; ; x31 ; ... ] ] .
The new enumeration first yields the first elements of the enumerations in
the supplied order, then second elements, etc. Thus, a sequence
[ [| [x11 ; x12 ; ...] ; [x21 ; x22, ...] , ... [xN1 ; xN2 ; ...] |] ] becomes
[[ x11 ; x12 ; ... ; xN1 ; x21 ; x22 ; ... ; xN2 ; x31 ; ... ]].
*)
val uniq : 'a t -> 'a t
(** [uniq e] returns a duplicate of [e] with repeated values
omitted (similar to unix's [uniq] command).
It uses structural equality to compare consecutive elements. *)
val uniqq : 'a t -> 'a t
* [ uniqq e ] behaves as [ uniq e ] except it uses physical equality
to compare consecutive elements .
@since 2.4.0
to compare consecutive elements.
@since 2.4.0 *)
val uniq_by : ('a -> 'a -> bool) -> 'a t -> 'a t
* [ uniq_by cmp e ] behaves as [ uniq e ] except it allows to specify a
comparison function .
@since 2.4.0
comparison function.
@since 2.4.0 *)
val switch : ('a -> bool) -> 'a t -> 'a t * 'a t
* [ switch test enum ] splits [ enum ] into two enums , where the first enum have
all the elements satisfying [ test ] , the second enum is opposite . The
order of elements in the source enum is preserved .
all the elements satisfying [test], the second enum is opposite. The
order of elements in the source enum is preserved. *)
val partition : ('a -> bool) -> 'a t -> 'a t * 'a t
(** as [switch]
@added v1.4.0
*)
switchn : int - > ( ' a - > int ) - > ' a t - > ' a t array
( * * [ switchn ] is the array version of [ switch ] . [ switch n f fl ] split [ fl ] to an array of [ n ] enums , [ f ] is
applied to each element of [ fl ] to decide the i d of its destination
enum .
(** [switchn] is the array version of [switch]. [switch n f fl] split [fl] to an array of [n] enums, [f] is
applied to each element of [fl] to decide the id of its destination
enum. *)*)
val arg_min : ('a -> 'b) -> 'a t -> 'a
val arg_max : ('a -> 'b) -> 'a t -> 'a
* [ arg_min f xs ] returns the [ x ] in [ xs ] for which [ f x ] is minimum .
Similarly for [ arg_max ] , except it returns the maximum . If
multiple values reach the maximum , one of them is
returned . ( currently the first , but this is not guaranteed )
Example : [ -5 -- 5 | > arg_min ( fun x - > x * x + 6 * x - 5 ) = -3 ]
Example : [ List.enum [ " cat " ; " canary " ; " dog " ; " dodo " ; " ant " ; " cow " ] | > arg_max String.length = " canary " ]
@added v1.4.0
@raise Invalid_argument if the input enum is empty
Similarly for [arg_max], except it returns the maximum. If
multiple values reach the maximum, one of them is
returned. (currently the first, but this is not guaranteed)
Example: [-5 -- 5 |> arg_min (fun x -> x * x + 6 * x - 5) = -3]
Example: [List.enum ["cat"; "canary"; "dog"; "dodo"; "ant"; "cow"] |> arg_max String.length = "canary"]
@added v1.4.0
@raise Invalid_argument if the input enum is empty
*)
* { 6 Trampolining }
val while_do : ('a -> bool) -> ('a t -> 'a t) -> 'a t -> 'a t
(** [while_do cont f e] is a loop on [e] using [f] as body and [cont] as
condition for continuing.
If [e] contains elements [x0], [x1], [x2]..., then if [cont x0] is [false],
[x0] is returned as such and treatment stops. On the other hand, if [cont x0]
is [true], [f x0] is returned and the loop proceeds with [x1]...
Note that f is used as halting condition {i after} the
corresponding element has been added to the result stream.
*)
* { 6 Infix operators }
(** Infix versions of some functions
This module groups together all infix operators so that
you can open it without opening the whole batEnum module.
*)
module Infix : sig
val ( -- ) : int -> int -> int t
* As [ range ] , without the label .
[ 5 -- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 -- 5 ] is the empty enumeration
[5 -- 10] is the enumeration 5,6,7,8,9,10.
[10 -- 5] is the empty enumeration*)
val ( --^ ) : int -> int -> int t
* As [ ( -- ) ] but without the right endpoint
[ 5 --^ 10 ] is the enumeration 5,6,7,8,9 .
[5 --^ 10] is the enumeration 5,6,7,8,9.
*)
val ( --. ) : (float * float) -> float -> float t
* [ ( a , step ) -- . b ) ] creates a float enumeration from [ a ] to [ b ] with an
increment of [ step ] between elements .
[ ( 5.0 , 1.0 ) -- . 10.0 ] is the enumeration 5.0,6.0,7.0,8.0,9.0,10.0 .
[ ( 10.0 , -1.0 ) -- . 5.0 ] is the enumeration 10.0,9.0,8.0,7.0,6.0,5.0 .
[ ( 10.0 , 1.0 ) -- . 1.0 ] is the empty enumeration .
increment of [step] between elements.
[(5.0, 1.0) --. 10.0] is the enumeration 5.0,6.0,7.0,8.0,9.0,10.0.
[(10.0, -1.0) --. 5.0] is the enumeration 10.0,9.0,8.0,7.0,6.0,5.0.
[(10.0, 1.0) --. 1.0] is the empty enumeration. *)
val ( --- ) : int -> int -> int t
* As [ -- ] , but accepts enumerations in reverse order .
[ 5 --- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 --- 5 ] is the enumeration 10,9,8,7,6,5 .
[5 --- 10] is the enumeration 5,6,7,8,9,10.
[10 --- 5] is the enumeration 10,9,8,7,6,5.*)
val ( --~ ) : char -> char -> char t
(** As ( -- ), but for characters.*)
val ( // ) : 'a t -> ('a -> bool) -> 'a t
* Filtering ( pronounce this operator name " such that " ) .
For instance , [ ( 1 -- 37 ) // odd ] is the enumeration of all odd
numbers between 1 and 37 .
For instance, [(1 -- 37) // odd] is the enumeration of all odd
numbers between 1 and 37.*)
val ( /@ ) : 'a t -> ('a -> 'b) -> 'b t
val ( @/ ) : ('a -> 'b) -> 'a t -> 'b t
(**
Mapping operators.
These operators have the same meaning as function {!map} but are
sometimes more readable than this function, when chaining
several transformations in a row.
*)
val ( //@ ) : 'a t -> ('a -> 'b option) -> 'b t
val ( @// ) : ('a -> 'b option) -> 'a t -> 'b t
(**
Map combined with filter. Same as {!filter_map}.
*)
end
val ( -- ) : int -> int -> int t
val ( --^ ) : int -> int -> int t
val ( --. ) : (float * float) -> float -> float t
val ( --- ) : int -> int -> int t
val ( --~ ) : char -> char -> char t
val ( // ) : 'a t -> ('a -> bool) -> 'a t
val ( /@ ) : 'a t -> ('a -> 'b) -> 'b t
val ( @/ ) : ('a -> 'b) -> 'a t -> 'b t
val ( //@ ) : 'a t -> ('a -> 'b option) -> 'b t
val ( @// ) : ('a -> 'b option) -> 'a t -> 'b t
* { 6 Monad related modules }
* Monadic operations on Enumerations containing monadic elements
This module will let you use sequence and fold_monad functions over enumerations .
This module will let you use sequence and fold_monad functions over enumerations.
*)
module WithMonad : functor (Mon : BatInterfaces.Monad) -> sig
type 'a m = 'a Mon.m
(** Type of the monadic elements. *)
val sequence : 'a m t -> 'a t m
* [ sequence e ] evaluates each monadic elements ( of type [ ' a m ] ) contained in the enumeration [ e ] to get a monadic enumeration of [ ' a ] elements ,
of type [ ' a BatEnum.t m ] .
of type ['a BatEnum.t m]. *)
val fold_monad : ('a -> 'b -> 'a m) -> 'a -> 'b t -> 'a m
(** [fold_monad f init e] does a folding of the enumeration [e] applying step by step the function [f] that gives back results in the [Mon] monad,
with the [init] initial element. The result is a value in the [Mon] monad. *)
end
* The Monad
This module provides everything needed for writing and executing
computations in the BatEnum Monad .
This module provides everything needed for writing and executing
computations in the BatEnum Monad.
*)
module Monad : sig
type 'a m = 'a t
* The type of the monad 's elements , thus [ BatEnum.t ] .
val return : 'a -> 'a m
* This function puts a single value in the monad , that is to say it creates an enumeration containing a single element .
val bind : 'a m -> ('a -> 'b m) -> 'b m
(** [bind m f] takes the result of the monadic computation m, puts the f function in the monadic context passing it the result of m and then
returning a monadic result. *)
end
(** {6 Boilerplate code}*)
val print : ?first:string -> ?last:string -> ?sep:string -> ('a BatInnerIO.output -> 'b -> unit) -> 'a BatInnerIO.output -> 'b t -> unit
(** Print and consume the contents of an enumeration.*)
val print_at_most : ?first:string -> ?last:string -> ?sep:string ->
limit:int -> ('a BatInnerIO.output -> 'b -> unit) ->
'a BatInnerIO.output -> 'b t -> unit
* [ print_at_most pp limit out enum ] consumes [ enum ] to print its elements
into [ out ] ( using [ pp ] to print individual elements ) .
At most [ limit ] arguments are printed , if more elements are
available an ellipsis " ... " is added .
@raise Invalid_argument if the limit is < = 0 .
@since 2.2.0
into [out] (using [pp] to print individual elements).
At most [limit] arguments are printed, if more elements are
available an ellipsis "..." is added.
@raise Invalid_argument if the limit is <= 0.
@since 2.2.0 *)
val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int
* [ compare cmp a b ] compares enumerations [ a ] and [ b ]
by lexicographical order using comparison [ cmp ] .
@return 0 if [ a ] and [ b ] are equal wrt [ cmp ]
@return -1 if [ a ] is empty and [ b ] is not
@return 1 if [ b ] is empty and [ a ] is not
@return [ cmp x y ] , where [ x ] is the first element of [ a ]
and [ y ] is the first element of [ b ] , if [ cmp x y < > 0 ]
@return [ compare cmp a ' b ' ] , where [ a ' ] and [ b ' ] are
respectively equal to [ a ] and [ b ] without their first
element , if both [ a ] and [ b ] are non - empty and [ cmp x y = 0 ] ,
where [ x ] is the first element of [ a ] and [ y ] is the first
element of [ b ]
by lexicographical order using comparison [cmp].
@return 0 if [a] and [b] are equal wrt [cmp]
@return -1 if [a] is empty and [b] is not
@return 1 if [b] is empty and [a] is not
@return [cmp x y], where [x] is the first element of [a]
and [y] is the first element of [b], if [cmp x y <> 0]
@return [compare cmp a' b'], where [a'] and [b'] are
respectively equal to [a] and [b] without their first
element, if both [a] and [b] are non-empty and [cmp x y = 0],
where [x] is the first element of [a] and [y] is the first
element of [b]
*)
val ord : ('a -> 'a -> BatOrd.order) -> 'a t -> 'a t -> BatOrd.order
(** Same as [compare] but returning a {!BatOrd.order} instead of an integer. *)
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
(** [equal eq a b] returns [true] when [a] and [b] contain
the same sequence of elements.
*)
* { 6 Override modules }
*
The following modules replace functions defined in { ! } with functions
behaving slightly differently but having the same name . This is by design :
the functions meant to override the corresponding functions of { ! } .
The following modules replace functions defined in {!BatEnum} with functions
behaving slightly differently but having the same name. This is by design:
the functions meant to override the corresponding functions of {!BatEnum}.
*)
* Operations on { ! } without exceptions .
module Exceptionless : sig
val find : ('a -> bool) -> 'a t -> 'a option
* [ find f e ] returns [ Some x ] where [ x ] is the first element [ x ] of [ e ]
such that [ f x ] returns [ true ] , consuming the enumeration up to and
including the found element , or [ None ] if no such element exists
in the enumeration , consuming the whole enumeration in the search .
Since [ find ] consumes a prefix of the enumeration , it can be used several
times on the same enumeration to find the next element .
such that [f x] returns [true], consuming the enumeration up to and
including the found element, or [None] if no such element exists
in the enumeration, consuming the whole enumeration in the search.
Since [find] consumes a prefix of the enumeration, it can be used several
times on the same enumeration to find the next element. *)
end
* Operations on { ! } with labels .
This module overrides a number of functions of { ! } by
functions in which some arguments require labels . These labels are
there to improve readability and safety and to let you change the
order of arguments to functions . In every case , the behavior of the
function is identical to that of the corresponding function of { ! } .
This module overrides a number of functions of {!BatEnum} by
functions in which some arguments require labels. These labels are
there to improve readability and safety and to let you change the
order of arguments to functions. In every case, the behavior of the
function is identical to that of the corresponding function of {!BatEnum}.
*)
module Labels : sig
val iter: f:('a -> unit) -> 'a t -> unit
val iter2: f:('a -> 'b -> unit) -> 'a t -> 'b t -> unit
val exists: f:('a -> bool) -> 'a t -> bool
val for_all: f:('a -> bool) -> 'a t -> bool
val fold: f:('b -> 'a -> 'b) -> init:'b -> 'a t -> 'b
val fold2: f:('a -> 'b -> 'c -> 'c) -> init:'c -> 'a t -> 'b t -> 'c
val iteri: f:(int -> 'a -> unit) -> 'a t -> unit
val iter2i: f:( int -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
val foldi: f:(int -> 'a -> 'b -> 'b) -> init:'b -> 'a t -> 'b
val fold2i: f:(int -> 'a -> 'b -> 'c -> 'c) -> init:'c -> 'a t -> 'b t -> 'c
val find: f:('a -> bool) -> 'a t -> 'a
val take_while: f:('a -> bool) -> 'a t -> 'a t
val drop_while: f:('a -> bool) -> 'a t -> 'a t
val map: f:('a -> 'b) -> 'a t -> 'b t
val mapi: f:(int -> 'a -> 'b) -> 'a t -> 'b t
val filter: f:('a -> bool) -> 'a t -> 'a t
val filter_map: f:('a -> 'b option) -> 'a t -> 'b t
val from: f:(unit -> 'a) -> 'a t
val from_while: f:(unit -> 'a option) -> 'a t
val from_loop: init:'b -> f:('b -> ('a * 'b)) -> 'a t
val seq: init:'a -> f:('a -> 'a) -> cnd:('a -> bool) -> 'a t
val unfold: init:'b -> f:('b -> ('a * 'b) option) -> 'a t
val init: int -> f:(int -> 'a) -> 'a t
val switch: f:('a -> bool) -> 'a t -> 'a t * 'a t
val compare: ?cmp:('a -> 'a -> int) -> 'a t -> 'a t -> int
val uniq: ?cmp:('a -> 'a -> bool) -> 'a t -> 'a t
module LExceptionless : sig
val find : f:('a -> bool) -> 'a t -> 'a option
end
end
(**/**)
* { 6 For system use only , not for the casual user }
For compatibility with { ! Stream }
For compatibility with {!Stream}
*)
val iapp : 'a t -> 'a t -> 'a t
val icons : 'a -> 'a t -> 'a t
val ising : 'a -> 'a t
val lapp : (unit -> 'a t) -> 'a t -> 'a t
val lcons : (unit -> 'a) -> 'a t -> 'a t
val lsing : (unit -> 'a) -> 'a t
val slazy : (unit -> 'a t) -> 'a t
(**/**)
| null | https://raw.githubusercontent.com/ocaml-batteries-team/batteries-included/f143ef5ec583d87d538b8f06f06d046d64555e90/src/batEnum.mli | ocaml | * A signature for data structures which may be converted to and from [enum].
If you create a new data structure, you should make it compatible
with [Enumerable].
* The data structure, e.g. ['a List.t]
* Return an enumeration of the elements of the data structure
* Build a data structure from an enumeration
* [iter f e] calls the function [f] with each elements of [e] in turn.
* [exists f e] returns [true] if there is some [x] in [e] such
that [f x]
* [for_all f e] returns [true] if for every [x] in [e], [f x] is true
* A general loop on an enumeration.
If [e] is empty, [fold f v e] returns [v]. Otherwise, [fold v e]
returns [f (... (f (f v a0) a1) ...) aN] where [a0,a1..aN] are the
elements of [e]. This function may be used, for instance, to
compute the sum of all elements of an enumeration [e] as follows:
[fold ( + ) 0 e]. Eager.
* [sum] returns the sum of the given int enum. If the argument is
empty, returns 0. Eager
* [is_empty e] returns true if [e] does not contains any element.
Forces at most one element.
* [peek e] returns [None] if [e] is empty or [Some x] where [x] is
the next element of [e]. The element is not removed from the
enumeration.
* [get e] returns [None] if [e] is empty or [Some x] where [x] is
the next element of [e], in which case the element is removed
from the enumeration.
* [push e x] will add [x] at the beginning of [e].
* [clone e] creates a new enumeration that is copy of [e]. If [e]
is consumed by later operations, the clone will not get affected.
* [force e] forces the application of all lazy functions and the
enumeration of all elements, exhausting the enumeration.
An efficient intermediate data structure
of enumerated elements is constructed and [e] will now enumerate over
that data structure.
* [take n e] returns the prefix of [e] of length [n], or [e]
itself if [n] is greater than the length of [e]
* Negated span.
[break test e] is equivalent to [span (fun x -> not (test x)) e]
* [group_by eq e] divides [e] into an enumeration of enumerations,
where each sub-enumeration is the longest continuous enumeration
of elements that are equal, as judged by [eq].
Warning: The result of this operation cannot be directly cloned
safely; instead, reify to a non-lazy structure and read from that
structure multiple times.
* [clump size add get e] runs [add] on [size] (or less at the end)
elements of [e] and then runs [get] to produce value for the
result enumeration. Useful to convert a char enum into string
enum.
* [map f e] returns an enumeration over [(f a0, f a1, ...)] where
[a0,a1...] are the elements of [e]. Lazy.
* [filter f e] returns an enumeration over all elements [x] of [e] such
as [f x] returns [true]. Lazy.
{b Note} filter is lazy in that it returns a lazy enumeration, but
each element in the result is eagerly searched in the input
enumeration. Therefore, the access to a given element in the result
will diverge if it is preceded, in the input enumeration, by
infinitely many false elements (elements on which the predicate
[p] returns [false]).
Other functions that may drop an unbound number of elements
([filter_map], [take_while], etc.) have the same behavior.
* [filter_map f e] returns an enumeration over all elements [x] such as
[f y] returns [Some x] , where [y] is an element of [e].
[filter_map] works on infinite enumerations; see [filter].
* [concat e] returns an enumeration over all elements of all enumerations
of [e].
* Synonym of {!concat}
* This exception {i shall} be raised by the [next] function of [make]
or [from] when no more elements can be enumerated, it {i shall not}
be raised by any function which is an argument to any
other function specified in the interface.
* As a convenience for debugging, this exception {i may} be raised by
the [count] function of [make] when attempting to count an infinite enum.
* The empty enumeration : contains no element
* This function creates a fully defined enumeration.
{ul {li the [next] function {i shall} return the next element of the
enumeration or raise [No_more_elements] if the underlying data structure
does not have any more elements to enumerate.}
{li the [count] function {i shall} return the actual number of remaining
elements in the enumeration or {i may} raise [Infinite_enum] if it is known
that the enumeration is infinite.}
{li the [clone] function {i shall} create a clone of the enumeration
such as operations on the original enumeration will not affect the
clone. }}
For some samples on how to correctly use [make], you can have a look
at implementation of [BatList.enum].
* [from next] creates an enumeration from the [next] function.
[next] {i shall} return the next element of the enumeration or raise
[No_more_elements] when no more elements can be enumerated. Since the
enumeration definition is incomplete, a call to [count] will result in
a call to [force] that will enumerate all elements in order to
return a correct value.
* [from_while next] creates an enumeration from the [next] function.
[next] {i shall} return [Some x] where [x] is the next element of the
enumeration or [None] when no more elements can be enumerated. Since the
enumeration definition is incomplete, a call to [clone] or [count] will
result in a call to [force] that will enumerate all elements in order to
return a correct value.
* [init n f] creates a new enumeration over elements
[f 0, f 1, ..., f (n-1)]
* [repeat ~times:n x] creates a enum sequence filled with [n] times of
[x]. It return infinite enum when [~times] is absent. It returns empty
enum when [times <= 0]
* [cycle] is similar to [repeat], except that the content to fill is a
subenum rather than a single element. Note that [times] represents the
times of repeating not the length of enum. When [~times] is absent the
result is an infinite enum.
* [delay (fun () -> e)] produces an enumeration which behaves as [e].
The enumeration itself will only be computed when consumed.
A typical use of this function is to explore lazily non-trivial
data structures, as follows:
[type 'a tree = Leaf
| Node of 'a * 'a tree * 'a tree
let enum_tree =
let rec aux = function
| Leaf -> BatEnum.empty ()
| Node (n, l, r) -> BatEnum.append (BatEnum.singleton n)
(BatEnum.append (delay (fun () -> aux l))
(delay (fun () -> aux r)))
]
*[to_object e] returns a representation of [e] as an object.
*[of_object e] returns a representation of an object as an enumeration
* identity : added for consistency with the other data structures
* identity : added for consistency with the other data structures
* [combination n k] returns an enumeration over combination of [k] elements
between [n] distincts elements.
If [repeat] is true, the combination may contain the same elements many
times.
* [count e] returns the number of remaining elements in [e] without
consuming the enumeration.
Depending of the underlying data structure that is implementing the
enumeration functions, the count operation can be costly, and even sometimes
can cause a call to [force].
* For users worried about the speed of [count] you can call the [fast_count]
function that will give an hint about [count] implementation. Basically, if
the enumeration has been created with [make] or [init] or if [force] has
been called on it, then [fast_count] will return true.
* [hard_count] returns the number of remaining in elements in [e],
consuming the whole enumeration somewhere along the way. This
function is always at least as fast as the fastest of either
[count] or a [fold] on the elements of [t].
This function is useful when you have opened an enumeration for
the sole purpose of counting its elements (e.g. the number of
lines in a file).
* [range p until:q] creates an enumeration of integers [[p, p+1, ..., q]].
If [until] is omitted, the enumeration is not bounded. Behaviour is
not-specified once [max_int] has been reached.
* [uncombine] is the opposite of [combine]
* [uniq e] returns a duplicate of [e] with repeated values
omitted (similar to unix's [uniq] command).
It uses structural equality to compare consecutive elements.
* as [switch]
@added v1.4.0
* [switchn] is the array version of [switch]. [switch n f fl] split [fl] to an array of [n] enums, [f] is
applied to each element of [fl] to decide the id of its destination
enum.
* [while_do cont f e] is a loop on [e] using [f] as body and [cont] as
condition for continuing.
If [e] contains elements [x0], [x1], [x2]..., then if [cont x0] is [false],
[x0] is returned as such and treatment stops. On the other hand, if [cont x0]
is [true], [f x0] is returned and the loop proceeds with [x1]...
Note that f is used as halting condition {i after} the
corresponding element has been added to the result stream.
* Infix versions of some functions
This module groups together all infix operators so that
you can open it without opening the whole batEnum module.
* As ( -- ), but for characters.
*
Mapping operators.
These operators have the same meaning as function {!map} but are
sometimes more readable than this function, when chaining
several transformations in a row.
*
Map combined with filter. Same as {!filter_map}.
* Type of the monadic elements.
* [fold_monad f init e] does a folding of the enumeration [e] applying step by step the function [f] that gives back results in the [Mon] monad,
with the [init] initial element. The result is a value in the [Mon] monad.
* [bind m f] takes the result of the monadic computation m, puts the f function in the monadic context passing it the result of m and then
returning a monadic result.
* {6 Boilerplate code}
* Print and consume the contents of an enumeration.
* Same as [compare] but returning a {!BatOrd.order} instead of an integer.
* [equal eq a b] returns [true] when [a] and [b] contain
the same sequence of elements.
*/*
*/* |
* - enumeration over abstract collection of elements .
* Copyright ( C ) 2003
* 2009 , LIFO , Universite d'Orleans
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatEnum - enumeration over abstract collection of elements.
* Copyright (C) 2003 Nicolas Cannasse
* 2009 David Rajchenbach-Teller, LIFO, Universite d'Orleans
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
*
Enumeration over abstract collection of elements .
Enumerations are a representation of finite or infinite sequences
of elements . In Batteries Included , enumerations are used
pervasively , both as a uniform manner of reading and manipulating
the contents of a data structure , or as a simple manner of reading
or writing sequences of characters , numbers , strings , etc . from / to
files , network connections or other inputs / outputs .
Enumerations are typically computed as needed , which allows the
definition and manipulation of huge ( possibly infinite ) sequences .
Manipulating an enumeration is a uniform and often comfortable way
of extracting subsequences ( function { ! filter } or operator [ // ] et
al ) , converting sequences into other sequences ( function { ! map } or
operators [ /@ ] and [ @/ ] et al ) , gathering information ( function
{ ! } et al ) or performing loops ( functions { ! iter } and
{ ! map } ) .
For instance , function { ! BatRandom.enum_int } creates an
infinite enumeration of random numbers . Combined with [ // ]
and { ! map } , we may turn this into an infinite enumeration of
squares of random even numbers :
[ map ( fun x - > x * x ) ( ( Random.enum_int 100 ) // even ) ]
Similarly , to obtain an enumeration of 50 random integers ,
we may use { ! take } , as follows :
[ take 50 ( Random.enum_int 100 ) ]
As most data structures in Batteries can be enumerated and built
from enumerations , these operations may be used also on lists ,
arrays , hashtables , etc . When designing a new data structure , it
is usually a good idea to allow enumeration and construction
from an enumeration .
{ b Note } Enumerations are not thread - safe . You should not attempt
to access one enumeration from different threads .
@author @author
Enumeration over abstract collection of elements.
Enumerations are a representation of finite or infinite sequences
of elements. In Batteries Included, enumerations are used
pervasively, both as a uniform manner of reading and manipulating
the contents of a data structure, or as a simple manner of reading
or writing sequences of characters, numbers, strings, etc. from/to
files, network connections or other inputs/outputs.
Enumerations are typically computed as needed, which allows the
definition and manipulation of huge (possibly infinite) sequences.
Manipulating an enumeration is a uniform and often comfortable way
of extracting subsequences (function {!filter} or operator [//] et
al), converting sequences into other sequences (function {!map} or
operators [/@] and [@/] et al), gathering information (function
{!scanl} et al) or performing loops (functions {!iter} and
{!map}).
For instance, function {!BatRandom.enum_int} creates an
infinite enumeration of random numbers. Combined with [//]
and {!map}, we may turn this into an infinite enumeration of
squares of random even numbers:
[map (fun x -> x * x) ( (Random.enum_int 100) // even )]
Similarly, to obtain an enumeration of 50 random integers,
we may use {!take}, as follows:
[take 50 (Random.enum_int 100)]
As most data structures in Batteries can be enumerated and built
from enumerations, these operations may be used also on lists,
arrays, hashtables, etc. When designing a new data structure, it
is usually a good idea to allow enumeration and construction
from an enumeration.
{b Note} Enumerations are not thread-safe. You should not attempt
to access one enumeration from different threads.
@author Nicolas Cannasse
@author David Rajchenbach-Teller
*)
type 'a t
module type Enumerable = sig
val enum : 'a enumerable -> 'a t
val of_enum : 'a t -> 'a enumerable
end
include Enumerable with type 'a enumerable = 'a t
include BatInterfaces.Mappable with type 'a mappable = 'a t
* { 6 Final functions }
These functions consume the enumeration until
it ends or an exception is raised by the first
argument function .
These functions consume the enumeration until
it ends or an exception is raised by the first
argument function.
*)
val iter : ('a -> unit) -> 'a t -> unit
val iter2 : ('a -> 'b -> unit) -> 'a t -> 'b t -> unit
* [ iter2 f e1 e2 ] calls the function [ f ] with the next elements of [ e1 ] and
[ e2 ] repeatedly until one of the two enumerations ends .
[e2] repeatedly until one of the two enumerations ends. *)
val exists: ('a -> bool) -> 'a t -> bool
val for_all: ('a -> bool) -> 'a t -> bool
val fold : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b
val reduce : ('a -> 'a -> 'a) -> 'a t -> 'a
* A simplified version of [ fold ] , which uses the first element
of the enumeration as a default value .
[ reduce f e ] throws [ Not_found ] if [ e ] is empty , returns its only
element if e is a singleton , otherwise [ f ( ... ( f ( f a0 a1 )
a2 ) ... ) aN ] where [ a0,a1 .. aN ] are the elements of [ e ] .
of the enumeration as a default value.
[reduce f e] throws [Not_found] if [e] is empty, returns its only
element if e is a singleton, otherwise [f (... (f (f a0 a1)
a2)...) aN] where [a0,a1..aN] are the elements of [e]. *)
val sum : int t -> int
val fsum : float t -> float
* @returns the sum of the enum 's elements . Uses summing to
get a more accurate answer than [ reduce ( + . ) ] would return , but runs slower .
@since 2.0
get a more accurate answer than [reduce (+.)] would return, but runs slower.
@since 2.0
*)
val kahan_sum : float t -> float
* [ kahan_sum l ] returns a numerically - accurate sum of the floats of
[ l ] . See { ! BatArray.fsum } for more details .
@since 2.2.0
[l]. See {!BatArray.fsum} for more details.
@since 2.2.0
*)
val fold2 : ('a -> 'b -> 'c -> 'c) -> 'c -> 'a t -> 'b t -> 'c
* [ fold2 ] is similar to [ fold ] but will fold over two enumerations at the
same time until one of the two enumerations ends .
same time until one of the two enumerations ends. *)
val scanl : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b t
* A variant of [ fold ] producing an enumeration of its intermediate values .
If [ e ] contains [ x0 ] , [ x1 ] , ... , [ f init e ] is the enumeration
containing [ init ] , [ f init ] , [ f ( f init x0 ) x1 ] ... Lazy .
If [e] contains [x0], [x1], ..., [scanl f init e] is the enumeration
containing [init], [f init x0], [f (f init x0) x1]... Lazy. *)
val scan : ('a -> 'a -> 'a) -> 'a t -> 'a t
* [ scan ] is similar to [ ] but without the [ init ] value : if [ e ]
contains [ x0 ] , [ x1 ] , [ x2 ] ... , [ scan f e ] is the enumeration containing
[ x0 ] , [ f x0 x1 ] , [ f ( f x0 x1 ) x2 ] ...
For instance , [ scan ( * ) ( 1 -- 10 ) ] will produce an enumeration
containing the successive values of the factorial function .
contains [x0], [x1], [x2] ..., [scan f e] is the enumeration containing
[x0], [f x0 x1], [f (f x0 x1) x2]...
For instance, [scan ( * ) (1 -- 10)] will produce an enumeration
containing the successive values of the factorial function.*)
* Indexed functions : these functions are similar to previous ones
except that they call the function with one additional argument which
is an index starting at 0 and incremented after each call to the function .
except that they call the function with one additional argument which
is an index starting at 0 and incremented after each call to the function. *)
val iteri : (int -> 'a -> unit) -> 'a t -> unit
val iter2i : ( int -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
val foldi : (int -> 'a -> 'b -> 'b) -> 'b -> 'a t -> 'b
val fold2i : (int -> 'a -> 'b -> 'c -> 'c) -> 'c -> 'a t -> 'b t -> 'c
* { 6 Useful functions }
val find : ('a -> bool) -> 'a t -> 'a
* [ find f e ] returns the first element [ x ] of [ e ] such that [ f x ] returns
[ true ] , consuming the enumeration up to and including the
found element .
@raise Not_found if no such element exists
in the enumeration , consuming the whole enumeration in the search .
Since [ find ] ( eagerly ) consumes a prefix of the enumeration , it
can be used several times on the same enumeration to find the
next element .
[true], consuming the enumeration up to and including the
found element.
@raise Not_found if no such element exists
in the enumeration, consuming the whole enumeration in the search.
Since [find] (eagerly) consumes a prefix of the enumeration, it
can be used several times on the same enumeration to find the
next element. *)
val find_map : ('a -> 'b option) -> 'a t -> 'b
* [ find_map f e ] finds the first element [ x ] of [ e ] such that [ f x ] returns
[ Some r ] , then returns r. It consumes the enumeration up to and including
the found element .
@raise Not_found if no such element exists in the
enumeration , consuming the whole enumeration in the search .
Since [ find_map ] ( eagerly ) consumes a prefix of the enumeration , it can be
used several times on the same enumeration to find the next element .
@since 2.0
[Some r], then returns r. It consumes the enumeration up to and including
the found element.
@raise Not_found if no such element exists in the
enumeration, consuming the whole enumeration in the search.
Since [find_map] (eagerly) consumes a prefix of the enumeration, it can be
used several times on the same enumeration to find the next element.
@since 2.0
*)
val is_empty : 'a t -> bool
val peek : 'a t -> 'a option
val get : 'a t -> 'a option
val get_exn : 'a t -> 'a
* [ get_exn e ] returns the first element of [ e ] .
@raise No_more_elements if [ e ] is empty .
@since 2.0
@raise No_more_elements if [e] is empty.
@since 2.0 *)
val push : 'a t -> 'a -> unit
val junk : 'a t -> unit
* [ junk e ] removes the first element from the enumeration , if any .
val clone : 'a t -> 'a t
val force : 'a t -> unit
val take : int -> 'a t -> 'a t
val drop : int -> 'a t -> unit
* [ drop n e ] removes the first [ n ] element from the enumeration , if any .
val skip: int -> 'a t -> 'a t
* [ skip n e ] removes the first [ n ] element from the enumeration , if any ,
then returns [ e ] .
This function has the same behavior as [ drop ] but is often easier to
compose with , e.g. , [ skip 5 % > take 3 ] is a new function which skips
5 elements and then returns the next 3 elements .
then returns [e].
This function has the same behavior as [drop] but is often easier to
compose with, e.g., [skip 5 %> take 3] is a new function which skips
5 elements and then returns the next 3 elements.*)
val take_while : ('a -> bool) -> 'a t -> 'a t
* [ take_while f e ] produces a new enumeration in which only remain
the first few elements [ x ] of [ e ] such that [ f x ]
the first few elements [x] of [e] such that [f x] *)
val drop_while : ('a -> bool) -> 'a t -> 'a t
* [ drop_while p e ] produces a new enumeration in which only
all the first elements such that [ f e ] have been junked .
all the first elements such that [f e] have been junked.*)
val span : ('a -> bool) -> 'a t -> 'a t * 'a t
* [ span test e ] produces two enumerations [ ( hd , tl ) ] , such that
[ hd ] is the same as [ take_while test e ] and [ tl ] is the same
as [ drop_while test e ] .
[hd] is the same as [take_while test e] and [tl] is the same
as [drop_while test e]. *)
val break : ('a -> bool) -> 'a t -> 'a t * 'a t
val group : ('a -> 'b) -> 'a t -> 'a t t
* [ group test e ] divides [ e ] into an enumeration of enumerations ,
where each sub - enumeration is the longest continuous enumeration
of elements whose [ test ] results are the same .
[ Enum.group ( x - > x mod 2 ) [ 1;2;4;1 ] = [ [ 1];[2;4];[1 ] ] ]
[ Enum.group ( fun x - > x mod 3 ) [ 1;2;4;1 ] = [ [ 1];[2];[4;1 ] ] ]
[ Enum.group ( fun s - > s.[0 ] ) [ " cat " ; " canary " ; " dog " ; " dodo " ; " ant " ; " cow " ] = [ [ " cat " ; " canary"];["dog";"dodo"];["ant"];["cow " ] ] ]
Warning : The result of this operation can not be directly cloned
safely ; instead , reify to a non - lazy structure and read from that
structure multiple times .
where each sub-enumeration is the longest continuous enumeration
of elements whose [test] results are the same.
[Enum.group (x -> x mod 2) [1;2;4;1] = [[1];[2;4];[1]]]
[Enum.group (fun x -> x mod 3) [1;2;4;1] = [[1];[2];[4;1]]]
[Enum.group (fun s -> s.[0]) ["cat"; "canary"; "dog"; "dodo"; "ant"; "cow"] = [["cat"; "canary"];["dog";"dodo"];["ant"];["cow"]]]
Warning: The result of this operation cannot be directly cloned
safely; instead, reify to a non-lazy structure and read from that
structure multiple times.
*)
val group_by : ('a -> 'a -> bool) -> 'a t -> 'a t t
val clump : int -> ('a -> unit) -> (unit -> 'b) -> 'a t -> 'b t
val cartesian_product : 'a t -> 'b t -> ('a * 'b) t
* [ e1 e2 ] computes the cartesian product of [ e1 ] and [ e2 ] .
Pairs are enumerated in a non - specified order , but in fair enough an order
so that it works on infinite enums ( i.e. even then , any pair is eventually
returned )
@since 2.2.0
Pairs are enumerated in a non-specified order, but in fair enough an order
so that it works on infinite enums (i.e. even then, any pair is eventually
returned)
@since 2.2.0 *)
* { 6 Lazy constructors }
These functions are lazy which means that they will create a new modified
enumeration without actually enumerating any element until they are asked
to do so by the programmer ( using one of the functions above ) .
When the resulting enumerations of these functions are consumed , the
underlying enumerations they were created from are also consumed .
These functions are lazy which means that they will create a new modified
enumeration without actually enumerating any element until they are asked
to do so by the programmer (using one of the functions above).
When the resulting enumerations of these functions are consumed, the
underlying enumerations they were created from are also consumed. *)
val map : ('a -> 'b) -> 'a t -> 'b t
val mapi : (int -> 'a -> 'b) -> 'a t -> 'b t
* [ mapi ] is similar to [ map ] except that [ f ] is passed one extra argument
which is the index of the element in the enumeration , starting from 0 :
mapi f e returns an enumeration over [ ( f 0 a0 , f 1 a1 , ... ) ] where
[ a0,a1 ... ] are the elements of [ e ] .
which is the index of the element in the enumeration, starting from 0 :
mapi f e returns an enumeration over [(f 0 a0, f 1 a1, ...)] where
[a0,a1...] are the elements of [e]. *)
val filter : ('a -> bool) -> 'a t -> 'a t
val filter_map : ('a -> 'b option) -> 'a t -> 'b t
val append : 'a t -> 'a t -> 'a t
* [ append e1 e2 ] returns an enumeration that will enumerate over all
elements of [ e1 ] followed by all elements of [ e2 ] . Lazy .
{ b Note } The behavior of appending [ e ] to itself or to something
derived from [ e ] is not specified . In particular , cloning [ append e e ]
may destroy any sharing between the first and the second argument .
elements of [e1] followed by all elements of [e2]. Lazy.
{b Note} The behavior of appending [e] to itself or to something
derived from [e] is not specified. In particular, cloning [append e e]
may destroy any sharing between the first and the second argument.
*)
val prefix_action : (unit -> unit) -> 'a t -> 'a t
* [ prefix_action f e ] will behave as [ e ] but guarantees that [ f ( ) ]
will be invoked exactly once before the current first element of [ e ]
is read .
If [ prefix_action f e ] is cloned , [ f ] is invoked only once , during
the cloning . If [ prefix_action f e ] is counted , [ f ] is invoked
only once , during the counting .
May be used for signalling that reading starts or for performing
delayed evaluations .
will be invoked exactly once before the current first element of [e]
is read.
If [prefix_action f e] is cloned, [f] is invoked only once, during
the cloning. If [prefix_action f e] is counted, [f] is invoked
only once, during the counting.
May be used for signalling that reading starts or for performing
delayed evaluations.*)
val suffix_action : (unit -> unit) -> 'a t -> 'a t
* [ suffix_action f e ] will behave as [ e ] but guarantees that [ f ( ) ]
will be invoked after the contents of [ e ] are exhausted .
If [ suffix_action f e ] is cloned , [ f ] is invoked only once , when
the original enumeration is exhausted . If [ suffix_action f e ]
is counted , [ f ] is only invoked if the act of counting
requires a call to [ force ] .
May be used for signalling that reading stopped or for performing
delayed evaluations .
will be invoked after the contents of [e] are exhausted.
If [suffix_action f e] is cloned, [f] is invoked only once, when
the original enumeration is exhausted. If [suffix_action f e]
is counted, [f] is only invoked if the act of counting
requires a call to [force].
May be used for signalling that reading stopped or for performing
delayed evaluations.*)
val concat : 'a t t -> 'a t
val flatten : 'a t t -> 'a t
val concat_map : ('a -> 'b t) -> 'a t -> 'b t
* Synonym of { ! Monad.bind } , with flipped arguments .
[ concat_map f e ] is the same as [ concat ( map f e ) ] .
@since 2.2.0
[concat_map f e] is the same as [concat (map f e)].
@since 2.2.0 *)
* { 6 Constructors }
In this section the word { i shall } denotes a semantic
requirement . The correct operation of the functions in this
interface are conditional on the client meeting these
requirements .
In this section the word {i shall} denotes a semantic
requirement. The correct operation of the functions in this
interface are conditional on the client meeting these
requirements.
*)
exception No_more_elements
exception Infinite_enum
val empty : unit -> 'a t
val make : next:(unit -> 'a) -> count:(unit -> int) -> clone:(unit -> 'a t) -> 'a t
val from : (unit -> 'a) -> 'a t
val from_while : (unit -> 'a option) -> 'a t
val from_loop: 'b -> ('b -> ('a * 'b)) -> 'a t
* [ from_loop data next ] creates a ( possibly infinite ) enumeration from
the successive results of applying [ next ] to [ data ] , then to the
result , etc . The list ends whenever the function raises
{ ! . No_more_elements } .
the successive results of applying [next] to [data], then to the
result, etc. The list ends whenever the function raises
{!BatEnum.No_more_elements}.*)
val seq : 'a -> ('a -> 'a) -> ('a -> bool) -> 'a t
* [ seq init step cond ] creates a sequence of data , which starts
from [ init ] , extends by [ step ] , until the condition [ cond ]
fails . E.g. [ seq 1 ( ( + ) 1 ) ( ( > ) 100 ) ] returns [ 1 , 2 , ... 99 ] . If [ cond
init ] is false , the result is empty .
from [init], extends by [step], until the condition [cond]
fails. E.g. [seq 1 ((+) 1) ((>) 100)] returns [1, 2, ... 99]. If [cond
init] is false, the result is empty. *)
val unfold: 'b -> ('b -> ('a * 'b) option) -> 'a t
* As [ from_loop ] , except uses option type to signal the end of the enumeration .
[ unfold data next ] creates a ( possibly infinite ) enumeration from
the successive results of applying [ next ] to [ data ] , then to the
result , etc . The enumeration ends whenever the function returns [ None ]
Example : [ Enum.unfold n ( fun x - > if x = 1 then None else Some
( x , if x land 1 = 1 then 3 * x + 1 else x / 2 ) ) ] returns the
hailstone sequence starting at [ n ] .
[unfold data next] creates a (possibly infinite) enumeration from
the successive results of applying [next] to [data], then to the
result, etc. The enumeration ends whenever the function returns [None]
Example: [Enum.unfold n (fun x -> if x = 1 then None else Some
(x, if x land 1 = 1 then 3 * x + 1 else x / 2))] returns the
hailstone sequence starting at [n].
*)
val init : int -> (int -> 'a) -> 'a t
val singleton : 'a -> 'a t
* Create an enumeration consisting of exactly one element .
val repeat : ?times:int -> 'a -> 'a t
val cycle : ?times:int -> 'a t -> 'a t
val delay : (unit -> 'a t) -> 'a t
val to_object: 'a t -> (<next:'a; count:int; clone:'b> as 'b)
val of_object: (<next:'a; count:int; clone:'b> as 'b) -> 'a t
val enum : 'a t -> 'a t
val of_enum : 'a t -> 'a t
val combination : ?repeat:bool -> int -> int -> int list t
* { 6 Counting }
val count : 'a t -> int
val fast_count : 'a t -> bool
val hard_count : 'a t -> int
*
{ 6 Utilities }
{6 Utilities }
*)
val range : ?until:int -> int -> int t
val dup : 'a t -> 'a t * 'a t
* [ dup stream ] returns a pair of streams which are identical to [ stream ] . Note
that stream is a destructive data structure , the point of [ dup ] is to
return two streams can be used independently .
that stream is a destructive data structure, the point of [dup] is to
return two streams can be used independently. *)
val combine : 'a t -> 'b t -> ('a * 'b) t
* [ combine ] transform two streams into a stream of pairs of corresponding
elements . If one stream is shorter , excess elements of the longer stream are
ignored .
Curried @since 3.0
elements. If one stream is shorter, excess elements of the longer stream are
ignored.
Curried @since 3.0
*)
val uncombine : ('a * 'b) t -> 'a t * 'b t
val merge : ('a -> 'a -> bool) -> 'a t -> 'a t -> 'a t
* [ merge test a b ] merge the elements from [ a ] and [ b ] into a single
enumeration . At each step , [ test ] is applied to the first element [ xa ] of
[ a ] and the first element [ xb ] of [ b ] to determine which should get first
into resulting enumeration . If [ test xa xb ] returns [ true ] , [ xa ] ( the
first element of [ a ] ) is used , otherwise [ xb ] is used . If [ a ] or [ b ] runs
out of elements , the process will append all elements of the other
enumeration to the result .
For example , if [ a ] and [ b ] are enumerations of integers sorted
in increasing order , then [ merge ( < ) a b ] will also be sorted .
enumeration. At each step, [test] is applied to the first element [xa] of
[a] and the first element [xb] of [b] to determine which should get first
into resulting enumeration. If [test xa xb] returns [true], [xa] (the
first element of [a]) is used, otherwise [xb] is used. If [a] or [b] runs
out of elements, the process will append all elements of the other
enumeration to the result.
For example, if [a] and [b] are enumerations of integers sorted
in increasing order, then [merge (<) a b] will also be sorted.
*)
val interleave : 'a t array -> 'a t
* [ interleave enums ] creates a new enumeration from an array of enumerations .
The new enumeration first yields the first elements of the enumerations in
the supplied order , then second elements , etc . Thus , a sequence
[ [ | [ x11 ; x12 ; ... ] ; [ x21 ; , ... ] , ... [ xN1 ; ; ... ] | ] ] becomes
[ [ x11 ; x12 ; ... ; xN1 ; x21 ; ; ... ; ; x31 ; ... ] ] .
The new enumeration first yields the first elements of the enumerations in
the supplied order, then second elements, etc. Thus, a sequence
[ [| [x11 ; x12 ; ...] ; [x21 ; x22, ...] , ... [xN1 ; xN2 ; ...] |] ] becomes
[[ x11 ; x12 ; ... ; xN1 ; x21 ; x22 ; ... ; xN2 ; x31 ; ... ]].
*)
val uniq : 'a t -> 'a t
val uniqq : 'a t -> 'a t
* [ uniqq e ] behaves as [ uniq e ] except it uses physical equality
to compare consecutive elements .
@since 2.4.0
to compare consecutive elements.
@since 2.4.0 *)
val uniq_by : ('a -> 'a -> bool) -> 'a t -> 'a t
* [ uniq_by cmp e ] behaves as [ uniq e ] except it allows to specify a
comparison function .
@since 2.4.0
comparison function.
@since 2.4.0 *)
val switch : ('a -> bool) -> 'a t -> 'a t * 'a t
* [ switch test enum ] splits [ enum ] into two enums , where the first enum have
all the elements satisfying [ test ] , the second enum is opposite . The
order of elements in the source enum is preserved .
all the elements satisfying [test], the second enum is opposite. The
order of elements in the source enum is preserved. *)
val partition : ('a -> bool) -> 'a t -> 'a t * 'a t
switchn : int - > ( ' a - > int ) - > ' a t - > ' a t array
( * * [ switchn ] is the array version of [ switch ] . [ switch n f fl ] split [ fl ] to an array of [ n ] enums , [ f ] is
applied to each element of [ fl ] to decide the i d of its destination
enum .
val arg_min : ('a -> 'b) -> 'a t -> 'a
val arg_max : ('a -> 'b) -> 'a t -> 'a
* [ arg_min f xs ] returns the [ x ] in [ xs ] for which [ f x ] is minimum .
Similarly for [ arg_max ] , except it returns the maximum . If
multiple values reach the maximum , one of them is
returned . ( currently the first , but this is not guaranteed )
Example : [ -5 -- 5 | > arg_min ( fun x - > x * x + 6 * x - 5 ) = -3 ]
Example : [ List.enum [ " cat " ; " canary " ; " dog " ; " dodo " ; " ant " ; " cow " ] | > arg_max String.length = " canary " ]
@added v1.4.0
@raise Invalid_argument if the input enum is empty
Similarly for [arg_max], except it returns the maximum. If
multiple values reach the maximum, one of them is
returned. (currently the first, but this is not guaranteed)
Example: [-5 -- 5 |> arg_min (fun x -> x * x + 6 * x - 5) = -3]
Example: [List.enum ["cat"; "canary"; "dog"; "dodo"; "ant"; "cow"] |> arg_max String.length = "canary"]
@added v1.4.0
@raise Invalid_argument if the input enum is empty
*)
* { 6 Trampolining }
val while_do : ('a -> bool) -> ('a t -> 'a t) -> 'a t -> 'a t
* { 6 Infix operators }
module Infix : sig
val ( -- ) : int -> int -> int t
* As [ range ] , without the label .
[ 5 -- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 -- 5 ] is the empty enumeration
[5 -- 10] is the enumeration 5,6,7,8,9,10.
[10 -- 5] is the empty enumeration*)
val ( --^ ) : int -> int -> int t
* As [ ( -- ) ] but without the right endpoint
[ 5 --^ 10 ] is the enumeration 5,6,7,8,9 .
[5 --^ 10] is the enumeration 5,6,7,8,9.
*)
val ( --. ) : (float * float) -> float -> float t
* [ ( a , step ) -- . b ) ] creates a float enumeration from [ a ] to [ b ] with an
increment of [ step ] between elements .
[ ( 5.0 , 1.0 ) -- . 10.0 ] is the enumeration 5.0,6.0,7.0,8.0,9.0,10.0 .
[ ( 10.0 , -1.0 ) -- . 5.0 ] is the enumeration 10.0,9.0,8.0,7.0,6.0,5.0 .
[ ( 10.0 , 1.0 ) -- . 1.0 ] is the empty enumeration .
increment of [step] between elements.
[(5.0, 1.0) --. 10.0] is the enumeration 5.0,6.0,7.0,8.0,9.0,10.0.
[(10.0, -1.0) --. 5.0] is the enumeration 10.0,9.0,8.0,7.0,6.0,5.0.
[(10.0, 1.0) --. 1.0] is the empty enumeration. *)
val ( --- ) : int -> int -> int t
* As [ -- ] , but accepts enumerations in reverse order .
[ 5 --- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 --- 5 ] is the enumeration 10,9,8,7,6,5 .
[5 --- 10] is the enumeration 5,6,7,8,9,10.
[10 --- 5] is the enumeration 10,9,8,7,6,5.*)
val ( --~ ) : char -> char -> char t
val ( // ) : 'a t -> ('a -> bool) -> 'a t
* Filtering ( pronounce this operator name " such that " ) .
For instance , [ ( 1 -- 37 ) // odd ] is the enumeration of all odd
numbers between 1 and 37 .
For instance, [(1 -- 37) // odd] is the enumeration of all odd
numbers between 1 and 37.*)
val ( /@ ) : 'a t -> ('a -> 'b) -> 'b t
val ( @/ ) : ('a -> 'b) -> 'a t -> 'b t
val ( //@ ) : 'a t -> ('a -> 'b option) -> 'b t
val ( @// ) : ('a -> 'b option) -> 'a t -> 'b t
end
val ( -- ) : int -> int -> int t
val ( --^ ) : int -> int -> int t
val ( --. ) : (float * float) -> float -> float t
val ( --- ) : int -> int -> int t
val ( --~ ) : char -> char -> char t
val ( // ) : 'a t -> ('a -> bool) -> 'a t
val ( /@ ) : 'a t -> ('a -> 'b) -> 'b t
val ( @/ ) : ('a -> 'b) -> 'a t -> 'b t
val ( //@ ) : 'a t -> ('a -> 'b option) -> 'b t
val ( @// ) : ('a -> 'b option) -> 'a t -> 'b t
* { 6 Monad related modules }
* Monadic operations on Enumerations containing monadic elements
This module will let you use sequence and fold_monad functions over enumerations .
This module will let you use sequence and fold_monad functions over enumerations.
*)
module WithMonad : functor (Mon : BatInterfaces.Monad) -> sig
type 'a m = 'a Mon.m
val sequence : 'a m t -> 'a t m
* [ sequence e ] evaluates each monadic elements ( of type [ ' a m ] ) contained in the enumeration [ e ] to get a monadic enumeration of [ ' a ] elements ,
of type [ ' a BatEnum.t m ] .
of type ['a BatEnum.t m]. *)
val fold_monad : ('a -> 'b -> 'a m) -> 'a -> 'b t -> 'a m
end
* The Monad
This module provides everything needed for writing and executing
computations in the BatEnum Monad .
This module provides everything needed for writing and executing
computations in the BatEnum Monad.
*)
module Monad : sig
type 'a m = 'a t
* The type of the monad 's elements , thus [ BatEnum.t ] .
val return : 'a -> 'a m
* This function puts a single value in the monad , that is to say it creates an enumeration containing a single element .
val bind : 'a m -> ('a -> 'b m) -> 'b m
end
val print : ?first:string -> ?last:string -> ?sep:string -> ('a BatInnerIO.output -> 'b -> unit) -> 'a BatInnerIO.output -> 'b t -> unit
val print_at_most : ?first:string -> ?last:string -> ?sep:string ->
limit:int -> ('a BatInnerIO.output -> 'b -> unit) ->
'a BatInnerIO.output -> 'b t -> unit
* [ print_at_most pp limit out enum ] consumes [ enum ] to print its elements
into [ out ] ( using [ pp ] to print individual elements ) .
At most [ limit ] arguments are printed , if more elements are
available an ellipsis " ... " is added .
@raise Invalid_argument if the limit is < = 0 .
@since 2.2.0
into [out] (using [pp] to print individual elements).
At most [limit] arguments are printed, if more elements are
available an ellipsis "..." is added.
@raise Invalid_argument if the limit is <= 0.
@since 2.2.0 *)
val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int
* [ compare cmp a b ] compares enumerations [ a ] and [ b ]
by lexicographical order using comparison [ cmp ] .
@return 0 if [ a ] and [ b ] are equal wrt [ cmp ]
@return -1 if [ a ] is empty and [ b ] is not
@return 1 if [ b ] is empty and [ a ] is not
@return [ cmp x y ] , where [ x ] is the first element of [ a ]
and [ y ] is the first element of [ b ] , if [ cmp x y < > 0 ]
@return [ compare cmp a ' b ' ] , where [ a ' ] and [ b ' ] are
respectively equal to [ a ] and [ b ] without their first
element , if both [ a ] and [ b ] are non - empty and [ cmp x y = 0 ] ,
where [ x ] is the first element of [ a ] and [ y ] is the first
element of [ b ]
by lexicographical order using comparison [cmp].
@return 0 if [a] and [b] are equal wrt [cmp]
@return -1 if [a] is empty and [b] is not
@return 1 if [b] is empty and [a] is not
@return [cmp x y], where [x] is the first element of [a]
and [y] is the first element of [b], if [cmp x y <> 0]
@return [compare cmp a' b'], where [a'] and [b'] are
respectively equal to [a] and [b] without their first
element, if both [a] and [b] are non-empty and [cmp x y = 0],
where [x] is the first element of [a] and [y] is the first
element of [b]
*)
val ord : ('a -> 'a -> BatOrd.order) -> 'a t -> 'a t -> BatOrd.order
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
* { 6 Override modules }
*
The following modules replace functions defined in { ! } with functions
behaving slightly differently but having the same name . This is by design :
the functions meant to override the corresponding functions of { ! } .
The following modules replace functions defined in {!BatEnum} with functions
behaving slightly differently but having the same name. This is by design:
the functions meant to override the corresponding functions of {!BatEnum}.
*)
* Operations on { ! } without exceptions .
module Exceptionless : sig
val find : ('a -> bool) -> 'a t -> 'a option
* [ find f e ] returns [ Some x ] where [ x ] is the first element [ x ] of [ e ]
such that [ f x ] returns [ true ] , consuming the enumeration up to and
including the found element , or [ None ] if no such element exists
in the enumeration , consuming the whole enumeration in the search .
Since [ find ] consumes a prefix of the enumeration , it can be used several
times on the same enumeration to find the next element .
such that [f x] returns [true], consuming the enumeration up to and
including the found element, or [None] if no such element exists
in the enumeration, consuming the whole enumeration in the search.
Since [find] consumes a prefix of the enumeration, it can be used several
times on the same enumeration to find the next element. *)
end
* Operations on { ! } with labels .
This module overrides a number of functions of { ! } by
functions in which some arguments require labels . These labels are
there to improve readability and safety and to let you change the
order of arguments to functions . In every case , the behavior of the
function is identical to that of the corresponding function of { ! } .
This module overrides a number of functions of {!BatEnum} by
functions in which some arguments require labels. These labels are
there to improve readability and safety and to let you change the
order of arguments to functions. In every case, the behavior of the
function is identical to that of the corresponding function of {!BatEnum}.
*)
module Labels : sig
val iter: f:('a -> unit) -> 'a t -> unit
val iter2: f:('a -> 'b -> unit) -> 'a t -> 'b t -> unit
val exists: f:('a -> bool) -> 'a t -> bool
val for_all: f:('a -> bool) -> 'a t -> bool
val fold: f:('b -> 'a -> 'b) -> init:'b -> 'a t -> 'b
val fold2: f:('a -> 'b -> 'c -> 'c) -> init:'c -> 'a t -> 'b t -> 'c
val iteri: f:(int -> 'a -> unit) -> 'a t -> unit
val iter2i: f:( int -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
val foldi: f:(int -> 'a -> 'b -> 'b) -> init:'b -> 'a t -> 'b
val fold2i: f:(int -> 'a -> 'b -> 'c -> 'c) -> init:'c -> 'a t -> 'b t -> 'c
val find: f:('a -> bool) -> 'a t -> 'a
val take_while: f:('a -> bool) -> 'a t -> 'a t
val drop_while: f:('a -> bool) -> 'a t -> 'a t
val map: f:('a -> 'b) -> 'a t -> 'b t
val mapi: f:(int -> 'a -> 'b) -> 'a t -> 'b t
val filter: f:('a -> bool) -> 'a t -> 'a t
val filter_map: f:('a -> 'b option) -> 'a t -> 'b t
val from: f:(unit -> 'a) -> 'a t
val from_while: f:(unit -> 'a option) -> 'a t
val from_loop: init:'b -> f:('b -> ('a * 'b)) -> 'a t
val seq: init:'a -> f:('a -> 'a) -> cnd:('a -> bool) -> 'a t
val unfold: init:'b -> f:('b -> ('a * 'b) option) -> 'a t
val init: int -> f:(int -> 'a) -> 'a t
val switch: f:('a -> bool) -> 'a t -> 'a t * 'a t
val compare: ?cmp:('a -> 'a -> int) -> 'a t -> 'a t -> int
val uniq: ?cmp:('a -> 'a -> bool) -> 'a t -> 'a t
module LExceptionless : sig
val find : f:('a -> bool) -> 'a t -> 'a option
end
end
* { 6 For system use only , not for the casual user }
For compatibility with { ! Stream }
For compatibility with {!Stream}
*)
val iapp : 'a t -> 'a t -> 'a t
val icons : 'a -> 'a t -> 'a t
val ising : 'a -> 'a t
val lapp : (unit -> 'a t) -> 'a t -> 'a t
val lcons : (unit -> 'a) -> 'a t -> 'a t
val lsing : (unit -> 'a) -> 'a t
val slazy : (unit -> 'a t) -> 'a t
|
3def62fc3373de040d548ea38c2b3928a8c4fa369d6a7ad27dc0b172290bec58 | flodihn/NextGen | shared_cache.erl | -module(shared_cache).
-export([
init/0,
store/2,
retr/1
]).
init() ->
ets:new(?MODULE, [named_table, public]).
store(Key, Val) ->
ets:insert(?MODULE, {Key, Val}).
retr(Key) ->
case ets:lookup(?MODULE, Key) of
[{Key, Val}] ->
Val;
[] ->
undefined
end.
| null | https://raw.githubusercontent.com/flodihn/NextGen/3da1c3ee0d8f658383bdf5fccbdd49ace3cdb323/AreaServer/src/shared_cache.erl | erlang | -module(shared_cache).
-export([
init/0,
store/2,
retr/1
]).
init() ->
ets:new(?MODULE, [named_table, public]).
store(Key, Val) ->
ets:insert(?MODULE, {Key, Val}).
retr(Key) ->
case ets:lookup(?MODULE, Key) of
[{Key, Val}] ->
Val;
[] ->
undefined
end.
| |
ef6b6d8e8c7f21227fe9819fa9910387846c39f93faef9ba177d1bbe73af465b | disco-framework/disco | test_helpers.erl | %% @hidden to edoc
-module(test_helpers).
-export([
unconsult/2,
calls_of/2,
times_called/2
]).
-spec unconsult(file:name(), [term()]) -> ok.
unconsult(File,Terms) ->
{ok, Handle} = file:open(File, [write]),
lists:foreach( fun(X) -> io:format(Handle, "~p.~n",[X]) end, Terms),
file:close(Handle).
-spec calls_of(atom(), atom()) -> list(term()).
calls_of(Mod, FunName) ->
FilterFunc = fun (CallSpec) ->
case CallSpec of
{_, {Mod, FunName, _}, _} -> true;
_ -> false
end
end,
lists:filter(FilterFunc, meck:history(Mod)).
-spec times_called(atom(), atom()) -> non_neg_integer().
times_called(Mod, FunName) ->
length(calls_of(Mod, FunName)).
| null | https://raw.githubusercontent.com/disco-framework/disco/f55f35d46d43ef5f4fa1466bdf8d662f5f01f30f/src/test/test_helpers.erl | erlang | @hidden to edoc |
-module(test_helpers).
-export([
unconsult/2,
calls_of/2,
times_called/2
]).
-spec unconsult(file:name(), [term()]) -> ok.
unconsult(File,Terms) ->
{ok, Handle} = file:open(File, [write]),
lists:foreach( fun(X) -> io:format(Handle, "~p.~n",[X]) end, Terms),
file:close(Handle).
-spec calls_of(atom(), atom()) -> list(term()).
calls_of(Mod, FunName) ->
FilterFunc = fun (CallSpec) ->
case CallSpec of
{_, {Mod, FunName, _}, _} -> true;
_ -> false
end
end,
lists:filter(FilterFunc, meck:history(Mod)).
-spec times_called(atom(), atom()) -> non_neg_integer().
times_called(Mod, FunName) ->
length(calls_of(Mod, FunName)).
|
291f66f781337e38d8e067eef05095fac28e24b89ba71a5841eb8cdf0617e8ef | dsheets/codoc | codocCliExtract.ml |
* Copyright ( c ) 2015 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2015 David Sheets <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
module Error = CodocCli.Error
module Dir = CodocSysUtil.Dir
let (/) = Filename.concat
let hypot output root path = CodocUtil.(rel_of_path (depth output) root, path)
let rel_path fpath to_ = CodocExtraction.(uapply (hypot fpath) to_)
let exists_package dir package rel_file =
let path = dir / package / rel_file in
if Sys.file_exists path then Some path else None
let extract ~force ~index input out_dir rel_xml_out =
let xml_out = out_dir / rel_xml_out in
if not force && Sys.file_exists xml_out
then Error.use_force rel_xml_out
else
let dirs = (Dir.name xml_out)::(
if index then [out_dir / CodocConfig.rel_index_xml] else []
) in
match Dir.make_dirs_exist ~perm:0o755 dirs with
| Some err -> err
| None ->
let unit_path = rel_path xml_out input in
let root_fn unit_name unit_digest =
let open CodocDoc in
let cm = { unit_path; unit_name; unit_digest } in
let xml_file = Filename.basename xml_out in
Xml (xml_file, Cm cm)
in
let open DocOck in
let open CodocExtraction in
match read root_fn input with
| Not_an_interface -> Error.not_an_interface (path input)
| Wrong_version -> Error.wrong_version_interface (path input)
| Corrupted -> Error.corrupted_interface (path input)
| Not_a_typedtree -> Error.not_a_typedtree (path input)
| Not_an_implementation ->
TODO : fixme
failwith "unimplemented: Not_an_implementation"
| Ok unit ->
let _root, name = CodocUtil.root_of_unit unit in
let oc = open_out xml_out in
let xml_out = Xmlm.make_output (`Channel oc) in
DocOckXmlFold.((file { f = CodocXml.doc_printer }).f)
(fun () signal -> Xmlm.output xml_out signal) () unit;
close_out oc;
let open CodocIndex in
let empty_sub = { CodocIndex.html_file = None; issues = [] } in
match CodocUnit.Substruct.(map_of_unit {
map_class = (fun _ _ -> empty_sub);
map_classtype = (fun _ _ -> empty_sub);
map_module = (fun _ _ -> empty_sub);
map_moduletype = (fun _ _ -> empty_sub);
} unit) with
| None -> failwith "packs not yet supported" (* TODO: support packs *)
| Some substructs ->
let substructs = CodocUnit.Substruct.to_name substructs in
let xml_file = rel_xml_out in
let unit_issues =
if CodocExtraction.is_cmti input
then []
else [ Non_cmti_source input ]
in
let hide = CodocExtraction.is_hidden input in
let unit = { name; xml_file; unit_issues; substructs; hide; } in
if not index then `Ok unit
else
(* TODO: Use index caching? *)
(* Creating *or* updating index so no need to check for force *)
TODO : FIXME this can raise
let index = read out_dir CodocConfig.rel_index_xml in
let units = StringMap.add name unit index.units in
let index = { index with units } in
write index;
`Ok unit
let run_dir ~force ~index in_dir out_dir package =
let extr = CodocCliListExtractions.collect in_dir in
Printf.printf "%s\n" (CodocExtraction.summarize extr);
let files = CodocExtraction.file_list extr in
match if force then [] else List.fold_left (fun errs file ->
match exists_package out_dir package (CodocExtraction.rel_xml file) with
| None -> errs
| Some path -> (Error.use_force path)::errs
) [] files with
| (_::_) as errs -> CodocCli.combine_errors errs
| [] -> match List.fold_left (fun (units,errs) file ->
let index = false in
let rel_xml = CodocExtraction.rel_xml file in
match extract ~force ~index file (out_dir / package) rel_xml with
| `Ok unit -> (unit::units, errs)
| `Error err -> (units, (`Error err)::errs)
) ([],[]) files with
| _, ((_::_) as errs) -> CodocCli.combine_errors errs
| [], [] -> `Ok (`Dir out_dir)
| units, [] -> if not index then `Ok (`Dir out_dir)
else
(* TODO: use index caching? *)
let open CodocIndex in
(* Creating *or* updating index so no need to check for force *)
TODO : FIXME this can raise
let rel_index = CodocConfig.rel_index_xml in
let (pkg_path, pkg_index), pkg_parents =
traverse ~rel_index out_dir package
in
let units = List.fold_left (fun map unit ->
StringMap.add unit.name unit map
) pkg_index.units units in
let pkg_index = { pkg_index with units } in
write pkg_index;
List.iter (fun (_name, index) -> write index) pkg_parents;
`Ok (`Dir out_dir)
let extract_file ~force ~index file package out_dir rel_out =
if package = ""
then if index
then Error.no_file_index
else
CodocCli.map_ret (fun _ -> ())
(extract ~force ~index file out_dir rel_out)
else Error.no_file_package
let file in_file f =
let src = Filename.dirname in_file in
let rel = Filename.basename in_file in
match CodocExtraction.file ~src rel with
| None -> `Error (false, "source "^in_file^" is not a cmti, cmt, or cmi")
| Some file -> f file
let file_to_file ~force ~index in_file package out_file =
(* simple doc gen *)
file in_file (fun file ->
let out_dir = Dir.name out_file in
CodocCli.map_ret
(fun () -> `File out_file)
(extract_file ~force ~index file package out_dir out_file)
)
let file_to_dir ~force ~index in_file package out_dir =
file in_file (fun file ->
let out_dir = out_dir / package in
let out = CodocExtraction.relocate out_dir file in
let rel_xml_out = CodocExtraction.rel_xml out in
CodocCli.map_ret (fun _ -> `Dir out_dir)
(extract ~force ~index file out_dir rel_xml_out)
)
let run ({ CodocCli.Common.force; index }) output path package =
match path, output with
| `Missing path, _ -> Error.source_missing path
| `File in_file, None -> file in_file (fun file ->
let out_dir = Dir.name in_file in
let xml_file = CodocExtraction.rel_xml file in
CodocCli.map_ret
(fun () -> `File xml_file)
(extract_file ~force ~index file package out_dir xml_file)
)
| `File in_file, Some (`File out_file) ->
file_to_file ~force ~index in_file package out_file
| `File in_file, Some (`Missing out_path) ->
if out_path.[String.length out_path - 1] = '/'
then file_to_dir ~force ~index in_file package out_path
else file_to_file ~force ~index in_file package out_path
| `File in_file, Some (`Dir out_dir) ->
file_to_dir ~force ~index in_file package out_dir
| `Dir in_dir, None -> run_dir ~force ~index in_dir in_dir package
| `Dir in_dir, Some (`Missing out_dir | `Dir out_dir) ->
run_dir ~force ~index in_dir out_dir package
| `Dir in_dir, Some (`File out_file) -> Error.dir_to_file in_dir out_file
| null | https://raw.githubusercontent.com/dsheets/codoc/382077cf3e7e20e478bd97cc0b348e0b2ec926db/cli/codocCliExtract.ml | ocaml | TODO: support packs
TODO: Use index caching?
Creating *or* updating index so no need to check for force
TODO: use index caching?
Creating *or* updating index so no need to check for force
simple doc gen |
* Copyright ( c ) 2015 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2015 David Sheets <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
module Error = CodocCli.Error
module Dir = CodocSysUtil.Dir
let (/) = Filename.concat
let hypot output root path = CodocUtil.(rel_of_path (depth output) root, path)
let rel_path fpath to_ = CodocExtraction.(uapply (hypot fpath) to_)
let exists_package dir package rel_file =
let path = dir / package / rel_file in
if Sys.file_exists path then Some path else None
let extract ~force ~index input out_dir rel_xml_out =
let xml_out = out_dir / rel_xml_out in
if not force && Sys.file_exists xml_out
then Error.use_force rel_xml_out
else
let dirs = (Dir.name xml_out)::(
if index then [out_dir / CodocConfig.rel_index_xml] else []
) in
match Dir.make_dirs_exist ~perm:0o755 dirs with
| Some err -> err
| None ->
let unit_path = rel_path xml_out input in
let root_fn unit_name unit_digest =
let open CodocDoc in
let cm = { unit_path; unit_name; unit_digest } in
let xml_file = Filename.basename xml_out in
Xml (xml_file, Cm cm)
in
let open DocOck in
let open CodocExtraction in
match read root_fn input with
| Not_an_interface -> Error.not_an_interface (path input)
| Wrong_version -> Error.wrong_version_interface (path input)
| Corrupted -> Error.corrupted_interface (path input)
| Not_a_typedtree -> Error.not_a_typedtree (path input)
| Not_an_implementation ->
TODO : fixme
failwith "unimplemented: Not_an_implementation"
| Ok unit ->
let _root, name = CodocUtil.root_of_unit unit in
let oc = open_out xml_out in
let xml_out = Xmlm.make_output (`Channel oc) in
DocOckXmlFold.((file { f = CodocXml.doc_printer }).f)
(fun () signal -> Xmlm.output xml_out signal) () unit;
close_out oc;
let open CodocIndex in
let empty_sub = { CodocIndex.html_file = None; issues = [] } in
match CodocUnit.Substruct.(map_of_unit {
map_class = (fun _ _ -> empty_sub);
map_classtype = (fun _ _ -> empty_sub);
map_module = (fun _ _ -> empty_sub);
map_moduletype = (fun _ _ -> empty_sub);
} unit) with
| Some substructs ->
let substructs = CodocUnit.Substruct.to_name substructs in
let xml_file = rel_xml_out in
let unit_issues =
if CodocExtraction.is_cmti input
then []
else [ Non_cmti_source input ]
in
let hide = CodocExtraction.is_hidden input in
let unit = { name; xml_file; unit_issues; substructs; hide; } in
if not index then `Ok unit
else
TODO : FIXME this can raise
let index = read out_dir CodocConfig.rel_index_xml in
let units = StringMap.add name unit index.units in
let index = { index with units } in
write index;
`Ok unit
let run_dir ~force ~index in_dir out_dir package =
let extr = CodocCliListExtractions.collect in_dir in
Printf.printf "%s\n" (CodocExtraction.summarize extr);
let files = CodocExtraction.file_list extr in
match if force then [] else List.fold_left (fun errs file ->
match exists_package out_dir package (CodocExtraction.rel_xml file) with
| None -> errs
| Some path -> (Error.use_force path)::errs
) [] files with
| (_::_) as errs -> CodocCli.combine_errors errs
| [] -> match List.fold_left (fun (units,errs) file ->
let index = false in
let rel_xml = CodocExtraction.rel_xml file in
match extract ~force ~index file (out_dir / package) rel_xml with
| `Ok unit -> (unit::units, errs)
| `Error err -> (units, (`Error err)::errs)
) ([],[]) files with
| _, ((_::_) as errs) -> CodocCli.combine_errors errs
| [], [] -> `Ok (`Dir out_dir)
| units, [] -> if not index then `Ok (`Dir out_dir)
else
let open CodocIndex in
TODO : FIXME this can raise
let rel_index = CodocConfig.rel_index_xml in
let (pkg_path, pkg_index), pkg_parents =
traverse ~rel_index out_dir package
in
let units = List.fold_left (fun map unit ->
StringMap.add unit.name unit map
) pkg_index.units units in
let pkg_index = { pkg_index with units } in
write pkg_index;
List.iter (fun (_name, index) -> write index) pkg_parents;
`Ok (`Dir out_dir)
let extract_file ~force ~index file package out_dir rel_out =
if package = ""
then if index
then Error.no_file_index
else
CodocCli.map_ret (fun _ -> ())
(extract ~force ~index file out_dir rel_out)
else Error.no_file_package
let file in_file f =
let src = Filename.dirname in_file in
let rel = Filename.basename in_file in
match CodocExtraction.file ~src rel with
| None -> `Error (false, "source "^in_file^" is not a cmti, cmt, or cmi")
| Some file -> f file
let file_to_file ~force ~index in_file package out_file =
file in_file (fun file ->
let out_dir = Dir.name out_file in
CodocCli.map_ret
(fun () -> `File out_file)
(extract_file ~force ~index file package out_dir out_file)
)
let file_to_dir ~force ~index in_file package out_dir =
file in_file (fun file ->
let out_dir = out_dir / package in
let out = CodocExtraction.relocate out_dir file in
let rel_xml_out = CodocExtraction.rel_xml out in
CodocCli.map_ret (fun _ -> `Dir out_dir)
(extract ~force ~index file out_dir rel_xml_out)
)
let run ({ CodocCli.Common.force; index }) output path package =
match path, output with
| `Missing path, _ -> Error.source_missing path
| `File in_file, None -> file in_file (fun file ->
let out_dir = Dir.name in_file in
let xml_file = CodocExtraction.rel_xml file in
CodocCli.map_ret
(fun () -> `File xml_file)
(extract_file ~force ~index file package out_dir xml_file)
)
| `File in_file, Some (`File out_file) ->
file_to_file ~force ~index in_file package out_file
| `File in_file, Some (`Missing out_path) ->
if out_path.[String.length out_path - 1] = '/'
then file_to_dir ~force ~index in_file package out_path
else file_to_file ~force ~index in_file package out_path
| `File in_file, Some (`Dir out_dir) ->
file_to_dir ~force ~index in_file package out_dir
| `Dir in_dir, None -> run_dir ~force ~index in_dir in_dir package
| `Dir in_dir, Some (`Missing out_dir | `Dir out_dir) ->
run_dir ~force ~index in_dir out_dir package
| `Dir in_dir, Some (`File out_file) -> Error.dir_to_file in_dir out_file
|
d4d0ba4fe77bd07fa79924db79e4f1efb8318e4c316804520e0112b8c9add16f | McParen/croatoan | color.lisp | (in-package :de.anvi.ncurses)
;;; color
;;; curses color manipulation routines
;;; -island.net/ncurses/man/curs_color.3x.html
;;;
;;; C prototypes
int start_color(void ) ;
;; bool has_colors(void);
;; bool can_change_color(void);
;; int init_pair(short pair, short f, short b);
;; int init_color(short color, short r, short g, short b);
;; int pair_content(short pair, short *f, short *b);
;; int color_content(short color, short *r, short *g, short *b);
;; int init_extended_pair(int pair, int f, int b);
;; int init_extended_color(int color, int r, int g, int b);
;; int extended_pair_content(int pair, int *f, int *b);
;; int extended_color_content(int color, int *r, int *g, int *b);
void reset_color_pairs(void ) ;
;;; C macros
;; COLOR_PAIR(int n)
;; PAIR_NUMBER(attrs);
;;; Low-level CFFI wrappers
(cffi:defcfun ("start_color" start-color) :int)
(cffi:defcfun ("has_colors" has-colors) :boolean)
(cffi:defcfun ("can_change_color" can-change-color) :boolean)
(cffi:defcfun ("init_pair" init-pair) :int (pair :short) (f :short) (b :short))
(cffi:defcfun ("init_color" init-color) :int (color :short) (r :short) (g :short) (b :short))
(cffi:defcfun ("pair_content" pair-content) :int (pair :short) (f (:pointer :short)) (b (:pointer :short)))
(cffi:defcfun ("color_content" color-content) :int (color :short) (r (:pointer :short)) (g (:pointer :short)) (b (:pointer :short)))
(cffi:defcfun ("init_extended_pair" init-extended-pair) :int (pair :int) (f :int) (b :int))
(cffi:defcfun ("init_extended_color" init-extended-color) :int (color :int) (r :int) (g :int) (b :int))
(cffi:defcfun ("extended_pair_content" extended-pair-content) :int (pair :int) (f (:pointer :int)) (b (:pointer :int)))
(cffi:defcfun ("extended_color_content" extended-color-content) :int (color :int) (r (:pointer :int)) (g (:pointer :int)) (b (:pointer :int)))
(cffi:defcfun ("reset_color_pairs" reset-color-pairs) :void)
(cffi:defcfun ("COLOR_PAIR" color-pair) :int (n :int))
(cffi:defcfun ("PAIR_NUMBER" pair-number) :int (attrs :int))
(defconstant +COLOR-BLACK+ 0)
(defconstant +COLOR-RED+ 1)
(defconstant +COLOR-GREEN+ 2)
(defconstant +COLOR-YELLOW+ 3)
(defconstant +COLOR-BLUE+ 4)
(defconstant +COLOR-MAGENTA+ 5)
(defconstant +COLOR-CYAN+ 6)
(defconstant +COLOR-WHITE+ 7)
| null | https://raw.githubusercontent.com/McParen/croatoan/89014b041ff6d17005fa4e5210f9360a96550fdb/ncurses/color.lisp | lisp | color
curses color manipulation routines
-island.net/ncurses/man/curs_color.3x.html
C prototypes
bool has_colors(void);
bool can_change_color(void);
int init_pair(short pair, short f, short b);
int init_color(short color, short r, short g, short b);
int pair_content(short pair, short *f, short *b);
int color_content(short color, short *r, short *g, short *b);
int init_extended_pair(int pair, int f, int b);
int init_extended_color(int color, int r, int g, int b);
int extended_pair_content(int pair, int *f, int *b);
int extended_color_content(int color, int *r, int *g, int *b);
C macros
COLOR_PAIR(int n)
PAIR_NUMBER(attrs);
Low-level CFFI wrappers | (in-package :de.anvi.ncurses)
(cffi:defcfun ("start_color" start-color) :int)
(cffi:defcfun ("has_colors" has-colors) :boolean)
(cffi:defcfun ("can_change_color" can-change-color) :boolean)
(cffi:defcfun ("init_pair" init-pair) :int (pair :short) (f :short) (b :short))
(cffi:defcfun ("init_color" init-color) :int (color :short) (r :short) (g :short) (b :short))
(cffi:defcfun ("pair_content" pair-content) :int (pair :short) (f (:pointer :short)) (b (:pointer :short)))
(cffi:defcfun ("color_content" color-content) :int (color :short) (r (:pointer :short)) (g (:pointer :short)) (b (:pointer :short)))
(cffi:defcfun ("init_extended_pair" init-extended-pair) :int (pair :int) (f :int) (b :int))
(cffi:defcfun ("init_extended_color" init-extended-color) :int (color :int) (r :int) (g :int) (b :int))
(cffi:defcfun ("extended_pair_content" extended-pair-content) :int (pair :int) (f (:pointer :int)) (b (:pointer :int)))
(cffi:defcfun ("extended_color_content" extended-color-content) :int (color :int) (r (:pointer :int)) (g (:pointer :int)) (b (:pointer :int)))
(cffi:defcfun ("reset_color_pairs" reset-color-pairs) :void)
(cffi:defcfun ("COLOR_PAIR" color-pair) :int (n :int))
(cffi:defcfun ("PAIR_NUMBER" pair-number) :int (attrs :int))
(defconstant +COLOR-BLACK+ 0)
(defconstant +COLOR-RED+ 1)
(defconstant +COLOR-GREEN+ 2)
(defconstant +COLOR-YELLOW+ 3)
(defconstant +COLOR-BLUE+ 4)
(defconstant +COLOR-MAGENTA+ 5)
(defconstant +COLOR-CYAN+ 6)
(defconstant +COLOR-WHITE+ 7)
|
98bde824695f5d911a9fae1b40cab4b6d7450747320c31e399a9b019d3cb0eae | kawasima/jagrid | index.clj | (ns jagrid.example.index
(:use [hiccup.core]
[jagrid.example.layout]))
(defn view []
(view-layout {:title "index"}
[:h1 "Excel方眼紙のようなレイアウトを実現します"]
[:ul
[:li [:a {:href "basic.html"} "Excel方眼紙レイアウトの基本"]]
[:li [:a {:href "sales-report.html"} "組み合わせた例 (営業日報)"]]]))
| null | https://raw.githubusercontent.com/kawasima/jagrid/524b351c47ba2648f96ce8ef5ee431d0eb594d28/src/jagrid/example/index.clj | clojure | (ns jagrid.example.index
(:use [hiccup.core]
[jagrid.example.layout]))
(defn view []
(view-layout {:title "index"}
[:h1 "Excel方眼紙のようなレイアウトを実現します"]
[:ul
[:li [:a {:href "basic.html"} "Excel方眼紙レイアウトの基本"]]
[:li [:a {:href "sales-report.html"} "組み合わせた例 (営業日報)"]]]))
| |
cfc6e8c49ca87d787ee8cb95cab29ed89093dc888f099e8be3c577d2cf64e317 | TerrorJack/ghc-alter | Pack.hs | # LANGUAGE Unsafe #
# LANGUAGE NoImplicitPrelude , MagicHash , UnboxedTuples #
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Pack
Copyright : ( c ) The University of Glasgow 1997 - 2002
-- License : see libraries/base/LICENSE
--
-- Maintainer :
-- Stability : internal
Portability : non - portable ( GHC Extensions )
--
-- This module provides a small set of low-level functions for packing
-- and unpacking a chunk of bytes. Used by code emitted by the compiler
-- plus the prelude libraries.
--
The programmer level view of packed strings is provided by a GHC
system library PackedString .
--
-----------------------------------------------------------------------------
module GHC.Pack
(
-- (**) - emitted by compiler.
packCString#,
unpackCString,
unpackCString#,
unpackNBytes#,
unpackFoldrCString#, -- (**)
unpackAppendCString#, -- (**)
)
where
import GHC.Base
import GHC.List ( length )
import GHC.ST
import GHC.Ptr
data ByteArray ix = ByteArray ix ix ByteArray#
data MutableByteArray s ix = MutableByteArray ix ix (MutableByteArray# s)
unpackCString :: Ptr a -> [Char]
unpackCString a@(Ptr addr)
| a == nullPtr = []
| otherwise = unpackCString# addr
packCString# :: [Char] -> ByteArray#
packCString# str = case (packString str) of { ByteArray _ _ bytes -> bytes }
packString :: [Char] -> ByteArray Int
packString str = runST (packStringST str)
packStringST :: [Char] -> ST s (ByteArray Int)
packStringST str =
let len = length str in
packNBytesST len str
packNBytesST :: Int -> [Char] -> ST s (ByteArray Int)
packNBytesST (I# length#) str =
allocate an array that will hold the string
( not forgetting the NUL byte at the end )
allocate an array that will hold the string
(not forgetting the NUL byte at the end)
-}
new_ps_array (length# +# 1#) >>= \ ch_array ->
-- fill in packed string from "str"
fill_in ch_array 0# str >>
-- freeze the puppy:
freeze_ps_array ch_array length#
where
fill_in :: MutableByteArray s Int -> Int# -> [Char] -> ST s ()
fill_in arr_in# idx [] =
write_ps_array arr_in# idx (chr# 0#) >>
return ()
fill_in arr_in# idx (C# c : cs) =
write_ps_array arr_in# idx c >>
fill_in arr_in# (idx +# 1#) cs
( Very :-) ` ` Specialised '' versions of some CharArray things ...
new_ps_array :: Int# -> ST s (MutableByteArray s Int)
write_ps_array :: MutableByteArray s Int -> Int# -> Char# -> ST s ()
freeze_ps_array :: MutableByteArray s Int -> Int# -> ST s (ByteArray Int)
new_ps_array size = ST $ \ s ->
case (newByteArray# size s) of { (# s2#, barr# #) ->
(# s2#, MutableByteArray bot bot barr# #) }
where
bot = errorWithoutStackTrace "new_ps_array"
write_ps_array (MutableByteArray _ _ barr#) n ch = ST $ \ s# ->
case writeCharArray# barr# n ch s# of { s2# ->
(# s2#, () #) }
-- same as unsafeFreezeByteArray
freeze_ps_array (MutableByteArray _ _ arr#) len# = ST $ \ s# ->
case unsafeFreezeByteArray# arr# s# of { (# s2#, frozen# #) ->
(# s2#, ByteArray 0 (I# len#) frozen# #) }
| null | https://raw.githubusercontent.com/TerrorJack/ghc-alter/db736f34095eef416b7e077f9b26fc03aa78c311/ghc-alter/boot-lib/base/GHC/Pack.hs | haskell | # OPTIONS_HADDOCK hide #
---------------------------------------------------------------------------
|
Module : GHC.Pack
License : see libraries/base/LICENSE
Maintainer :
Stability : internal
This module provides a small set of low-level functions for packing
and unpacking a chunk of bytes. Used by code emitted by the compiler
plus the prelude libraries.
---------------------------------------------------------------------------
(**) - emitted by compiler.
(**)
(**)
fill in packed string from "str"
freeze the puppy:
same as unsafeFreezeByteArray | # LANGUAGE Unsafe #
# LANGUAGE NoImplicitPrelude , MagicHash , UnboxedTuples #
Copyright : ( c ) The University of Glasgow 1997 - 2002
Portability : non - portable ( GHC Extensions )
The programmer level view of packed strings is provided by a GHC
system library PackedString .
module GHC.Pack
(
packCString#,
unpackCString,
unpackCString#,
unpackNBytes#,
)
where
import GHC.Base
import GHC.List ( length )
import GHC.ST
import GHC.Ptr
data ByteArray ix = ByteArray ix ix ByteArray#
data MutableByteArray s ix = MutableByteArray ix ix (MutableByteArray# s)
unpackCString :: Ptr a -> [Char]
unpackCString a@(Ptr addr)
| a == nullPtr = []
| otherwise = unpackCString# addr
packCString# :: [Char] -> ByteArray#
packCString# str = case (packString str) of { ByteArray _ _ bytes -> bytes }
packString :: [Char] -> ByteArray Int
packString str = runST (packStringST str)
packStringST :: [Char] -> ST s (ByteArray Int)
packStringST str =
let len = length str in
packNBytesST len str
packNBytesST :: Int -> [Char] -> ST s (ByteArray Int)
packNBytesST (I# length#) str =
allocate an array that will hold the string
( not forgetting the NUL byte at the end )
allocate an array that will hold the string
(not forgetting the NUL byte at the end)
-}
new_ps_array (length# +# 1#) >>= \ ch_array ->
fill_in ch_array 0# str >>
freeze_ps_array ch_array length#
where
fill_in :: MutableByteArray s Int -> Int# -> [Char] -> ST s ()
fill_in arr_in# idx [] =
write_ps_array arr_in# idx (chr# 0#) >>
return ()
fill_in arr_in# idx (C# c : cs) =
write_ps_array arr_in# idx c >>
fill_in arr_in# (idx +# 1#) cs
( Very :-) ` ` Specialised '' versions of some CharArray things ...
new_ps_array :: Int# -> ST s (MutableByteArray s Int)
write_ps_array :: MutableByteArray s Int -> Int# -> Char# -> ST s ()
freeze_ps_array :: MutableByteArray s Int -> Int# -> ST s (ByteArray Int)
new_ps_array size = ST $ \ s ->
case (newByteArray# size s) of { (# s2#, barr# #) ->
(# s2#, MutableByteArray bot bot barr# #) }
where
bot = errorWithoutStackTrace "new_ps_array"
write_ps_array (MutableByteArray _ _ barr#) n ch = ST $ \ s# ->
case writeCharArray# barr# n ch s# of { s2# ->
(# s2#, () #) }
freeze_ps_array (MutableByteArray _ _ arr#) len# = ST $ \ s# ->
case unsafeFreezeByteArray# arr# s# of { (# s2#, frozen# #) ->
(# s2#, ByteArray 0 (I# len#) frozen# #) }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.