_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
679bc2665c3ccb30acaeade0d1a39cf852bb65be22d330edff4b781182d6917b | glebec/haskell-programming-allen-moronuki | Main.hs | # LANGUAGE TypeApplications #
module Main where
import Test.Hspec
import Test.QuickCheck
import Control . Monad
import Data . Monoid
import MadLib
import Data.List.NonEmpty
-- Exercise: Optional Monoid
data Optional a = Nada
| Only a
deriving (Eq, Show)
instance Semigroup a => Semigroup (Optional a) where
(<>) Nada x = x
(<>) x Nada = x
(<>) (Only a) (Only a') = Only (a <> a')
instance Monoid a => Monoid (Optional a) where
mempty = Nada
genOptional : : Arbitrary a = > Gen ( Optional a )
genOptional = arbitrary > > = \a - > elements [ , Only a ]
-- genOptional3 :: Arbitrary a => Gen (Optional a, Optional a, Optional a)
-- genOptional3 = (,,) <$> genOptional <*> genOptional <*> genOptional
-- uncurry3 :: (a -> b -> c -> d) -> (a, b, c) -> d
-- uncurry3 f (a, b, c) = f a b c
-- prop_assocOptional :: Property
-- prop_assocOptional = forAll (genOptional3 :: Gen (Optional String, Optional String, Optional String))
-- $ uncurry3 prop_associative
prop_associative :: (Semigroup a, Eq a) => a -> a -> a -> Bool
prop_associative a b c = (a <> b) <> c == a <> (b <> c)
I felt that my QC code above was overwrought . suggested this :
instance Arbitrary a => Arbitrary (Optional a) where
arbitrary = oneof [pure Nada, Only <$> arbitrary]
-- Testing Asc
asc :: Eq a => (a -> a -> a) -> a -> a -> a -> Bool
asc (<>) a b c =
a <> (b <> c) == (a <> b) <> c
-- Monoid Identity
prop_monoidLeftId :: (Monoid a, Eq a) => a -> Bool
prop_monoidLeftId a = mempty <> a == a
prop_monoidRightId :: (Monoid a, Eq a) => a -> Bool
prop_monoidRightId a = a == a <> mempty
Bad Monoid
data Bull = Fools
| Twoo
deriving (Eq, Show)
instance Arbitrary Bull where
arbitrary = frequency [ (1, return Fools) , (1, return Twoo) ]
instance Semigroup Bull where
(<>) _ _ = Fools
instance Monoid Bull where -- this is a false monoid!
mempty = Fools
type BullMappend = Bull - > Bull - > Bull - > Bool
-- badMonoid :: IO ()
= do
let = prop_associative
-- mli = prop_monoidLeftId
-- mlr = prop_monoidRightId
-- quickCheck (ma :: BullMappend)
-- quickCheck (mli :: Bull -> Bool)
-- quickCheck (mlr :: Bull -> Bool)
-- Maybe Another Monoid
newtype First' a =
First' { getFirst' :: Optional a }
deriving (Eq, Show)
instance Arbitrary a => Arbitrary (First' a)
where arbitrary = oneof
[ pure $ First' Nada
, First' . Only <$> arbitrary ]
instance Semigroup (First' a) where
(<>) (First' Nada) x = x
(<>) x _ = x
instance Monoid (First' a) where
mempty = First' Nada
firstMappend : : First ' a - > First ' a - > First ' a
-- firstMappend = mappend
type FirstMappend = First ' String - > First ' String - > First ' String - > Bool
type FstId = First ' String - > Bool
Semigroups
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary = do
xs <- arbitrary
x <- arbitrary
return $ x :| xs
main :: IO ()
-- main = putStrLn "hi"
main = hspec $ do
describe "Optional monoid" $ do
it "is associative" $
-- property prop_assocOptional
property $ prop_associative @(Optional String)
it "has a left identity" $
property $ prop_monoidLeftId @(Optional String)
it "has a right identity" $
property $ prop_monoidRightId @(Optional String)
describe "Bull" $ do
it "is associative" $
property $ prop_associative @Bull
it "does not have a left identity" $
expectFailure $ prop_monoidLeftId @Bull
it "does not have a right identity" $
expectFailure $ prop_monoidRightId @Bull
describe "First'" $ do
it "is associative" $
property $ prop_associative @(First' String)
it "has a left identity" $
property $ prop_monoidLeftId @(First' String)
it "has a right identity" $
property $ prop_monoidRightId @(First' String)
describe "NonEmpty" $
it "is associative" $
property $ prop_associative @(NonEmpty Char)
| null | https://raw.githubusercontent.com/glebec/haskell-programming-allen-moronuki/99bd232f523e426d18a5e096f1cf771228c55f52/15-monoid-semigroup/projects/p0-scratch/src/Main.hs | haskell | Exercise: Optional Monoid
genOptional3 :: Arbitrary a => Gen (Optional a, Optional a, Optional a)
genOptional3 = (,,) <$> genOptional <*> genOptional <*> genOptional
uncurry3 :: (a -> b -> c -> d) -> (a, b, c) -> d
uncurry3 f (a, b, c) = f a b c
prop_assocOptional :: Property
prop_assocOptional = forAll (genOptional3 :: Gen (Optional String, Optional String, Optional String))
$ uncurry3 prop_associative
Testing Asc
Monoid Identity
this is a false monoid!
badMonoid :: IO ()
mli = prop_monoidLeftId
mlr = prop_monoidRightId
quickCheck (ma :: BullMappend)
quickCheck (mli :: Bull -> Bool)
quickCheck (mlr :: Bull -> Bool)
Maybe Another Monoid
firstMappend = mappend
main = putStrLn "hi"
property prop_assocOptional | # LANGUAGE TypeApplications #
module Main where
import Test.Hspec
import Test.QuickCheck
import Control . Monad
import Data . Monoid
import MadLib
import Data.List.NonEmpty
data Optional a = Nada
| Only a
deriving (Eq, Show)
instance Semigroup a => Semigroup (Optional a) where
(<>) Nada x = x
(<>) x Nada = x
(<>) (Only a) (Only a') = Only (a <> a')
instance Monoid a => Monoid (Optional a) where
mempty = Nada
genOptional : : Arbitrary a = > Gen ( Optional a )
genOptional = arbitrary > > = \a - > elements [ , Only a ]
prop_associative :: (Semigroup a, Eq a) => a -> a -> a -> Bool
prop_associative a b c = (a <> b) <> c == a <> (b <> c)
I felt that my QC code above was overwrought . suggested this :
instance Arbitrary a => Arbitrary (Optional a) where
arbitrary = oneof [pure Nada, Only <$> arbitrary]
asc :: Eq a => (a -> a -> a) -> a -> a -> a -> Bool
asc (<>) a b c =
a <> (b <> c) == (a <> b) <> c
prop_monoidLeftId :: (Monoid a, Eq a) => a -> Bool
prop_monoidLeftId a = mempty <> a == a
prop_monoidRightId :: (Monoid a, Eq a) => a -> Bool
prop_monoidRightId a = a == a <> mempty
Bad Monoid
data Bull = Fools
| Twoo
deriving (Eq, Show)
instance Arbitrary Bull where
arbitrary = frequency [ (1, return Fools) , (1, return Twoo) ]
instance Semigroup Bull where
(<>) _ _ = Fools
mempty = Fools
type BullMappend = Bull - > Bull - > Bull - > Bool
= do
let = prop_associative
newtype First' a =
First' { getFirst' :: Optional a }
deriving (Eq, Show)
instance Arbitrary a => Arbitrary (First' a)
where arbitrary = oneof
[ pure $ First' Nada
, First' . Only <$> arbitrary ]
instance Semigroup (First' a) where
(<>) (First' Nada) x = x
(<>) x _ = x
instance Monoid (First' a) where
mempty = First' Nada
firstMappend : : First ' a - > First ' a - > First ' a
type FirstMappend = First ' String - > First ' String - > First ' String - > Bool
type FstId = First ' String - > Bool
Semigroups
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary = do
xs <- arbitrary
x <- arbitrary
return $ x :| xs
main :: IO ()
main = hspec $ do
describe "Optional monoid" $ do
it "is associative" $
property $ prop_associative @(Optional String)
it "has a left identity" $
property $ prop_monoidLeftId @(Optional String)
it "has a right identity" $
property $ prop_monoidRightId @(Optional String)
describe "Bull" $ do
it "is associative" $
property $ prop_associative @Bull
it "does not have a left identity" $
expectFailure $ prop_monoidLeftId @Bull
it "does not have a right identity" $
expectFailure $ prop_monoidRightId @Bull
describe "First'" $ do
it "is associative" $
property $ prop_associative @(First' String)
it "has a left identity" $
property $ prop_monoidLeftId @(First' String)
it "has a right identity" $
property $ prop_monoidRightId @(First' String)
describe "NonEmpty" $
it "is associative" $
property $ prop_associative @(NonEmpty Char)
|
d63cf6a207ecead10edbefd5b77122de6b7fea98c63d9ce8d58242ca0ddcf74c | toolslive/ordma | lwt_rsocket.mli | type lwt_rsocket
val socket : Unix.socket_domain -> Unix.socket_type -> int -> lwt_rsocket
(* val show : rsocket -> string *)
val identifier : lwt_rsocket -> int
val connect : lwt_rsocket -> Unix.sockaddr -> unit Lwt.t
val close : lwt_rsocket -> unit Lwt.t
val bind : lwt_rsocket -> Unix.sockaddr -> unit
val setsockopt : lwt_rsocket -> Unix.socket_bool_option -> bool -> unit
val listen : lwt_rsocket -> int -> unit
val recv : lwt_rsocket -> bytes -> int -> int -> Unix.msg_flag list -> int Lwt.t
val send : lwt_rsocket -> bytes -> int -> int -> Unix.msg_flag list -> int Lwt.t
val accept : lwt_rsocket -> (lwt_rsocket * Unix.sockaddr ) Lwt.t
open Bigarray
module Bytes : sig
type t = (char, int8_unsigned_elt, c_layout) Array1.t
val create : int -> t
val send : lwt_rsocket -> t -> int -> int -> Unix.msg_flag list -> int Lwt.t
val recv : lwt_rsocket -> t -> int -> int -> Unix.msg_flag list -> int Lwt.t
end
(* class rpoll : Lwt_engine.t *)
class rselect : Lwt_engine.t
| null | https://raw.githubusercontent.com/toolslive/ordma/e430cb48677a6c0c7847c00118c5eb4ebedebe2c/lwt_rsocket.mli | ocaml | val show : rsocket -> string
class rpoll : Lwt_engine.t | type lwt_rsocket
val socket : Unix.socket_domain -> Unix.socket_type -> int -> lwt_rsocket
val identifier : lwt_rsocket -> int
val connect : lwt_rsocket -> Unix.sockaddr -> unit Lwt.t
val close : lwt_rsocket -> unit Lwt.t
val bind : lwt_rsocket -> Unix.sockaddr -> unit
val setsockopt : lwt_rsocket -> Unix.socket_bool_option -> bool -> unit
val listen : lwt_rsocket -> int -> unit
val recv : lwt_rsocket -> bytes -> int -> int -> Unix.msg_flag list -> int Lwt.t
val send : lwt_rsocket -> bytes -> int -> int -> Unix.msg_flag list -> int Lwt.t
val accept : lwt_rsocket -> (lwt_rsocket * Unix.sockaddr ) Lwt.t
open Bigarray
module Bytes : sig
type t = (char, int8_unsigned_elt, c_layout) Array1.t
val create : int -> t
val send : lwt_rsocket -> t -> int -> int -> Unix.msg_flag list -> int Lwt.t
val recv : lwt_rsocket -> t -> int -> int -> Unix.msg_flag list -> int Lwt.t
end
class rselect : Lwt_engine.t
|
e126b3becc8ba034345ff650c71afade7d209f2e22fb3cdd2862c9938cb0523f | gretay-js/ocamlfdo | clusters.ml | Adaptation of [ 1 ] to basic blocks , influenced by [ 2 ] . The use of LBR
profile information for calculating basic - block level execution counts
used here is based on algorithms described in [ 3 ] . Collection and decoding
of profile information is inspired by [ 4 ] .
[ 1 ] Optimizing function placement for large - scale data - center
applications . and . In Proceedings of the
2017 International Symposium on Code Generation and Optimization ( CGO
2017 ) .
[ 2 ] BOLT : A Practical Binary Optimizer for Data Centers and Beyond .
, , , and . In Proceedings
of 2019 International Symposium on Code Generation and Optimization ( CGO
2019 ) .
[ 3 ] Taming Hardware Event Samples for Precise and Versatile Feedback
Directed Optimizations . , , ,
, , , Published in
IEEE Transactions on Computers 2013
[ 4 ] AutoFDO : automatic feedback - directed optimization for warehouse - scale
applications . , , and . 2016 . In
Proceedings of the 2016 International Symposium on Code Generation and
Optimization ( CGO ' 16 ) .
profile information for calculating basic-block level execution counts
used here is based on algorithms described in [3]. Collection and decoding
of profile information is inspired by [4].
[1] Optimizing function placement for large-scale data-center
applications. Guilherme Ottoni and Bertrand Maher. In Proceedings of the
2017 International Symposium on Code Generation and Optimization (CGO
2017).
[2] BOLT: A Practical Binary Optimizer for Data Centers and Beyond. Maksim
Panchenko, Rafael Auler, Bill Nell, and Guilherme Ottoni. In Proceedings
of 2019 International Symposium on Code Generation and Optimization (CGO
2019).
[3] Taming Hardware Event Samples for Precise and Versatile Feedback
Directed Optimizations. Dehao Chen, Neil Vachharajani, Robert Hundt,
Xinliang D. Li, Stéphane Eranian, Wenguang Chen, Weimin Zheng Published in
IEEE Transactions on Computers 2013
[4] AutoFDO: automatic feedback-directed optimization for warehouse-scale
applications. Dehao Chen, David Xinliang Li, and Tipp Moseley. 2016. In
Proceedings of the 2016 International Symposium on Code Generation and
Optimization (CGO '16). *)
open Core
(* CR-someday xclerc: the various operations in this files (sorting,
extracting the argmin/argmax, etc) seem to indicate that we might
want to switch from lists to e.g. heaps. However, the execution
profile shows that the tool does not spend much time in the clustering,
so this should be low-priority (or even just discarded). *)
let verbose = ref true
type clusterid = int [@@deriving compare]
type weight = int64 [@@deriving compare]
let entry_pos = 0
(* Invariant: the weight of a cluster is the sum of the weights of the data
it represents. *)
(* Invariant: weights must be non-negative. *)
(* Invariant: position of the cluster is the smallest position of any item it
represents. This is a heuristic, see [Note1] *)
type 'd cluster =
{ id : clusterid; (** unique id of the cluster *)
pos : int; (** the smallest, index in the original layout *)
weight : weight; (** weight *)
items : 'd list; (** data items represented by this cluster. *)
mutable can_be_merged : bool
(** [a.can_be_merged] is [false] means that [a] cannot be placed in a
cluster _after_ another [b] **)
}
type edge =
{ src : clusterid;
dst : clusterid;
weight : weight
}
[@@deriving compare]
(* Directed graph whose nodes are clusters. *)
type 'd t =
{ next_id : clusterid;
(* used to create unique cluster ids *)
clusters : 'd cluster list;
edges : edge list;
original_layout : 'd list
}
let id_to_cluster t id = List.find_exn t.clusters ~f:(fun c -> c.id = id)
let _find t data =
List.find t.clusters ~f:(fun c ->
List.mem c.items data ~equal:(fun d1 d2 -> d1 = d2))
let init_layout original_layout execounts =
let open Block_info in
(* Makes a singleton cluster. data, id and pos must be unique *)
let mk_node ~data ~weight ~pos ~id =
assert (Int64.(weight >= 0L));
(* Cluster that contains the entry position of the original layout cannot
be merged *after* another cluster. *)
let can_be_merged = not (pos = entry_pos) in
{ id; weight; items = [data]; pos; can_be_merged }
in
let mk_edge ~src ~dst ~weight =
assert (Int64.(weight >= 0L));
{ src; dst; weight }
in
Initialize each block in its own cluster : cluster i d is block 's position
in the original layout , data is block label , weight is block 's execution
count , or 0 if there is no info .
in the original layout, data is block label, weight is block's execution
count, or 0 if there is no info. *)
let clusters =
List.mapi original_layout ~f:(fun pos data ->
let weight =
match Cfg_info.get_block execounts data with
| None -> 0L
| Some block_info -> block_info.count
in
mk_node ~pos ~id:pos ~data ~weight)
in
(* Add all branch info *)
let label2pos =
List.foldi original_layout ~init:Int.Map.empty ~f:(fun i acc data ->
Int.Map.add_exn acc ~key:data ~data:i)
in
let find_pos label = Map.find_exn label2pos label in
let edges =
Cfg_info.fold execounts ~init:[] ~f:(fun ~key:_ ~data:block_info acc ->
let src = find_pos block_info.label in
List.fold block_info.branches ~init:acc ~f:(fun acc b ->
if b.intra then
let dst = find_pos (Option.value_exn b.target_label) in
let e =
mk_edge ~src ~dst ~weight:b.taken
(* CR-someday gyorsh: can we factor in mispredicted? *)
in
e :: acc
else acc))
in
{ next_id = List.length clusters; clusters; edges; original_layout }
(* Compare clusters using their weight, in descending order. Tie breaker
using their position in the orginal layout, if available. Otherwise, using
their ids which are unique. Clusters that cannot be merged are at the end,
ordered amongst them in the same way. *)
let cluster_compare_frozen c1 c2 =
if Bool.equal c1.can_be_merged c2.can_be_merged then
let res = compare_weight c2.weight c1.weight in
if res = 0 then
let res = Int.compare c1.pos c2.pos in
if res = 0 then Int.compare c1.id c2.id else res
else res
else if c1.can_be_merged then -1
else 1
let cluster_compare_pos c1 c2 =
let res = Int.compare c1.pos c2.pos in
if res = 0 then
let res = compare_weight c1.weight c2.weight in
if res = 0 then Int.compare c1.id c2.id else res
else res
let _get_cluster t id = List.find_exn t.clusters ~f:(fun c -> c.id = id)
(* Compare edges using weights, in descending order. Tie breaker on sources
first, then on destinations. *)
let edge_compare e1 e2 =
let res = compare_weight e2.weight e1.weight in
if res = 0 then
let res = compare_clusterid e1.src e2.src in
if res = 0 then compare_clusterid e1.dst e2.dst else res
else res
(* Merge two clusters, laying out their components in order as c1@c2. *)
let merge t c1 c2 =
assert c2.can_be_merged;
assert (c1.id <> c2.id);
let id = t.next_id in
let next_id = id + 1 in
The new pos is the minimal pos of the two clusters we merged .
let pos = min c1.pos c2.pos in
let can_be_merged = not (pos = entry_pos) in
let c =
{ id;
pos;
can_be_merged;
weight = Int64.(c1.weight + c2.weight);
(* For layout, preserve the order of the input items lists. *)
items = c1.items @ c2.items
}
in
(* Add the new cluster at the front and remove the c1 and c2. *)
let clusters =
c
:: List.filter t.clusters ~f:(fun c ->
not (c.id = c1.id || c.id = c2.id))
in
(* Find all edges with endpoints c1 and c2, and replace them with c. *)
let updated, preserved =
List.partition_map t.edges ~f:(fun edge ->
let s = edge.src in
let d = edge.dst in
let src = if s = c1.id || s = c2.id then c.id else s in
let dst = if d = c1.id || d = c2.id then c.id else d in
if src = edge.src && dst = edge.dst then Second edge
else First { src; dst; weight = edge.weight })
in
(* Update the weights of the edges whose endpoints were updated. *)
(* Preserve the invariant that the edges are unique pairs of (src,dst).
This is temporarily violated by the update above if for example the
edges c1->c3 and c2->c3 are both present in the input. Merge them by
adding their weights up. *)
let sorted = List.sort updated ~compare:compare_edge in
let merged =
List.fold sorted ~init:[] ~f:(fun acc e1 ->
if e1.src = e1.dst then (* remove self-loops *)
acc
else
match acc with
| e2 :: rest when e2.src = e1.src && e2.dst = e1.dst ->
let e =
{ src = e1.src;
dst = e1.dst;
weight = Int64.(e1.weight + e2.weight)
}
in
e :: rest
| _ -> e1 :: acc)
in
let edges = merged @ preserved in
{ t with edges; clusters; next_id }
let find_max_pred t c =
let max =
List.fold t.edges ~init:None ~f:(fun max e ->
if e.dst = c.id && (* ignore self loops *) not (e.src = c.id) then
match max with
| None -> Some e
| Some me -> if edge_compare me e < 0 then Some e else max
else max)
in
match max with
| None -> None
| Some max -> Some max.src
Order clusters by their execution counts , descending , tie breaker using
original layout , as in cluster 's compare function . Choose the cluster with
the highest weight . Find its " most likely predecessor " cluster i.e. , the
predecessor with the highest edge weight , tie breaker using original
layout . Merge the two clusters . Repeat until all clusters are merged into
a single cluster . Return its data .
original layout, as in cluster's compare function. Choose the cluster with
the highest weight. Find its "most likely predecessor" cluster i.e., the
predecessor with the highest edge weight, tie breaker using original
layout. Merge the two clusters. Repeat until all clusters are merged into
a single cluster. Return its data. *)
let optimize_layout original_layout execounts =
let t = init_layout original_layout execounts in
let len_clusters = List.length t.clusters in
let len_layout = List.length t.original_layout in
if not (len_layout = len_clusters) then
Report.user_error "layout length doesn't match cluster length."
else if len_layout = 0 then (
if !verbose then printf "Optimize layout called with empty layout.\n";
[] )
else
(* Invariant preserved: clusters that can be merged are ordered by weight
and pos, followed by clusters that cannot be merged. The new cluster
has the highest weight, amongst ones that can be merged, because it
takes the previously highest weight cluster cur and adds a
non-negative weight of pred to it. If a cluster has no predecessors,
it is moved to the end. *)
let clusters = List.sort t.clusters ~compare:cluster_compare_frozen in
let t = { t with clusters } in
CR - someday xclerc : it might be slightly more efficient to keep two
collections here : the clusters that can be merged and those that
can not .
collections here: the clusters that can be merged and those that
cannot. *)
let rec loop t step =
match t.clusters with
| [] -> []
| c :: rest ->
if c.can_be_merged then (
if !verbose then printf "Step %d: merging cluster %d\n" step c.id;
match find_max_pred t c with
| None ->
(* Cluster c is not reachable from within the function using
any of the edges that we have for clustering, i.e., edges
that have weights. Move c to the end of the layout, after
marking that it cannot be merged. *)
if !verbose then printf "No predecessor for %d\n" c.id;
c.can_be_merged <- false;
let t = { t with clusters = rest @ [c] } in
loop t (step + 1)
| Some pred_id ->
let pred = id_to_cluster t pred_id in
if !verbose then
printf "Found pred %d weight=%Ld\n" pred.id pred.weight;
let t = merge t pred c in
loop t (step + 1) )
else
(* Cannot merge any more clusters. Sort the remaining clusters in
original layout order. This guarantees that the entry is at
the front. *)
let print msg x =
let x = List.map x ~f:(fun c -> c.items) in
Printf.printf !"%s: %{sexp: int list list}\n" msg x
in
if !verbose then print "clusters" t.clusters;
let clusters =
List.sort t.clusters ~compare:cluster_compare_pos
in
(* Merge their lists *)
if !verbose then print "sorted" clusters;
let layout =
List.concat_map clusters ~f:(fun c ->
List.sort ~compare:Int.compare c.items)
in
if !verbose then printf "Finished in %d steps\n" step;
layout
in
loop t 0
[ Note1 ] Position of cluster .
This is a heuristic that orders merged clusters with equal weights
* in a way that respects the original layout
* of the data they contain as must as possible .
* The goal is to preserve original layout 's fallthroughs
* if there is no profile for them , i.e. , no strong indication
* that they should be reordered .
* An alternative is to set it to uninitialized_pos , which will order
* all merged clusters before original clusters
* because uninitialized_pos = -1 < pos from original layout .
* This will result in hotter blocks ordered earlier .
This is a heuristic that orders merged clusters with equal weights
* in a way that respects the original layout
* of the data they contain as must as possible.
* The goal is to preserve original layout's fallthroughs
* if there is no profile for them, i.e., no strong indication
* that they should be reordered.
* An alternative is to set it to uninitialized_pos, which will order
* all merged clusters before original clusters
* because uninitialized_pos = -1 < pos from original layout.
* This will result in hotter blocks ordered earlier. *)
| null | https://raw.githubusercontent.com/gretay-js/ocamlfdo/5866fe9c2bfea03bc7efb033cc7b91a3a25cf520/src/clusters.ml | ocaml | CR-someday xclerc: the various operations in this files (sorting,
extracting the argmin/argmax, etc) seem to indicate that we might
want to switch from lists to e.g. heaps. However, the execution
profile shows that the tool does not spend much time in the clustering,
so this should be low-priority (or even just discarded).
Invariant: the weight of a cluster is the sum of the weights of the data
it represents.
Invariant: weights must be non-negative.
Invariant: position of the cluster is the smallest position of any item it
represents. This is a heuristic, see [Note1]
* unique id of the cluster
* the smallest, index in the original layout
* weight
* data items represented by this cluster.
* [a.can_be_merged] is [false] means that [a] cannot be placed in a
cluster _after_ another [b] *
Directed graph whose nodes are clusters.
used to create unique cluster ids
Makes a singleton cluster. data, id and pos must be unique
Cluster that contains the entry position of the original layout cannot
be merged *after* another cluster.
Add all branch info
CR-someday gyorsh: can we factor in mispredicted?
Compare clusters using their weight, in descending order. Tie breaker
using their position in the orginal layout, if available. Otherwise, using
their ids which are unique. Clusters that cannot be merged are at the end,
ordered amongst them in the same way.
Compare edges using weights, in descending order. Tie breaker on sources
first, then on destinations.
Merge two clusters, laying out their components in order as c1@c2.
For layout, preserve the order of the input items lists.
Add the new cluster at the front and remove the c1 and c2.
Find all edges with endpoints c1 and c2, and replace them with c.
Update the weights of the edges whose endpoints were updated.
Preserve the invariant that the edges are unique pairs of (src,dst).
This is temporarily violated by the update above if for example the
edges c1->c3 and c2->c3 are both present in the input. Merge them by
adding their weights up.
remove self-loops
ignore self loops
Invariant preserved: clusters that can be merged are ordered by weight
and pos, followed by clusters that cannot be merged. The new cluster
has the highest weight, amongst ones that can be merged, because it
takes the previously highest weight cluster cur and adds a
non-negative weight of pred to it. If a cluster has no predecessors,
it is moved to the end.
Cluster c is not reachable from within the function using
any of the edges that we have for clustering, i.e., edges
that have weights. Move c to the end of the layout, after
marking that it cannot be merged.
Cannot merge any more clusters. Sort the remaining clusters in
original layout order. This guarantees that the entry is at
the front.
Merge their lists | Adaptation of [ 1 ] to basic blocks , influenced by [ 2 ] . The use of LBR
profile information for calculating basic - block level execution counts
used here is based on algorithms described in [ 3 ] . Collection and decoding
of profile information is inspired by [ 4 ] .
[ 1 ] Optimizing function placement for large - scale data - center
applications . and . In Proceedings of the
2017 International Symposium on Code Generation and Optimization ( CGO
2017 ) .
[ 2 ] BOLT : A Practical Binary Optimizer for Data Centers and Beyond .
, , , and . In Proceedings
of 2019 International Symposium on Code Generation and Optimization ( CGO
2019 ) .
[ 3 ] Taming Hardware Event Samples for Precise and Versatile Feedback
Directed Optimizations . , , ,
, , , Published in
IEEE Transactions on Computers 2013
[ 4 ] AutoFDO : automatic feedback - directed optimization for warehouse - scale
applications . , , and . 2016 . In
Proceedings of the 2016 International Symposium on Code Generation and
Optimization ( CGO ' 16 ) .
profile information for calculating basic-block level execution counts
used here is based on algorithms described in [3]. Collection and decoding
of profile information is inspired by [4].
[1] Optimizing function placement for large-scale data-center
applications. Guilherme Ottoni and Bertrand Maher. In Proceedings of the
2017 International Symposium on Code Generation and Optimization (CGO
2017).
[2] BOLT: A Practical Binary Optimizer for Data Centers and Beyond. Maksim
Panchenko, Rafael Auler, Bill Nell, and Guilherme Ottoni. In Proceedings
of 2019 International Symposium on Code Generation and Optimization (CGO
2019).
[3] Taming Hardware Event Samples for Precise and Versatile Feedback
Directed Optimizations. Dehao Chen, Neil Vachharajani, Robert Hundt,
Xinliang D. Li, Stéphane Eranian, Wenguang Chen, Weimin Zheng Published in
IEEE Transactions on Computers 2013
[4] AutoFDO: automatic feedback-directed optimization for warehouse-scale
applications. Dehao Chen, David Xinliang Li, and Tipp Moseley. 2016. In
Proceedings of the 2016 International Symposium on Code Generation and
Optimization (CGO '16). *)
open Core
let verbose = ref true
type clusterid = int [@@deriving compare]
type weight = int64 [@@deriving compare]
let entry_pos = 0
type 'd cluster =
mutable can_be_merged : bool
}
type edge =
{ src : clusterid;
dst : clusterid;
weight : weight
}
[@@deriving compare]
type 'd t =
{ next_id : clusterid;
clusters : 'd cluster list;
edges : edge list;
original_layout : 'd list
}
let id_to_cluster t id = List.find_exn t.clusters ~f:(fun c -> c.id = id)
let _find t data =
List.find t.clusters ~f:(fun c ->
List.mem c.items data ~equal:(fun d1 d2 -> d1 = d2))
let init_layout original_layout execounts =
let open Block_info in
let mk_node ~data ~weight ~pos ~id =
assert (Int64.(weight >= 0L));
let can_be_merged = not (pos = entry_pos) in
{ id; weight; items = [data]; pos; can_be_merged }
in
let mk_edge ~src ~dst ~weight =
assert (Int64.(weight >= 0L));
{ src; dst; weight }
in
Initialize each block in its own cluster : cluster i d is block 's position
in the original layout , data is block label , weight is block 's execution
count , or 0 if there is no info .
in the original layout, data is block label, weight is block's execution
count, or 0 if there is no info. *)
let clusters =
List.mapi original_layout ~f:(fun pos data ->
let weight =
match Cfg_info.get_block execounts data with
| None -> 0L
| Some block_info -> block_info.count
in
mk_node ~pos ~id:pos ~data ~weight)
in
let label2pos =
List.foldi original_layout ~init:Int.Map.empty ~f:(fun i acc data ->
Int.Map.add_exn acc ~key:data ~data:i)
in
let find_pos label = Map.find_exn label2pos label in
let edges =
Cfg_info.fold execounts ~init:[] ~f:(fun ~key:_ ~data:block_info acc ->
let src = find_pos block_info.label in
List.fold block_info.branches ~init:acc ~f:(fun acc b ->
if b.intra then
let dst = find_pos (Option.value_exn b.target_label) in
let e =
mk_edge ~src ~dst ~weight:b.taken
in
e :: acc
else acc))
in
{ next_id = List.length clusters; clusters; edges; original_layout }
let cluster_compare_frozen c1 c2 =
if Bool.equal c1.can_be_merged c2.can_be_merged then
let res = compare_weight c2.weight c1.weight in
if res = 0 then
let res = Int.compare c1.pos c2.pos in
if res = 0 then Int.compare c1.id c2.id else res
else res
else if c1.can_be_merged then -1
else 1
let cluster_compare_pos c1 c2 =
let res = Int.compare c1.pos c2.pos in
if res = 0 then
let res = compare_weight c1.weight c2.weight in
if res = 0 then Int.compare c1.id c2.id else res
else res
let _get_cluster t id = List.find_exn t.clusters ~f:(fun c -> c.id = id)
let edge_compare e1 e2 =
let res = compare_weight e2.weight e1.weight in
if res = 0 then
let res = compare_clusterid e1.src e2.src in
if res = 0 then compare_clusterid e1.dst e2.dst else res
else res
let merge t c1 c2 =
assert c2.can_be_merged;
assert (c1.id <> c2.id);
let id = t.next_id in
let next_id = id + 1 in
The new pos is the minimal pos of the two clusters we merged .
let pos = min c1.pos c2.pos in
let can_be_merged = not (pos = entry_pos) in
let c =
{ id;
pos;
can_be_merged;
weight = Int64.(c1.weight + c2.weight);
items = c1.items @ c2.items
}
in
let clusters =
c
:: List.filter t.clusters ~f:(fun c ->
not (c.id = c1.id || c.id = c2.id))
in
let updated, preserved =
List.partition_map t.edges ~f:(fun edge ->
let s = edge.src in
let d = edge.dst in
let src = if s = c1.id || s = c2.id then c.id else s in
let dst = if d = c1.id || d = c2.id then c.id else d in
if src = edge.src && dst = edge.dst then Second edge
else First { src; dst; weight = edge.weight })
in
let sorted = List.sort updated ~compare:compare_edge in
let merged =
List.fold sorted ~init:[] ~f:(fun acc e1 ->
acc
else
match acc with
| e2 :: rest when e2.src = e1.src && e2.dst = e1.dst ->
let e =
{ src = e1.src;
dst = e1.dst;
weight = Int64.(e1.weight + e2.weight)
}
in
e :: rest
| _ -> e1 :: acc)
in
let edges = merged @ preserved in
{ t with edges; clusters; next_id }
let find_max_pred t c =
let max =
List.fold t.edges ~init:None ~f:(fun max e ->
match max with
| None -> Some e
| Some me -> if edge_compare me e < 0 then Some e else max
else max)
in
match max with
| None -> None
| Some max -> Some max.src
Order clusters by their execution counts , descending , tie breaker using
original layout , as in cluster 's compare function . Choose the cluster with
the highest weight . Find its " most likely predecessor " cluster i.e. , the
predecessor with the highest edge weight , tie breaker using original
layout . Merge the two clusters . Repeat until all clusters are merged into
a single cluster . Return its data .
original layout, as in cluster's compare function. Choose the cluster with
the highest weight. Find its "most likely predecessor" cluster i.e., the
predecessor with the highest edge weight, tie breaker using original
layout. Merge the two clusters. Repeat until all clusters are merged into
a single cluster. Return its data. *)
let optimize_layout original_layout execounts =
let t = init_layout original_layout execounts in
let len_clusters = List.length t.clusters in
let len_layout = List.length t.original_layout in
if not (len_layout = len_clusters) then
Report.user_error "layout length doesn't match cluster length."
else if len_layout = 0 then (
if !verbose then printf "Optimize layout called with empty layout.\n";
[] )
else
let clusters = List.sort t.clusters ~compare:cluster_compare_frozen in
let t = { t with clusters } in
CR - someday xclerc : it might be slightly more efficient to keep two
collections here : the clusters that can be merged and those that
can not .
collections here: the clusters that can be merged and those that
cannot. *)
let rec loop t step =
match t.clusters with
| [] -> []
| c :: rest ->
if c.can_be_merged then (
if !verbose then printf "Step %d: merging cluster %d\n" step c.id;
match find_max_pred t c with
| None ->
if !verbose then printf "No predecessor for %d\n" c.id;
c.can_be_merged <- false;
let t = { t with clusters = rest @ [c] } in
loop t (step + 1)
| Some pred_id ->
let pred = id_to_cluster t pred_id in
if !verbose then
printf "Found pred %d weight=%Ld\n" pred.id pred.weight;
let t = merge t pred c in
loop t (step + 1) )
else
let print msg x =
let x = List.map x ~f:(fun c -> c.items) in
Printf.printf !"%s: %{sexp: int list list}\n" msg x
in
if !verbose then print "clusters" t.clusters;
let clusters =
List.sort t.clusters ~compare:cluster_compare_pos
in
if !verbose then print "sorted" clusters;
let layout =
List.concat_map clusters ~f:(fun c ->
List.sort ~compare:Int.compare c.items)
in
if !verbose then printf "Finished in %d steps\n" step;
layout
in
loop t 0
[ Note1 ] Position of cluster .
This is a heuristic that orders merged clusters with equal weights
* in a way that respects the original layout
* of the data they contain as must as possible .
* The goal is to preserve original layout 's fallthroughs
* if there is no profile for them , i.e. , no strong indication
* that they should be reordered .
* An alternative is to set it to uninitialized_pos , which will order
* all merged clusters before original clusters
* because uninitialized_pos = -1 < pos from original layout .
* This will result in hotter blocks ordered earlier .
This is a heuristic that orders merged clusters with equal weights
* in a way that respects the original layout
* of the data they contain as must as possible.
* The goal is to preserve original layout's fallthroughs
* if there is no profile for them, i.e., no strong indication
* that they should be reordered.
* An alternative is to set it to uninitialized_pos, which will order
* all merged clusters before original clusters
* because uninitialized_pos = -1 < pos from original layout.
* This will result in hotter blocks ordered earlier. *)
|
7a05ce33634bd0a580ca36e6645f0ab1b259896588d50ff683f87c7df39b35f4 | onedata/op-worker | storage_import_delete_and_links_test_SUITE.erl | %%%--------------------------------------------------------------------
@author
( C ) 2016 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%--------------------------------------------------------------------
%%% @doc This module tests storage import
%%% @end
%%%-------------------------------------------------------------------
-module(storage_import_delete_and_links_test_SUITE).
-author("Jakub Kudzia").
-include("modules/fslogic/fslogic_common.hrl").
-include_lib("ctool/include/test/performance.hrl").
%% export for ct
-export([all/0, init_per_suite/1, end_per_suite/1, init_per_testcase/2,
end_per_testcase/2]).
%% tests
-export([
delete_empty_directory_update_test/1,
delete_non_empty_directory_update_test/1,
sync_works_properly_after_delete_test/1,
delete_and_update_files_simultaneously_update_test/1,
delete_file_update_test/1,
delete_file_in_dir_update_test/1,
delete_many_subfiles_test/1,
create_delete_race_test/1,
create_list_race_test/1,
properly_handle_hardlink_when_file_and_hardlink_are_not_deleted/1,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted/1,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted/1,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened/1,
properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened/1,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened/1,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted/1,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted/1,
properly_handle_hardlink_when_file_and_hardlink_are_deleted/1,
symlink_is_ignored_by_initial_scan/1,
symlink_is_ignored_by_continuous_scan/1
]).
-define(TEST_CASES, [
delete_empty_directory_update_test,
delete_non_empty_directory_update_test,
sync_works_properly_after_delete_test,
delete_and_update_files_simultaneously_update_test,
delete_file_update_test,
delete_file_in_dir_update_test,
delete_many_subfiles_test,
create_delete_race_test,
create_list_race_test,
properly_handle_hardlink_when_file_and_hardlink_are_not_deleted,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened,
properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted,
properly_handle_hardlink_when_file_and_hardlink_are_deleted,
symlink_is_ignored_by_initial_scan,
symlink_is_ignored_by_continuous_scan
]).
all() -> ?ALL(?TEST_CASES).
%%%==================================================================
%%% Test functions
%%%===================================================================
delete_empty_directory_update_test(Config) ->
storage_import_test_base:delete_empty_directory_update_test(Config).
delete_non_empty_directory_update_test(Config) ->
storage_import_test_base:delete_non_empty_directory_update_test(Config).
sync_works_properly_after_delete_test(Config) ->
storage_import_test_base:sync_works_properly_after_delete_test(Config).
delete_and_update_files_simultaneously_update_test(Config) ->
storage_import_test_base:delete_and_update_files_simultaneously_update_test(Config).
delete_file_update_test(Config) ->
storage_import_test_base:delete_file_update_test(Config).
delete_file_in_dir_update_test(Config) ->
storage_import_test_base:delete_file_in_dir_update_test(Config).
delete_many_subfiles_test(Config) ->
storage_import_test_base:delete_many_subfiles_test(Config).
create_delete_race_test(Config) ->
storage_import_test_base:create_delete_race_test(Config, ?POSIX_HELPER_NAME).
create_list_race_test(Config) ->
storage_import_test_base:create_list_race_test(Config).
properly_handle_hardlink_when_file_and_hardlink_are_not_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_and_hardlink_are_not_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_and_hardlink_are_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_and_hardlink_are_deleted(Config, ?POSIX_HELPER_NAME).
symlink_is_ignored_by_initial_scan(Config) ->
storage_import_test_base:symlink_is_ignored_by_initial_scan(Config).
symlink_is_ignored_by_continuous_scan(Config) ->
storage_import_test_base:symlink_is_ignored_by_continuous_scan(Config, ?POSIX_HELPER_NAME).
%===================================================================
SetUp and TearDown functions
%===================================================================
init_per_suite(Config) ->
storage_import_test_base:init_per_suite(Config).
end_per_suite(Config) ->
storage_import_test_base:end_per_suite(Config).
init_per_testcase(Case, Config) ->
storage_import_test_base:init_per_testcase(Case, Config).
end_per_testcase(_Case, Config) ->
storage_import_test_base:end_per_testcase(_Case, Config). | null | https://raw.githubusercontent.com/onedata/op-worker/74254143340d6783cc90a51d971bca67536267ff/test_distributed/storage_import_delete_and_links_test_SUITE.erl | erlang | --------------------------------------------------------------------
@end
--------------------------------------------------------------------
@doc This module tests storage import
@end
-------------------------------------------------------------------
export for ct
tests
==================================================================
Test functions
===================================================================
===================================================================
=================================================================== | @author
( C ) 2016 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(storage_import_delete_and_links_test_SUITE).
-author("Jakub Kudzia").
-include("modules/fslogic/fslogic_common.hrl").
-include_lib("ctool/include/test/performance.hrl").
-export([all/0, init_per_suite/1, end_per_suite/1, init_per_testcase/2,
end_per_testcase/2]).
-export([
delete_empty_directory_update_test/1,
delete_non_empty_directory_update_test/1,
sync_works_properly_after_delete_test/1,
delete_and_update_files_simultaneously_update_test/1,
delete_file_update_test/1,
delete_file_in_dir_update_test/1,
delete_many_subfiles_test/1,
create_delete_race_test/1,
create_list_race_test/1,
properly_handle_hardlink_when_file_and_hardlink_are_not_deleted/1,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted/1,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted/1,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened/1,
properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened/1,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened/1,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted/1,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted/1,
properly_handle_hardlink_when_file_and_hardlink_are_deleted/1,
symlink_is_ignored_by_initial_scan/1,
symlink_is_ignored_by_continuous_scan/1
]).
-define(TEST_CASES, [
delete_empty_directory_update_test,
delete_non_empty_directory_update_test,
sync_works_properly_after_delete_test,
delete_and_update_files_simultaneously_update_test,
delete_file_update_test,
delete_file_in_dir_update_test,
delete_many_subfiles_test,
create_delete_race_test,
create_list_race_test,
properly_handle_hardlink_when_file_and_hardlink_are_not_deleted,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened,
properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened,
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened,
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted,
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted,
properly_handle_hardlink_when_file_and_hardlink_are_deleted,
symlink_is_ignored_by_initial_scan,
symlink_is_ignored_by_continuous_scan
]).
all() -> ?ALL(?TEST_CASES).
delete_empty_directory_update_test(Config) ->
storage_import_test_base:delete_empty_directory_update_test(Config).
delete_non_empty_directory_update_test(Config) ->
storage_import_test_base:delete_non_empty_directory_update_test(Config).
sync_works_properly_after_delete_test(Config) ->
storage_import_test_base:sync_works_properly_after_delete_test(Config).
delete_and_update_files_simultaneously_update_test(Config) ->
storage_import_test_base:delete_and_update_files_simultaneously_update_test(Config).
delete_file_update_test(Config) ->
storage_import_test_base:delete_file_update_test(Config).
delete_file_in_dir_update_test(Config) ->
storage_import_test_base:delete_file_in_dir_update_test(Config).
delete_many_subfiles_test(Config) ->
storage_import_test_base:delete_many_subfiles_test(Config).
create_delete_race_test(Config) ->
storage_import_test_base:create_delete_race_test(Config, ?POSIX_HELPER_NAME).
create_list_race_test(Config) ->
storage_import_test_base:create_list_race_test(Config).
properly_handle_hardlink_when_file_and_hardlink_are_not_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_and_hardlink_are_not_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_not_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_not_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted_when_opened(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_and_hardlink_are_deleted_when_opened(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_and_hardlink_is_deleted_when_opened(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_not_deleted_and_hardlink_is_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_is_deleted_when_opened_and_hardlink_is_deleted(Config, ?POSIX_HELPER_NAME).
properly_handle_hardlink_when_file_and_hardlink_are_deleted(Config) ->
storage_import_test_base:properly_handle_hardlink_when_file_and_hardlink_are_deleted(Config, ?POSIX_HELPER_NAME).
symlink_is_ignored_by_initial_scan(Config) ->
storage_import_test_base:symlink_is_ignored_by_initial_scan(Config).
symlink_is_ignored_by_continuous_scan(Config) ->
storage_import_test_base:symlink_is_ignored_by_continuous_scan(Config, ?POSIX_HELPER_NAME).
SetUp and TearDown functions
init_per_suite(Config) ->
storage_import_test_base:init_per_suite(Config).
end_per_suite(Config) ->
storage_import_test_base:end_per_suite(Config).
init_per_testcase(Case, Config) ->
storage_import_test_base:init_per_testcase(Case, Config).
end_per_testcase(_Case, Config) ->
storage_import_test_base:end_per_testcase(_Case, Config). |
221b39f3a1c5840c7911f5fb817ce1492ec57eabb2d5d77831bc6d8ccd9effd2 | khibino/haskell-relational-record | SqlSyntax.hs | -- |
-- Module : Database.Relational.SqlSyntax
Copyright : 2017 - 2019
-- License : BSD3
--
-- Maintainer :
-- Stability : experimental
-- Portability : unknown
--
-- This module is integrated module of sql-syntax.
module Database.Relational.SqlSyntax (
module Database.Relational.SqlSyntax.Types,
module Database.Relational.SqlSyntax.Join,
module Database.Relational.SqlSyntax.Aggregate,
module Database.Relational.SqlSyntax.Query,
module Database.Relational.SqlSyntax.Fold,
module Database.Relational.SqlSyntax.Updates,
) where
import Database.Relational.SqlSyntax.Types
import Database.Relational.SqlSyntax.Join (growProduct, restrictProduct, )
import Database.Relational.SqlSyntax.Aggregate
import Database.Relational.SqlSyntax.Query
import Database.Relational.SqlSyntax.Fold
import Database.Relational.SqlSyntax.Updates
| null | https://raw.githubusercontent.com/khibino/haskell-relational-record/759b3d7cea207e64d2bd1cf195125182f73d2a52/relational-query/src/Database/Relational/SqlSyntax.hs | haskell | |
Module : Database.Relational.SqlSyntax
License : BSD3
Maintainer :
Stability : experimental
Portability : unknown
This module is integrated module of sql-syntax. | Copyright : 2017 - 2019
module Database.Relational.SqlSyntax (
module Database.Relational.SqlSyntax.Types,
module Database.Relational.SqlSyntax.Join,
module Database.Relational.SqlSyntax.Aggregate,
module Database.Relational.SqlSyntax.Query,
module Database.Relational.SqlSyntax.Fold,
module Database.Relational.SqlSyntax.Updates,
) where
import Database.Relational.SqlSyntax.Types
import Database.Relational.SqlSyntax.Join (growProduct, restrictProduct, )
import Database.Relational.SqlSyntax.Aggregate
import Database.Relational.SqlSyntax.Query
import Database.Relational.SqlSyntax.Fold
import Database.Relational.SqlSyntax.Updates
|
c422a816c54b65b9d9b549092188b8eb98fa1fcfa05b4e313748785499a71684 | hasura/pg-client-hs | Interrupt.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DerivingStrategies #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
module Interrupt (specInterrupt) where
-------------------------------------------------------------------------------
import Control.Concurrent
( MVar,
newEmptyMVar,
putMVar,
threadDelay,
tryReadMVar,
)
import Control.Concurrent.Interrupt (interruptOnAsyncException)
import Control.Exception.Safe (Exception, onException, throwIO, uninterruptibleMask_)
import Control.Monad (liftM2, unless)
import Data.Bifunctor (first)
import Data.IORef (IORef, atomicModifyIORef', newIORef, readIORef)
import Data.Kind (Type)
import Data.Maybe (isJust)
import Data.Time (NominalDiffTime, diffUTCTime, getCurrentTime)
import System.Timeout (timeout)
import Test.Hspec (Spec, describe, it, shouldBe, shouldThrow)
import Prelude hiding (log)
-------------------------------------------------------------------------------
specInterrupt :: Spec
specInterrupt = do
describe "without interrupt" $ do
it "logging etc works" $ do
events <- withLogger $ \log -> do
let action = trace log "sleep" $ sleep (1000 * ms)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "done")
]
it "cancellable sleep is like sleep without cancelling" $ do
events <- withLogger $ \log -> do
let action = trace log "sleep" $ cancellableSleep (1000 * ms) (pure False)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "done")
]
it "uninterruptible sleep doesn't time out" $ do
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
uninterruptibleMask_ $ trace log "sleep" $ cancellableSleep (1000 * ms) (pure False)
-- add an extra action so the timeout is delivered reliably
sleep (500 * ms)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(1000 * ms, "sleep end"),
(1000 * ms, "outer exception"),
(1000 * ms, "done")
]
describe "interruptOnAsyncException" $ do
it "behaves like baseline without cancelling" $ do
events <- withLogger $ \log -> do
let action = interruptOnAsyncException (pure ()) $ trace log "sleep" $ sleep (1000 * ms)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "done")
]
it "allows interrupting a blocking action" $ do
(cancel, cancelled) <- getCancel
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $ uninterruptibleMask_ $ trace log "sleep" $ cancellableSleep (1000 * ms) cancelled
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(500 * ms, "sleep end"),
(500 * ms, "outer exception"),
(500 * ms, "done")
]
it "waits for the thread and bubbles the exception if cancel only throws" $ do
(_cancel, cancelled) <- getCancel
let cancel = throwIO CancelException
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $ uninterruptibleMask_ $ trace log "sleep" $ cancellableSleep (1000 * ms) cancelled
timeout (500 * ms) action `shouldThrow` (== CancelException)
log "done"
-- the important property is that we always get "sleep"'s end/exception before "outer"'s end/exception
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(1000 * ms, "sleep end"),
(1000 * ms, "outer exception"),
(1000 * ms, "done")
]
it "bubbles an exception that occurs before cancelling" $ do
(cancel, cancelled) <- getCancel
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $
uninterruptibleMask_ $
trace log "sleep" $ do
sleep (200 * ms)
throwIO ActionException :: IO ()
cancellableSleep (800 * ms) cancelled
timeout (500 * ms) action `shouldThrow` (== ActionException)
log "done"
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(200 * ms, "sleep exception"),
(200 * ms, "outer exception"),
(200 * ms, "done")
]
it "bubbles an exception that occurs after cancelling" $ do
(cancel, cancelled) <- getCancel
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $
uninterruptibleMask_ $
trace log "sleep" $ do
cancellableSleep (1000 * ms) cancelled
throwIO ActionException
timeout (500 * ms) action `shouldThrow` (== ActionException)
log "done"
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "outer exception"),
(500 * ms, "done")
]
-- millisecond in microseconds
ms :: Int
ms = 1000
second in microseconds
s :: Int
s = 1000000
sleep :: Int -> IO ()
sleep = threadDelay
cancellableSleep :: Int -> IO Bool -> IO ()
cancellableSleep t cancelled = do
t0 <- getCurrentTime
let done = do
t1 <- getCurrentTime
return $ diffUTCTime t1 t0 * fromIntegral s >= fromIntegral t
spinUntil (liftM2 (||) done cancelled)
where
spinUntil cond = do
stop <- cond
unless stop $ do
threadDelay (1 * ms)
spinUntil cond
getCancel :: IO (IO (), IO Bool)
getCancel = do
c :: MVar () <- newEmptyMVar
let cancel = putMVar c ()
cancelled = isJust <$> tryReadMVar c
return (cancel, cancelled)
type CancelException :: Type
data CancelException = CancelException
deriving stock (Eq, Show)
deriving anyclass (Exception)
type ActionException :: Type
data ActionException = ActionException
deriving stock (Eq, Show)
deriving anyclass (Exception)
type Log :: Type
type Log = [(NominalDiffTime, String)]
roundTo :: Int -> NominalDiffTime -> Int
roundTo interval t = round (t * fromIntegral s / fromIntegral interval) * interval
roundLog :: Int -> Log -> [(Int, String)]
roundLog interval = map (first (roundTo interval))
withLogger :: ((String -> IO ()) -> IO ()) -> IO Log
withLogger f = do
ref :: IORef Log <- newIORef []
t0 <- getCurrentTime
let log event = do
t <- getCurrentTime
atomicModifyIORef' ref (\events -> ((diffUTCTime t t0, event) : events, ()))
f log
reverse <$> readIORef ref
trace :: (String -> IO ()) -> String -> IO () -> IO ()
trace log label action = do
log $ label <> " start"
action `onException` log (label <> " exception")
log $ label <> " end"
| null | https://raw.githubusercontent.com/hasura/pg-client-hs/5793e998c20358eef6ca86b5d480956e08b7e07a/test/Interrupt.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings #
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
add an extra action so the timeout is delivered reliably
the important property is that we always get "sleep"'s end/exception before "outer"'s end/exception
millisecond in microseconds | # LANGUAGE DerivingStrategies #
# LANGUAGE ScopedTypeVariables #
module Interrupt (specInterrupt) where
import Control.Concurrent
( MVar,
newEmptyMVar,
putMVar,
threadDelay,
tryReadMVar,
)
import Control.Concurrent.Interrupt (interruptOnAsyncException)
import Control.Exception.Safe (Exception, onException, throwIO, uninterruptibleMask_)
import Control.Monad (liftM2, unless)
import Data.Bifunctor (first)
import Data.IORef (IORef, atomicModifyIORef', newIORef, readIORef)
import Data.Kind (Type)
import Data.Maybe (isJust)
import Data.Time (NominalDiffTime, diffUTCTime, getCurrentTime)
import System.Timeout (timeout)
import Test.Hspec (Spec, describe, it, shouldBe, shouldThrow)
import Prelude hiding (log)
specInterrupt :: Spec
specInterrupt = do
describe "without interrupt" $ do
it "logging etc works" $ do
events <- withLogger $ \log -> do
let action = trace log "sleep" $ sleep (1000 * ms)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "done")
]
it "cancellable sleep is like sleep without cancelling" $ do
events <- withLogger $ \log -> do
let action = trace log "sleep" $ cancellableSleep (1000 * ms) (pure False)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "done")
]
it "uninterruptible sleep doesn't time out" $ do
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
uninterruptibleMask_ $ trace log "sleep" $ cancellableSleep (1000 * ms) (pure False)
sleep (500 * ms)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(1000 * ms, "sleep end"),
(1000 * ms, "outer exception"),
(1000 * ms, "done")
]
describe "interruptOnAsyncException" $ do
it "behaves like baseline without cancelling" $ do
events <- withLogger $ \log -> do
let action = interruptOnAsyncException (pure ()) $ trace log "sleep" $ sleep (1000 * ms)
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "done")
]
it "allows interrupting a blocking action" $ do
(cancel, cancelled) <- getCancel
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $ uninterruptibleMask_ $ trace log "sleep" $ cancellableSleep (1000 * ms) cancelled
res <- timeout (500 * ms) action
log "done"
res `shouldBe` Nothing
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(500 * ms, "sleep end"),
(500 * ms, "outer exception"),
(500 * ms, "done")
]
it "waits for the thread and bubbles the exception if cancel only throws" $ do
(_cancel, cancelled) <- getCancel
let cancel = throwIO CancelException
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $ uninterruptibleMask_ $ trace log "sleep" $ cancellableSleep (1000 * ms) cancelled
timeout (500 * ms) action `shouldThrow` (== CancelException)
log "done"
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(1000 * ms, "sleep end"),
(1000 * ms, "outer exception"),
(1000 * ms, "done")
]
it "bubbles an exception that occurs before cancelling" $ do
(cancel, cancelled) <- getCancel
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $
uninterruptibleMask_ $
trace log "sleep" $ do
sleep (200 * ms)
throwIO ActionException :: IO ()
cancellableSleep (800 * ms) cancelled
timeout (500 * ms) action `shouldThrow` (== ActionException)
log "done"
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(200 * ms, "sleep exception"),
(200 * ms, "outer exception"),
(200 * ms, "done")
]
it "bubbles an exception that occurs after cancelling" $ do
(cancel, cancelled) <- getCancel
events <- withLogger $ \log -> do
let action = trace log "outer" $ do
interruptOnAsyncException cancel $
uninterruptibleMask_ $
trace log "sleep" $ do
cancellableSleep (1000 * ms) cancelled
throwIO ActionException
timeout (500 * ms) action `shouldThrow` (== ActionException)
log "done"
roundLog (100 * ms) events
`shouldBe` [ (0, "outer start"),
(0, "sleep start"),
(500 * ms, "sleep exception"),
(500 * ms, "outer exception"),
(500 * ms, "done")
]
ms :: Int
ms = 1000
second in microseconds
s :: Int
s = 1000000
sleep :: Int -> IO ()
sleep = threadDelay
cancellableSleep :: Int -> IO Bool -> IO ()
cancellableSleep t cancelled = do
t0 <- getCurrentTime
let done = do
t1 <- getCurrentTime
return $ diffUTCTime t1 t0 * fromIntegral s >= fromIntegral t
spinUntil (liftM2 (||) done cancelled)
where
spinUntil cond = do
stop <- cond
unless stop $ do
threadDelay (1 * ms)
spinUntil cond
getCancel :: IO (IO (), IO Bool)
getCancel = do
c :: MVar () <- newEmptyMVar
let cancel = putMVar c ()
cancelled = isJust <$> tryReadMVar c
return (cancel, cancelled)
type CancelException :: Type
data CancelException = CancelException
deriving stock (Eq, Show)
deriving anyclass (Exception)
type ActionException :: Type
data ActionException = ActionException
deriving stock (Eq, Show)
deriving anyclass (Exception)
type Log :: Type
type Log = [(NominalDiffTime, String)]
roundTo :: Int -> NominalDiffTime -> Int
roundTo interval t = round (t * fromIntegral s / fromIntegral interval) * interval
roundLog :: Int -> Log -> [(Int, String)]
roundLog interval = map (first (roundTo interval))
withLogger :: ((String -> IO ()) -> IO ()) -> IO Log
withLogger f = do
ref :: IORef Log <- newIORef []
t0 <- getCurrentTime
let log event = do
t <- getCurrentTime
atomicModifyIORef' ref (\events -> ((diffUTCTime t t0, event) : events, ()))
f log
reverse <$> readIORef ref
trace :: (String -> IO ()) -> String -> IO () -> IO ()
trace log label action = do
log $ label <> " start"
action `onException` log (label <> " exception")
log $ label <> " end"
|
64e33d59a0d436c3d8a236086d36d4030ab18f8392d1c48f25b920bb1fe92888 | cardmagic/lucash | ekko.scm | #!/usr/local/bin/scsh -s
!#
(define (main args)
(for-each (lambda (f) (display f) (write-char #\ ))
args)
(newline))
(define (ekko)
(main command-line-arguments)
)
(ekko)
| null | https://raw.githubusercontent.com/cardmagic/lucash/0452d410430d12140c14948f7f583624f819cdad/reference/scsh-0.6.6/scsh/ekko.scm | scheme | #!/usr/local/bin/scsh -s
!#
(define (main args)
(for-each (lambda (f) (display f) (write-char #\ ))
args)
(newline))
(define (ekko)
(main command-line-arguments)
)
(ekko)
| |
36bd0880698f96b05c4c4e5fc544df7d35d19616ea37199568774321252c66f0 | yapsterapp/deferst | monad_trans.cljc | (ns cats.labs.monad-trans
(:require
[cats.context :as ctx]))
(defprotocol MonadTrans
"A monad transformer abstraction."
(-lift [m mv] "Lift a value from the parameterized monad to the transformer."))
(defn lift
"Lift a value from the inner monad of a monad transformer
into a value of the monad transformer."
([mv] (-lift ctx/*context* mv))
([m mv] (-lift m mv)))
| null | https://raw.githubusercontent.com/yapsterapp/deferst/13ef02499211b42b1876e3b170ca27e9f721ad2b/src/cats/labs/monad_trans.cljc | clojure | (ns cats.labs.monad-trans
(:require
[cats.context :as ctx]))
(defprotocol MonadTrans
"A monad transformer abstraction."
(-lift [m mv] "Lift a value from the parameterized monad to the transformer."))
(defn lift
"Lift a value from the inner monad of a monad transformer
into a value of the monad transformer."
([mv] (-lift ctx/*context* mv))
([m mv] (-lift m mv)))
| |
5f56d881056f954457605fc1bdc4427dd6ada3e738308b51715b37ed0fa7545d | NorfairKing/smos | PostBackupSpec.hs | module Smos.Server.Handler.PostBackupSpec
( spec,
)
where
import Smos.Client
import Smos.Data.Gen ()
import Smos.Server.InterestingStore
import Smos.Server.TestUtils
import Test.Syd
import Test.Syd.Validity
spec :: Spec
spec =
describe "PostBackup" $ do
serverSpec $ do
it "produces a valid smos directory forest for any interesting store" $ \cenv ->
forAllValid $ \store ->
withNewUser cenv $ \t -> testClient cenv $ do
setupInterestingStore t store
backupUuid <- clientPostBackup t
liftIO $ shouldBeValid backupUuid
it "doesn't change the stored files" $ \cenv ->
forAllValid $ \store ->
withNewUser cenv $ \t -> testClient cenv $ do
setupInterestingStore t store
filesBefore <- clientGetListSmosFiles t
_ <- clientPostBackup t
filesAfter <- clientGetListSmosFiles t
liftIO $ filesAfter `shouldBe` filesBefore
| null | https://raw.githubusercontent.com/NorfairKing/smos/a3ed8fdbcb8f298bc49866bf6d0ee59a964ba0da/smos-server-gen/test/Smos/Server/Handler/PostBackupSpec.hs | haskell | module Smos.Server.Handler.PostBackupSpec
( spec,
)
where
import Smos.Client
import Smos.Data.Gen ()
import Smos.Server.InterestingStore
import Smos.Server.TestUtils
import Test.Syd
import Test.Syd.Validity
spec :: Spec
spec =
describe "PostBackup" $ do
serverSpec $ do
it "produces a valid smos directory forest for any interesting store" $ \cenv ->
forAllValid $ \store ->
withNewUser cenv $ \t -> testClient cenv $ do
setupInterestingStore t store
backupUuid <- clientPostBackup t
liftIO $ shouldBeValid backupUuid
it "doesn't change the stored files" $ \cenv ->
forAllValid $ \store ->
withNewUser cenv $ \t -> testClient cenv $ do
setupInterestingStore t store
filesBefore <- clientGetListSmosFiles t
_ <- clientPostBackup t
filesAfter <- clientGetListSmosFiles t
liftIO $ filesAfter `shouldBe` filesBefore
| |
e16e48bb8a227dbb05521b863bbb9840ae359e6f78a01c9ad20bbcda0da3e163 | imrehg/ypsilon | mount.scm | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon gtk mount)
(export gtk_mount_operation_get_parent
gtk_mount_operation_get_screen
gtk_mount_operation_get_type
gtk_mount_operation_is_showing
gtk_mount_operation_new
gtk_mount_operation_set_parent
gtk_mount_operation_set_screen)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libgtk-x11-2.0.so.0")
(on-sunos "libgtk-x11-2.0.so.0")
(on-freebsd "libgtk-x11-2.0.so.0")
(on-openbsd "libgtk-x11-2.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libgtk-win32-2.0-0.dll")
(else
(assertion-violation #f "can not locate GTK library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
(define-syntax define-function/va_list
(syntax-rules ()
((_ ret name args)
(define name (lambda x (assertion-violation 'name "va_list argument not supported"))))))
GtkWindow * gtk_mount_operation_get_parent ( GtkMountOperation * op )
(define-function void* gtk_mount_operation_get_parent (void*))
GdkScreen * gtk_mount_operation_get_screen ( GtkMountOperation * op )
(define-function void* gtk_mount_operation_get_screen (void*))
GType gtk_mount_operation_get_type ( void )
(define-function unsigned-long gtk_mount_operation_get_type ())
gboolean gtk_mount_operation_is_showing ( GtkMountOperation * op )
(define-function int gtk_mount_operation_is_showing (void*))
GMountOperation * ( GtkWindow * parent )
(define-function void* gtk_mount_operation_new (void*))
void gtk_mount_operation_set_parent ( GtkMountOperation * op , GtkWindow * parent )
(define-function void gtk_mount_operation_set_parent (void* void*))
void gtk_mount_operation_set_screen ( GtkMountOperation * op , GdkScreen * screen )
(define-function void gtk_mount_operation_set_screen (void* void*))
) ;[end]
| null | https://raw.githubusercontent.com/imrehg/ypsilon/e57a06ef5c66c1a88905b2be2fa791fa29848514/sitelib/ypsilon/gtk/mount.scm | scheme | [end] | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon gtk mount)
(export gtk_mount_operation_get_parent
gtk_mount_operation_get_screen
gtk_mount_operation_get_type
gtk_mount_operation_is_showing
gtk_mount_operation_new
gtk_mount_operation_set_parent
gtk_mount_operation_set_screen)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libgtk-x11-2.0.so.0")
(on-sunos "libgtk-x11-2.0.so.0")
(on-freebsd "libgtk-x11-2.0.so.0")
(on-openbsd "libgtk-x11-2.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libgtk-win32-2.0-0.dll")
(else
(assertion-violation #f "can not locate GTK library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
(define-syntax define-function/va_list
(syntax-rules ()
((_ ret name args)
(define name (lambda x (assertion-violation 'name "va_list argument not supported"))))))
GtkWindow * gtk_mount_operation_get_parent ( GtkMountOperation * op )
(define-function void* gtk_mount_operation_get_parent (void*))
GdkScreen * gtk_mount_operation_get_screen ( GtkMountOperation * op )
(define-function void* gtk_mount_operation_get_screen (void*))
GType gtk_mount_operation_get_type ( void )
(define-function unsigned-long gtk_mount_operation_get_type ())
gboolean gtk_mount_operation_is_showing ( GtkMountOperation * op )
(define-function int gtk_mount_operation_is_showing (void*))
GMountOperation * ( GtkWindow * parent )
(define-function void* gtk_mount_operation_new (void*))
void gtk_mount_operation_set_parent ( GtkMountOperation * op , GtkWindow * parent )
(define-function void gtk_mount_operation_set_parent (void* void*))
void gtk_mount_operation_set_screen ( GtkMountOperation * op , GdkScreen * screen )
(define-function void gtk_mount_operation_set_screen (void* void*))
|
9f539cde5b8b6c523217c87abac7cae4f86ca963995ccd74e7ef7d3986933ec9 | cedlemo/OCaml-GI-ctypes-bindings-generator | Socket_msg_flags.mli | open Ctypes
type t = None | Oob | Peek | Dontroute
type t_list = t list
val of_value:
Unsigned.uint32 -> t
val to_value:
t -> Unsigned.uint32
val list_of_value:
Unsigned.uint32 -> t_list
val list_to_value:
t_list -> Unsigned.uint32
val t_list_view : t_list typ
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gio/Socket_msg_flags.mli | ocaml | open Ctypes
type t = None | Oob | Peek | Dontroute
type t_list = t list
val of_value:
Unsigned.uint32 -> t
val to_value:
t -> Unsigned.uint32
val list_of_value:
Unsigned.uint32 -> t_list
val list_to_value:
t_list -> Unsigned.uint32
val t_list_view : t_list typ
| |
dc891c1cd3b5988c0cec0ffaaabfd0fa397230cc87716c58cb67ed46c4e449fc | phadej/staged | Directory.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
module Staged.Stream.Directory (
listDirectory,
recursiveListDirectory,
files,
) where
import Control.Monad.IO.Class (MonadIO (..))
import System.FilePath ((</>))
import qualified System.Directory as Dir
import Staged.Commons
import Staged.Stream.Type
import Staged.Stream.Combinators (bfsTreeM, fromListM, filterM)
-- | List directory.
--
-- 'listDirectory' is defined using 'M.fromListM' of corresponding
-- function in the @directory@ package.
--
listDirectory :: MonadIO m => StreamM m FilePath FilePath
listDirectory = fromListM $ \dir ->
sliftIO $ toCode [|| fmap (map ($$(fromCode dir) </>)) (Dir.listDirectory $$(fromCode dir)) ||]
-- | Recursively (breath-first) walk through the directory structure.
recursiveListDirectory :: MonadIO m => StreamM m FilePath FilePath
recursiveListDirectory = bfsTreeM listDirectory $ \fp ->
toCode [|| liftIO . Dir.doesDirectoryExist ||] @@ fp
-- | Filter with 'Dir.doesFileExist', i.e. return only existing files.
files :: MonadIO m => StreamM m a FilePath -> StreamM m a FilePath
files = filterM (\x -> sliftIO (toCode [|| Dir.doesFileExist ||] @@ x))
| null | https://raw.githubusercontent.com/phadej/staged/b51c8c508af71ddb2aca4a75030da9b2c4f9e3dd/staged-streams/src/Staged/Stream/Directory.hs | haskell | # LANGUAGE DeriveAnyClass #
| List directory.
'listDirectory' is defined using 'M.fromListM' of corresponding
function in the @directory@ package.
| Recursively (breath-first) walk through the directory structure.
| Filter with 'Dir.doesFileExist', i.e. return only existing files. | # LANGUAGE DeriveGeneric #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
module Staged.Stream.Directory (
listDirectory,
recursiveListDirectory,
files,
) where
import Control.Monad.IO.Class (MonadIO (..))
import System.FilePath ((</>))
import qualified System.Directory as Dir
import Staged.Commons
import Staged.Stream.Type
import Staged.Stream.Combinators (bfsTreeM, fromListM, filterM)
listDirectory :: MonadIO m => StreamM m FilePath FilePath
listDirectory = fromListM $ \dir ->
sliftIO $ toCode [|| fmap (map ($$(fromCode dir) </>)) (Dir.listDirectory $$(fromCode dir)) ||]
recursiveListDirectory :: MonadIO m => StreamM m FilePath FilePath
recursiveListDirectory = bfsTreeM listDirectory $ \fp ->
toCode [|| liftIO . Dir.doesDirectoryExist ||] @@ fp
files :: MonadIO m => StreamM m a FilePath -> StreamM m a FilePath
files = filterM (\x -> sliftIO (toCode [|| Dir.doesFileExist ||] @@ x))
|
ef5e552e8a46084a715175cb1ea6d644d5b634a24ac1bfc32323f6b5b714f8d5 | opencog/learn | gram-class-api.scm | ;
; gram-class-api.scm
;
; Representing word-classes as vectors of (pseudo-)connector-sets.
;
Copyright ( c ) 2017 , 2019 Linas Vepstas
;
; ---------------------------------------------------------------------
; OVERVIEW
; --------
; This file provides the `matrix-object` API that allows grammatical
; classes of words to be treated as vectors of connector-sets (vectors
; of disjuncts; vectors of Sections).
;
; This is meant to be used as a wrapper around `make-pseudo-cset-api`,
; extending it so that both `WordNode`s and `WordClassNode`s are handled.
;
; ---------------------------------------------------------------------
;
(use-modules (srfi srfi-1))
(use-modules (opencog))
(use-modules (opencog persist))
(use-modules (opencog matrix))
; ---------------------------------------------------------------------
; This class kind-of resembles a direct sum (as coded in the
; `direct-sum` object) but its ad hoc, hard-coded, not generic.
(define-public (add-gram-class-api LLOBJ)
"
add-gram-class-api LLOBJ -- Enable (WordClass, disjunct) pairs.
This will take LLOBJ and extend it's native `left-type with the
`WordClass` type, so that the left type can be either. It also
provides methods for managing the `MemberLink`s that indicate
membership of the 'left-type in the `WordClass`.
The membership of a word to a WordClass is denoted as
(MemberLink (WordNode \"foo\") (WordClassNode \"bar\"))
Keep in mind that a word might belong to more than one WordClass.
Contributions to the class are stored as counts on the MemberLink.
See the `pseudo-csets.scm` file for a general overview.
Provided methods:
'left-type -- returns (TypeChoice (LLOBJ 'left-type) (Type 'WordClassNode))
'store-aux -- Store the MemberLinks above.
'fetch-pairs -- Fetch both WordClassNodes and MemberLinks.
'cluster-type -- returns (Type 'WordClassNode)
'get-clusters -- return all left-basis elements that are of
cluster type.
'make-cluster WA WB -- Creates a WordClassNode
"
(define (get-left-type)
(TypeChoice (LLOBJ 'left-type) (Type 'WordClassNode)))
(define any-left (AnyNode "gram-class"))
(define (get-left-wildcard DJ) (ListLink any-left DJ))
; Recycle the right wildcard from the parent class.
; XXX FIXME: this won't work for some classes, which store
; marginals in a different format than pairs. That is, the
; 'right-element method will work correctly on pairs only,
; not on marginals. For example, direct-sum is like that.
; Perhaps we should blame the classes for mis-handling marginals?
(define any-right (LLOBJ 'right-element (LLOBJ 'wild-wild)))
(define (get-wild-wild) (ListLink any-left any-right))
Fetch ( from the database ) all Sections that have a WordClass
; on the left-hand-side. Fetch the marginals, too.
(define (fetch-disjuncts)
; Let the base object do the heavy lifting.
(LLOBJ 'fetch-pairs)
(define start-time (current-time))
; Fetch all MemberLinks, as these indicate which Words
belong to which WordClasses . Sections have already been
; fetched by the LLOBJ, so we won't do anything more, here.
(load-atoms-of-type 'WordClassNode)
(for-each
(lambda (wcl)
(fetch-incoming-by-type wcl 'MemberLink))
(cog-get-atoms 'WordClassNode))
; Load marginals, too. These are specific to this class.
(fetch-incoming-set any-left)
(format #t "Elapsed time to load grammatical classes: ~A secs\n"
(- (current-time) start-time)))
; Store into the database the "auxiliary" MemberLinks between
; WordClassNodes and WordNodes. Without this, the dataset is
; incomplete.
(define (store-aux)
(for-each
lambda over lists of MemberLink 's
(lambda (memb-list)
(for-each
lambda over MemberLinks
(lambda (memb)
If the right kind of MemberLink
(if (eq? 'WordNode (cog-type (gar memb)))
(store-atom memb)))
memb-list))
Get all MemberLinks that this WordClass belongs to .
(map (lambda (wrdcls) (cog-incoming-by-type wrdcls 'MemberLink))
(cog-get-atoms 'WordClassNode))))
;-------------------------------------------
; Custom methods specific to this object.
(define (get-cluster-type) (Type 'WordClassNode))
; Get the clusters appearing in the left-basis.
(define (get-clusters)
; In current usage, LLOBJ doesn't have stars on it.
; At any rate, we want a fresh search for the basis
; each time we are called, as the basis may have changed.
(define stars (add-pair-stars LLOBJ))
(filter (lambda (W) (equal? 'WordClassNode (cog-type W)))
(stars 'left-basis)))
Create a word - class out of two words , or just extend an
; existing word class. Here, "extend" means "do nothing",
return the existing class . If this is called a second time
; with the same arguments, then a new, unique name is generated!
; Therefore, this should never be called than once!
; XXX FIXME the semantics of this thing is ugly, and should be
moved to the caller . We should n't have to second - guess the
; callers dsired behavior!
(define (make-cluster A-ATOM B-ATOM)
(define is-a-class (eq? 'WordClassNode (cog-type A-ATOM)))
(define is-b-class (eq? 'WordClassNode (cog-type B-ATOM)))
(cond
(is-a-class A-ATOM)
(is-b-class B-ATOM)
(else (let
((cluname (string-join
(list (cog-name A-ATOM) (cog-name B-ATOM)))))
; If `cluname` already exists, then append "(dup)" to the
; end of it, and try again. Keep repeating. In the real
; world, this should never happen more than once, maybe
twice , unimaginable three times . So that iota is safe .
(every
(lambda (N)
(if (nil? (cog-node 'WordClassNode cluname)) #f
(begin
(set! cluname (string-append cluname " (dup)"))
#t)))
(iota 10000))
(WordClassNode cluname)))))
;-------------------------------------------
(define (describe)
(display (procedure-property add-gram-class-api 'documentation)))
;-------------------------------------------
; Explain the non-default provided methods.
(define (provides meth)
(case meth
((left-type) get-left-type)
((store-aux) store-aux)
(else #f)
))
; Methods on the object
(lambda (message . args)
(apply (case message
((name) (lambda () "WordClass-Disjunct Pairs"))
((id) (lambda () "gram-class"))
((left-type) get-left-type)
((left-wildcard) get-left-wildcard)
((wild-wild) get-wild-wild)
((fetch-pairs) fetch-disjuncts)
((store-aux) store-aux)
((cluster-type) get-cluster-type)
((get-clusters) get-clusters)
((make-cluster) make-cluster)
((provides) provides)
((filters?) (lambda () #f))
((describe) describe)
((help) describe)
((obj) (lambda () "add-gram-class-api"))
((base) (lambda () LLOBJ))
(else (lambda ( . rest ) (apply LLOBJ (cons message args))))
) args))
)
; ---------------------------------------------------------------------
| null | https://raw.githubusercontent.com/opencog/learn/ffd86cccfdcba360433d9a79a84ce7eb1a8f255c/scm/gram-class/gram-class-api.scm | scheme |
gram-class-api.scm
Representing word-classes as vectors of (pseudo-)connector-sets.
---------------------------------------------------------------------
OVERVIEW
--------
This file provides the `matrix-object` API that allows grammatical
classes of words to be treated as vectors of connector-sets (vectors
of disjuncts; vectors of Sections).
This is meant to be used as a wrapper around `make-pseudo-cset-api`,
extending it so that both `WordNode`s and `WordClassNode`s are handled.
---------------------------------------------------------------------
---------------------------------------------------------------------
This class kind-of resembles a direct sum (as coded in the
`direct-sum` object) but its ad hoc, hard-coded, not generic.
Recycle the right wildcard from the parent class.
XXX FIXME: this won't work for some classes, which store
marginals in a different format than pairs. That is, the
'right-element method will work correctly on pairs only,
not on marginals. For example, direct-sum is like that.
Perhaps we should blame the classes for mis-handling marginals?
on the left-hand-side. Fetch the marginals, too.
Let the base object do the heavy lifting.
Fetch all MemberLinks, as these indicate which Words
fetched by the LLOBJ, so we won't do anything more, here.
Load marginals, too. These are specific to this class.
Store into the database the "auxiliary" MemberLinks between
WordClassNodes and WordNodes. Without this, the dataset is
incomplete.
-------------------------------------------
Custom methods specific to this object.
Get the clusters appearing in the left-basis.
In current usage, LLOBJ doesn't have stars on it.
At any rate, we want a fresh search for the basis
each time we are called, as the basis may have changed.
existing word class. Here, "extend" means "do nothing",
with the same arguments, then a new, unique name is generated!
Therefore, this should never be called than once!
XXX FIXME the semantics of this thing is ugly, and should be
callers dsired behavior!
If `cluname` already exists, then append "(dup)" to the
end of it, and try again. Keep repeating. In the real
world, this should never happen more than once, maybe
-------------------------------------------
-------------------------------------------
Explain the non-default provided methods.
Methods on the object
--------------------------------------------------------------------- | Copyright ( c ) 2017 , 2019 Linas Vepstas
(use-modules (srfi srfi-1))
(use-modules (opencog))
(use-modules (opencog persist))
(use-modules (opencog matrix))
(define-public (add-gram-class-api LLOBJ)
"
add-gram-class-api LLOBJ -- Enable (WordClass, disjunct) pairs.
This will take LLOBJ and extend it's native `left-type with the
`WordClass` type, so that the left type can be either. It also
provides methods for managing the `MemberLink`s that indicate
membership of the 'left-type in the `WordClass`.
The membership of a word to a WordClass is denoted as
(MemberLink (WordNode \"foo\") (WordClassNode \"bar\"))
Keep in mind that a word might belong to more than one WordClass.
Contributions to the class are stored as counts on the MemberLink.
See the `pseudo-csets.scm` file for a general overview.
Provided methods:
'left-type -- returns (TypeChoice (LLOBJ 'left-type) (Type 'WordClassNode))
'store-aux -- Store the MemberLinks above.
'fetch-pairs -- Fetch both WordClassNodes and MemberLinks.
'cluster-type -- returns (Type 'WordClassNode)
'get-clusters -- return all left-basis elements that are of
cluster type.
'make-cluster WA WB -- Creates a WordClassNode
"
(define (get-left-type)
(TypeChoice (LLOBJ 'left-type) (Type 'WordClassNode)))
(define any-left (AnyNode "gram-class"))
(define (get-left-wildcard DJ) (ListLink any-left DJ))
(define any-right (LLOBJ 'right-element (LLOBJ 'wild-wild)))
(define (get-wild-wild) (ListLink any-left any-right))
Fetch ( from the database ) all Sections that have a WordClass
(define (fetch-disjuncts)
(LLOBJ 'fetch-pairs)
(define start-time (current-time))
belong to which WordClasses . Sections have already been
(load-atoms-of-type 'WordClassNode)
(for-each
(lambda (wcl)
(fetch-incoming-by-type wcl 'MemberLink))
(cog-get-atoms 'WordClassNode))
(fetch-incoming-set any-left)
(format #t "Elapsed time to load grammatical classes: ~A secs\n"
(- (current-time) start-time)))
(define (store-aux)
(for-each
lambda over lists of MemberLink 's
(lambda (memb-list)
(for-each
lambda over MemberLinks
(lambda (memb)
If the right kind of MemberLink
(if (eq? 'WordNode (cog-type (gar memb)))
(store-atom memb)))
memb-list))
Get all MemberLinks that this WordClass belongs to .
(map (lambda (wrdcls) (cog-incoming-by-type wrdcls 'MemberLink))
(cog-get-atoms 'WordClassNode))))
(define (get-cluster-type) (Type 'WordClassNode))
(define (get-clusters)
(define stars (add-pair-stars LLOBJ))
(filter (lambda (W) (equal? 'WordClassNode (cog-type W)))
(stars 'left-basis)))
Create a word - class out of two words , or just extend an
return the existing class . If this is called a second time
moved to the caller . We should n't have to second - guess the
(define (make-cluster A-ATOM B-ATOM)
(define is-a-class (eq? 'WordClassNode (cog-type A-ATOM)))
(define is-b-class (eq? 'WordClassNode (cog-type B-ATOM)))
(cond
(is-a-class A-ATOM)
(is-b-class B-ATOM)
(else (let
((cluname (string-join
(list (cog-name A-ATOM) (cog-name B-ATOM)))))
twice , unimaginable three times . So that iota is safe .
(every
(lambda (N)
(if (nil? (cog-node 'WordClassNode cluname)) #f
(begin
(set! cluname (string-append cluname " (dup)"))
#t)))
(iota 10000))
(WordClassNode cluname)))))
(define (describe)
(display (procedure-property add-gram-class-api 'documentation)))
(define (provides meth)
(case meth
((left-type) get-left-type)
((store-aux) store-aux)
(else #f)
))
(lambda (message . args)
(apply (case message
((name) (lambda () "WordClass-Disjunct Pairs"))
((id) (lambda () "gram-class"))
((left-type) get-left-type)
((left-wildcard) get-left-wildcard)
((wild-wild) get-wild-wild)
((fetch-pairs) fetch-disjuncts)
((store-aux) store-aux)
((cluster-type) get-cluster-type)
((get-clusters) get-clusters)
((make-cluster) make-cluster)
((provides) provides)
((filters?) (lambda () #f))
((describe) describe)
((help) describe)
((obj) (lambda () "add-gram-class-api"))
((base) (lambda () LLOBJ))
(else (lambda ( . rest ) (apply LLOBJ (cons message args))))
) args))
)
|
c888f72d90a8a80779e8117c4968dc7bdc5790024ad5ddbe83dee40556d3adc9 | wargrey/w3s | text-decor.rkt | #lang typed/racket/base
-text-decor
(provide (all-defined-out))
(require racket/list)
(require "syntax/digicore.rkt")
(require "syntax/misc.rkt")
(require "../recognizer.rkt")
(require "color.rkt")
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define css-text-decor-line-options : (Listof Symbol) '(underline overline line-through blink))
(define css-text-decor-skip-options : (Listof Symbol) '(objects spaces ink edges box-decoration))
(define css-text-decor-style-option : (Listof Symbol) '(solid double dotted dashed wavy))
(define css-fold-decoration-line : CSS-Longhand-Update
(lambda [_ old-options option]
(if (list? old-options) (cons option old-options) (list option))))
(define <:text-decoration:> : CSS-Shorthand+Parser
;;; -text-decor/#text-decoration-color-property
(cons (CSS<*> (CSS<+> #:any
(CSS:<^> (<css-keyword> 'none) 'text-decoration-line)
(CSS:<^> (<css-keyword> css-text-decor-line-options) 'text-decoration-line css-fold-decoration-line)
(CSS:<^> (<css-keyword> css-text-decor-style-option) 'text-decoration-style)
(CSS:<^> (<css-color>) 'text-decoration-color)))
'(text-decoration-line text-decoration-color text-decoration-style)))
(define css->text-decor-lines : (CSS->Racket (Listof Symbol))
(lambda [[_ : Symbol] [value : Any]]
(cond [(list? value) (reverse (remove-duplicates (reverse (filter symbol? value))))]
[else null])))
| null | https://raw.githubusercontent.com/wargrey/w3s/9a716932d946a51a1105c8913f3c532eb99c823b/css/digitama/text-decor.rkt | racket |
-text-decor/#text-decoration-color-property | #lang typed/racket/base
-text-decor
(provide (all-defined-out))
(require racket/list)
(require "syntax/digicore.rkt")
(require "syntax/misc.rkt")
(require "../recognizer.rkt")
(require "color.rkt")
(define css-text-decor-line-options : (Listof Symbol) '(underline overline line-through blink))
(define css-text-decor-skip-options : (Listof Symbol) '(objects spaces ink edges box-decoration))
(define css-text-decor-style-option : (Listof Symbol) '(solid double dotted dashed wavy))
(define css-fold-decoration-line : CSS-Longhand-Update
(lambda [_ old-options option]
(if (list? old-options) (cons option old-options) (list option))))
(define <:text-decoration:> : CSS-Shorthand+Parser
(cons (CSS<*> (CSS<+> #:any
(CSS:<^> (<css-keyword> 'none) 'text-decoration-line)
(CSS:<^> (<css-keyword> css-text-decor-line-options) 'text-decoration-line css-fold-decoration-line)
(CSS:<^> (<css-keyword> css-text-decor-style-option) 'text-decoration-style)
(CSS:<^> (<css-color>) 'text-decoration-color)))
'(text-decoration-line text-decoration-color text-decoration-style)))
(define css->text-decor-lines : (CSS->Racket (Listof Symbol))
(lambda [[_ : Symbol] [value : Any]]
(cond [(list? value) (reverse (remove-duplicates (reverse (filter symbol? value))))]
[else null])))
|
5d6ee0ff56249dce5a00b33496bde95c85bbb4fc20f96753feddd3bf2fc2f420 | haskell-tools/haskell-tools | Transitive.hs | # LANGUAGE NoImplicitPrelude #
module Refactor.OrganizeImports.InstanceCarry.Transitive where
import Data.List ()
import Data.Foldable ()
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/builtin-refactorings/examples/Refactor/OrganizeImports/InstanceCarry/Transitive.hs | haskell | # LANGUAGE NoImplicitPrelude #
module Refactor.OrganizeImports.InstanceCarry.Transitive where
import Data.List ()
import Data.Foldable ()
| |
35603ea522c254b511df7589e3aa2f1faa1b6fc31661763eae79749912469d15 | karetsu/xmonad-aloysius | Commands.hs | -- | Commands to include in prompts
TODO
module Data.Commands where
import XMonad
import XMonad.Actions.Commands
commands :: X [(String, X ())]
commands = defaultCommands
| null | https://raw.githubusercontent.com/karetsu/xmonad-aloysius/9910c8db4bb75600fc9af51b9e64ab5704e8126c/app/Data/Commands.hs | haskell | | Commands to include in prompts | TODO
module Data.Commands where
import XMonad
import XMonad.Actions.Commands
commands :: X [(String, X ())]
commands = defaultCommands
|
88615676a841942c4676c8a9ae41b6e76a6b71b37e6450c0e69bef0bd7f11608 | qfpl/sv | Type.hs | # LANGUAGE GeneralizedNewtypeDeriving #
|
Module : Data . Sv . Encode . Type
Copyright : ( C ) CSIRO 2017 - 2019
License : : < >
Stability : experimental
Portability : non - portable
The core type for encoding
Module : Data.Sv.Encode.Type
Copyright : (C) CSIRO 2017-2019
License : BSD3
Maintainer : George Wilson <>
Stability : experimental
Portability : non-portable
The core type for encoding
-}
module Data.Sv.Encode.Type (
Encode (Encode, getEncode)
, NameEncode (..)
) where
import Control.Applicative (liftA2)
import Control.Monad.Writer (Writer)
import Data.Bifoldable (bifoldMap)
import Data.ByteString.Builder (Builder)
import Data.Functor.Contravariant (Contravariant (contramap))
import Data.Functor.Contravariant.Compose (ComposeFC (ComposeFC))
import Data.Functor.Contravariant.Divisible (Divisible (divide, conquer), Decidable (choose, lose))
import Data.Semigroup (Semigroup ((<>)))
import Data.Sequence (Seq)
import Data.Void (absurd)
import Data.Sv.Encode.Options
| An ' Encode ' converts its argument into one or more textual fields , to be
-- written out as CSV.
--
It is ' Semigroup ' , ' Monoid ' , ' ' , ' Divisible ' , and ' ' ,
allowing for composition of these values to build bigger ' Encode 's
-- from smaller ones.
newtype Encode a =
Encode { getEncode :: EncodeOptions -> a -> Seq Builder }
deriving (Semigroup, Monoid)
instance Contravariant Encode where
contramap f (Encode g) = Encode $ fmap (. f) g
instance Divisible Encode where
conquer = Encode mempty
divide f (Encode x) (Encode y) =
Encode $ \e a -> bifoldMap (x e) (y e) (f a)
instance Decidable Encode where
lose f = Encode (const (absurd . f))
choose f (Encode x) (Encode y) =
Encode $ \e a -> either (x e) (y e) (f a)
| A ' NameEncode ' is an ' Encode ' with an attached column name .
--
It is ' Semigroup ' , ' Monoid ' , ' ' , and ' Divisible ' , allowing
for composition of these values to build bigger ' NameEncode 's
-- from smaller ones.
--
Notably , ' NameEncode ' is not ' ' , since taking the sum of column
-- names does not make sense.
newtype NameEncode a =
NameEncode { unNamedE :: ComposeFC (Writer (Seq Builder)) Encode a}
intentionally not
instance Semigroup (NameEncode a) where
NameEncode (ComposeFC a) <> NameEncode (ComposeFC b) =
NameEncode (ComposeFC (liftA2 (<>) a b))
instance Monoid (NameEncode a) where
mappend = (<>)
mempty = NameEncode (ComposeFC (pure mempty))
| null | https://raw.githubusercontent.com/qfpl/sv/74aa56bcdcc0d7f1d7b5783bf59e3878dfbb64bc/sv-core/src/Data/Sv/Encode/Type.hs | haskell | written out as CSV.
from smaller ones.
from smaller ones.
names does not make sense. | # LANGUAGE GeneralizedNewtypeDeriving #
|
Module : Data . Sv . Encode . Type
Copyright : ( C ) CSIRO 2017 - 2019
License : : < >
Stability : experimental
Portability : non - portable
The core type for encoding
Module : Data.Sv.Encode.Type
Copyright : (C) CSIRO 2017-2019
License : BSD3
Maintainer : George Wilson <>
Stability : experimental
Portability : non-portable
The core type for encoding
-}
module Data.Sv.Encode.Type (
Encode (Encode, getEncode)
, NameEncode (..)
) where
import Control.Applicative (liftA2)
import Control.Monad.Writer (Writer)
import Data.Bifoldable (bifoldMap)
import Data.ByteString.Builder (Builder)
import Data.Functor.Contravariant (Contravariant (contramap))
import Data.Functor.Contravariant.Compose (ComposeFC (ComposeFC))
import Data.Functor.Contravariant.Divisible (Divisible (divide, conquer), Decidable (choose, lose))
import Data.Semigroup (Semigroup ((<>)))
import Data.Sequence (Seq)
import Data.Void (absurd)
import Data.Sv.Encode.Options
| An ' Encode ' converts its argument into one or more textual fields , to be
It is ' Semigroup ' , ' Monoid ' , ' ' , ' Divisible ' , and ' ' ,
allowing for composition of these values to build bigger ' Encode 's
newtype Encode a =
Encode { getEncode :: EncodeOptions -> a -> Seq Builder }
deriving (Semigroup, Monoid)
instance Contravariant Encode where
contramap f (Encode g) = Encode $ fmap (. f) g
instance Divisible Encode where
conquer = Encode mempty
divide f (Encode x) (Encode y) =
Encode $ \e a -> bifoldMap (x e) (y e) (f a)
instance Decidable Encode where
lose f = Encode (const (absurd . f))
choose f (Encode x) (Encode y) =
Encode $ \e a -> either (x e) (y e) (f a)
| A ' NameEncode ' is an ' Encode ' with an attached column name .
It is ' Semigroup ' , ' Monoid ' , ' ' , and ' Divisible ' , allowing
for composition of these values to build bigger ' NameEncode 's
Notably , ' NameEncode ' is not ' ' , since taking the sum of column
newtype NameEncode a =
NameEncode { unNamedE :: ComposeFC (Writer (Seq Builder)) Encode a}
intentionally not
instance Semigroup (NameEncode a) where
NameEncode (ComposeFC a) <> NameEncode (ComposeFC b) =
NameEncode (ComposeFC (liftA2 (<>) a b))
instance Monoid (NameEncode a) where
mappend = (<>)
mempty = NameEncode (ComposeFC (pure mempty))
|
1acfea7c75e866131e2ecc87b1ffc25f07b8d435ec85b8454956a987b22e5603 | bldl/magnolisp | test-foreign-1.rkt | #lang magnolisp/2014
(typedef int (#:annos foreign))
(begin-racket 1 2 3 (void))
(function (holds? x)
(#:annos [type (fn int Bool)] foreign)
(begin-racket (begin 1 #f)))
(function (f x)
(#:annos export [type (fn int int)])
(begin-racket 4 5 6)
(if (holds? x) 1 2))
(f 5)
| null | https://raw.githubusercontent.com/bldl/magnolisp/191d529486e688e5dda2be677ad8fe3b654e0d4f/tests/test-foreign-1.rkt | racket | #lang magnolisp/2014
(typedef int (#:annos foreign))
(begin-racket 1 2 3 (void))
(function (holds? x)
(#:annos [type (fn int Bool)] foreign)
(begin-racket (begin 1 #f)))
(function (f x)
(#:annos export [type (fn int int)])
(begin-racket 4 5 6)
(if (holds? x) 1 2))
(f 5)
| |
4049fac581c40faa3709bb9e302cd4c472a902a6e014ad29ccf5402e3e0e4392 | rescript-lang/rescript-compiler | super_typecore.ml | open Misc
open
open Parsetree
open Types
open Typedtree
open Btype
open Asttypes
open Parsetree
open Types
open Typedtree
open Btype *)
open Ctype
let fprintf = Format.fprintf
let sprintf = Format.sprintf
let longident = Printtyp.longident
let super_report_unification_error = Printtyp.super_report_unification_error
let reset_and_mark_loops = Printtyp.reset_and_mark_loops
let type_expr = Printtyp.type_expr
let rec bottom_aliases = function
| (_, one) :: (_, two) :: rest -> begin match bottom_aliases rest with
| Some types -> Some types
| None -> Some (one, two)
end
| _ -> None
let simple_conversions = [
(("float", "int"), "Belt.Float.toInt");
(("float", "string"), "Belt.Float.toString");
(("int", "float"), "Belt.Int.toFloat");
(("int", "string"), "Belt.Int.toString");
(("string", "float"), "Belt.Float.fromString");
(("string", "int"), "Belt.Int.fromString");
]
let print_simple_conversion ppf (actual, expected) =
try (
let converter = List.assoc (actual, expected) simple_conversions in
fprintf ppf "@,@,@[<v 2>You can convert @{<info>%s@} to @{<info>%s@} with @{<info>%s@}.@]" actual expected converter
) with | Not_found -> ()
let print_simple_message ppf = function
| ("float", "int") -> fprintf ppf "@ If this is a literal, try a number without a trailing dot (e.g. @{<info>20@})."
| ("int", "float") -> fprintf ppf "@ If this is a literal, try a number with a trailing dot (e.g. @{<info>20.@})."
| _ -> ()
let show_extra_help ppf _env trace = begin
match bottom_aliases trace with
| Some ({Types.desc = Tconstr (actualPath, actualArgs, _)}, {desc = Tconstr (expectedPath, expextedArgs, _)}) -> begin
match (actualPath, actualArgs, expectedPath, expextedArgs) with
| (Pident {name = actualName}, [], Pident {name = expectedName}, []) -> begin
print_simple_conversion ppf (actualName, expectedName);
print_simple_message ppf (actualName, expectedName);
end
| _ -> ()
end;
| _ -> ();
end
given type1 is foo = > bar = > baz(qux ) and type 2 is bar = > ) , return Some(foo )
let rec collect_missing_arguments env type1 type2 = match type1 with
why do we use Ctype.matches here ? Please see
| {Types.desc=Tarrow (label, argtype, typ, _)} when Ctype.matches env typ type2 ->
Some [(label, argtype)]
| {desc=Tarrow (label, argtype, typ, _)} -> begin
match collect_missing_arguments env typ type2 with
| Some res -> Some ((label, argtype) :: res)
| None -> None
end
| _ -> None
let print_expr_type_clash env trace ppf = begin
(* this is the most frequent error. We should do whatever we can to provide
specific guidance to this generic error before giving up *)
let bottom_aliases_result = bottom_aliases trace in
let missing_arguments = match bottom_aliases_result with
| Some (actual, expected) -> collect_missing_arguments env actual expected
| None -> assert false
in
let print_arguments =
Format.pp_print_list
~pp_sep:(fun ppf _ -> fprintf ppf ",@ ")
(fun ppf (label, argtype) ->
match label with
| Asttypes.Nolabel -> fprintf ppf "@[%a@]" type_expr argtype
| Labelled label ->
fprintf ppf "@[(~%s: %a)@]" label type_expr argtype
| Optional label ->
fprintf ppf "@[(?%s: %a)@]" label type_expr argtype
)
in
match missing_arguments with
| Some [singleArgument] ->
(* btw, you can't say "final arguments". Intermediate labeled
arguments might be the ones missing *)
fprintf ppf "@[@{<info>This call is missing an argument@} of type@ %a@]"
print_arguments [singleArgument]
| Some arguments ->
fprintf ppf "@[<hv>@{<info>This call is missing arguments@} of type:@ %a@]"
print_arguments arguments
| None ->
let missing_parameters = match bottom_aliases_result with
| Some (actual, expected) -> collect_missing_arguments env expected actual
| None -> assert false
in
begin match missing_parameters with
| Some [singleParameter] ->
fprintf ppf "@[This value might need to be @{<info>wrapped in a function@ that@ takes@ an@ extra@ parameter@}@ of@ type@ %a@]@,@,"
print_arguments [singleParameter];
fprintf ppf "@[@{<info>Here's the original error message@}@]@,"
| Some arguments ->
fprintf ppf "@[This value seems to @{<info>need to be wrapped in a function that takes extra@ arguments@}@ of@ type:@ @[<hv>%a@]@]@,@,"
print_arguments arguments;
fprintf ppf "@[@{<info>Here's the original error message@}@]@,"
| None -> ()
end;
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "This has type:")
(function ppf ->
fprintf ppf "Somewhere wanted:");
show_extra_help ppf env trace;
end
let reportArityMismatch ~arityA ~arityB ppf =
fprintf ppf "This function expected @{<info>%s@} %s, but got @{<error>%s@}"
arityB
(if arityB = "1" then "argument" else "arguments")
arityA
(* Pasted from typecore.ml. Needed for some cases in report_error below *)
Records
let label_of_kind kind =
if kind = "record" then "field" else "constructor"
let spellcheck ppf unbound_name valid_names =
Misc.did_you_mean ppf (fun () ->
Misc.spellcheck valid_names unbound_name
)
taken from -lang/ocaml/blob/d4144647d1bf9bc7dc3aadc24c25a7efa3a67915/typing/typecore.ml#L3769
(* modified branches are commented *)
let report_error env ppf = function
| Typecore.Constructor_arity_mismatch(lid, expected, provided) ->
(* modified *)
fprintf ppf
"@[This variant constructor, %a, expects %i %s; here, we've %sfound %i.@]"
longident lid expected (if expected == 1 then "argument" else "arguments") (if provided < expected then "only " else "") provided
| Label_mismatch(lid, trace) ->
(* modified *)
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "The record field %a@ belongs to the type"
longident lid)
(function ppf ->
fprintf ppf "but is mixed here with fields of type")
| Pattern_type_clash trace ->
(* modified *)
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "This pattern matches values of type")
(function ppf ->
fprintf ppf "but a pattern was expected which matches values of type")
| Or_pattern_type_clash (id, trace) ->
(* modified *)
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "The variable %s on the left-hand side of this or-pattern has type" (Ident.name id))
(function ppf ->
fprintf ppf "but on the right-hand side it has type")
| Expr_type_clash (
(_, {desc = Tarrow _}) ::
(_, {desc = Tconstr (Pident {name = "function$"},_,_)}) :: _
) ->
fprintf ppf "This function is a curried function where an uncurried function is expected"
| Expr_type_clash (
(_, {desc = Tconstr (Pident {name = "function$"}, [{desc=Tvar _}; _],_)}) ::
(_, {desc = Tarrow _}) :: _
) ->
fprintf ppf "This function is an uncurried function where a curried function is expected"
| Expr_type_clash (
(_, {desc = Tconstr (Pident {name = "function$"},[_; tA],_)}) ::
(_, {desc = Tconstr (Pident {name = "function$"},[_; tB],_)}) :: _
) when Ast_uncurried.type_to_arity tA <> Ast_uncurried.type_to_arity tB ->
let arityA = Ast_uncurried.type_to_arity tA |> string_of_int in
let arityB = Ast_uncurried.type_to_arity tB |> string_of_int in
reportArityMismatch ~arityA ~arityB ppf
| Expr_type_clash (
(_, {desc = Tconstr (Pdot (Pdot(Pident {name = "Js_OO"},"Meth",_),a,_),_,_)}) ::
(_, {desc = Tconstr (Pdot (Pdot(Pident {name = "Js_OO"},"Meth",_),b,_),_,_)}) :: _
) when a <> b ->
fprintf ppf "This method has %s but was expected %s" a b
| Expr_type_clash trace ->
(* modified *)
fprintf ppf "@[<v>";
print_expr_type_clash env trace ppf;
fprintf ppf "@]"
| Apply_non_function typ ->
(* modified *)
reset_and_mark_loops typ;
begin match (repr typ).desc with
Tarrow (_, _inputType, returnType, _) ->
let rec countNumberOfArgs count {Types.desc} = match desc with
| Tarrow (_, _inputType, returnType, _) -> countNumberOfArgs (count + 1) returnType
| _ -> count
in
let countNumberOfArgs = countNumberOfArgs 1 in
let acceptsCount = countNumberOfArgs returnType in
fprintf ppf "@[<v>@[<2>This function has type@ @{<info>%a@}@]"
type_expr typ;
fprintf ppf "@ @[It only accepts %i %s; here, it's called with more.@]@]"
acceptsCount (if acceptsCount == 1 then "argument" else "arguments")
| _ ->
fprintf ppf "@[<v>@[<2>This expression has type@ %a@]@ %s@]"
type_expr typ
"It is not a function."
end
| Coercion_failure (ty, ty', trace, b) ->
(* modified *)
super_report_unification_error ppf env trace
(function ppf ->
let ty, ty' = Printtyp.prepare_expansion (ty, ty') in
fprintf ppf
"This expression cannot be coerced to type@;<1 2>%a;@ it has type"
(Printtyp.type_expansion ty) ty')
(function ppf ->
fprintf ppf "but is here used with type");
if b then
fprintf ppf ".@.@[<hov>%s@ %s@]"
"This simple coercion was not fully general."
"Consider using a double coercion."
| Too_many_arguments (in_function, ty) ->
(* modified *)
reset_and_mark_loops ty;
if in_function then begin
fprintf ppf "@[This function expects too many arguments,@ ";
fprintf ppf "it should have type@ %a@]"
type_expr ty
end else begin
match ty with
| {desc = Tconstr (Pident {name = "function$"},_,_)} ->
fprintf ppf "This expression is expected to have an uncurried function"
| _ ->
fprintf ppf "@[This expression should not be a function,@ ";
fprintf ppf "the expected type is@ %a@]"
type_expr ty
end
| Less_general (kind, trace) ->
(* modified *)
super_report_unification_error ppf env trace
(fun ppf -> fprintf ppf "This %s has type" kind)
(fun ppf -> fprintf ppf "which is less general than")
| Recursive_local_constraint trace ->
(* modified *)
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "Recursive local constraint when unifying")
(function ppf ->
fprintf ppf "with")
| Wrong_name (eorp, ty, kind, p, name, valid_names) ->
(* modified *)
reset_and_mark_loops ty;
if Path.is_constructor_typath p then begin
fprintf ppf "@[The field %s is not part of the record \
argument for the %a constructor@]"
name
Printtyp.path p;
end else begin
fprintf ppf "@[@[<2>%s type@ @{<info>%a@}@]@ "
eorp type_expr ty;
fprintf ppf "The %s @{<error>%s@} does not belong to type @{<info>%a@}@]"
(label_of_kind kind)
name (*kind*) Printtyp.path p;
end;
spellcheck ppf name valid_names;
| anythingElse ->
Typecore.super_report_error_no_wrap_printing_env env ppf anythingElse
let report_error env ppf err =
Printtyp.wrap_printing_env env (fun () -> report_error env ppf err)
(* This will be called in super_main. This is how you'd override the default error printer from the compiler & register new error_of_exn handlers *)
let setup () =
Location.register_error_of_exn
(function
| Typecore.Error (loc, env, err) ->
Some (Super_location.error_of_printer loc (report_error env) err)
| Typecore.Error_forward err ->
Some err
| _ ->
None
)
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/beebcfff28aba13d833fab557e4fd5fc8ea87336/jscomp/super_errors/super_typecore.ml | ocaml | this is the most frequent error. We should do whatever we can to provide
specific guidance to this generic error before giving up
btw, you can't say "final arguments". Intermediate labeled
arguments might be the ones missing
Pasted from typecore.ml. Needed for some cases in report_error below
modified branches are commented
modified
modified
modified
modified
modified
modified
modified
modified
modified
modified
modified
kind
This will be called in super_main. This is how you'd override the default error printer from the compiler & register new error_of_exn handlers | open Misc
open
open Parsetree
open Types
open Typedtree
open Btype
open Asttypes
open Parsetree
open Types
open Typedtree
open Btype *)
open Ctype
let fprintf = Format.fprintf
let sprintf = Format.sprintf
let longident = Printtyp.longident
let super_report_unification_error = Printtyp.super_report_unification_error
let reset_and_mark_loops = Printtyp.reset_and_mark_loops
let type_expr = Printtyp.type_expr
let rec bottom_aliases = function
| (_, one) :: (_, two) :: rest -> begin match bottom_aliases rest with
| Some types -> Some types
| None -> Some (one, two)
end
| _ -> None
let simple_conversions = [
(("float", "int"), "Belt.Float.toInt");
(("float", "string"), "Belt.Float.toString");
(("int", "float"), "Belt.Int.toFloat");
(("int", "string"), "Belt.Int.toString");
(("string", "float"), "Belt.Float.fromString");
(("string", "int"), "Belt.Int.fromString");
]
let print_simple_conversion ppf (actual, expected) =
try (
let converter = List.assoc (actual, expected) simple_conversions in
fprintf ppf "@,@,@[<v 2>You can convert @{<info>%s@} to @{<info>%s@} with @{<info>%s@}.@]" actual expected converter
) with | Not_found -> ()
let print_simple_message ppf = function
| ("float", "int") -> fprintf ppf "@ If this is a literal, try a number without a trailing dot (e.g. @{<info>20@})."
| ("int", "float") -> fprintf ppf "@ If this is a literal, try a number with a trailing dot (e.g. @{<info>20.@})."
| _ -> ()
let show_extra_help ppf _env trace = begin
match bottom_aliases trace with
| Some ({Types.desc = Tconstr (actualPath, actualArgs, _)}, {desc = Tconstr (expectedPath, expextedArgs, _)}) -> begin
match (actualPath, actualArgs, expectedPath, expextedArgs) with
| (Pident {name = actualName}, [], Pident {name = expectedName}, []) -> begin
print_simple_conversion ppf (actualName, expectedName);
print_simple_message ppf (actualName, expectedName);
end
| _ -> ()
end;
| _ -> ();
end
given type1 is foo = > bar = > baz(qux ) and type 2 is bar = > ) , return Some(foo )
let rec collect_missing_arguments env type1 type2 = match type1 with
why do we use Ctype.matches here ? Please see
| {Types.desc=Tarrow (label, argtype, typ, _)} when Ctype.matches env typ type2 ->
Some [(label, argtype)]
| {desc=Tarrow (label, argtype, typ, _)} -> begin
match collect_missing_arguments env typ type2 with
| Some res -> Some ((label, argtype) :: res)
| None -> None
end
| _ -> None
let print_expr_type_clash env trace ppf = begin
let bottom_aliases_result = bottom_aliases trace in
let missing_arguments = match bottom_aliases_result with
| Some (actual, expected) -> collect_missing_arguments env actual expected
| None -> assert false
in
let print_arguments =
Format.pp_print_list
~pp_sep:(fun ppf _ -> fprintf ppf ",@ ")
(fun ppf (label, argtype) ->
match label with
| Asttypes.Nolabel -> fprintf ppf "@[%a@]" type_expr argtype
| Labelled label ->
fprintf ppf "@[(~%s: %a)@]" label type_expr argtype
| Optional label ->
fprintf ppf "@[(?%s: %a)@]" label type_expr argtype
)
in
match missing_arguments with
| Some [singleArgument] ->
fprintf ppf "@[@{<info>This call is missing an argument@} of type@ %a@]"
print_arguments [singleArgument]
| Some arguments ->
fprintf ppf "@[<hv>@{<info>This call is missing arguments@} of type:@ %a@]"
print_arguments arguments
| None ->
let missing_parameters = match bottom_aliases_result with
| Some (actual, expected) -> collect_missing_arguments env expected actual
| None -> assert false
in
begin match missing_parameters with
| Some [singleParameter] ->
fprintf ppf "@[This value might need to be @{<info>wrapped in a function@ that@ takes@ an@ extra@ parameter@}@ of@ type@ %a@]@,@,"
print_arguments [singleParameter];
fprintf ppf "@[@{<info>Here's the original error message@}@]@,"
| Some arguments ->
fprintf ppf "@[This value seems to @{<info>need to be wrapped in a function that takes extra@ arguments@}@ of@ type:@ @[<hv>%a@]@]@,@,"
print_arguments arguments;
fprintf ppf "@[@{<info>Here's the original error message@}@]@,"
| None -> ()
end;
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "This has type:")
(function ppf ->
fprintf ppf "Somewhere wanted:");
show_extra_help ppf env trace;
end
let reportArityMismatch ~arityA ~arityB ppf =
fprintf ppf "This function expected @{<info>%s@} %s, but got @{<error>%s@}"
arityB
(if arityB = "1" then "argument" else "arguments")
arityA
Records
let label_of_kind kind =
if kind = "record" then "field" else "constructor"
let spellcheck ppf unbound_name valid_names =
Misc.did_you_mean ppf (fun () ->
Misc.spellcheck valid_names unbound_name
)
taken from -lang/ocaml/blob/d4144647d1bf9bc7dc3aadc24c25a7efa3a67915/typing/typecore.ml#L3769
let report_error env ppf = function
| Typecore.Constructor_arity_mismatch(lid, expected, provided) ->
fprintf ppf
"@[This variant constructor, %a, expects %i %s; here, we've %sfound %i.@]"
longident lid expected (if expected == 1 then "argument" else "arguments") (if provided < expected then "only " else "") provided
| Label_mismatch(lid, trace) ->
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "The record field %a@ belongs to the type"
longident lid)
(function ppf ->
fprintf ppf "but is mixed here with fields of type")
| Pattern_type_clash trace ->
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "This pattern matches values of type")
(function ppf ->
fprintf ppf "but a pattern was expected which matches values of type")
| Or_pattern_type_clash (id, trace) ->
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "The variable %s on the left-hand side of this or-pattern has type" (Ident.name id))
(function ppf ->
fprintf ppf "but on the right-hand side it has type")
| Expr_type_clash (
(_, {desc = Tarrow _}) ::
(_, {desc = Tconstr (Pident {name = "function$"},_,_)}) :: _
) ->
fprintf ppf "This function is a curried function where an uncurried function is expected"
| Expr_type_clash (
(_, {desc = Tconstr (Pident {name = "function$"}, [{desc=Tvar _}; _],_)}) ::
(_, {desc = Tarrow _}) :: _
) ->
fprintf ppf "This function is an uncurried function where a curried function is expected"
| Expr_type_clash (
(_, {desc = Tconstr (Pident {name = "function$"},[_; tA],_)}) ::
(_, {desc = Tconstr (Pident {name = "function$"},[_; tB],_)}) :: _
) when Ast_uncurried.type_to_arity tA <> Ast_uncurried.type_to_arity tB ->
let arityA = Ast_uncurried.type_to_arity tA |> string_of_int in
let arityB = Ast_uncurried.type_to_arity tB |> string_of_int in
reportArityMismatch ~arityA ~arityB ppf
| Expr_type_clash (
(_, {desc = Tconstr (Pdot (Pdot(Pident {name = "Js_OO"},"Meth",_),a,_),_,_)}) ::
(_, {desc = Tconstr (Pdot (Pdot(Pident {name = "Js_OO"},"Meth",_),b,_),_,_)}) :: _
) when a <> b ->
fprintf ppf "This method has %s but was expected %s" a b
| Expr_type_clash trace ->
fprintf ppf "@[<v>";
print_expr_type_clash env trace ppf;
fprintf ppf "@]"
| Apply_non_function typ ->
reset_and_mark_loops typ;
begin match (repr typ).desc with
Tarrow (_, _inputType, returnType, _) ->
let rec countNumberOfArgs count {Types.desc} = match desc with
| Tarrow (_, _inputType, returnType, _) -> countNumberOfArgs (count + 1) returnType
| _ -> count
in
let countNumberOfArgs = countNumberOfArgs 1 in
let acceptsCount = countNumberOfArgs returnType in
fprintf ppf "@[<v>@[<2>This function has type@ @{<info>%a@}@]"
type_expr typ;
fprintf ppf "@ @[It only accepts %i %s; here, it's called with more.@]@]"
acceptsCount (if acceptsCount == 1 then "argument" else "arguments")
| _ ->
fprintf ppf "@[<v>@[<2>This expression has type@ %a@]@ %s@]"
type_expr typ
"It is not a function."
end
| Coercion_failure (ty, ty', trace, b) ->
super_report_unification_error ppf env trace
(function ppf ->
let ty, ty' = Printtyp.prepare_expansion (ty, ty') in
fprintf ppf
"This expression cannot be coerced to type@;<1 2>%a;@ it has type"
(Printtyp.type_expansion ty) ty')
(function ppf ->
fprintf ppf "but is here used with type");
if b then
fprintf ppf ".@.@[<hov>%s@ %s@]"
"This simple coercion was not fully general."
"Consider using a double coercion."
| Too_many_arguments (in_function, ty) ->
reset_and_mark_loops ty;
if in_function then begin
fprintf ppf "@[This function expects too many arguments,@ ";
fprintf ppf "it should have type@ %a@]"
type_expr ty
end else begin
match ty with
| {desc = Tconstr (Pident {name = "function$"},_,_)} ->
fprintf ppf "This expression is expected to have an uncurried function"
| _ ->
fprintf ppf "@[This expression should not be a function,@ ";
fprintf ppf "the expected type is@ %a@]"
type_expr ty
end
| Less_general (kind, trace) ->
super_report_unification_error ppf env trace
(fun ppf -> fprintf ppf "This %s has type" kind)
(fun ppf -> fprintf ppf "which is less general than")
| Recursive_local_constraint trace ->
super_report_unification_error ppf env trace
(function ppf ->
fprintf ppf "Recursive local constraint when unifying")
(function ppf ->
fprintf ppf "with")
| Wrong_name (eorp, ty, kind, p, name, valid_names) ->
reset_and_mark_loops ty;
if Path.is_constructor_typath p then begin
fprintf ppf "@[The field %s is not part of the record \
argument for the %a constructor@]"
name
Printtyp.path p;
end else begin
fprintf ppf "@[@[<2>%s type@ @{<info>%a@}@]@ "
eorp type_expr ty;
fprintf ppf "The %s @{<error>%s@} does not belong to type @{<info>%a@}@]"
(label_of_kind kind)
end;
spellcheck ppf name valid_names;
| anythingElse ->
Typecore.super_report_error_no_wrap_printing_env env ppf anythingElse
let report_error env ppf err =
Printtyp.wrap_printing_env env (fun () -> report_error env ppf err)
let setup () =
Location.register_error_of_exn
(function
| Typecore.Error (loc, env, err) ->
Some (Super_location.error_of_printer loc (report_error env) err)
| Typecore.Error_forward err ->
Some err
| _ ->
None
)
|
ae7ddc9cb84cd82e18c5f5c3c65db66ff4923d04931113bc2056812891f93441 | haroldcarr/learn-haskell-coq-ml-etc | Lib.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE TemplateHaskell #-}
module Lib where
import Control.Concurrent
import Control.Lens
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer.Strict
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Serialize (Serialize)
import qualified Data.Serialize as S
import Data.Word (Word64)
import GHC.Generics
import GHC.Int (Int64)
------------------------------------------------------------------------------
type ShardId = Int64
type MsgNum = Int
type Port = Word64
data Role
= Follower
| Candidate
| Leader
| Inactive
deriving (Show, Generic, Eq)
newtype Term = Term Int
deriving (Show, Read, Eq, Enum, Num, Ord, Generic, Serialize)
data NodeId = NodeId { _host :: !String, _port :: !Port, _fullAddr :: !String }
deriving (Eq,Ord,Read,Generic,Show)
newtype LogIndex = LogIndex Int
deriving (Show, Read, Eq, Ord, Enum, Num, Real, Integral, Generic, Serialize)
data RaftState a = RaftState
{ _nodeRole :: !Role
, _term :: !Term
, _currentLeader :: !(Maybe NodeId)
, _ignoreLeader :: !Bool
, _commitIndex :: !LogIndex
, _timerThread :: !(Maybe ThreadId)
, _timeSinceLastAER :: !Int
, _lNextIndex :: !(Map NodeId LogIndex)
, _lMatchIndex :: !(Map NodeId LogIndex)
, _lRATS :: !a
, _shardLeader :: !(Map ShardId NodeId)
, _numTimeouts :: !Int
, _nextMsgNum :: !MsgNum
, _logSenderSend :: !Bool
} deriving Show
makeLenses ''RaftState
class RNodeRole a where
rNodeRole :: a -> Role
instance RNodeRole (RaftState a) where
rNodeRole s = s^.nodeRole
class WNodeRole a where
wNodeRole :: Role -> a -> a
instance WNodeRole (RaftState a) where
wNodeRole r s = s {_nodeRole = r}
wNodeRole' :: (RNodeRole a, WNodeRole a, MonadState a m) => Role -> m ()
wNodeRole' r = get >>= put . wNodeRole r
class RTerm a where
rTerm :: a -> Term
instance RTerm (RaftState a) where
rTerm s = s^.term
class RTerm a => WTerm a where
wTerm :: Term -> a -> a
instance WTerm (RaftState a) where
wTerm t s = s {_term = t}
wTerm' :: (RTerm t, WTerm t, MonadState t m) => Term -> m ()
wTerm' t = get >>= put . wTerm t
type RNodeRoleRWTerm x = (RNodeRole x, RTerm x, WTerm x)
------------------------------------------------------------------------------
someFunc :: IO ()
someFunc = do
let r = RaftState
Follower
(Term 0)
Nothing
False
(LogIndex 0)
Nothing
0
Map.empty
Map.empty
(3::Int)
Map.empty
0
0
True
bar r
( ( ( Role , Term , RaftState Int ) , [ String ] ) , RaftState Int )
(((r, t, s0), msgs), s1) <- runStateT (runReaderT (runWriterT xxx) r) r
print r
print t
print s0
print msgs
print s1
bar :: RNodeRole r => r -> IO ()
bar r = do
putStrLn "\n"
print (rNodeRole r)
xxx
::
( RNodeRole r , RTerm t , WTerm t , MonadWriter [ String ] m , MonadReader r m , MonadState t m )
(RNodeRoleRWTerm x , MonadWriter [String] m, MonadReader x m, MonadState x m)
=> m (Role, Term, x)
xxx = do
r <- asks rNodeRole
tell [show r]
t <- gets rTerm
tell [show t]
s <- get
put $ wTerm (t + 1) s
t' <- gets rTerm
tell [show t']
wTerm' (t' + 1)
t'' <- gets rTerm
tell [show t'']
return (r, t, s)
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/topic/monads/hc-has-field/src/Lib.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE TemplateHaskell #
----------------------------------------------------------------------------
---------------------------------------------------------------------------- | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
module Lib where
import Control.Concurrent
import Control.Lens
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer.Strict
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Serialize (Serialize)
import qualified Data.Serialize as S
import Data.Word (Word64)
import GHC.Generics
import GHC.Int (Int64)
type ShardId = Int64
type MsgNum = Int
type Port = Word64
data Role
= Follower
| Candidate
| Leader
| Inactive
deriving (Show, Generic, Eq)
newtype Term = Term Int
deriving (Show, Read, Eq, Enum, Num, Ord, Generic, Serialize)
data NodeId = NodeId { _host :: !String, _port :: !Port, _fullAddr :: !String }
deriving (Eq,Ord,Read,Generic,Show)
newtype LogIndex = LogIndex Int
deriving (Show, Read, Eq, Ord, Enum, Num, Real, Integral, Generic, Serialize)
data RaftState a = RaftState
{ _nodeRole :: !Role
, _term :: !Term
, _currentLeader :: !(Maybe NodeId)
, _ignoreLeader :: !Bool
, _commitIndex :: !LogIndex
, _timerThread :: !(Maybe ThreadId)
, _timeSinceLastAER :: !Int
, _lNextIndex :: !(Map NodeId LogIndex)
, _lMatchIndex :: !(Map NodeId LogIndex)
, _lRATS :: !a
, _shardLeader :: !(Map ShardId NodeId)
, _numTimeouts :: !Int
, _nextMsgNum :: !MsgNum
, _logSenderSend :: !Bool
} deriving Show
makeLenses ''RaftState
class RNodeRole a where
rNodeRole :: a -> Role
instance RNodeRole (RaftState a) where
rNodeRole s = s^.nodeRole
class WNodeRole a where
wNodeRole :: Role -> a -> a
instance WNodeRole (RaftState a) where
wNodeRole r s = s {_nodeRole = r}
wNodeRole' :: (RNodeRole a, WNodeRole a, MonadState a m) => Role -> m ()
wNodeRole' r = get >>= put . wNodeRole r
class RTerm a where
rTerm :: a -> Term
instance RTerm (RaftState a) where
rTerm s = s^.term
class RTerm a => WTerm a where
wTerm :: Term -> a -> a
instance WTerm (RaftState a) where
wTerm t s = s {_term = t}
wTerm' :: (RTerm t, WTerm t, MonadState t m) => Term -> m ()
wTerm' t = get >>= put . wTerm t
type RNodeRoleRWTerm x = (RNodeRole x, RTerm x, WTerm x)
someFunc :: IO ()
someFunc = do
let r = RaftState
Follower
(Term 0)
Nothing
False
(LogIndex 0)
Nothing
0
Map.empty
Map.empty
(3::Int)
Map.empty
0
0
True
bar r
( ( ( Role , Term , RaftState Int ) , [ String ] ) , RaftState Int )
(((r, t, s0), msgs), s1) <- runStateT (runReaderT (runWriterT xxx) r) r
print r
print t
print s0
print msgs
print s1
bar :: RNodeRole r => r -> IO ()
bar r = do
putStrLn "\n"
print (rNodeRole r)
xxx
::
( RNodeRole r , RTerm t , WTerm t , MonadWriter [ String ] m , MonadReader r m , MonadState t m )
(RNodeRoleRWTerm x , MonadWriter [String] m, MonadReader x m, MonadState x m)
=> m (Role, Term, x)
xxx = do
r <- asks rNodeRole
tell [show r]
t <- gets rTerm
tell [show t]
s <- get
put $ wTerm (t + 1) s
t' <- gets rTerm
tell [show t']
wTerm' (t' + 1)
t'' <- gets rTerm
tell [show t'']
return (r, t, s)
|
e1d9172b3f00cdd9a83865ab0e0aa5a44f445710ed4565e95e42c317b83630c1 | racket/redex | stlc-tests-lib.rkt | #lang racket/base
(require redex/reduction-semantics)
(provide stlc-tests
consistent-with?)
(define (consistent-with? t1 t2)
(define table (make-hash))
(let loop ([t1 t1]
[t2 t2])
(cond
[(and (pair? t1) (pair? t2))
(and (loop (car t1) (car t2))
(loop (cdr t1) (cdr t2)))]
[(and (symbol? t1)
(symbol? t2)
(not (equal? t1 t2))
(same-first-char-or-empty-and-numbers? t1 t2))
(cond
[(equal? t1 t2) #t]
[else
(define bound (hash-ref table t1 #f))
(cond
[bound (equal? bound t2)]
[else
(hash-set! table t1 t2)
#t])])]
[else (equal? t1 t2)])))
(define (same-first-char-or-empty-and-numbers? t1 t2)
(define (first-char s) (string-ref (symbol->string s) 0))
(cond
[(equal? t1 '||)
(regexp-match #rx"[^[0-9]*$" (symbol->string t2))]
[(equal? t2 '||)
(regexp-match #rx"[^[0-9]*$" (symbol->string t1))]
[else
(equal? (first-char t1)
(first-char t2))]))
(define-syntax-rule
(stlc-tests uses-bound-var?
typeof
red
reduction-step-count
Eval
subst)
(begin
(test-equal (term (uses-bound-var? () 5))
#f)
(test-equal (term (uses-bound-var? () nil))
#f)
(test-equal (term (uses-bound-var? () (λ (x int) x)))
#t)
(test-equal (term (uses-bound-var? () (λ (x int) y)))
#f)
(test-equal (term (uses-bound-var? () ((λ (x int) x) 5)))
#t)
(test-equal (term (uses-bound-var? () ((λ (x int) xy) 5)))
#f)
(test-equal (consistent-with? '(λ (z1 int) (λ (z2 int) z2))
'(λ (z int) (λ (z1 int) z)))
#f)
(test-equal (consistent-with? '(λ (z1 int) (λ (z2 int) z2))
'(λ (z int) (λ (z1 int) z1)))
#t)
(test-equal (term (subst ((+ 1) 1) x 2))
(term ((+ 1) 1)))
(test-equal (term (subst ((+ x) x) x 2))
(term ((+ 2) 2)))
(test-equal (term (subst ((+ y) x) x 2))
(term ((+ y) 2)))
(test-equal (term (subst ((+ y) z) x 2))
(term ((+ y) z)))
(test-equal (term (subst ((λ (x int) x) x) x 2))
(term ((λ (x int) x) 2)))
(test-equal (consistent-with? (term (subst ((λ (y int) x) x) x 2))
(term ((λ (y int) 2) 2)))
#t)
(test-equal (consistent-with? (term (subst ((λ (y int) x) x) x (λ (q int) z)))
(term ((λ (y int) (λ (q int) z)) (λ (q int) z))))
#t)
(test-equal (consistent-with? (term (subst ((λ (y int) x) x) x (λ (q int) y)))
(term ((λ (y2 int) (λ (q int) y)) (λ (q int) y))))
#t)
(test-equal (consistent-with? (term (subst (λ (z int) (λ (z1 int) z)) q 1))
(term (λ (z int) (λ (z1 int) z))))
#t)
(test-equal (judgment-holds (typeof • 5 τ) τ)
(list (term int)))
(test-equal (judgment-holds (typeof • nil τ) τ)
(list (term (list int))))
(test-equal (judgment-holds (typeof • (cons 1) τ) τ)
(list (term ((list int) → (list int)))))
(test-equal (judgment-holds (typeof • ((cons 1) nil) τ) τ)
(list (term (list int))))
(test-equal (judgment-holds (typeof • (λ (x int) x) τ) τ)
(list (term (int → int))))
(test-equal (judgment-holds (typeof • (λ (x (int → int)) (λ (y int) x)) τ) τ)
(list (term ((int → int) → (int → (int → int))))))
(test-equal (judgment-holds (typeof • ((+ ((+ 1) 2)) ((+ 3) 4)) τ) τ)
(list (term int)))
(test-->> red (term ((λ (x int) x) 7)) (term 7))
(test-->> red (term (((λ (x int) (λ (x int) x)) 2) 1)) (term 1))
(test-->> red (term (((λ (x int) (λ (y int) x)) 2) 1)) (term 2))
(test-->> red
(term ((λ (x int) ((cons x) nil)) 11))
(term ((cons 11) nil)))
(test-->> red
(term ((λ (x int) ((cons x) nil)) 11))
(term ((cons 11) nil)))
(test-->> red
(term ((cons ((λ (x int) x) 11)) nil))
(term ((cons 11) nil)))
(test-->> red
(term (cons ((λ (x int) x) 1)))
(term (cons 1)))
(test-->> red
(term ((cons ((λ (x int) x) 1)) nil))
(term ((cons 1) nil)))
(test-->> red
(term (hd ((λ (x int) ((cons x) nil)) 11)))
(term 11))
(test-->> red
(term (tl ((λ (x int) ((cons x) nil)) 11)))
(term nil))
(test-->> red
(term (tl nil))
"error")
(test-->> red
(term (hd nil))
"error")
(test-->> red
(term ((+ 1) (hd nil)))
"error")
(test-->> red
(term ((+ ((+ 1) 2)) ((+ 3) 4)))
(term 10))
(test-->> red
(term ((λ (f (int → (list int))) (f 3)) (cons 1)))
(term ((cons 1) 3)))
(test-->> red
(term ((λ (f (int → int)) (f 3)) (+ 1)))
(term 4))
(test-equal (Eval (term ((λ (x int) x) 3)))
(term 3))
(test-equal (reduction-step-count (term (λ (x int) x)))
0)
(test-equal (reduction-step-count (term ((λ (x int) x) 1)))
1)
(test-equal (reduction-step-count (term ((λ (x int) x) 1)))
1)
(test-equal (reduction-step-count (term ((cons 1) nil)))
0)
(test-equal (reduction-step-count (term (hd ((cons 1) nil))))
1)
(test-equal (reduction-step-count (term (hd nil)))
1)
(test-equal (reduction-step-count (term ((λ (x int) x) (hd ((cons 1) nil)))))
2)
(test-results))) | null | https://raw.githubusercontent.com/racket/redex/4c2dc96d90cedeb08ec1850575079b952c5ad396/redex-examples/redex/examples/stlc-tests-lib.rkt | racket | #lang racket/base
(require redex/reduction-semantics)
(provide stlc-tests
consistent-with?)
(define (consistent-with? t1 t2)
(define table (make-hash))
(let loop ([t1 t1]
[t2 t2])
(cond
[(and (pair? t1) (pair? t2))
(and (loop (car t1) (car t2))
(loop (cdr t1) (cdr t2)))]
[(and (symbol? t1)
(symbol? t2)
(not (equal? t1 t2))
(same-first-char-or-empty-and-numbers? t1 t2))
(cond
[(equal? t1 t2) #t]
[else
(define bound (hash-ref table t1 #f))
(cond
[bound (equal? bound t2)]
[else
(hash-set! table t1 t2)
#t])])]
[else (equal? t1 t2)])))
(define (same-first-char-or-empty-and-numbers? t1 t2)
(define (first-char s) (string-ref (symbol->string s) 0))
(cond
[(equal? t1 '||)
(regexp-match #rx"[^[0-9]*$" (symbol->string t2))]
[(equal? t2 '||)
(regexp-match #rx"[^[0-9]*$" (symbol->string t1))]
[else
(equal? (first-char t1)
(first-char t2))]))
(define-syntax-rule
(stlc-tests uses-bound-var?
typeof
red
reduction-step-count
Eval
subst)
(begin
(test-equal (term (uses-bound-var? () 5))
#f)
(test-equal (term (uses-bound-var? () nil))
#f)
(test-equal (term (uses-bound-var? () (λ (x int) x)))
#t)
(test-equal (term (uses-bound-var? () (λ (x int) y)))
#f)
(test-equal (term (uses-bound-var? () ((λ (x int) x) 5)))
#t)
(test-equal (term (uses-bound-var? () ((λ (x int) xy) 5)))
#f)
(test-equal (consistent-with? '(λ (z1 int) (λ (z2 int) z2))
'(λ (z int) (λ (z1 int) z)))
#f)
(test-equal (consistent-with? '(λ (z1 int) (λ (z2 int) z2))
'(λ (z int) (λ (z1 int) z1)))
#t)
(test-equal (term (subst ((+ 1) 1) x 2))
(term ((+ 1) 1)))
(test-equal (term (subst ((+ x) x) x 2))
(term ((+ 2) 2)))
(test-equal (term (subst ((+ y) x) x 2))
(term ((+ y) 2)))
(test-equal (term (subst ((+ y) z) x 2))
(term ((+ y) z)))
(test-equal (term (subst ((λ (x int) x) x) x 2))
(term ((λ (x int) x) 2)))
(test-equal (consistent-with? (term (subst ((λ (y int) x) x) x 2))
(term ((λ (y int) 2) 2)))
#t)
(test-equal (consistent-with? (term (subst ((λ (y int) x) x) x (λ (q int) z)))
(term ((λ (y int) (λ (q int) z)) (λ (q int) z))))
#t)
(test-equal (consistent-with? (term (subst ((λ (y int) x) x) x (λ (q int) y)))
(term ((λ (y2 int) (λ (q int) y)) (λ (q int) y))))
#t)
(test-equal (consistent-with? (term (subst (λ (z int) (λ (z1 int) z)) q 1))
(term (λ (z int) (λ (z1 int) z))))
#t)
(test-equal (judgment-holds (typeof • 5 τ) τ)
(list (term int)))
(test-equal (judgment-holds (typeof • nil τ) τ)
(list (term (list int))))
(test-equal (judgment-holds (typeof • (cons 1) τ) τ)
(list (term ((list int) → (list int)))))
(test-equal (judgment-holds (typeof • ((cons 1) nil) τ) τ)
(list (term (list int))))
(test-equal (judgment-holds (typeof • (λ (x int) x) τ) τ)
(list (term (int → int))))
(test-equal (judgment-holds (typeof • (λ (x (int → int)) (λ (y int) x)) τ) τ)
(list (term ((int → int) → (int → (int → int))))))
(test-equal (judgment-holds (typeof • ((+ ((+ 1) 2)) ((+ 3) 4)) τ) τ)
(list (term int)))
(test-->> red (term ((λ (x int) x) 7)) (term 7))
(test-->> red (term (((λ (x int) (λ (x int) x)) 2) 1)) (term 1))
(test-->> red (term (((λ (x int) (λ (y int) x)) 2) 1)) (term 2))
(test-->> red
(term ((λ (x int) ((cons x) nil)) 11))
(term ((cons 11) nil)))
(test-->> red
(term ((λ (x int) ((cons x) nil)) 11))
(term ((cons 11) nil)))
(test-->> red
(term ((cons ((λ (x int) x) 11)) nil))
(term ((cons 11) nil)))
(test-->> red
(term (cons ((λ (x int) x) 1)))
(term (cons 1)))
(test-->> red
(term ((cons ((λ (x int) x) 1)) nil))
(term ((cons 1) nil)))
(test-->> red
(term (hd ((λ (x int) ((cons x) nil)) 11)))
(term 11))
(test-->> red
(term (tl ((λ (x int) ((cons x) nil)) 11)))
(term nil))
(test-->> red
(term (tl nil))
"error")
(test-->> red
(term (hd nil))
"error")
(test-->> red
(term ((+ 1) (hd nil)))
"error")
(test-->> red
(term ((+ ((+ 1) 2)) ((+ 3) 4)))
(term 10))
(test-->> red
(term ((λ (f (int → (list int))) (f 3)) (cons 1)))
(term ((cons 1) 3)))
(test-->> red
(term ((λ (f (int → int)) (f 3)) (+ 1)))
(term 4))
(test-equal (Eval (term ((λ (x int) x) 3)))
(term 3))
(test-equal (reduction-step-count (term (λ (x int) x)))
0)
(test-equal (reduction-step-count (term ((λ (x int) x) 1)))
1)
(test-equal (reduction-step-count (term ((λ (x int) x) 1)))
1)
(test-equal (reduction-step-count (term ((cons 1) nil)))
0)
(test-equal (reduction-step-count (term (hd ((cons 1) nil))))
1)
(test-equal (reduction-step-count (term (hd nil)))
1)
(test-equal (reduction-step-count (term ((λ (x int) x) (hd ((cons 1) nil)))))
2)
(test-results))) | |
26ef3814b6c336cb315035aefcf05a7f0ba8f57069cedb4ad64fe2e5a4bb8e52 | ml4tp/tcoq | eqschemes.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(** This file builds schemes relative to equality inductive types *)
open Names
open Term
open Environ
open Ind_tables
(** Builds a left-to-right rewriting scheme for an equality type *)
val rew_l2r_dep_scheme_kind : individual scheme_kind
val rew_l2r_scheme_kind : individual scheme_kind
val rew_r2l_forward_dep_scheme_kind : individual scheme_kind
val rew_l2r_forward_dep_scheme_kind : individual scheme_kind
val rew_r2l_dep_scheme_kind : individual scheme_kind
val rew_r2l_scheme_kind : individual scheme_kind
val build_r2l_rew_scheme : bool -> env -> inductive -> sorts_family ->
constr Evd.in_evar_universe_context
val build_l2r_rew_scheme : bool -> env -> inductive -> sorts_family ->
constr Evd.in_evar_universe_context * Safe_typing.private_constants
val build_r2l_forward_rew_scheme :
bool -> env -> inductive -> sorts_family -> constr Evd.in_evar_universe_context
val build_l2r_forward_rew_scheme :
bool -> env -> inductive -> sorts_family -> constr Evd.in_evar_universe_context
(** Builds a symmetry scheme for a symmetrical equality type *)
val build_sym_scheme : env -> inductive -> constr Evd.in_evar_universe_context
val sym_scheme_kind : individual scheme_kind
val build_sym_involutive_scheme : env -> inductive ->
constr Evd.in_evar_universe_context * Safe_typing.private_constants
val sym_involutive_scheme_kind : individual scheme_kind
(** Builds a congruence scheme for an equality type *)
val congr_scheme_kind : individual scheme_kind
val build_congr : env -> constr * constr * Univ.universe_context_set -> inductive ->
constr Evd.in_evar_universe_context
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/tactics/eqschemes.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* This file builds schemes relative to equality inductive types
* Builds a left-to-right rewriting scheme for an equality type
* Builds a symmetry scheme for a symmetrical equality type
* Builds a congruence scheme for an equality type | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Names
open Term
open Environ
open Ind_tables
val rew_l2r_dep_scheme_kind : individual scheme_kind
val rew_l2r_scheme_kind : individual scheme_kind
val rew_r2l_forward_dep_scheme_kind : individual scheme_kind
val rew_l2r_forward_dep_scheme_kind : individual scheme_kind
val rew_r2l_dep_scheme_kind : individual scheme_kind
val rew_r2l_scheme_kind : individual scheme_kind
val build_r2l_rew_scheme : bool -> env -> inductive -> sorts_family ->
constr Evd.in_evar_universe_context
val build_l2r_rew_scheme : bool -> env -> inductive -> sorts_family ->
constr Evd.in_evar_universe_context * Safe_typing.private_constants
val build_r2l_forward_rew_scheme :
bool -> env -> inductive -> sorts_family -> constr Evd.in_evar_universe_context
val build_l2r_forward_rew_scheme :
bool -> env -> inductive -> sorts_family -> constr Evd.in_evar_universe_context
val build_sym_scheme : env -> inductive -> constr Evd.in_evar_universe_context
val sym_scheme_kind : individual scheme_kind
val build_sym_involutive_scheme : env -> inductive ->
constr Evd.in_evar_universe_context * Safe_typing.private_constants
val sym_involutive_scheme_kind : individual scheme_kind
val congr_scheme_kind : individual scheme_kind
val build_congr : env -> constr * constr * Univ.universe_context_set -> inductive ->
constr Evd.in_evar_universe_context
|
09b85caced5405300f9de6a3fdf3b6030e85fa44a94904f43cde1be326f3e82a | ghcjs/ghcjs-dom | HTMLHyperlinkElementUtils.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
module GHCJS.DOM.JSFFI.Generated.HTMLHyperlinkElementUtils
(js_setHref, setHref, js_getHref, getHref, js_getOrigin, getOrigin,
js_setProtocol, setProtocol, js_getProtocol, getProtocol,
js_setUsername, setUsername, js_getUsername, getUsername,
js_setPassword, setPassword, js_getPassword, getPassword,
js_setHost, setHost, js_getHost, getHost, js_setHostname,
setHostname, js_getHostname, getHostname, js_setPort, setPort,
js_getPort, getPort, js_setPathname, setPathname, js_getPathname,
getPathname, js_setSearch, setSearch, js_getSearch, getSearch,
js_setHash, setHash, js_getHash, getHash,
HTMLHyperlinkElementUtils(..), gTypeHTMLHyperlinkElementUtils,
IsHTMLHyperlinkElementUtils, toHTMLHyperlinkElementUtils)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"href\"] = $2;" js_setHref ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.href Mozilla HTMLHyperlinkElementUtils.href documentation >
setHref ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHref self val
= liftIO
(js_setHref (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"href\"]" js_getHref ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.href Mozilla HTMLHyperlinkElementUtils.href documentation >
getHref ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHref self
= liftIO
(fromJSString <$> (js_getHref (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"origin\"]" js_getOrigin ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.origin Mozilla HTMLHyperlinkElementUtils.origin documentation >
getOrigin ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getOrigin self
= liftIO
(fromJSString <$>
(js_getOrigin (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"protocol\"] = $2;"
js_setProtocol :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.protocol Mozilla HTMLHyperlinkElementUtils.protocol documentation >
setProtocol ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setProtocol self val
= liftIO
(js_setProtocol (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"protocol\"]" js_getProtocol
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.protocol Mozilla HTMLHyperlinkElementUtils.protocol documentation >
getProtocol ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getProtocol self
= liftIO
(fromJSString <$>
(js_getProtocol (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"username\"] = $2;"
js_setUsername :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.username Mozilla HTMLHyperlinkElementUtils.username documentation >
setUsername ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setUsername self val
= liftIO
(js_setUsername (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"username\"]" js_getUsername
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.username Mozilla HTMLHyperlinkElementUtils.username documentation >
getUsername ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getUsername self
= liftIO
(fromJSString <$>
(js_getUsername (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"password\"] = $2;"
js_setPassword :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.password Mozilla HTMLHyperlinkElementUtils.password documentation >
setPassword ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setPassword self val
= liftIO
(js_setPassword (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"password\"]" js_getPassword
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.password Mozilla HTMLHyperlinkElementUtils.password documentation >
getPassword ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getPassword self
= liftIO
(fromJSString <$>
(js_getPassword (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"host\"] = $2;" js_setHost ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.host Mozilla HTMLHyperlinkElementUtils.host documentation >
setHost ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHost self val
= liftIO
(js_setHost (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"host\"]" js_getHost ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.host Mozilla HTMLHyperlinkElementUtils.host documentation >
getHost ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHost self
= liftIO
(fromJSString <$> (js_getHost (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"hostname\"] = $2;"
js_setHostname :: HTMLHyperlinkElementUtils -> JSString -> IO ()
-- | <-US/docs/Web/API/HTMLHyperlinkElementUtils.hostname Mozilla HTMLHyperlinkElementUtils.hostname documentation>
setHostname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHostname self val
= liftIO
(js_setHostname (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"hostname\"]" js_getHostname
:: HTMLHyperlinkElementUtils -> IO JSString
-- | <-US/docs/Web/API/HTMLHyperlinkElementUtils.hostname Mozilla HTMLHyperlinkElementUtils.hostname documentation>
getHostname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHostname self
= liftIO
(fromJSString <$>
(js_getHostname (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"port\"] = $2;" js_setPort ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
-- | <-US/docs/Web/API/HTMLHyperlinkElementUtils.port Mozilla HTMLHyperlinkElementUtils.port documentation>
setPort ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setPort self val
= liftIO
(js_setPort (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"port\"]" js_getPort ::
HTMLHyperlinkElementUtils -> IO JSString
-- | <-US/docs/Web/API/HTMLHyperlinkElementUtils.port Mozilla HTMLHyperlinkElementUtils.port documentation>
getPort ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getPort self
= liftIO
(fromJSString <$> (js_getPort (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"pathname\"] = $2;"
js_setPathname :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.pathname Mozilla HTMLHyperlinkElementUtils.pathname documentation >
setPathname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setPathname self val
= liftIO
(js_setPathname (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"pathname\"]" js_getPathname
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.pathname Mozilla HTMLHyperlinkElementUtils.pathname documentation >
getPathname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getPathname self
= liftIO
(fromJSString <$>
(js_getPathname (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"search\"] = $2;"
js_setSearch :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.search Mozilla HTMLHyperlinkElementUtils.search documentation >
setSearch ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setSearch self val
= liftIO
(js_setSearch (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"search\"]" js_getSearch ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.search Mozilla HTMLHyperlinkElementUtils.search documentation >
getSearch ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getSearch self
= liftIO
(fromJSString <$>
(js_getSearch (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"hash\"] = $2;" js_setHash ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.hash Mozilla HTMLHyperlinkElementUtils.hash documentation >
setHash ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHash self val
= liftIO
(js_setHash (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"hash\"]" js_getHash ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.hash Mozilla HTMLHyperlinkElementUtils.hash documentation >
getHash ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHash self
= liftIO
(fromJSString <$> (js_getHash (toHTMLHyperlinkElementUtils self))) | null | https://raw.githubusercontent.com/ghcjs/ghcjs-dom/749963557d878d866be2d0184079836f367dd0ea/ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/HTMLHyperlinkElementUtils.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #
| <-US/docs/Web/API/HTMLHyperlinkElementUtils.hostname Mozilla HTMLHyperlinkElementUtils.hostname documentation>
| <-US/docs/Web/API/HTMLHyperlinkElementUtils.hostname Mozilla HTMLHyperlinkElementUtils.hostname documentation>
| <-US/docs/Web/API/HTMLHyperlinkElementUtils.port Mozilla HTMLHyperlinkElementUtils.port documentation>
| <-US/docs/Web/API/HTMLHyperlinkElementUtils.port Mozilla HTMLHyperlinkElementUtils.port documentation> | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
module GHCJS.DOM.JSFFI.Generated.HTMLHyperlinkElementUtils
(js_setHref, setHref, js_getHref, getHref, js_getOrigin, getOrigin,
js_setProtocol, setProtocol, js_getProtocol, getProtocol,
js_setUsername, setUsername, js_getUsername, getUsername,
js_setPassword, setPassword, js_getPassword, getPassword,
js_setHost, setHost, js_getHost, getHost, js_setHostname,
setHostname, js_getHostname, getHostname, js_setPort, setPort,
js_getPort, getPort, js_setPathname, setPathname, js_getPathname,
getPathname, js_setSearch, setSearch, js_getSearch, getSearch,
js_setHash, setHash, js_getHash, getHash,
HTMLHyperlinkElementUtils(..), gTypeHTMLHyperlinkElementUtils,
IsHTMLHyperlinkElementUtils, toHTMLHyperlinkElementUtils)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"href\"] = $2;" js_setHref ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.href Mozilla HTMLHyperlinkElementUtils.href documentation >
setHref ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHref self val
= liftIO
(js_setHref (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"href\"]" js_getHref ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.href Mozilla HTMLHyperlinkElementUtils.href documentation >
getHref ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHref self
= liftIO
(fromJSString <$> (js_getHref (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"origin\"]" js_getOrigin ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.origin Mozilla HTMLHyperlinkElementUtils.origin documentation >
getOrigin ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getOrigin self
= liftIO
(fromJSString <$>
(js_getOrigin (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"protocol\"] = $2;"
js_setProtocol :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.protocol Mozilla HTMLHyperlinkElementUtils.protocol documentation >
setProtocol ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setProtocol self val
= liftIO
(js_setProtocol (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"protocol\"]" js_getProtocol
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.protocol Mozilla HTMLHyperlinkElementUtils.protocol documentation >
getProtocol ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getProtocol self
= liftIO
(fromJSString <$>
(js_getProtocol (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"username\"] = $2;"
js_setUsername :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.username Mozilla HTMLHyperlinkElementUtils.username documentation >
setUsername ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setUsername self val
= liftIO
(js_setUsername (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"username\"]" js_getUsername
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.username Mozilla HTMLHyperlinkElementUtils.username documentation >
getUsername ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getUsername self
= liftIO
(fromJSString <$>
(js_getUsername (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"password\"] = $2;"
js_setPassword :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.password Mozilla HTMLHyperlinkElementUtils.password documentation >
setPassword ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setPassword self val
= liftIO
(js_setPassword (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"password\"]" js_getPassword
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.password Mozilla HTMLHyperlinkElementUtils.password documentation >
getPassword ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getPassword self
= liftIO
(fromJSString <$>
(js_getPassword (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"host\"] = $2;" js_setHost ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.host Mozilla HTMLHyperlinkElementUtils.host documentation >
setHost ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHost self val
= liftIO
(js_setHost (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"host\"]" js_getHost ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.host Mozilla HTMLHyperlinkElementUtils.host documentation >
getHost ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHost self
= liftIO
(fromJSString <$> (js_getHost (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"hostname\"] = $2;"
js_setHostname :: HTMLHyperlinkElementUtils -> JSString -> IO ()
setHostname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHostname self val
= liftIO
(js_setHostname (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"hostname\"]" js_getHostname
:: HTMLHyperlinkElementUtils -> IO JSString
getHostname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHostname self
= liftIO
(fromJSString <$>
(js_getHostname (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"port\"] = $2;" js_setPort ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
setPort ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setPort self val
= liftIO
(js_setPort (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"port\"]" js_getPort ::
HTMLHyperlinkElementUtils -> IO JSString
getPort ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getPort self
= liftIO
(fromJSString <$> (js_getPort (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"pathname\"] = $2;"
js_setPathname :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.pathname Mozilla HTMLHyperlinkElementUtils.pathname documentation >
setPathname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setPathname self val
= liftIO
(js_setPathname (toHTMLHyperlinkElementUtils self)
(toJSString val))
foreign import javascript unsafe "$1[\"pathname\"]" js_getPathname
:: HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.pathname Mozilla HTMLHyperlinkElementUtils.pathname documentation >
getPathname ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getPathname self
= liftIO
(fromJSString <$>
(js_getPathname (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"search\"] = $2;"
js_setSearch :: HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.search Mozilla HTMLHyperlinkElementUtils.search documentation >
setSearch ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setSearch self val
= liftIO
(js_setSearch (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"search\"]" js_getSearch ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.search Mozilla HTMLHyperlinkElementUtils.search documentation >
getSearch ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getSearch self
= liftIO
(fromJSString <$>
(js_getSearch (toHTMLHyperlinkElementUtils self)))
foreign import javascript unsafe "$1[\"hash\"] = $2;" js_setHash ::
HTMLHyperlinkElementUtils -> JSString -> IO ()
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.hash Mozilla HTMLHyperlinkElementUtils.hash documentation >
setHash ::
(MonadIO m, IsHTMLHyperlinkElementUtils self, ToJSString val) =>
self -> val -> m ()
setHash self val
= liftIO
(js_setHash (toHTMLHyperlinkElementUtils self) (toJSString val))
foreign import javascript unsafe "$1[\"hash\"]" js_getHash ::
HTMLHyperlinkElementUtils -> IO JSString
| < -US/docs/Web/API/HTMLHyperlinkElementUtils.hash Mozilla HTMLHyperlinkElementUtils.hash documentation >
getHash ::
(MonadIO m, IsHTMLHyperlinkElementUtils self,
FromJSString result) =>
self -> m result
getHash self
= liftIO
(fromJSString <$> (js_getHash (toHTMLHyperlinkElementUtils self))) |
119ad543a5812bcad162ee48778d13c676301a77382fee35faabad7620e76f16 | mejgun/haskell-tdlib | OptionValue.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.OptionValue where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
-- | Represents the value of an option
data OptionValue
| Represents a boolean option @value The value of the option
OptionValueBoolean
{ -- |
value :: Maybe Bool
}
| -- | Represents an unknown option or an option which has a default value
OptionValueEmpty
| Represents an integer option @value The value of the option
OptionValueInteger
{ -- |
_value :: Maybe Int
}
| Represents a string option @value The value of the option
OptionValueString
{ -- |
__value :: Maybe String
}
deriving (Eq)
instance Show OptionValue where
show
OptionValueBoolean
{ value = value_
} =
"OptionValueBoolean"
++ U.cc
[ U.p "value" value_
]
show OptionValueEmpty =
"OptionValueEmpty"
++ U.cc
[]
show
OptionValueInteger
{ _value = _value_
} =
"OptionValueInteger"
++ U.cc
[ U.p "_value" _value_
]
show
OptionValueString
{ __value = __value_
} =
"OptionValueString"
++ U.cc
[ U.p "__value" __value_
]
instance T.FromJSON OptionValue where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"optionValueBoolean" -> parseOptionValueBoolean v
"optionValueEmpty" -> parseOptionValueEmpty v
"optionValueInteger" -> parseOptionValueInteger v
"optionValueString" -> parseOptionValueString v
_ -> mempty
where
parseOptionValueBoolean :: A.Value -> T.Parser OptionValue
parseOptionValueBoolean = A.withObject "OptionValueBoolean" $ \o -> do
value_ <- o A..:? "value"
return $ OptionValueBoolean {value = value_}
parseOptionValueEmpty :: A.Value -> T.Parser OptionValue
parseOptionValueEmpty = A.withObject "OptionValueEmpty" $ \_ -> return OptionValueEmpty
parseOptionValueInteger :: A.Value -> T.Parser OptionValue
parseOptionValueInteger = A.withObject "OptionValueInteger" $ \o -> do
_value_ <- U.rm <$> (o A..:? "value" :: T.Parser (Maybe String)) :: T.Parser (Maybe Int)
return $ OptionValueInteger {_value = _value_}
parseOptionValueString :: A.Value -> T.Parser OptionValue
parseOptionValueString = A.withObject "OptionValueString" $ \o -> do
__value_ <- o A..:? "value"
return $ OptionValueString {__value = __value_}
parseJSON _ = mempty
instance T.ToJSON OptionValue where
toJSON
OptionValueBoolean
{ value = value_
} =
A.object
[ "@type" A..= T.String "optionValueBoolean",
"value" A..= value_
]
toJSON OptionValueEmpty =
A.object
[ "@type" A..= T.String "optionValueEmpty"
]
toJSON
OptionValueInteger
{ _value = _value_
} =
A.object
[ "@type" A..= T.String "optionValueInteger",
"value" A..= U.toS _value_
]
toJSON
OptionValueString
{ __value = __value_
} =
A.object
[ "@type" A..= T.String "optionValueString",
"value" A..= __value_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/9bd82101be6e6218daf816228f6141fe89d97e8b/src/TD/Data/OptionValue.hs | haskell | # LANGUAGE OverloadedStrings #
|
| Represents the value of an option
|
| Represents an unknown option or an option which has a default value
|
| |
module TD.Data.OptionValue where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
data OptionValue
| Represents a boolean option @value The value of the option
OptionValueBoolean
value :: Maybe Bool
}
OptionValueEmpty
| Represents an integer option @value The value of the option
OptionValueInteger
_value :: Maybe Int
}
| Represents a string option @value The value of the option
OptionValueString
__value :: Maybe String
}
deriving (Eq)
instance Show OptionValue where
show
OptionValueBoolean
{ value = value_
} =
"OptionValueBoolean"
++ U.cc
[ U.p "value" value_
]
show OptionValueEmpty =
"OptionValueEmpty"
++ U.cc
[]
show
OptionValueInteger
{ _value = _value_
} =
"OptionValueInteger"
++ U.cc
[ U.p "_value" _value_
]
show
OptionValueString
{ __value = __value_
} =
"OptionValueString"
++ U.cc
[ U.p "__value" __value_
]
instance T.FromJSON OptionValue where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"optionValueBoolean" -> parseOptionValueBoolean v
"optionValueEmpty" -> parseOptionValueEmpty v
"optionValueInteger" -> parseOptionValueInteger v
"optionValueString" -> parseOptionValueString v
_ -> mempty
where
parseOptionValueBoolean :: A.Value -> T.Parser OptionValue
parseOptionValueBoolean = A.withObject "OptionValueBoolean" $ \o -> do
value_ <- o A..:? "value"
return $ OptionValueBoolean {value = value_}
parseOptionValueEmpty :: A.Value -> T.Parser OptionValue
parseOptionValueEmpty = A.withObject "OptionValueEmpty" $ \_ -> return OptionValueEmpty
parseOptionValueInteger :: A.Value -> T.Parser OptionValue
parseOptionValueInteger = A.withObject "OptionValueInteger" $ \o -> do
_value_ <- U.rm <$> (o A..:? "value" :: T.Parser (Maybe String)) :: T.Parser (Maybe Int)
return $ OptionValueInteger {_value = _value_}
parseOptionValueString :: A.Value -> T.Parser OptionValue
parseOptionValueString = A.withObject "OptionValueString" $ \o -> do
__value_ <- o A..:? "value"
return $ OptionValueString {__value = __value_}
parseJSON _ = mempty
instance T.ToJSON OptionValue where
toJSON
OptionValueBoolean
{ value = value_
} =
A.object
[ "@type" A..= T.String "optionValueBoolean",
"value" A..= value_
]
toJSON OptionValueEmpty =
A.object
[ "@type" A..= T.String "optionValueEmpty"
]
toJSON
OptionValueInteger
{ _value = _value_
} =
A.object
[ "@type" A..= T.String "optionValueInteger",
"value" A..= U.toS _value_
]
toJSON
OptionValueString
{ __value = __value_
} =
A.object
[ "@type" A..= T.String "optionValueString",
"value" A..= __value_
]
|
0ac18c7fe61e7d2409be017336981c68b5b8ea231e6aff508c1fed320fb0ca68 | Frama-C/Frama-C-snapshot | alarmset.mli | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** Map from alarms to status.
Returned by the abstract semantics to report the possible undefined
behaviors. *)
(** An alarm is a guard against an undesirable behavior. If the status of an
assertion is true, then its corresponding undesirable behavior never
occurs. Otherwise, the undesirable behavior may occur (unknown status) or
definitely happens if the program point is reachable (false status). *)
* The maps are partial . Missing assertions are implicitly bound to a default
status . There are two kinds of alarm maps :
- closed maps [ Just s ] , where all missing assertions are considered as true :
[ s ] contains the only alarms that can occur .
- open maps [ AllBut s ] , where all missing assertions are considered as
unknown : [ s ] contains the only alarms whose status is known .
status. There are two kinds of alarm maps:
- closed maps [Just s], where all missing assertions are considered as true:
[s] contains the only alarms that can occur.
- open maps [AllBut s], where all missing assertions are considered as
unknown: [s] contains the only alarms whose status is known. *)
type s
type t = private Just of s | AllBut of s
type alarm = Alarms.t
type status = Abstract_interp.Comp.result = True | False | Unknown
type 'a if_consistent = [ `Value of 'a | `Inconsistent ]
(* Logical status bound to assertions. *)
module Status : sig
include Datatype.S_with_collections with type t := status
val join: status -> status -> status
val join_list: status list -> status
val inter: status -> status -> status if_consistent
end
(** no alarms: all potential assertions have a True status.
= Just empty *)
val none : t
(** all alarms: all potential assertions have a Unknown status.
= AllBut empty *)
val all : t
(** [set alarm status t] binds the [alarm] to the [status] in the map [t]. *)
val set : alarm -> status -> t -> t
(** Returns the status of a given alarm. *)
val find : alarm -> t -> status
* Are two maps equal ?
val equal : t -> t -> bool
(** Is there an assertion with a non True status ? *)
val is_empty : t -> bool
(** [singleton ?status alarm] creates the map [set alarm status none]:
[alarm] has a by default an unkown status (which can be overridden through
[status]), and all others have a True status. *)
val singleton : ?status:status -> alarm -> t
* Combines two alarm maps carrying different sets of alarms . If [ t1 ] and [ t2 ]
are sound alarm maps for the evaluation in the same state of the expressions
[ e1 ] and [ e2 ] respectively , then [ combine t1 t2 ] is a sound alarm map for
both evaluations of [ e1 ] and [ e2 ] .
are sound alarm maps for the evaluation in the same state of the expressions
[e1] and [e2] respectively, then [combine t1 t2] is a sound alarm map for
both evaluations of [e1] and [e2]. *)
val combine: t -> t -> t
(** Pointwise union of property status: the least precise status is kept.
If [t1] and [t2] are sound alarm maps for a same expression [e] in states
[s1] and [s2] respectively, then [union t1 t2] is a sound alarm map for [e]
in states [s1] and [s2]. *)
val union: t -> t -> t
(** Pointwise intersection of property status: the most precise status is kept.
May return Inconsistent in case of incompatible status bound to an alarm.
If [t1] and [t2] are both sound alarm maps for a same expression [e] in the
same state, then [inter t1 t2] is also a sound alarm map for [e]. *)
val inter: t -> t -> t if_consistent
val exists: (alarm -> status -> bool) -> default:(status -> bool) -> t -> bool
val for_all: (alarm -> status -> bool) -> default:(status -> bool) -> t -> bool
val iter: (alarm -> status -> unit) -> t -> unit
(** Emits the alarms according to the given warn mode, at the given
instruction. *)
val emit: Cil_types.kinstr -> t -> unit
(** Calls the functions registered in the [warn_mode] according to the
set of alarms. *)
val notify: CilE.warn_mode -> t -> unit
val pretty : Format.formatter -> t -> unit
val pretty_status : Format.formatter -> status -> unit
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/value/alarmset.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Map from alarms to status.
Returned by the abstract semantics to report the possible undefined
behaviors.
* An alarm is a guard against an undesirable behavior. If the status of an
assertion is true, then its corresponding undesirable behavior never
occurs. Otherwise, the undesirable behavior may occur (unknown status) or
definitely happens if the program point is reachable (false status).
Logical status bound to assertions.
* no alarms: all potential assertions have a True status.
= Just empty
* all alarms: all potential assertions have a Unknown status.
= AllBut empty
* [set alarm status t] binds the [alarm] to the [status] in the map [t].
* Returns the status of a given alarm.
* Is there an assertion with a non True status ?
* [singleton ?status alarm] creates the map [set alarm status none]:
[alarm] has a by default an unkown status (which can be overridden through
[status]), and all others have a True status.
* Pointwise union of property status: the least precise status is kept.
If [t1] and [t2] are sound alarm maps for a same expression [e] in states
[s1] and [s2] respectively, then [union t1 t2] is a sound alarm map for [e]
in states [s1] and [s2].
* Pointwise intersection of property status: the most precise status is kept.
May return Inconsistent in case of incompatible status bound to an alarm.
If [t1] and [t2] are both sound alarm maps for a same expression [e] in the
same state, then [inter t1 t2] is also a sound alarm map for [e].
* Emits the alarms according to the given warn mode, at the given
instruction.
* Calls the functions registered in the [warn_mode] according to the
set of alarms.
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
* The maps are partial . Missing assertions are implicitly bound to a default
status . There are two kinds of alarm maps :
- closed maps [ Just s ] , where all missing assertions are considered as true :
[ s ] contains the only alarms that can occur .
- open maps [ AllBut s ] , where all missing assertions are considered as
unknown : [ s ] contains the only alarms whose status is known .
status. There are two kinds of alarm maps:
- closed maps [Just s], where all missing assertions are considered as true:
[s] contains the only alarms that can occur.
- open maps [AllBut s], where all missing assertions are considered as
unknown: [s] contains the only alarms whose status is known. *)
type s
type t = private Just of s | AllBut of s
type alarm = Alarms.t
type status = Abstract_interp.Comp.result = True | False | Unknown
type 'a if_consistent = [ `Value of 'a | `Inconsistent ]
module Status : sig
include Datatype.S_with_collections with type t := status
val join: status -> status -> status
val join_list: status list -> status
val inter: status -> status -> status if_consistent
end
val none : t
val all : t
val set : alarm -> status -> t -> t
val find : alarm -> t -> status
* Are two maps equal ?
val equal : t -> t -> bool
val is_empty : t -> bool
val singleton : ?status:status -> alarm -> t
* Combines two alarm maps carrying different sets of alarms . If [ t1 ] and [ t2 ]
are sound alarm maps for the evaluation in the same state of the expressions
[ e1 ] and [ e2 ] respectively , then [ combine t1 t2 ] is a sound alarm map for
both evaluations of [ e1 ] and [ e2 ] .
are sound alarm maps for the evaluation in the same state of the expressions
[e1] and [e2] respectively, then [combine t1 t2] is a sound alarm map for
both evaluations of [e1] and [e2]. *)
val combine: t -> t -> t
val union: t -> t -> t
val inter: t -> t -> t if_consistent
val exists: (alarm -> status -> bool) -> default:(status -> bool) -> t -> bool
val for_all: (alarm -> status -> bool) -> default:(status -> bool) -> t -> bool
val iter: (alarm -> status -> unit) -> t -> unit
val emit: Cil_types.kinstr -> t -> unit
val notify: CilE.warn_mode -> t -> unit
val pretty : Format.formatter -> t -> unit
val pretty_status : Format.formatter -> status -> unit
|
fc4658ec8864481fdcf774d27241576d14e576ca0f14fe1000036d314dd0f746 | Clozure/ccl | native-activity.lisp | ;;;-*-Mode: LISP; Package: CL-USER -*-
;;;
Copyright ( C ) 2012 Clozure Associates
This file is part of Clozure CL .
;;;
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
;;; file "LICENSE". The LLGPL consists of a preamble and the LGPL,
which is distributed with Clozure CL as the file " LGPL " . Where these
;;; conflict, the preamble takes precedence.
;;;
;;; Clozure CL is referenced in the preamble as the "LIBRARY."
;;;
;;; The LLGPL is also available online at
;;;
(in-package "CL-USER")
;;; This is supposed to be a transliteration of the code from the
file native_activity / jni / main.c from the Android 's samples
;;; directory. It doesn't do much, but is intended to show how
;;; the native app glue works.
(def-foreign-type nil
(:struct :saved_state
(:angle :float)
(:x :int32_t)
(:y :int32_t)))
(def-foreign-type nil
(:struct :engine
(#>app (:* (:struct :android_app)))
(#>sensorManager (:* #>ASensorManager))
(#>accelerometerSensor (:* #>ASensor))
(#>sensorEventQueue (:* #>ASensorEventQueue))
(#>animating :int)
(#>display #>EGLDisplay)
(#>surface #>EGLSurface)
(#>context #>EGLContext)
(#>width #>int32_t)
(#>height #>int32_t)
(#>state (:struct #>saved_state))))
;;;
Initialize an EGL context for the current display .
;;;
(defun engine-init-display (engine)
initialize OpenGL ES and EGL
(rletZ ((attribs (:array #>EGLint 9)))
;; Not the best way to initialize a foreign array, but
;; not the worst ...
(let* ((i 0))
(declare (fixnum i))
(dolist (attrib '(#$EGL_SURFACE_TYPE #$EGL_WINDOW_BIT
#$EGL_BLUE_SIZE 8
#$EGL_GREEN_SIZE 8
#$EGL_RED_SIZE 8
#$EGL_NONE))
(setf (paref attribs #>EGLint i) attrib)))
(let* ((display (#_eglGetDisplay #$EGL_DEFAULT_DISPLAY)))
(#_eglInitialize display (%null-ptr) (%null-ptr))
(rlet ((pconfig #>EGLConfig)
(pnumconfigs #>EGLint)
(pformat #>EGLint))
(#_eglChooseConfig display attribs pconfig 1 pnumconfigs)
(let* ((config (pref pconfig #>EGLConfig)))
(#_eglGetConfigAttrib display config #$EGL_NATIVE_VISUAL_ID pformat)
(let* ((format (pref pformat #>EGLint))
(window (pref engine :engine.app.window)))
(#_ANativeWindow_setBuffersGeometry window 0 0 format)
(let* ((surface (#_eglCreateWindowSurface display config window (%null-ptr)))
(context (#_eglCreateContext display config (%null-ptr) (%null-ptr))))
(unless (eql (#_eglMakeCurrent display surface surface context)
#$EGL_FALSE)
(rlet ((pw #>EGLint)
(ph #>EGLint))
(#_eglQuerySurface display surface #$EGL_WIDTH pw)
(#_eglQuerySurface display surface #$EGL_HEIGHT ph)
(setf (pref engine :engine.display) display
(pref engine :engine.context) context
(pref engine :engine.surface) surface
(pref engine :engine.width) (pref pw #>EGLint)
(pref engine :engine.height) (pref ph #>EGLint)
(pref engine :engine.state.angle) 0)
(#_glHint #$GL_PERSPECTIVE_CORRECTION_HINT #$GL_FASTEST)
(#_glEnable #$GL_CULL_FACE)
(#_glShadeModel #$GL_SMOOTH)
(#_glDisable #$GL_DEPTH_TEST)
t)))))))))
(defun engine-draw-frame (engine)
(let* ((display (pref engine :engine.display)))
(unless (%null-ptr-p display)
(#_glClearColor (/ (float (pref engine :engine.state.x))
(float (pref engine :engine.width)))
(float (pref engine :engine.state.angle))
(/ (float (pref engine :engine.state.y))
(float (pref engine :engine.height)))
1.0f0)
(#_glClear #$GL_COLOR_BUFFER_BIT)
(#_eglSwapBuffers display (pref engine :engine.surface)))))
(defun engine-term-display (engine)
(let* ((display (pref engine :engine.display))
(context (pref engine :engine.context))
(surface (pref engine :engine.surface)))
(unless (eql display #$EGL_NO_DISPLAY)
(#_eglMakeCurrent display #$EGL_NO_SURFACE #$EGL_NO_SURFACE #$EGL_NO_CONTEXT)
(unless (eql context #$EGL_NO_CONTEXT)
(#_eglDestroyContext display context))
(unless (eql surface #$EGL_NO_SURFACE)
(#_eglDestroySurface display surface))
(#_eglTerminate display))
(setf (pref engine :engine.animating) 0
(pref engine :engine.display) #$EGL_NO_DISPLAY
(pref engine :engine.context) #$EGL_NO_CONTEXT
(pref engine :engine.surface) #$EGL_NO_SURFACE)))
(defcallback engine-handle-input ((:* (:struct #>android_app)) app
(:* #>AInputEvent) event
:int32_t)
(cond ((eql (#_AInputEvent_getType event) #$AINPUT_EVENT_TYPE_MOTION)
(let* ((engine (pref app #>android_app.userData)))
(setf (pref engine :engine.animating) 1
(pref engine :engine.state.x) (#_AMotionEvent_getX event 0)
(pref engine :engine.state.y) (#_AMotionEvent_getY event 0))
1))
(t 0)))
(defcallback engine-handle-cmd ((:* (:struct #>android_app)) app
:int32_t cmd)
(let* ((engine (pref app #>android_app.userData)))
(case cmd
(#.#$APP_CMD_SAVE_STATE
(let* ((new (#_malloc (ccl::record-length :saved_state))))
(#_memcpy new (pref engine :engine.state) (ccl::record-length :saved_state))
(setf (pref app #>android_app.savedState) new
(pref app #>android_app.savedStateSize) (ccl::record-length :saved_state))))
(#.#$APP_CMD_INIT_WINDOW
(unless (%null-ptr-p (pref app #>android_app.window))
(engine-init-display engine)
(engine-draw-frame engine)))
(#.#$APP_CMD_TERM_WINDOW
(engine-term-display engine))
(#.#$APP_CMD_GAINED_FOCUS
(unless (%null-ptr-p (pref engine #>engine.accelerometerSensor))
(#_ASensorEventQueue_enableSensor
(pref engine #>engine.sensorEventQueue)
(pref engine #>engine.accelerometerSensor))
(#_ASensorEventQueue_setEventRate
(pref engine #>engine.sensorEventQueue)
(pref engine #>engine.accelerometerSensor)
(round (* 1000 (/ 1000 60))))))
(#.#$APP_CMD_LOST_FOCUS
(unless (%null-ptr-p (pref engine #>engine.accelerometerSensor))
(#_ASensorEventQueue_disableSensor
(pref engine #>engine.sensorEventQueue)
(pref engine #>engine.accelerometerSensor)))
(setf (pref engine #>engine.animating) 0)
(engine-draw-frame engine)))))
;;; This function implements android_main(). It needs to be called by this
;;; distinguished and funny name. (It'll always need to be called by a
;;; distinguished name, but that name may be less funny in the future.)
(defun ccl::%os-init-function% (state)
(rletz ((engine :engine))
(setf (pref state #>android_app.userData) engine
(pref state #>android_app.onAppCmd) engine-handle-cmd
(pref state #>android_app.onInputEvent) engine-handle-input
(pref engine #>engine.app) state
(pref engine #>engine.sensorManager) (#_ASensorManager_getInstance)
(pref engine #>engine.accelerometerSensor) (#_ASensorManager_getDefaultSensor (pref engine #>engine.sensorManager) #$ASENSOR_TYPE_ACCELEROMETER)
(pref engine #>engine.sensorEventQueue) (#_ASensorManager_createEventQueue (pref engine #>engine.sensorManager) (pref state #>android_app.looper) #$LOOPER_ID_USER (%null-ptr) (%null-ptr)))
(unless (%null-ptr-p (pref state #>android_app.savedState))
(#_memcpy (pref engine #>engine.state)
(pref state #>android_app.savedState)
(ccl::record-length :saved_state)))
(block event-loop
(loop
(let* ((ident -1))
(rlet ((psource :address)
(pevents :int))
(loop
(setq ident (#_ALooper_pollAll (if (zerop (pref engine :engine.animating)) -1 0) (%null-ptr) pevents psource))
(when (< ident 0) (return))
(let* ((source (pref psource :address)))
(unless (%null-ptr-p source)
(ff-call (pref source :android_poll_source.process)
:address state
:address source))
(when (eql ident #$LOOPER_ID_USER)
(unless (%null-ptr-p (pref engine #>engine.accelerometerSensor))
(rlet ((event #>ASensorEvent))
(loop
(unless (> (#_ASensorEventQueue_getEvents
(pref engine #>engine.sensorEventQueue)
event
1)
0)
(return)))))))
(unless (eql (pref state #>android_app.destroyRequested) 0)
(engine-term-display engine)
(return-from event-loop nil)))
(unless (eql 0 (pref engine #>engine.animating))
(when (> (incf (pref engine :engine.state.angle) 0.1f0) 1)
(setf (pref engine :engine.state.angle) 0.0f0))
(engine-draw-frame engine)))))))) | null | https://raw.githubusercontent.com/Clozure/ccl/6c1a9458f7a5437b73ec227e989aa5b825f32fd3/examples/android/native-activity.lisp | lisp | -*-Mode: LISP; Package: CL-USER -*-
file "LICENSE". The LLGPL consists of a preamble and the LGPL,
conflict, the preamble takes precedence.
Clozure CL is referenced in the preamble as the "LIBRARY."
The LLGPL is also available online at
This is supposed to be a transliteration of the code from the
directory. It doesn't do much, but is intended to show how
the native app glue works.
Not the best way to initialize a foreign array, but
not the worst ...
This function implements android_main(). It needs to be called by this
distinguished and funny name. (It'll always need to be called by a
distinguished name, but that name may be less funny in the future.) | Copyright ( C ) 2012 Clozure Associates
This file is part of Clozure CL .
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
which is distributed with Clozure CL as the file " LGPL " . Where these
(in-package "CL-USER")
file native_activity / jni / main.c from the Android 's samples
(def-foreign-type nil
(:struct :saved_state
(:angle :float)
(:x :int32_t)
(:y :int32_t)))
(def-foreign-type nil
(:struct :engine
(#>app (:* (:struct :android_app)))
(#>sensorManager (:* #>ASensorManager))
(#>accelerometerSensor (:* #>ASensor))
(#>sensorEventQueue (:* #>ASensorEventQueue))
(#>animating :int)
(#>display #>EGLDisplay)
(#>surface #>EGLSurface)
(#>context #>EGLContext)
(#>width #>int32_t)
(#>height #>int32_t)
(#>state (:struct #>saved_state))))
Initialize an EGL context for the current display .
(defun engine-init-display (engine)
initialize OpenGL ES and EGL
(rletZ ((attribs (:array #>EGLint 9)))
(let* ((i 0))
(declare (fixnum i))
(dolist (attrib '(#$EGL_SURFACE_TYPE #$EGL_WINDOW_BIT
#$EGL_BLUE_SIZE 8
#$EGL_GREEN_SIZE 8
#$EGL_RED_SIZE 8
#$EGL_NONE))
(setf (paref attribs #>EGLint i) attrib)))
(let* ((display (#_eglGetDisplay #$EGL_DEFAULT_DISPLAY)))
(#_eglInitialize display (%null-ptr) (%null-ptr))
(rlet ((pconfig #>EGLConfig)
(pnumconfigs #>EGLint)
(pformat #>EGLint))
(#_eglChooseConfig display attribs pconfig 1 pnumconfigs)
(let* ((config (pref pconfig #>EGLConfig)))
(#_eglGetConfigAttrib display config #$EGL_NATIVE_VISUAL_ID pformat)
(let* ((format (pref pformat #>EGLint))
(window (pref engine :engine.app.window)))
(#_ANativeWindow_setBuffersGeometry window 0 0 format)
(let* ((surface (#_eglCreateWindowSurface display config window (%null-ptr)))
(context (#_eglCreateContext display config (%null-ptr) (%null-ptr))))
(unless (eql (#_eglMakeCurrent display surface surface context)
#$EGL_FALSE)
(rlet ((pw #>EGLint)
(ph #>EGLint))
(#_eglQuerySurface display surface #$EGL_WIDTH pw)
(#_eglQuerySurface display surface #$EGL_HEIGHT ph)
(setf (pref engine :engine.display) display
(pref engine :engine.context) context
(pref engine :engine.surface) surface
(pref engine :engine.width) (pref pw #>EGLint)
(pref engine :engine.height) (pref ph #>EGLint)
(pref engine :engine.state.angle) 0)
(#_glHint #$GL_PERSPECTIVE_CORRECTION_HINT #$GL_FASTEST)
(#_glEnable #$GL_CULL_FACE)
(#_glShadeModel #$GL_SMOOTH)
(#_glDisable #$GL_DEPTH_TEST)
t)))))))))
(defun engine-draw-frame (engine)
(let* ((display (pref engine :engine.display)))
(unless (%null-ptr-p display)
(#_glClearColor (/ (float (pref engine :engine.state.x))
(float (pref engine :engine.width)))
(float (pref engine :engine.state.angle))
(/ (float (pref engine :engine.state.y))
(float (pref engine :engine.height)))
1.0f0)
(#_glClear #$GL_COLOR_BUFFER_BIT)
(#_eglSwapBuffers display (pref engine :engine.surface)))))
(defun engine-term-display (engine)
(let* ((display (pref engine :engine.display))
(context (pref engine :engine.context))
(surface (pref engine :engine.surface)))
(unless (eql display #$EGL_NO_DISPLAY)
(#_eglMakeCurrent display #$EGL_NO_SURFACE #$EGL_NO_SURFACE #$EGL_NO_CONTEXT)
(unless (eql context #$EGL_NO_CONTEXT)
(#_eglDestroyContext display context))
(unless (eql surface #$EGL_NO_SURFACE)
(#_eglDestroySurface display surface))
(#_eglTerminate display))
(setf (pref engine :engine.animating) 0
(pref engine :engine.display) #$EGL_NO_DISPLAY
(pref engine :engine.context) #$EGL_NO_CONTEXT
(pref engine :engine.surface) #$EGL_NO_SURFACE)))
(defcallback engine-handle-input ((:* (:struct #>android_app)) app
(:* #>AInputEvent) event
:int32_t)
(cond ((eql (#_AInputEvent_getType event) #$AINPUT_EVENT_TYPE_MOTION)
(let* ((engine (pref app #>android_app.userData)))
(setf (pref engine :engine.animating) 1
(pref engine :engine.state.x) (#_AMotionEvent_getX event 0)
(pref engine :engine.state.y) (#_AMotionEvent_getY event 0))
1))
(t 0)))
(defcallback engine-handle-cmd ((:* (:struct #>android_app)) app
:int32_t cmd)
(let* ((engine (pref app #>android_app.userData)))
(case cmd
(#.#$APP_CMD_SAVE_STATE
(let* ((new (#_malloc (ccl::record-length :saved_state))))
(#_memcpy new (pref engine :engine.state) (ccl::record-length :saved_state))
(setf (pref app #>android_app.savedState) new
(pref app #>android_app.savedStateSize) (ccl::record-length :saved_state))))
(#.#$APP_CMD_INIT_WINDOW
(unless (%null-ptr-p (pref app #>android_app.window))
(engine-init-display engine)
(engine-draw-frame engine)))
(#.#$APP_CMD_TERM_WINDOW
(engine-term-display engine))
(#.#$APP_CMD_GAINED_FOCUS
(unless (%null-ptr-p (pref engine #>engine.accelerometerSensor))
(#_ASensorEventQueue_enableSensor
(pref engine #>engine.sensorEventQueue)
(pref engine #>engine.accelerometerSensor))
(#_ASensorEventQueue_setEventRate
(pref engine #>engine.sensorEventQueue)
(pref engine #>engine.accelerometerSensor)
(round (* 1000 (/ 1000 60))))))
(#.#$APP_CMD_LOST_FOCUS
(unless (%null-ptr-p (pref engine #>engine.accelerometerSensor))
(#_ASensorEventQueue_disableSensor
(pref engine #>engine.sensorEventQueue)
(pref engine #>engine.accelerometerSensor)))
(setf (pref engine #>engine.animating) 0)
(engine-draw-frame engine)))))
(defun ccl::%os-init-function% (state)
(rletz ((engine :engine))
(setf (pref state #>android_app.userData) engine
(pref state #>android_app.onAppCmd) engine-handle-cmd
(pref state #>android_app.onInputEvent) engine-handle-input
(pref engine #>engine.app) state
(pref engine #>engine.sensorManager) (#_ASensorManager_getInstance)
(pref engine #>engine.accelerometerSensor) (#_ASensorManager_getDefaultSensor (pref engine #>engine.sensorManager) #$ASENSOR_TYPE_ACCELEROMETER)
(pref engine #>engine.sensorEventQueue) (#_ASensorManager_createEventQueue (pref engine #>engine.sensorManager) (pref state #>android_app.looper) #$LOOPER_ID_USER (%null-ptr) (%null-ptr)))
(unless (%null-ptr-p (pref state #>android_app.savedState))
(#_memcpy (pref engine #>engine.state)
(pref state #>android_app.savedState)
(ccl::record-length :saved_state)))
(block event-loop
(loop
(let* ((ident -1))
(rlet ((psource :address)
(pevents :int))
(loop
(setq ident (#_ALooper_pollAll (if (zerop (pref engine :engine.animating)) -1 0) (%null-ptr) pevents psource))
(when (< ident 0) (return))
(let* ((source (pref psource :address)))
(unless (%null-ptr-p source)
(ff-call (pref source :android_poll_source.process)
:address state
:address source))
(when (eql ident #$LOOPER_ID_USER)
(unless (%null-ptr-p (pref engine #>engine.accelerometerSensor))
(rlet ((event #>ASensorEvent))
(loop
(unless (> (#_ASensorEventQueue_getEvents
(pref engine #>engine.sensorEventQueue)
event
1)
0)
(return)))))))
(unless (eql (pref state #>android_app.destroyRequested) 0)
(engine-term-display engine)
(return-from event-loop nil)))
(unless (eql 0 (pref engine #>engine.animating))
(when (> (incf (pref engine :engine.state.angle) 0.1f0) 1)
(setf (pref engine :engine.state.angle) 0.0f0))
(engine-draw-frame engine)))))))) |
474b235d5876c5e22be1d60cb9b9b6e83d72fc3aa326c4a9381e11f1d7c0dfcb | stevebleazard/ocaml-json-of-jsonm | json_of_jsonm_monad.mli | type json =
[ `Null
| `Bool of bool
| `Float of float
| `String of string
| `List of json list
| `Assoc of (string * json) list
]
module type IO = sig
type 'a t
val return : 'a -> 'a t
val (>>=) : 'a t -> ('a -> 'b t) -> 'b t
end
module type Json_encoder_decoder = sig
module IO : IO
type nonrec json = json
* [ decode ] decodes the byte stream provided by [ reader ] . [ reader ] reads
up to [ len ] bytes into [ buf ] and returns the number of bytes read .
up to [len] bytes into [buf] and returns the number of bytes read. *)
val decode : reader:(Bytes.t -> int -> int IO.t) -> (json, string) result IO.t
* [ decode_exn ] - the same as [ decode ] but raises on error
val decode_exn : reader:(Bytes.t -> int -> int IO.t) -> json IO.t
(** [decode_string] - decode a [string] to a [json] type *)
val decode_string : string -> (json, string) result IO.t
(** [decode_string_exn] - the same as [decode_sting] but raises on error *)
val decode_string_exn : string -> json IO.t
* [ encode ] encodes the supplied [ json ] type using [ writer [ to output the text .
[ writer ] writes [ len ] bytes from [ buf ] and returns [ init ] .
returns and error if a float [ NaN ] or [ Inf ] is encountered in the [ json ]
type
[writer buf len] writes [len] bytes from [buf] and returns [init].
returns and error if a float [NaN] or [Inf] is encountered in the [json]
type *)
val encode : writer:(Bytes.t -> int -> unit IO.t) -> json -> (unit, string) result IO.t
(** [encode_exn] - the same as [encode] but raises on error *)
val encode_exn : writer:(Bytes.t -> int -> unit IO.t) -> json -> unit IO.t
(** [encode_string] - encode a [json] type to a [string] *)
val encode_string : json -> (string, string) result IO.t
(** [encode_string_exn] - the same as [encode_string] but raises on error *)
val encode_string_exn : json -> string IO.t
(** [encode_string_hum] - same as [encode_string] but formats the output for
humans to read *)
val encode_string_hum : json -> (string, string) result IO.t
end
module Make(IO : IO) : Json_encoder_decoder with type 'a IO.t = 'a IO.t
| null | https://raw.githubusercontent.com/stevebleazard/ocaml-json-of-jsonm/595b90e19c5399316fcd013aa9ad6a2df48a3d73/src/json_of_jsonm_monad.mli | ocaml | * [decode_string] - decode a [string] to a [json] type
* [decode_string_exn] - the same as [decode_sting] but raises on error
* [encode_exn] - the same as [encode] but raises on error
* [encode_string] - encode a [json] type to a [string]
* [encode_string_exn] - the same as [encode_string] but raises on error
* [encode_string_hum] - same as [encode_string] but formats the output for
humans to read | type json =
[ `Null
| `Bool of bool
| `Float of float
| `String of string
| `List of json list
| `Assoc of (string * json) list
]
module type IO = sig
type 'a t
val return : 'a -> 'a t
val (>>=) : 'a t -> ('a -> 'b t) -> 'b t
end
module type Json_encoder_decoder = sig
module IO : IO
type nonrec json = json
* [ decode ] decodes the byte stream provided by [ reader ] . [ reader ] reads
up to [ len ] bytes into [ buf ] and returns the number of bytes read .
up to [len] bytes into [buf] and returns the number of bytes read. *)
val decode : reader:(Bytes.t -> int -> int IO.t) -> (json, string) result IO.t
* [ decode_exn ] - the same as [ decode ] but raises on error
val decode_exn : reader:(Bytes.t -> int -> int IO.t) -> json IO.t
val decode_string : string -> (json, string) result IO.t
val decode_string_exn : string -> json IO.t
* [ encode ] encodes the supplied [ json ] type using [ writer [ to output the text .
[ writer ] writes [ len ] bytes from [ buf ] and returns [ init ] .
returns and error if a float [ NaN ] or [ Inf ] is encountered in the [ json ]
type
[writer buf len] writes [len] bytes from [buf] and returns [init].
returns and error if a float [NaN] or [Inf] is encountered in the [json]
type *)
val encode : writer:(Bytes.t -> int -> unit IO.t) -> json -> (unit, string) result IO.t
val encode_exn : writer:(Bytes.t -> int -> unit IO.t) -> json -> unit IO.t
val encode_string : json -> (string, string) result IO.t
val encode_string_exn : json -> string IO.t
val encode_string_hum : json -> (string, string) result IO.t
end
module Make(IO : IO) : Json_encoder_decoder with type 'a IO.t = 'a IO.t
|
361d6845e317f9b1d5bad83b7ba8567225fd37376b7b01d3b74d0305012b7356 | tisnik/clojure-examples | core_test.clj | (ns sqltest3.core-test
(:require [clojure.test :refer :all]
[sqltest3.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/984af4a3e20d994b4f4989678ee1330e409fdae3/sqltest3/test/sqltest3/core_test.clj | clojure | (ns sqltest3.core-test
(:require [clojure.test :refer :all]
[sqltest3.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| |
23a4ee8b8e0cd2a1d0a0392206ff04fb6be629dd6981261c27d3c022b3cb6f4c | williamleferrand/accretio | ys_dummy.ml |
* Accretio is an API , a sandbox and a runtime for social playbooks
*
* Copyright ( C ) 2015
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < / > .
* Accretio is an API, a sandbox and a runtime for social playbooks
*
* Copyright (C) 2015 William Le Ferrand
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see </>.
*)
(* for future use *)
open Eliom_content.Html5.D
let pcdata_i18n = pcdata
| null | https://raw.githubusercontent.com/williamleferrand/accretio/394f855e9c2a6a18f0c2da35058d5a01aacf6586/library/client/ys_dummy.ml | ocaml | for future use |
* Accretio is an API , a sandbox and a runtime for social playbooks
*
* Copyright ( C ) 2015
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < / > .
* Accretio is an API, a sandbox and a runtime for social playbooks
*
* Copyright (C) 2015 William Le Ferrand
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see </>.
*)
open Eliom_content.Html5.D
let pcdata_i18n = pcdata
|
0577d236c6ddf5ae392375796e37931a9122a72b9ed3139ad437bcc3ae42f68b | buntine/Simply-Scheme-Exercises | 12-2.scm | ;Fix the bug in the following definition:
;(define (acronym sent) ;; wrong
( if (= ( count sent ) 1 )
( first sent )
( word ( first ( first sent ) )
; (acronym (bf sent)))))
(define (acronym sent)
(if (= (count sent) 1)
(first (first sent))
(word (first (first sent))
(acronym (bf sent)))))
| null | https://raw.githubusercontent.com/buntine/Simply-Scheme-Exercises/c6cbf0bd60d6385b506b8df94c348ac5edc7f646/12-the-leap-of-faith/12-2.scm | scheme | Fix the bug in the following definition:
(define (acronym sent) ;; wrong
(acronym (bf sent))))) |
( if (= ( count sent ) 1 )
( first sent )
( word ( first ( first sent ) )
(define (acronym sent)
(if (= (count sent) 1)
(first (first sent))
(word (first (first sent))
(acronym (bf sent)))))
|
994c8eecd5c4caba3f47f16b69ad481dc2c633237e508992bf79d1a265c8f2f8 | mokus0/splines | Tabulate.hs | module Tabulate where
import Text.Printf
import Data.VectorSpace
-- quick and dirty "tabulation" function - takes a function and a range and
-- prints a table of values of the function to the console in a format suitable
for copying and pasting into Excel or Grapher.app
tabulate :: Int -> (Double -> [Double]) -> Double -> Double -> IO ()
tabulate n f x0 x1 = sequence_
[ doubleRow (x : f x)
| i <- [1..n]
, let x = lerp x0 x1 (fromIntegral (i - 1) / fromIntegral (n - 1))
]
doubleRow :: [Double] -> IO ()
doubleRow = loop False
where
loop sep [] = printf "\n"
loop False (x:xs) = printf "%g" x >> loop True xs
loop True (x:xs) = printf "\t%g" x >> loop True xs
| null | https://raw.githubusercontent.com/mokus0/splines/2c1b370d7602a2287017811e9e4a2ec4b5ec1491/test/Tabulate.hs | haskell | quick and dirty "tabulation" function - takes a function and a range and
prints a table of values of the function to the console in a format suitable | module Tabulate where
import Text.Printf
import Data.VectorSpace
for copying and pasting into Excel or Grapher.app
tabulate :: Int -> (Double -> [Double]) -> Double -> Double -> IO ()
tabulate n f x0 x1 = sequence_
[ doubleRow (x : f x)
| i <- [1..n]
, let x = lerp x0 x1 (fromIntegral (i - 1) / fromIntegral (n - 1))
]
doubleRow :: [Double] -> IO ()
doubleRow = loop False
where
loop sep [] = printf "\n"
loop False (x:xs) = printf "%g" x >> loop True xs
loop True (x:xs) = printf "\t%g" x >> loop True xs
|
8bf0dcc214d1130581bff8c37e2a1a5db82d35d073e05a3ae562f3278c3bbdcb | kostmo/circleci-failure-tracker | gist.hs | # OPTIONS_GHC -Wall -O2 -threaded -with - rtsopts="-N " #
-- | Found here:
import Control.Concurrent
import Control.Concurrent.Async
import Control.Lens
import Control.Monad
import Data.ByteString.Lazy hiding (replicate)
import Data.List.Split
import Network.Wreq hiding (getWith)
import Network.Wreq.Session
data ThreadOptions = ThreadOptions { wreqOptions :: Options, interval :: Int }
data GlobalOptions = GlobalOptions { threadOptions :: ThreadOptions, threadCount :: Int }
defaultWreqOptions :: Options
defaultWreqOptions = defaults
defaultThreadOptions :: ThreadOptions
defaultThreadOptions = ThreadOptions { wreqOptions = defaultWreqOptions, interval = 0 }
defaultGlobalOptions :: GlobalOptions
defaultGlobalOptions = GlobalOptions { threadOptions = defaultThreadOptions, threadCount = 1 }
thread :: ThreadOptions -> [String] -> IO [ByteString]
thread options urls = withSession $ \session ->
mapM (\url -> const . flip (^.) responseBody <$>
getWith (wreqOptions options) session url
<*> threadDelay (interval options)) urls
threads :: GlobalOptions -> [String] -> IO [ByteString]
threads options urls = join <$> mapConcurrently (thread (threadOptions options)) (chunksOf (threadCount options) urls)
build_ids :: [Int]
build_ids = [
1424142,
1424273,
1424416,
1424425,
1427282,
1427935,
1429912,
1429920,
1430393,
1434710,
1436562,
1436818,
1437355,
1437359,
1437366,
1437374,
1424196,
1424285,
1424417,
1424428,
1427285,
1429386,
1429914,
1429937,
1430960,
1435252,
1436589,
1436889,
1437356,
1437360,
1437369,
1437379,
1424197,
1424404,
1424424,
1424429,
1427526,
1429818,
1429917,
1429940,
1433329,
1435265,
1436769,
1437161,
1437358,
1437364,
1437373,
1437381
]
url_lists = Prelude.map (\x -> ("/" ++ show x)) build_ids
main :: IO ()
main = do
contents < - threads defaultGlobalOptions ( replicate 8 " / " )
contents <- threads defaultGlobalOptions url_lists
print contents
| null | https://raw.githubusercontent.com/kostmo/circleci-failure-tracker/393d10a72080bd527fdb159da6ebfea23fcd52d1/app/threading-experiment/gist.hs | haskell | | Found here: | # OPTIONS_GHC -Wall -O2 -threaded -with - rtsopts="-N " #
import Control.Concurrent
import Control.Concurrent.Async
import Control.Lens
import Control.Monad
import Data.ByteString.Lazy hiding (replicate)
import Data.List.Split
import Network.Wreq hiding (getWith)
import Network.Wreq.Session
data ThreadOptions = ThreadOptions { wreqOptions :: Options, interval :: Int }
data GlobalOptions = GlobalOptions { threadOptions :: ThreadOptions, threadCount :: Int }
defaultWreqOptions :: Options
defaultWreqOptions = defaults
defaultThreadOptions :: ThreadOptions
defaultThreadOptions = ThreadOptions { wreqOptions = defaultWreqOptions, interval = 0 }
defaultGlobalOptions :: GlobalOptions
defaultGlobalOptions = GlobalOptions { threadOptions = defaultThreadOptions, threadCount = 1 }
thread :: ThreadOptions -> [String] -> IO [ByteString]
thread options urls = withSession $ \session ->
mapM (\url -> const . flip (^.) responseBody <$>
getWith (wreqOptions options) session url
<*> threadDelay (interval options)) urls
threads :: GlobalOptions -> [String] -> IO [ByteString]
threads options urls = join <$> mapConcurrently (thread (threadOptions options)) (chunksOf (threadCount options) urls)
build_ids :: [Int]
build_ids = [
1424142,
1424273,
1424416,
1424425,
1427282,
1427935,
1429912,
1429920,
1430393,
1434710,
1436562,
1436818,
1437355,
1437359,
1437366,
1437374,
1424196,
1424285,
1424417,
1424428,
1427285,
1429386,
1429914,
1429937,
1430960,
1435252,
1436589,
1436889,
1437356,
1437360,
1437369,
1437379,
1424197,
1424404,
1424424,
1424429,
1427526,
1429818,
1429917,
1429940,
1433329,
1435265,
1436769,
1437161,
1437358,
1437364,
1437373,
1437381
]
url_lists = Prelude.map (\x -> ("/" ++ show x)) build_ids
main :: IO ()
main = do
contents < - threads defaultGlobalOptions ( replicate 8 " / " )
contents <- threads defaultGlobalOptions url_lists
print contents
|
7e1e0db754f567876b6bc957b7045096cc49a90e8056f5ab77fc80754cccbd6f | CIFASIS/QuickFuzz | PrintGrammar.hs | # OPTIONS_GHC -fno - warn - incomplete - patterns #
module Test.QuickFuzz.Gen.Bnfc.PrintGrammar where
pretty - printer generated by the BNF converter
import Test.QuickFuzz.Gen.Bnfc.AbsGrammar
import Data.Char
-- the top-level printing method
printTree :: Print a => a -> String
printTree = render . prt 0
type Doc = [ShowS] -> [ShowS]
doc :: ShowS -> Doc
doc = (:)
render :: Doc -> String
render d = rend 0 (map ($ "") $ d []) "" where
rend i ss = case ss of
"[" :ts -> showChar '[' . rend i ts
"(" :ts -> showChar '(' . rend i ts
"{" :ts -> showChar '{' . new (i+1) . rend (i+1) ts
"}" : ";":ts -> new (i-1) . space "}" . showChar ';' . new (i-1) . rend (i-1) ts
"}" :ts -> new (i-1) . showChar '}' . new (i-1) . rend (i-1) ts
";" :ts -> showChar ';' . new i . rend i ts
t : "," :ts -> showString t . space "," . rend i ts
t : ")" :ts -> showString t . showChar ')' . rend i ts
t : "]" :ts -> showString t . showChar ']' . rend i ts
t :ts -> space t . rend i ts
_ -> id
new i = showChar '\n' . replicateS (2*i) (showChar ' ') . dropWhile isSpace
space t = showString t . (\s -> if null s then "" else (' ':s))
parenth :: Doc -> Doc
parenth ss = doc (showChar '(') . ss . doc (showChar ')')
concatS :: [ShowS] -> ShowS
concatS = foldr (.) id
concatD :: [Doc] -> Doc
concatD = foldr (.) id
replicateS :: Int -> ShowS -> ShowS
replicateS n f = concatS (replicate n f)
-- the printer class does the job
class Print a where
prt :: Int -> a -> Doc
prtList :: Int -> [a] -> Doc
prtList i = concatD . map (prt i)
instance Print a => Print [a] where
prt = prtList
instance Print Char where
prt _ s = doc (showChar '\'' . mkEsc '\'' s . showChar '\'')
prtList _ s = doc (showChar '"' . concatS (map (mkEsc '"') s) . showChar '"')
mkEsc :: Char -> Char -> ShowS
mkEsc q s = case s of
_ | s == q -> showChar '\\' . showChar s
'\\'-> showString "\\\\"
'\n' -> showString "\\n"
'\t' -> showString "\\t"
_ -> showChar s
prPrec :: Int -> Int -> Doc -> Doc
prPrec i j = if j<i then parenth else id
instance Print Integer where
prt _ x = doc (shows x)
instance Print Double where
prt _ x = doc (shows x)
instance Print Exp where
prt i e = case e of
EAdd exp1 exp2 -> prPrec i 0 (concatD [prt 0 exp1, doc (showString "+"), prt 1 exp2])
ESub exp1 exp2 -> prPrec i 0 (concatD [prt 0 exp1, doc (showString "-"), prt 1 exp2])
EMul exp1 exp2 -> prPrec i 1 (concatD [prt 1 exp1, doc (showString "*"), prt 2 exp2])
EDiv exp1 exp2 -> prPrec i 1 (concatD [prt 1 exp1, doc (showString "/"), prt 2 exp2])
EInt n -> prPrec i 2 (concatD [prt 0 n])
| null | https://raw.githubusercontent.com/CIFASIS/QuickFuzz/a1c69f028b0960c002cb83e8145f039ecc0e0a23/src/Test/QuickFuzz/Gen/Bnfc/PrintGrammar.hs | haskell | the top-level printing method
the printer class does the job | # OPTIONS_GHC -fno - warn - incomplete - patterns #
module Test.QuickFuzz.Gen.Bnfc.PrintGrammar where
pretty - printer generated by the BNF converter
import Test.QuickFuzz.Gen.Bnfc.AbsGrammar
import Data.Char
printTree :: Print a => a -> String
printTree = render . prt 0
type Doc = [ShowS] -> [ShowS]
doc :: ShowS -> Doc
doc = (:)
render :: Doc -> String
render d = rend 0 (map ($ "") $ d []) "" where
rend i ss = case ss of
"[" :ts -> showChar '[' . rend i ts
"(" :ts -> showChar '(' . rend i ts
"{" :ts -> showChar '{' . new (i+1) . rend (i+1) ts
"}" : ";":ts -> new (i-1) . space "}" . showChar ';' . new (i-1) . rend (i-1) ts
"}" :ts -> new (i-1) . showChar '}' . new (i-1) . rend (i-1) ts
";" :ts -> showChar ';' . new i . rend i ts
t : "," :ts -> showString t . space "," . rend i ts
t : ")" :ts -> showString t . showChar ')' . rend i ts
t : "]" :ts -> showString t . showChar ']' . rend i ts
t :ts -> space t . rend i ts
_ -> id
new i = showChar '\n' . replicateS (2*i) (showChar ' ') . dropWhile isSpace
space t = showString t . (\s -> if null s then "" else (' ':s))
parenth :: Doc -> Doc
parenth ss = doc (showChar '(') . ss . doc (showChar ')')
concatS :: [ShowS] -> ShowS
concatS = foldr (.) id
concatD :: [Doc] -> Doc
concatD = foldr (.) id
replicateS :: Int -> ShowS -> ShowS
replicateS n f = concatS (replicate n f)
class Print a where
prt :: Int -> a -> Doc
prtList :: Int -> [a] -> Doc
prtList i = concatD . map (prt i)
instance Print a => Print [a] where
prt = prtList
instance Print Char where
prt _ s = doc (showChar '\'' . mkEsc '\'' s . showChar '\'')
prtList _ s = doc (showChar '"' . concatS (map (mkEsc '"') s) . showChar '"')
mkEsc :: Char -> Char -> ShowS
mkEsc q s = case s of
_ | s == q -> showChar '\\' . showChar s
'\\'-> showString "\\\\"
'\n' -> showString "\\n"
'\t' -> showString "\\t"
_ -> showChar s
prPrec :: Int -> Int -> Doc -> Doc
prPrec i j = if j<i then parenth else id
instance Print Integer where
prt _ x = doc (shows x)
instance Print Double where
prt _ x = doc (shows x)
instance Print Exp where
prt i e = case e of
EAdd exp1 exp2 -> prPrec i 0 (concatD [prt 0 exp1, doc (showString "+"), prt 1 exp2])
ESub exp1 exp2 -> prPrec i 0 (concatD [prt 0 exp1, doc (showString "-"), prt 1 exp2])
EMul exp1 exp2 -> prPrec i 1 (concatD [prt 1 exp1, doc (showString "*"), prt 2 exp2])
EDiv exp1 exp2 -> prPrec i 1 (concatD [prt 1 exp1, doc (showString "/"), prt 2 exp2])
EInt n -> prPrec i 2 (concatD [prt 0 n])
|
51e1366314fd66694639c3ced855df4142cc73bad661668a6d5fb22f8160c8c0 | ryukinix/leraxandria | flipping-bits.lisp | Flipping Bits @ HackerRank Algorithms / Bit Manipulation
Solved by at 11/07/2017 05:05:05
(in-package :leraxandria/math)
(defun list-of-bits (integer)
"Given a integer i return a list of bits
Ex.: (integer-to-bits 3) => (1 1)
(integer-to-bits 5) => (1 0 1)"
(let ((bits '()))
(dotimes (index (integer-length integer) bits)
(push (if (logbitp index integer) 1 0) bits))
(or bits '(0))))
(defun bits-to-integer (bits)
(loop for x in (reverse bits)
for i from 0
when (= x 1)
sum (expt 2 i)))
(defun padding-bits (integer &optional (size 32))
(let ((bits (list-of-bits integer)))
(loop repeat (- size (length bits))
do (push 0 bits)
finally (return bits))))
(defun flip-bit (bit)
(if (= bit 1)
0
1))
(defun flip-bits (bits)
(mapcar #'flip-bit bits))
(defun flip-integer (integer)
(bits-to-integer (flip-bits (padding-bits integer))))
(eval-when (:execute)
(defun main ()
(loop repeat (read)
do (format t "~d~%" (flip-integer (read)))))
(main))
| null | https://raw.githubusercontent.com/ryukinix/leraxandria/e8c4d1f6d1e88072fbd58dd6c48b5d80577f3b62/src/math/flipping-bits.lisp | lisp | Flipping Bits @ HackerRank Algorithms / Bit Manipulation
Solved by at 11/07/2017 05:05:05
(in-package :leraxandria/math)
(defun list-of-bits (integer)
"Given a integer i return a list of bits
Ex.: (integer-to-bits 3) => (1 1)
(integer-to-bits 5) => (1 0 1)"
(let ((bits '()))
(dotimes (index (integer-length integer) bits)
(push (if (logbitp index integer) 1 0) bits))
(or bits '(0))))
(defun bits-to-integer (bits)
(loop for x in (reverse bits)
for i from 0
when (= x 1)
sum (expt 2 i)))
(defun padding-bits (integer &optional (size 32))
(let ((bits (list-of-bits integer)))
(loop repeat (- size (length bits))
do (push 0 bits)
finally (return bits))))
(defun flip-bit (bit)
(if (= bit 1)
0
1))
(defun flip-bits (bits)
(mapcar #'flip-bit bits))
(defun flip-integer (integer)
(bits-to-integer (flip-bits (padding-bits integer))))
(eval-when (:execute)
(defun main ()
(loop repeat (read)
do (format t "~d~%" (flip-integer (read)))))
(main))
| |
02c8f953a6276935385c62ef553f8df81541f5f6eef567da78e17701311a7284 | babashka/babashka | crypto_test.clj | (ns babashka.crypto-test
(:require [babashka.test-utils :as test-utils]
[clojure.edn :as edn]
[clojure.test :refer [deftest is]])
(:import (javax.crypto Mac)
(javax.crypto.spec SecretKeySpec)))
(defn bb [& exprs]
(edn/read-string (apply test-utils/bb nil (map str exprs))))
(defn hmac-sha-256 [key data]
(let [algo "HmacSHA256"
mac (Mac/getInstance algo)]
(.init mac (SecretKeySpec. key algo))
(.doFinal mac (.getBytes data "UTF-8"))))
(deftest hmac-sha-256-test
(let [key-s "some-key"
data "some-data"
expected-sha (String. (.encode (java.util.Base64/getEncoder)
(hmac-sha-256 (.getBytes key-s) data))
"utf-8")]
(prn expected-sha)
(is (= expected-sha (bb '(do (ns net
(:import (javax.crypto Mac)
(javax.crypto.spec SecretKeySpec)))
(defn hmac-sha-256 [key data]
(let [algo "HmacSHA256"
mac (Mac/getInstance algo)]
(.init mac (SecretKeySpec. key algo))
(.doFinal mac (.getBytes data "UTF-8"))))
(let [key-s "some-key"
data "some-data"]
(String. (.encode (java.util.Base64/getEncoder)
(hmac-sha-256 (.getBytes key-s) data))
"utf-8"))))))))
(deftest secretkey-test
(is (= 32 (bb '(do (import 'javax.crypto.SecretKeyFactory)
(import 'javax.crypto.spec.PBEKeySpec)
(defn gen-secret-key
"Generate secret key based on a given token string.
Returns bytes array 256-bit length."
[^String secret-token]
(let [salt (.getBytes "abcde")
factory (SecretKeyFactory/getInstance "PBKDF2WithHmacSHA256")
spec (PBEKeySpec. (.toCharArray secret-token) salt 10000 256)
secret (.generateSecret factory spec)]
(count (.getEncoded secret))))
(gen-secret-key "foo"))))))
| null | https://raw.githubusercontent.com/babashka/babashka/3ad043769c16162abf33c58ad7068fb8ebc6679f/test/babashka/crypto_test.clj | clojure | (ns babashka.crypto-test
(:require [babashka.test-utils :as test-utils]
[clojure.edn :as edn]
[clojure.test :refer [deftest is]])
(:import (javax.crypto Mac)
(javax.crypto.spec SecretKeySpec)))
(defn bb [& exprs]
(edn/read-string (apply test-utils/bb nil (map str exprs))))
(defn hmac-sha-256 [key data]
(let [algo "HmacSHA256"
mac (Mac/getInstance algo)]
(.init mac (SecretKeySpec. key algo))
(.doFinal mac (.getBytes data "UTF-8"))))
(deftest hmac-sha-256-test
(let [key-s "some-key"
data "some-data"
expected-sha (String. (.encode (java.util.Base64/getEncoder)
(hmac-sha-256 (.getBytes key-s) data))
"utf-8")]
(prn expected-sha)
(is (= expected-sha (bb '(do (ns net
(:import (javax.crypto Mac)
(javax.crypto.spec SecretKeySpec)))
(defn hmac-sha-256 [key data]
(let [algo "HmacSHA256"
mac (Mac/getInstance algo)]
(.init mac (SecretKeySpec. key algo))
(.doFinal mac (.getBytes data "UTF-8"))))
(let [key-s "some-key"
data "some-data"]
(String. (.encode (java.util.Base64/getEncoder)
(hmac-sha-256 (.getBytes key-s) data))
"utf-8"))))))))
(deftest secretkey-test
(is (= 32 (bb '(do (import 'javax.crypto.SecretKeyFactory)
(import 'javax.crypto.spec.PBEKeySpec)
(defn gen-secret-key
"Generate secret key based on a given token string.
Returns bytes array 256-bit length."
[^String secret-token]
(let [salt (.getBytes "abcde")
factory (SecretKeyFactory/getInstance "PBKDF2WithHmacSHA256")
spec (PBEKeySpec. (.toCharArray secret-token) salt 10000 256)
secret (.generateSecret factory spec)]
(count (.getEncoded secret))))
(gen-secret-key "foo"))))))
| |
4bdcc0fd5c102050201491782bc0ea45dd01945d9ef7a23da0c984c68e6e6b0d | mg289/summarizer | project.clj | (defproject summarizer "1.1.0-SNAPSHOT"
:description "Text summarizer"
:dependencies [[org.clojure/clojure "1.5.1"]
[net.sf.jwordnet/jwnl "1.4_rc3"]
[org.apache.opennlp/opennlp-tools "1.5.3"]
[org.jgrapht/jgrapht-jdk1.5 "0.7.3"]])
| null | https://raw.githubusercontent.com/mg289/summarizer/0696cad3b2d9c8f80fa7566f4fe910e068a6377b/project.clj | clojure | (defproject summarizer "1.1.0-SNAPSHOT"
:description "Text summarizer"
:dependencies [[org.clojure/clojure "1.5.1"]
[net.sf.jwordnet/jwnl "1.4_rc3"]
[org.apache.opennlp/opennlp-tools "1.5.3"]
[org.jgrapht/jgrapht-jdk1.5 "0.7.3"]])
| |
d662ceb421c2f34c880c36e59027b4aab91d53a8dc7fa6ea2e2331b8b00475c6 | monadbobo/ocaml-core | conv_test.ml | * Conv_test : module for testing automated S - expression conversions and
path substitutions
path substitutions *)
open Format
open Sexplib
open Sexp
open Conv
module Exc_test : sig
exception Test_exc of (string * int) with sexp
end = struct
exception Test_exc of (string * int) with sexp
end
(* Test each character. *)
let check_string s =
let s' =
match (Sexp.of_string (Sexp.to_string (Sexp.Atom s))) with
| Sexp.Atom s -> s
| _ -> assert false
in
assert (s = s')
let () =
for i = 0 to 255 do
check_string (String.make 1 (Char.chr i))
done
(* Test user specified conversion *)
type my_float = float
let sexp_of_my_float n = Atom (sprintf "%.4f" n)
let my_float_of_sexp = function
| Atom str -> float_of_string str
| _ -> failwith "my_float_of_sexp: atom expected"
(* Test simple sum of products *)
type foo = A | B of int * float
with sexp
(* Test polymorphic variants and deep module paths *)
module M = struct
module N = struct
type ('a, 'b) variant = [ `X of ('a, 'b) variant | `Y of 'a * 'b ]
with sexp
type test = [ `Test ]
with sexp
end
end
type 'a variant =
[ M.N.test | `V1 of [ `Z | ('a, string) M.N.variant ] option | `V2 ]
with sexp
(* Test empty types *)
type empty with sexp
(* Test variance annotations *)
module type S = sig
type +'a t with sexp
end
(* Test labeled arguments in functions *)
type labeled = string -> foo : unit -> ?bar : int -> float -> float with sexp
let f str ~foo:_ ?(bar = 3) n = float_of_string str +. n +. float bar
let labeled_sexp : Sexp.t = sexp_of_labeled f
let labeled : labeled lazy_t = lazy (labeled_of_sexp (labeled_sexp : Sexp.t))
type rec_labeled = { a : (foo : unit -> unit) } with sexp_of
(* Test recursive types *)
(* Test polymorphic record fields *)
type 'x poly =
{
p : 'a 'b. 'a list;
maybe_t : 'x t option;
}
(* Test records *)
and 'a t =
{
x : foo;
a : 'a variant;
foo : int;
bar : (my_float * string) list option;
sexp_option : int sexp_option;
sexp_list : int sexp_list;
sexp_bool : sexp_bool;
poly : 'a poly;
}
with sexp
type v = { t : int t }
(* Test manifest types *)
type u = v = { t : int t }
with sexp
(* Test types involving exceptions *)
type exn_test = int * exn
with sexp_of
(* Test function types *)
type fun_test = int -> unit with sexp_of
open Path
let main () =
let make_t a =
{
x = B (42, 3.1);
a = a;
foo = 3;
bar = Some [(3.1, "foo")];
sexp_option = None;
sexp_list = [];
sexp_bool = true;
poly =
{
p = [];
maybe_t = None;
};
}
in
let v = `B (5, 5) in
let v_sexp = <:sexp_of<[ `A | `B of int * int ] >> v in
assert (<:of_sexp< [ `A | `B of int * int ] >> v_sexp = v);
let u = { t = make_t (`V1 (Some (`X (`Y (7, "bla"))))) } in
let u_sexp = sexp_of_u u in
printf "Original: %a@\n@." pp u_sexp;
let u' = u_of_sexp u_sexp in
assert (u = u');
let foo_sexp = Sexp.of_string "A" in
let _foo = foo_of_sexp foo_sexp in
let path_str = ".[0].[1]" in
let path = Path.parse path_str in
let subst, el = subst_path u_sexp path in
printf "Pos(%s): %a -> SUBST1@\n" path_str pp el;
let dumb_sexp = subst (Atom "SUBST1") in
printf "Pos(%s): %a@\n@\n" path_str pp dumb_sexp;
let path_str = ".t.x.B[1]" in
let path = Path.parse path_str in
let subst, el = subst_path u_sexp path in
printf "Record(%s): %a -> SUBST2@\n" path_str pp el;
let u_sexp = subst (Atom "SUBST2") in
printf "Record(%s): %a@\n@\n" path_str pp u_sexp;
printf "SUCCESS!!!@."
let () =
try main (); raise (Exc_test.Test_exc ("expected exception", 42)) with
| exc -> eprintf "Exception: %s@." (Sexp.to_string_hum (sexp_of_exn exc))
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/sexplib/lib_test/conv_test.ml | ocaml | Test each character.
Test user specified conversion
Test simple sum of products
Test polymorphic variants and deep module paths
Test empty types
Test variance annotations
Test labeled arguments in functions
Test recursive types
Test polymorphic record fields
Test records
Test manifest types
Test types involving exceptions
Test function types | * Conv_test : module for testing automated S - expression conversions and
path substitutions
path substitutions *)
open Format
open Sexplib
open Sexp
open Conv
module Exc_test : sig
exception Test_exc of (string * int) with sexp
end = struct
exception Test_exc of (string * int) with sexp
end
let check_string s =
let s' =
match (Sexp.of_string (Sexp.to_string (Sexp.Atom s))) with
| Sexp.Atom s -> s
| _ -> assert false
in
assert (s = s')
let () =
for i = 0 to 255 do
check_string (String.make 1 (Char.chr i))
done
type my_float = float
let sexp_of_my_float n = Atom (sprintf "%.4f" n)
let my_float_of_sexp = function
| Atom str -> float_of_string str
| _ -> failwith "my_float_of_sexp: atom expected"
type foo = A | B of int * float
with sexp
module M = struct
module N = struct
type ('a, 'b) variant = [ `X of ('a, 'b) variant | `Y of 'a * 'b ]
with sexp
type test = [ `Test ]
with sexp
end
end
type 'a variant =
[ M.N.test | `V1 of [ `Z | ('a, string) M.N.variant ] option | `V2 ]
with sexp
type empty with sexp
module type S = sig
type +'a t with sexp
end
type labeled = string -> foo : unit -> ?bar : int -> float -> float with sexp
let f str ~foo:_ ?(bar = 3) n = float_of_string str +. n +. float bar
let labeled_sexp : Sexp.t = sexp_of_labeled f
let labeled : labeled lazy_t = lazy (labeled_of_sexp (labeled_sexp : Sexp.t))
type rec_labeled = { a : (foo : unit -> unit) } with sexp_of
type 'x poly =
{
p : 'a 'b. 'a list;
maybe_t : 'x t option;
}
and 'a t =
{
x : foo;
a : 'a variant;
foo : int;
bar : (my_float * string) list option;
sexp_option : int sexp_option;
sexp_list : int sexp_list;
sexp_bool : sexp_bool;
poly : 'a poly;
}
with sexp
type v = { t : int t }
type u = v = { t : int t }
with sexp
type exn_test = int * exn
with sexp_of
type fun_test = int -> unit with sexp_of
open Path
let main () =
let make_t a =
{
x = B (42, 3.1);
a = a;
foo = 3;
bar = Some [(3.1, "foo")];
sexp_option = None;
sexp_list = [];
sexp_bool = true;
poly =
{
p = [];
maybe_t = None;
};
}
in
let v = `B (5, 5) in
let v_sexp = <:sexp_of<[ `A | `B of int * int ] >> v in
assert (<:of_sexp< [ `A | `B of int * int ] >> v_sexp = v);
let u = { t = make_t (`V1 (Some (`X (`Y (7, "bla"))))) } in
let u_sexp = sexp_of_u u in
printf "Original: %a@\n@." pp u_sexp;
let u' = u_of_sexp u_sexp in
assert (u = u');
let foo_sexp = Sexp.of_string "A" in
let _foo = foo_of_sexp foo_sexp in
let path_str = ".[0].[1]" in
let path = Path.parse path_str in
let subst, el = subst_path u_sexp path in
printf "Pos(%s): %a -> SUBST1@\n" path_str pp el;
let dumb_sexp = subst (Atom "SUBST1") in
printf "Pos(%s): %a@\n@\n" path_str pp dumb_sexp;
let path_str = ".t.x.B[1]" in
let path = Path.parse path_str in
let subst, el = subst_path u_sexp path in
printf "Record(%s): %a -> SUBST2@\n" path_str pp el;
let u_sexp = subst (Atom "SUBST2") in
printf "Record(%s): %a@\n@\n" path_str pp u_sexp;
printf "SUCCESS!!!@."
let () =
try main (); raise (Exc_test.Test_exc ("expected exception", 42)) with
| exc -> eprintf "Exception: %s@." (Sexp.to_string_hum (sexp_of_exn exc))
|
9c5b3cc4a8bdafea263396cbfcc0f417c9517642114d8d1b278c64a44ccb1b4f | cloudant/chttpd | chttpd_sup.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(chttpd_sup).
-behaviour(supervisor).
-export([init/1]).
-export([start_link/1]).
start_link(Args) ->
supervisor:start_link({local,?MODULE}, ?MODULE, Args).
init([]) ->
Mod = chttpd,
Spec = {Mod, {Mod,start_link,[]}, permanent, 100, worker, [Mod]},
{ok, {{one_for_one, 3, 10}, [Spec]}}.
| null | https://raw.githubusercontent.com/cloudant/chttpd/d20e9b66b9e51ac400f468aa442af461fc85a96f/src/chttpd_sup.erl | erlang | the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(chttpd_sup).
-behaviour(supervisor).
-export([init/1]).
-export([start_link/1]).
start_link(Args) ->
supervisor:start_link({local,?MODULE}, ?MODULE, Args).
init([]) ->
Mod = chttpd,
Spec = {Mod, {Mod,start_link,[]}, permanent, 100, worker, [Mod]},
{ok, {{one_for_one, 3, 10}, [Spec]}}.
|
052a2a1a0db110b1733ddf6a9bed09c058db7eca5349c4bee8cbe9ebb6b374dd | roddyyaga/bs-swr | swr_options.ml | type t = {
errorRetryInterval: int option;
errorRetryCount: int option;
loadingTimeout: int option;
focusThrottleInterval: int option;
dedupingInterval: int option;
refreshInterval: int option;
refreshWhenHidden: bool option;
refreshWhenOffline: bool option;
revalidateOnFocus: bool option;
revalidateOnMount: bool option;
revalidateOnReconnect: bool option;
shouldRetryOnError: bool option;
suspense: bool option;
}
let make ?(suspense = false) ?(revalidateOnFocus = true)
?(revalidateOnReconnect = true) ?(revalidateOnMount = false)
?(refreshInterval = 0) ?(refreshWhenHidden = false)
?(refreshWhenOffline = false) ?(shouldRetryOnError = true)
?(dedupingInterval = 2000) ?(focusThrottleInterval = 5000)
?(loadingTimeout = 3000) ?(errorRetryInterval = 5000)
?errorRetryCount () =
{
errorRetryInterval = Some errorRetryInterval;
errorRetryCount;
loadingTimeout = Some loadingTimeout;
focusThrottleInterval = Some focusThrottleInterval;
dedupingInterval = Some dedupingInterval;
refreshInterval = Some refreshInterval;
refreshWhenHidden = Some refreshWhenHidden;
refreshWhenOffline = Some refreshWhenOffline;
revalidateOnFocus = Some revalidateOnFocus;
revalidateOnMount= Some revalidateOnMount;
revalidateOnReconnect = Some revalidateOnReconnect;
shouldRetryOnError = Some shouldRetryOnError;
suspense = Some suspense;
}
let default = make ()
let to_configInterface ?initialData ?onLoadingSlow ?onSuccess ?onError
?onErrorRetry ?compare
{
errorRetryInterval;
errorRetryCount;
loadingTimeout;
focusThrottleInterval;
dedupingInterval;
refreshInterval;
refreshWhenHidden;
refreshWhenOffline;
revalidateOnFocus;
revalidateOnMount;
revalidateOnReconnect;
shouldRetryOnError;
suspense;
} =
Swr_raw.configInterface ?errorRetryInterval ?errorRetryCount ?loadingTimeout
?focusThrottleInterval ?dedupingInterval ?refreshInterval ?refreshWhenHidden
?refreshWhenOffline ?revalidateOnFocus ?revalidateOnReconnect ?revalidateOnMount
?shouldRetryOnError ?suspense ?initialData ?onLoadingSlow ?onSuccess
?onError ?onErrorRetry ?compare ()
| null | https://raw.githubusercontent.com/roddyyaga/bs-swr/d284cc3f7d3a596c79c654eb3cc3204b6c7493e9/src/swr_options.ml | ocaml | type t = {
errorRetryInterval: int option;
errorRetryCount: int option;
loadingTimeout: int option;
focusThrottleInterval: int option;
dedupingInterval: int option;
refreshInterval: int option;
refreshWhenHidden: bool option;
refreshWhenOffline: bool option;
revalidateOnFocus: bool option;
revalidateOnMount: bool option;
revalidateOnReconnect: bool option;
shouldRetryOnError: bool option;
suspense: bool option;
}
let make ?(suspense = false) ?(revalidateOnFocus = true)
?(revalidateOnReconnect = true) ?(revalidateOnMount = false)
?(refreshInterval = 0) ?(refreshWhenHidden = false)
?(refreshWhenOffline = false) ?(shouldRetryOnError = true)
?(dedupingInterval = 2000) ?(focusThrottleInterval = 5000)
?(loadingTimeout = 3000) ?(errorRetryInterval = 5000)
?errorRetryCount () =
{
errorRetryInterval = Some errorRetryInterval;
errorRetryCount;
loadingTimeout = Some loadingTimeout;
focusThrottleInterval = Some focusThrottleInterval;
dedupingInterval = Some dedupingInterval;
refreshInterval = Some refreshInterval;
refreshWhenHidden = Some refreshWhenHidden;
refreshWhenOffline = Some refreshWhenOffline;
revalidateOnFocus = Some revalidateOnFocus;
revalidateOnMount= Some revalidateOnMount;
revalidateOnReconnect = Some revalidateOnReconnect;
shouldRetryOnError = Some shouldRetryOnError;
suspense = Some suspense;
}
let default = make ()
let to_configInterface ?initialData ?onLoadingSlow ?onSuccess ?onError
?onErrorRetry ?compare
{
errorRetryInterval;
errorRetryCount;
loadingTimeout;
focusThrottleInterval;
dedupingInterval;
refreshInterval;
refreshWhenHidden;
refreshWhenOffline;
revalidateOnFocus;
revalidateOnMount;
revalidateOnReconnect;
shouldRetryOnError;
suspense;
} =
Swr_raw.configInterface ?errorRetryInterval ?errorRetryCount ?loadingTimeout
?focusThrottleInterval ?dedupingInterval ?refreshInterval ?refreshWhenHidden
?refreshWhenOffline ?revalidateOnFocus ?revalidateOnReconnect ?revalidateOnMount
?shouldRetryOnError ?suspense ?initialData ?onLoadingSlow ?onSuccess
?onError ?onErrorRetry ?compare ()
| |
56a56e0de982a2f0abbcf35d09190b9bc53efa5c31003501c2f736b45ed2e2f7 | Dasudian/DSDIN | dsdc_block_genesis.erl | %%%-------------------------------------------------------------------
( C ) 2018 , Dasudian Technologies
@doc Genesis block definition .
%%%
%%% The genesis block does not follow the validation rules of the
%%% other blocks because:
%%% * Its state trees include preset accounts;
* It does not cointain a valid PoW ( it is unmined ) ;
* It implies genesis block can not be validated PoW wise ;
%%% * Note: the miner account specified in the genesis block is
%%% still rewarded as for the other blocks.
%%% * Its time is epoch i.e. much in the past;
* It implies the time difference between genesis block and first
%%% block is very large - that may be considered abnormal for
successive blocks ( e.g. between blocks 1 and 2 - with block 0
%%% being genesis).
%%% * The value of the hash of the (nonexistent) previous block is
special i.e. all zeros .
%%% * This means that validation function attempting to consider the
%%% hashes in a block needs to have a special case for genesis.
%%% @end
%%%-------------------------------------------------------------------
-module(dsdc_block_genesis).
%% API
-export([ genesis_header/0,
genesis_block_with_state/0,
populated_trees/0 ]).
-export([genesis_difficulty/0]).
-export([prev_hash/0,
height/0,
pow/0,
txs_hash/0,
transactions/0,
miner/0]).
-ifdef(TEST).
-export([genesis_block_with_state/1]).
-endif.
-include("blocks.hrl").
%% Since preset accounts are being loaded from a file - please use with caution
genesis_header() ->
{B, _S} = genesis_block_with_state(),
dsdc_blocks:to_header(B).
prev_hash() ->
<<0:?BLOCK_HEADER_HASH_BYTES/unit:8>>.
txs_hash() ->
txs_hash(transactions()).
txs_hash(Txs) ->
<<0:?TXS_HASH_BYTES/unit:8>> =
dsdc_txs_trees:pad_empty(dsdc_txs_trees:root_hash(dsdc_txs_trees:from_txs(
Txs))).
pow() ->
no_value.
transactions() ->
[].
miner() ->
<<0:?MINER_PUB_BYTES/unit:8>>.
%% Returns the genesis block and the state trees.
%%
The current implementation of state trees causes a new Erlang term ,
%% representing the initial state trees, to be allocated in the
%% heap memory of the calling process.
%%
%% Since preset accounts are being loaded from a file - please use with caution
genesis_block_with_state() ->
genesis_block_with_state(#{preset_accounts => dsdc_genesis_block_settings:preset_accounts()}).
genesis_block_with_state(Map) ->
Txs = transactions(),
{ok, Txs, Trees} =
dsdc_block_candidate:apply_block_txs_strict(Txs, miner(), populated_trees(Map),
height(), ?GENESIS_VERSION),
Block = dsdc_blocks:new(height(), prev_hash(), dsdc_trees:hash(Trees),
txs_hash(Txs), Txs, ?HIGHEST_TARGET_SCI, 0, 0, %%Epoch
?GENESIS_VERSION, miner()),
{Block, Trees}.
%% Returns state trees at genesis block.
%%
%% It includes preset accounts.
%%
%% It does not include reward for miner account.
populated_trees() ->
populated_trees(#{preset_accounts => dsdc_genesis_block_settings:preset_accounts()}).
populated_trees(Map) ->
PresetAccounts = maps:get(preset_accounts, Map),
StateTrees = maps:get(state_tree, Map, dsdc_trees:new()),
PopulatedAccountsTree =
lists:foldl(fun({PubKey, Amount}, T) ->
Account = dsdc_accounts:new(PubKey, Amount),
dsdc_accounts_trees:enter(Account, T)
end, dsdc_trees:accounts(StateTrees), PresetAccounts),
dsdc_trees:set_accounts(StateTrees, PopulatedAccountsTree).
height() ->
?GENESIS_HEIGHT.
%% Returns the difficulty of the genesis block meant to be used in the
%% computation of the chain difficulty.
genesis_difficulty() ->
Genesis block is unmined .
| null | https://raw.githubusercontent.com/Dasudian/DSDIN/b27a437d8deecae68613604fffcbb9804a6f1729/apps/dsdcore/src/dsdc_block_genesis.erl | erlang | -------------------------------------------------------------------
The genesis block does not follow the validation rules of the
other blocks because:
* Its state trees include preset accounts;
* Note: the miner account specified in the genesis block is
still rewarded as for the other blocks.
* Its time is epoch i.e. much in the past;
block is very large - that may be considered abnormal for
being genesis).
* The value of the hash of the (nonexistent) previous block is
* This means that validation function attempting to consider the
hashes in a block needs to have a special case for genesis.
@end
-------------------------------------------------------------------
API
Since preset accounts are being loaded from a file - please use with caution
Returns the genesis block and the state trees.
representing the initial state trees, to be allocated in the
heap memory of the calling process.
Since preset accounts are being loaded from a file - please use with caution
Epoch
Returns state trees at genesis block.
It includes preset accounts.
It does not include reward for miner account.
Returns the difficulty of the genesis block meant to be used in the
computation of the chain difficulty. | ( C ) 2018 , Dasudian Technologies
@doc Genesis block definition .
* It does not cointain a valid PoW ( it is unmined ) ;
* It implies genesis block can not be validated PoW wise ;
* It implies the time difference between genesis block and first
successive blocks ( e.g. between blocks 1 and 2 - with block 0
special i.e. all zeros .
-module(dsdc_block_genesis).
-export([ genesis_header/0,
genesis_block_with_state/0,
populated_trees/0 ]).
-export([genesis_difficulty/0]).
-export([prev_hash/0,
height/0,
pow/0,
txs_hash/0,
transactions/0,
miner/0]).
-ifdef(TEST).
-export([genesis_block_with_state/1]).
-endif.
-include("blocks.hrl").
genesis_header() ->
{B, _S} = genesis_block_with_state(),
dsdc_blocks:to_header(B).
prev_hash() ->
<<0:?BLOCK_HEADER_HASH_BYTES/unit:8>>.
txs_hash() ->
txs_hash(transactions()).
txs_hash(Txs) ->
<<0:?TXS_HASH_BYTES/unit:8>> =
dsdc_txs_trees:pad_empty(dsdc_txs_trees:root_hash(dsdc_txs_trees:from_txs(
Txs))).
pow() ->
no_value.
transactions() ->
[].
miner() ->
<<0:?MINER_PUB_BYTES/unit:8>>.
The current implementation of state trees causes a new Erlang term ,
genesis_block_with_state() ->
genesis_block_with_state(#{preset_accounts => dsdc_genesis_block_settings:preset_accounts()}).
genesis_block_with_state(Map) ->
Txs = transactions(),
{ok, Txs, Trees} =
dsdc_block_candidate:apply_block_txs_strict(Txs, miner(), populated_trees(Map),
height(), ?GENESIS_VERSION),
Block = dsdc_blocks:new(height(), prev_hash(), dsdc_trees:hash(Trees),
?GENESIS_VERSION, miner()),
{Block, Trees}.
populated_trees() ->
populated_trees(#{preset_accounts => dsdc_genesis_block_settings:preset_accounts()}).
populated_trees(Map) ->
PresetAccounts = maps:get(preset_accounts, Map),
StateTrees = maps:get(state_tree, Map, dsdc_trees:new()),
PopulatedAccountsTree =
lists:foldl(fun({PubKey, Amount}, T) ->
Account = dsdc_accounts:new(PubKey, Amount),
dsdc_accounts_trees:enter(Account, T)
end, dsdc_trees:accounts(StateTrees), PresetAccounts),
dsdc_trees:set_accounts(StateTrees, PopulatedAccountsTree).
height() ->
?GENESIS_HEIGHT.
genesis_difficulty() ->
Genesis block is unmined .
|
9a964321304b09fc40a553914842e320256916d81bc6f79d1de6da1e917aca4d | basho/riak_test | rt_cascading_mixed_clusters.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2013 - 2016 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% Topology for this cascading replication test:
%% +-----+
%% | n12 |
%% +-----+
%% ^ \
%% / V
%% +-----+ +-----+
| | < - | n34 |
%% +-----+ +-----+
%%
This test is configurable for 1.3 versions of Riak , but off by default .
%% place the following config in ~/.riak_test_config to run:
%%
%% {run_rt_cascading_1_3_tests, true}
%% -------------------------------------------------------------------
-module(rt_cascading_mixed_clusters).
-behavior(riak_test).
%% API
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
-define(bucket, <<"objects">>).
confirm() ->
test requires allow_mult = false b / c of rt : systest_read
rt:set_conf(all, [{"buckets.default.allow_mult", "false"}]),
case rt_config:config_or_os_env(run_rt_cascading_1_3_tests, false) of
false ->
lager:info("mixed_version_clusters_test_ not configured to run!");
_ ->
State = mixed_version_clusters_setup(),
_ = mixed_version_clusters_tests(State)
end,
pass.
mixed_version_clusters_setup() ->
Conf = rt_cascading:conf(),
DeployConfs = [{previous, Conf} || _ <- lists:seq(1,6)],
Nodes = rt:deploy_nodes(DeployConfs),
[N1, N2, N3, N4, N5, N6] = Nodes,
case rpc:call(N1, application, get_key, [riak_core, vsn]) of
this is meant to test upgrading from early BNW aka
Brave New World aka Advanced Repl aka version 3 repl to
% a cascading realtime repl. Other tests handle going from pre
repl 3 to repl 3 .
{ok, Vsn} when Vsn < "1.3.0" ->
{too_old, Nodes};
_ ->
N12 = [N1, N2],
N34 = [N3, N4],
N56 = [N5, N6],
repl_util:make_cluster(N12),
repl_util:make_cluster(N34),
repl_util:make_cluster(N56),
repl_util:name_cluster(N1, "n12"),
repl_util:name_cluster(N3, "n34"),
repl_util:name_cluster(N5, "n56"),
[repl_util:wait_until_leader_converge(Cluster) || Cluster <- [N12, N34, N56]],
rt_cascading:connect_rt(N1, rt_cascading:get_cluster_mgr_port(N3), "n34"),
rt_cascading:connect_rt(N3, rt_cascading:get_cluster_mgr_port(N5), "n56"),
rt_cascading:connect_rt(N5, rt_cascading:get_cluster_mgr_port(N1), "n12"),
Nodes
end.
mixed_version_clusters_tests({too_old, _Nodes}) ->
ok;
mixed_version_clusters_tests(Nodes) ->
[N1, N2, N3, N4, N5, N6] = Nodes,
Tests = [
{"no cascading at first 1", fun() ->
Client = rt:pbc(N1),
Bin = <<"no cascade yet">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists([N5, N6], ?bucket, Bin)),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists([N3, N4], ?bucket, Bin))
end},
{"no cascading at first 2", fun() ->
Client = rt:pbc(N2),
Bin = <<"no cascade yet 2">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists([N5, N6], ?bucket, Bin)),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists([N3, N4], ?bucket, Bin))
end},
{"mixed source can send (setup)", fun() ->
rt:upgrade(N1, current),
repl_util:wait_until_leader_converge([N1, N2]),
Running = fun(Node) ->
RTStatus = rpc:call(Node, riak_repl2_rt, status, []),
if
is_list(RTStatus) ->
SourcesList = proplists:get_value(sources, RTStatus, []),
Sources = [S || S <- SourcesList,
is_list(S),
proplists:get_value(connected, S, false),
proplists:get_value(source, S) =:= "n34"
],
length(Sources) >= 1;
true ->
false
end
end,
?assertEqual(ok, rt:wait_until(N1, Running)),
% give the node further time to settle
StatsNotEmpty = fun(Node) ->
case rpc:call(Node, riak_repl_stats, get_stats, []) of
[] ->
false;
Stats ->
is_list(Stats)
end
end,
?assertEqual(ok, rt:wait_until(N1, StatsNotEmpty))
end},
{"node1 put", fun() ->
Client = rt:pbc(N1),
Bin = <<"rt after upgrade">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists(N3, ?bucket, Bin, rt_cascading:timeout(100))),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists(N5, ?bucket, Bin, 100000))
end},
{"node2 put", fun() ->
Client = rt:pbc(N2),
Bin = <<"rt after upgrade 2">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists(N5, ?bucket, Bin)),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists([N3,N4], ?bucket, Bin))
end},
{"upgrade the world, cascade starts working", fun() ->
[N1 | NotUpgraded] = Nodes,
[rt:upgrade(Node, current) || Node <- NotUpgraded],
repl_util:wait_until_leader_converge([N1, N2]),
repl_util:wait_until_leader_converge([N3, N4]),
repl_util:wait_until_leader_converge([N5, N6]),
ClusterMgrUp = fun(Node) ->
case rpc:call(Node, erlang, whereis, [riak_core_cluster_manager]) of
P when is_pid(P) ->
true;
_ ->
fail
end
end,
[rt:wait_until(N, ClusterMgrUp) || N <- Nodes],
rt_cascading:maybe_reconnect_rt(N1, rt_cascading:get_cluster_mgr_port(N3), "n34"),
rt_cascading:maybe_reconnect_rt(N3, rt_cascading:get_cluster_mgr_port(N5), "n56"),
rt_cascading:maybe_reconnect_rt(N5, rt_cascading:get_cluster_mgr_port(N1), "n12"),
ToB = fun
(Atom) when is_atom(Atom) ->
list_to_binary(atom_to_list(Atom));
(N) when is_integer(N) ->
list_to_binary(integer_to_list(N))
end,
ExistsEverywhere = fun(Key, LookupOrder) ->
Reses = [rt_cascading:maybe_eventually_exists(Node, ?bucket, Key) || Node <- LookupOrder],
?debugFmt("Node and it's res:~n~p", [lists:zip(LookupOrder,
Reses)]),
lists:all(fun(E) -> E =:= Key end, Reses)
end,
MakeTest = fun(Node, N) ->
Name = "writing " ++ atom_to_list(Node) ++ "-write-" ++ integer_to_list(N),
{NewTail, NewHead} = lists:splitwith(fun(E) ->
E =/= Node
end, Nodes),
ExistsLookup = NewHead ++ NewTail,
Test = fun() ->
?debugFmt("Running test ~p", [Name]),
Client = rt:pbc(Node),
Key = <<(ToB(Node))/binary, "-write-", (ToB(N))/binary>>,
Obj = riakc_obj:new(?bucket, Key, Key),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assert(ExistsEverywhere(Key, ExistsLookup))
end,
{Name, Test}
end,
NodeTests = [MakeTest(Node, N) || Node <- Nodes, N <- lists:seq(1, 3)],
lists:foreach(fun({Name, Eval}) ->
lager:info("===== mixed version cluster: upgrade world: ~s =====", [Name]),
Eval()
end, NodeTests)
end},
{"check pendings", fun() ->
rt_cascading:wait_until_pending_count_zero(Nodes)
end}
],
lists:foreach(fun({Name, Eval}) ->
lager:info("===== mixed version cluster: ~p =====", [Name]),
Eval()
end, Tests).
| null | https://raw.githubusercontent.com/basho/riak_test/8170137b283061ba94bc85bf42575021e26c929d/tests/rt_cascading_mixed_clusters.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
Topology for this cascading replication test:
+-----+
| n12 |
+-----+
^ \
/ V
+-----+ +-----+
+-----+ +-----+
place the following config in ~/.riak_test_config to run:
{run_rt_cascading_1_3_tests, true}
-------------------------------------------------------------------
API
a cascading realtime repl. Other tests handle going from pre
give the node further time to settle | Copyright ( c ) 2013 - 2016 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
| | < - | n34 |
This test is configurable for 1.3 versions of Riak , but off by default .
-module(rt_cascading_mixed_clusters).
-behavior(riak_test).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
-define(bucket, <<"objects">>).
confirm() ->
test requires allow_mult = false b / c of rt : systest_read
rt:set_conf(all, [{"buckets.default.allow_mult", "false"}]),
case rt_config:config_or_os_env(run_rt_cascading_1_3_tests, false) of
false ->
lager:info("mixed_version_clusters_test_ not configured to run!");
_ ->
State = mixed_version_clusters_setup(),
_ = mixed_version_clusters_tests(State)
end,
pass.
mixed_version_clusters_setup() ->
Conf = rt_cascading:conf(),
DeployConfs = [{previous, Conf} || _ <- lists:seq(1,6)],
Nodes = rt:deploy_nodes(DeployConfs),
[N1, N2, N3, N4, N5, N6] = Nodes,
case rpc:call(N1, application, get_key, [riak_core, vsn]) of
this is meant to test upgrading from early BNW aka
Brave New World aka Advanced Repl aka version 3 repl to
repl 3 to repl 3 .
{ok, Vsn} when Vsn < "1.3.0" ->
{too_old, Nodes};
_ ->
N12 = [N1, N2],
N34 = [N3, N4],
N56 = [N5, N6],
repl_util:make_cluster(N12),
repl_util:make_cluster(N34),
repl_util:make_cluster(N56),
repl_util:name_cluster(N1, "n12"),
repl_util:name_cluster(N3, "n34"),
repl_util:name_cluster(N5, "n56"),
[repl_util:wait_until_leader_converge(Cluster) || Cluster <- [N12, N34, N56]],
rt_cascading:connect_rt(N1, rt_cascading:get_cluster_mgr_port(N3), "n34"),
rt_cascading:connect_rt(N3, rt_cascading:get_cluster_mgr_port(N5), "n56"),
rt_cascading:connect_rt(N5, rt_cascading:get_cluster_mgr_port(N1), "n12"),
Nodes
end.
mixed_version_clusters_tests({too_old, _Nodes}) ->
ok;
mixed_version_clusters_tests(Nodes) ->
[N1, N2, N3, N4, N5, N6] = Nodes,
Tests = [
{"no cascading at first 1", fun() ->
Client = rt:pbc(N1),
Bin = <<"no cascade yet">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists([N5, N6], ?bucket, Bin)),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists([N3, N4], ?bucket, Bin))
end},
{"no cascading at first 2", fun() ->
Client = rt:pbc(N2),
Bin = <<"no cascade yet 2">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists([N5, N6], ?bucket, Bin)),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists([N3, N4], ?bucket, Bin))
end},
{"mixed source can send (setup)", fun() ->
rt:upgrade(N1, current),
repl_util:wait_until_leader_converge([N1, N2]),
Running = fun(Node) ->
RTStatus = rpc:call(Node, riak_repl2_rt, status, []),
if
is_list(RTStatus) ->
SourcesList = proplists:get_value(sources, RTStatus, []),
Sources = [S || S <- SourcesList,
is_list(S),
proplists:get_value(connected, S, false),
proplists:get_value(source, S) =:= "n34"
],
length(Sources) >= 1;
true ->
false
end
end,
?assertEqual(ok, rt:wait_until(N1, Running)),
StatsNotEmpty = fun(Node) ->
case rpc:call(Node, riak_repl_stats, get_stats, []) of
[] ->
false;
Stats ->
is_list(Stats)
end
end,
?assertEqual(ok, rt:wait_until(N1, StatsNotEmpty))
end},
{"node1 put", fun() ->
Client = rt:pbc(N1),
Bin = <<"rt after upgrade">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists(N3, ?bucket, Bin, rt_cascading:timeout(100))),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists(N5, ?bucket, Bin, 100000))
end},
{"node2 put", fun() ->
Client = rt:pbc(N2),
Bin = <<"rt after upgrade 2">>,
Obj = riakc_obj:new(?bucket, Bin, Bin),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assertEqual({error, notfound}, rt_cascading:maybe_eventually_exists(N5, ?bucket, Bin)),
?assertEqual(Bin, rt_cascading:maybe_eventually_exists([N3,N4], ?bucket, Bin))
end},
{"upgrade the world, cascade starts working", fun() ->
[N1 | NotUpgraded] = Nodes,
[rt:upgrade(Node, current) || Node <- NotUpgraded],
repl_util:wait_until_leader_converge([N1, N2]),
repl_util:wait_until_leader_converge([N3, N4]),
repl_util:wait_until_leader_converge([N5, N6]),
ClusterMgrUp = fun(Node) ->
case rpc:call(Node, erlang, whereis, [riak_core_cluster_manager]) of
P when is_pid(P) ->
true;
_ ->
fail
end
end,
[rt:wait_until(N, ClusterMgrUp) || N <- Nodes],
rt_cascading:maybe_reconnect_rt(N1, rt_cascading:get_cluster_mgr_port(N3), "n34"),
rt_cascading:maybe_reconnect_rt(N3, rt_cascading:get_cluster_mgr_port(N5), "n56"),
rt_cascading:maybe_reconnect_rt(N5, rt_cascading:get_cluster_mgr_port(N1), "n12"),
ToB = fun
(Atom) when is_atom(Atom) ->
list_to_binary(atom_to_list(Atom));
(N) when is_integer(N) ->
list_to_binary(integer_to_list(N))
end,
ExistsEverywhere = fun(Key, LookupOrder) ->
Reses = [rt_cascading:maybe_eventually_exists(Node, ?bucket, Key) || Node <- LookupOrder],
?debugFmt("Node and it's res:~n~p", [lists:zip(LookupOrder,
Reses)]),
lists:all(fun(E) -> E =:= Key end, Reses)
end,
MakeTest = fun(Node, N) ->
Name = "writing " ++ atom_to_list(Node) ++ "-write-" ++ integer_to_list(N),
{NewTail, NewHead} = lists:splitwith(fun(E) ->
E =/= Node
end, Nodes),
ExistsLookup = NewHead ++ NewTail,
Test = fun() ->
?debugFmt("Running test ~p", [Name]),
Client = rt:pbc(Node),
Key = <<(ToB(Node))/binary, "-write-", (ToB(N))/binary>>,
Obj = riakc_obj:new(?bucket, Key, Key),
riakc_pb_socket:put(Client, Obj, [{w, 2}]),
riakc_pb_socket:stop(Client),
?assert(ExistsEverywhere(Key, ExistsLookup))
end,
{Name, Test}
end,
NodeTests = [MakeTest(Node, N) || Node <- Nodes, N <- lists:seq(1, 3)],
lists:foreach(fun({Name, Eval}) ->
lager:info("===== mixed version cluster: upgrade world: ~s =====", [Name]),
Eval()
end, NodeTests)
end},
{"check pendings", fun() ->
rt_cascading:wait_until_pending_count_zero(Nodes)
end}
],
lists:foreach(fun({Name, Eval}) ->
lager:info("===== mixed version cluster: ~p =====", [Name]),
Eval()
end, Tests).
|
182722d23a2451eee81f4e6ec8acafa1fc3fa75fd80cd9e09252502f6a3a90e2 | s-expressionists/Cleavir | staple.ext.lisp | (in-package #:cleavir-documentation-generation)
(defmethod staple:packages ((sys (eql (asdf:find-system :cleavir-ast-to-bir))))
(list (find-package "CLEAVIR-AST-TO-BIR")))
(defmethod staple:page-type ((sys (eql (asdf:find-system :cleavir-ast-to-bir))))
'cleavir-page)
| null | https://raw.githubusercontent.com/s-expressionists/Cleavir/b211c882e075867b0e18f5ccfc8d38a021df7eb8/AST-to-BIR/staple.ext.lisp | lisp | (in-package #:cleavir-documentation-generation)
(defmethod staple:packages ((sys (eql (asdf:find-system :cleavir-ast-to-bir))))
(list (find-package "CLEAVIR-AST-TO-BIR")))
(defmethod staple:page-type ((sys (eql (asdf:find-system :cleavir-ast-to-bir))))
'cleavir-page)
| |
81910df601e67887c284f62c94849a88bcc83418c08e45cf74b69e55f07eb311 | Naproche-SAD/Naproche-SAD | Error.hs |
Authors : ( 2017 - 2018 )
Message and Error data type and core functions .
Authors: Steffen Frerix (2017 - 2018)
Message and Parse Error data type and core functions.
-}
module SAD.Parser.Error
( ParseError,
errorPos,
newErrorMessage,
newErrorUnknown,
(<+>),
(<++>),
setExpectMessage,
unexpectError,
newMessage,
newUnExpect,
newExpect,
newWfMsg )
where
import SAD.Core.SourcePos
import Data.List (nub, sort)
import Debug.Trace
data Message
= ExpectMsg {unExpect :: String, expect :: [String], message :: [String]}
| WfMsg {message :: [String]} -- Well-formedness message
| Unknown deriving Show
isUnknownMsg Unknown = True; isUnknownMsg _ = False
isExpectMsg ExpectMsg{} = True; isExpectMsg _ = False
isWfMsg WfMsg{} = True; isWfMsg _ = False
newMessage msg = ExpectMsg {unExpect = "" , expect = [] , message = [msg]}
newUnExpect tok = ExpectMsg {unExpect = tok, expect = [] , message = [] }
newExpect msg = ExpectMsg {unExpect = "" , expect = [msg], message = [] }
newWfMsg msgs = WfMsg msgs
instance Enum Message where
fromEnum Unknown = 0
fromEnum ExpectMsg{} = 1
fromEnum WfMsg{} = 2
toEnum _ = error "toEnum is undefined for Message"
instance Eq Message where
msg1 == msg2 = case compare msg1 msg2 of EQ -> True; _ -> False
instance Ord Message where
compare msg1 msg2 =
case compare (fromEnum msg1) (fromEnum msg2) of
GT -> GT
LT -> LT
EQ -> case msg1 of
ExpectMsg{} -> compare (unExpect msg1) (unExpect msg2)
_ -> EQ
mergeMessage :: Message -> Message -> Message
mergeMessage msg1 msg2 =
case compare msg1 msg2 of
GT -> msg1
LT -> msg2
EQ -> mergeM msg1
where
mergeM ExpectMsg{} = msg1 {expect = expect msg1 ++ expect msg2,
message = message msg1 ++ message msg2}
mergeM WfMsg{} = msg1 {message = message msg1 ++ message msg2}
mergeM _ = msg1
data ParseError = ParseError {pePos :: SourcePos, peMsg :: Message} deriving Eq
instance Ord ParseError where
compare (ParseError pos1 msg1) (ParseError pos2 msg2)
| isWfMsg msg1 = if isWfMsg msg2 then compare pos1 pos2 else GT
| isWfMsg msg2 = if isWfMsg msg1 then compare pos1 pos2 else LT
| otherwise = compare pos1 pos2
errorPos :: ParseError -> SourcePos
errorPos (ParseError pos _) = pos
newErrorMessage :: Message -> SourcePos -> ParseError
newErrorMessage msg pos = ParseError pos msg
newErrorUnknown :: SourcePos -> ParseError
newErrorUnknown pos
= ParseError pos Unknown
mostImportantMerge :: ParseError -> ParseError -> ParseError
mostImportantMerge e1 e2 = case compare e1 e2 of
EQ -> e1 {peMsg = mergeMessage (peMsg e1) (peMsg e2)}
GT -> e1
LT -> e2
firstSetMerge :: ParseError -> ParseError -> ParseError
firstSetMerge e1@(ParseError pos1 msg1) e2@(ParseError pos2 msg2) =
case compare pos1 pos2 of
GT -> e1
LT -> e2
EQ | isExpectMsg msg1 -> if isExpectMsg msg2
then e1 {peMsg = mergeMessage msg1 msg2}
else e1
| isExpectMsg msg2 -> e2
| otherwise -> e1 {peMsg = mergeMessage msg1 msg2}
(<+>) = firstSetMerge
(<++>) = mostImportantMerge
setExpectMessage :: String -> ParseError -> ParseError
setExpectMessage exp pe@(ParseError pos msg)
| isUnknownMsg msg = ParseError pos $ newExpect exp
| isWfMsg msg = pe
| otherwise = ParseError pos $ msg {expect = [exp]}
unexpectError :: String -> SourcePos -> ParseError
unexpectError uex pos = newErrorMessage (newUnExpect uex) pos
errorMessage :: ParseError -> Message
errorMessage (ParseError _ msg) = msg
instance Show ParseError where
show err
= show (errorPos err) ++ ":" ++
showErrorMessage "or" "unknown parse error"
"expecting" "unexpected"
(errorMessage err)
showErrorMessage :: String -> String -> String -> String -> Message -> String
showErrorMessage msgOr msgUnknown msgExpecting msgUnExpected msg
| isUnknownMsg msg = msgUnknown
| isWfMsg msg = '\n': (showMany "" $ message msg)
| otherwise = concat $ map ("\n"++) $ clean $
[showUnExpect,showExpect,showMessages]
where
unExpected = unExpect msg
expected = expect msg
messages = message msg
showExpect | not (null messages) = ""
| otherwise = showMany msgExpecting expected
showUnExpect | not (null messages) = ""
| otherwise = msgUnExpected ++ " " ++ unExpected
showMessages = showMany "" messages
-- helpers
showMany pre msgs =
case clean msgs of
[] -> ""
ms | null pre -> commasOr ms
| otherwise -> pre ++ " " ++ commasOr ms
commasOr [] = ""
commasOr [m] = m
commasOr ms = commaSep (init ms) ++ " " ++ msgOr ++ " " ++ last ms
commaSep = separate ", " . clean
separate _ [] = ""
separate _ [m] = m
separate sep (m:ms) = m ++ sep ++ separate sep ms
clean = nub . filter (not . null)
| null | https://raw.githubusercontent.com/Naproche-SAD/Naproche-SAD/da131a6eaf65d4e02e82082a50a4febb6d42db3d/src/SAD/Parser/Error.hs | haskell | Well-formedness message
helpers |
Authors : ( 2017 - 2018 )
Message and Error data type and core functions .
Authors: Steffen Frerix (2017 - 2018)
Message and Parse Error data type and core functions.
-}
module SAD.Parser.Error
( ParseError,
errorPos,
newErrorMessage,
newErrorUnknown,
(<+>),
(<++>),
setExpectMessage,
unexpectError,
newMessage,
newUnExpect,
newExpect,
newWfMsg )
where
import SAD.Core.SourcePos
import Data.List (nub, sort)
import Debug.Trace
data Message
= ExpectMsg {unExpect :: String, expect :: [String], message :: [String]}
| Unknown deriving Show
isUnknownMsg Unknown = True; isUnknownMsg _ = False
isExpectMsg ExpectMsg{} = True; isExpectMsg _ = False
isWfMsg WfMsg{} = True; isWfMsg _ = False
newMessage msg = ExpectMsg {unExpect = "" , expect = [] , message = [msg]}
newUnExpect tok = ExpectMsg {unExpect = tok, expect = [] , message = [] }
newExpect msg = ExpectMsg {unExpect = "" , expect = [msg], message = [] }
newWfMsg msgs = WfMsg msgs
instance Enum Message where
fromEnum Unknown = 0
fromEnum ExpectMsg{} = 1
fromEnum WfMsg{} = 2
toEnum _ = error "toEnum is undefined for Message"
instance Eq Message where
msg1 == msg2 = case compare msg1 msg2 of EQ -> True; _ -> False
instance Ord Message where
compare msg1 msg2 =
case compare (fromEnum msg1) (fromEnum msg2) of
GT -> GT
LT -> LT
EQ -> case msg1 of
ExpectMsg{} -> compare (unExpect msg1) (unExpect msg2)
_ -> EQ
mergeMessage :: Message -> Message -> Message
mergeMessage msg1 msg2 =
case compare msg1 msg2 of
GT -> msg1
LT -> msg2
EQ -> mergeM msg1
where
mergeM ExpectMsg{} = msg1 {expect = expect msg1 ++ expect msg2,
message = message msg1 ++ message msg2}
mergeM WfMsg{} = msg1 {message = message msg1 ++ message msg2}
mergeM _ = msg1
data ParseError = ParseError {pePos :: SourcePos, peMsg :: Message} deriving Eq
instance Ord ParseError where
compare (ParseError pos1 msg1) (ParseError pos2 msg2)
| isWfMsg msg1 = if isWfMsg msg2 then compare pos1 pos2 else GT
| isWfMsg msg2 = if isWfMsg msg1 then compare pos1 pos2 else LT
| otherwise = compare pos1 pos2
errorPos :: ParseError -> SourcePos
errorPos (ParseError pos _) = pos
newErrorMessage :: Message -> SourcePos -> ParseError
newErrorMessage msg pos = ParseError pos msg
newErrorUnknown :: SourcePos -> ParseError
newErrorUnknown pos
= ParseError pos Unknown
mostImportantMerge :: ParseError -> ParseError -> ParseError
mostImportantMerge e1 e2 = case compare e1 e2 of
EQ -> e1 {peMsg = mergeMessage (peMsg e1) (peMsg e2)}
GT -> e1
LT -> e2
firstSetMerge :: ParseError -> ParseError -> ParseError
firstSetMerge e1@(ParseError pos1 msg1) e2@(ParseError pos2 msg2) =
case compare pos1 pos2 of
GT -> e1
LT -> e2
EQ | isExpectMsg msg1 -> if isExpectMsg msg2
then e1 {peMsg = mergeMessage msg1 msg2}
else e1
| isExpectMsg msg2 -> e2
| otherwise -> e1 {peMsg = mergeMessage msg1 msg2}
(<+>) = firstSetMerge
(<++>) = mostImportantMerge
setExpectMessage :: String -> ParseError -> ParseError
setExpectMessage exp pe@(ParseError pos msg)
| isUnknownMsg msg = ParseError pos $ newExpect exp
| isWfMsg msg = pe
| otherwise = ParseError pos $ msg {expect = [exp]}
unexpectError :: String -> SourcePos -> ParseError
unexpectError uex pos = newErrorMessage (newUnExpect uex) pos
errorMessage :: ParseError -> Message
errorMessage (ParseError _ msg) = msg
instance Show ParseError where
show err
= show (errorPos err) ++ ":" ++
showErrorMessage "or" "unknown parse error"
"expecting" "unexpected"
(errorMessage err)
showErrorMessage :: String -> String -> String -> String -> Message -> String
showErrorMessage msgOr msgUnknown msgExpecting msgUnExpected msg
| isUnknownMsg msg = msgUnknown
| isWfMsg msg = '\n': (showMany "" $ message msg)
| otherwise = concat $ map ("\n"++) $ clean $
[showUnExpect,showExpect,showMessages]
where
unExpected = unExpect msg
expected = expect msg
messages = message msg
showExpect | not (null messages) = ""
| otherwise = showMany msgExpecting expected
showUnExpect | not (null messages) = ""
| otherwise = msgUnExpected ++ " " ++ unExpected
showMessages = showMany "" messages
showMany pre msgs =
case clean msgs of
[] -> ""
ms | null pre -> commasOr ms
| otherwise -> pre ++ " " ++ commasOr ms
commasOr [] = ""
commasOr [m] = m
commasOr ms = commaSep (init ms) ++ " " ++ msgOr ++ " " ++ last ms
commaSep = separate ", " . clean
separate _ [] = ""
separate _ [m] = m
separate sep (m:ms) = m ++ sep ++ separate sep ms
clean = nub . filter (not . null)
|
40a9b5446653037c1c5f0b5ef0d6758178259553c39e3f9d753fb5e9c6e12e08 | gfour/gic | Parfib.hs | | Parallel Fibonacci example , parallelism cutoff at parameter 11 .
module Parfib where
import Control.Parallel (par, pseq)
result :: Int
result = fib 31
fib :: Int -> Int
fib x = if x<2 then 1 else if x<11 then ((fib (x-1)) + (fib (x-2))) else addpar (fib (x-1)) (fib (x-2)) ;
addpar :: Int -> Int -> Int
addpar a b = (a `par` b) `pseq` (a+b)
| null | https://raw.githubusercontent.com/gfour/gic/d5f2e506b31a1a28e02ca54af9610b3d8d618e9a/Examples/Parallel/Parfib.hs | haskell | | Parallel Fibonacci example , parallelism cutoff at parameter 11 .
module Parfib where
import Control.Parallel (par, pseq)
result :: Int
result = fib 31
fib :: Int -> Int
fib x = if x<2 then 1 else if x<11 then ((fib (x-1)) + (fib (x-2))) else addpar (fib (x-1)) (fib (x-2)) ;
addpar :: Int -> Int -> Int
addpar a b = (a `par` b) `pseq` (a+b)
| |
2ea320fa4b229c983784e4691dd3b3e2c0bc51ade03cb9c3d564fe3b6761fdf7 | 3b/clws | buffer.lisp | (in-package #:ws)
;;; chunks stored by chunk-buffer class
(defclass buffer-chunk ()
((vector :reader buffer-vector :initarg :vector)
(start :reader buffer-start :initarg :start)
(end :reader buffer-end :initarg :end)))
(defmethod buffer-count ((buffer buffer-chunk))
(- (buffer-end buffer) (buffer-start buffer)))
;;; chunked buffer class
;;; stores a sequence of vectors + start/end
intent is that one chunked - buffer is a single logical block of data
;;; and will be consumed all at once after it is accumulated
;;; operations:
;;; add a chunk (vector+bounds)
;;; -- check last chunk and combine if contiguous
;;; append another buffer
-- combine last / first chunks if contiguous ?
;;; read an octet
;;; convert to a contiguous vector
( 32bit for websockets masking stuff ? maybe subclass ? )
;;; convert (as utf8) to string
;;; call thunk with contents as (binary or text) stream?
;;; -- or maybe return a stream once it is implemented directly
;;; as a gray stream rather than a pile of concatenated
;;l and flexi-streams?
;;; ? map over octets/characters?
;;; todo: versions of octet-vector and string that don't clear buffer?
;;; (mostly for debugging)
;;; todo: option to build octet vector with extra space at beginning/end?
;;; (for example to make a pong response from a ping body)
(defclass chunk-buffer ()
((buffer-size :accessor buffer-size :initform 0)
(chunks :accessor chunks :initform nil)
;; reference to last cons of chunks list, so we can append quickly
(end-of-chunks :accessor end-of-chunks :initform nil)))
(defmethod %get-chunks ((cb chunk-buffer))
(setf (end-of-chunks cb) nil)
(values (shiftf (chunks cb) nil)
(shiftf (buffer-size cb) 0)))
(defmethod add-chunk ((cb chunk-buffer) vector start end)
(if (chunks cb)
;; we already have some chunks, add at end
(let ((last (end-of-chunks cb)))
;; if we are continuing previous buffer, just combine them
(if (and (eq vector (buffer-vector (car last)))
(= start (buffer-end (car last))))
(setf (slot-value (car last) 'end) end)
;; else add new chunk
(progn
(push (make-instance 'buffer-chunk :vector vector
:start start :end end)
(cdr last))
(pop (end-of-chunks cb)))))
;; add initial chunk
(progn
(push (make-instance 'buffer-chunk :vector vector
:start start :end end)
(chunks cb))
(setf (end-of-chunks cb) (chunks cb))))
(incf (buffer-size cb) (- end start)))
;;; fixme: should this make a new chunk-buffer? not clear more? reuse chunk-buffers better?
(defmethod add-chunks ((cb chunk-buffer) (more chunk-buffer))
(loop for i in (%get-chunks more)
do (add-chunk cb (buffer-vector i) (buffer-start i) (buffer-end i))))
(defmethod peek-octet ((cb chunk-buffer))
fixme : decide how to handle EOF ?
(unless (chunks cb)
(return-from peek-octet nil))
(let* ((chunk (car (chunks cb))))
(aref (buffer-vector chunk) (buffer-start chunk))))
(defmethod read-octet ((cb chunk-buffer))
fixme : decide how to handle EOF ?
(unless (chunks cb)
(return-from read-octet nil))
(let* ((chunk (car (chunks cb)))
(octet (aref (buffer-vector chunk) (buffer-start chunk))))
(incf (slot-value chunk 'start))
(decf (buffer-size cb))
;; if we emptied a chunk, get rid of it
(when (= (buffer-start chunk) (buffer-end chunk))
(pop (chunks cb))
;; and clear end ref as well if no more buffers
(when (not (chunks cb))
(setf (end-of-chunks cb) nil)))
octet))
(defun call-with-buffer-as-stream (buffer thunk)
(let ((streams nil))
(unwind-protect
(progn
(setf streams
(loop for i in (%get-chunks buffer)
while i
collect (flex:make-in-memory-input-stream
(buffer-vector i)
:start (buffer-start i)
:end (buffer-end i))))
(with-open-stream (cs (apply #'make-concatenated-stream streams))
(funcall thunk cs)))
(map 'nil 'close streams))))
(defmacro with-buffer-as-stream ((buffer stream) &body body)
`(call-with-buffer-as-stream ,buffer
(lambda (,stream)
,@body)))
(defmethod get-octet-vector ((cb chunk-buffer))
(let* ((size (buffer-size cb))
(vector (make-array-ubyte8 size :initial-element 0))
(chunks (%get-chunks cb)))
(loop for c in chunks
for offset = 0 then (+ offset size)
for size = (buffer-count c)
for cv = (buffer-vector c)
for cs = (buffer-start c)
for ce = (buffer-end c)
do (replace vector cv :start1 offset :start2 cs :end2 ce))
vector))
(defmethod get-utf8-string ((cb chunk-buffer) &key (errorp t) octet-end)
(declare (ignorable errorp))
;; not sure if it would be faster to pull through flexistreams
;; or make a separate octet vector and convert that with babel?
;; (best would be converting directly... possibly check for partial
;; character at beginning of buffer, find beginning in previous buffer
;; and only pass the valid part to babel, and add in the split char
;; by hand? might need to watch out for split over multiple buffers
;; if we get tiny chunks? (only when searching forward though, since
we should see the partial char in the first tiny chunk ... )
( or maybe just implement our own converter since we only need ? ) )
(let* ((size (buffer-size cb))
(end (or octet-end size))
(vector (make-array-ubyte8 end :initial-element 0))
(chunks (%get-chunks cb)))
(loop for c in chunks
for offset = 0 then (+ offset size)
for size = (buffer-count c)
for cv of-type (simple-array (unsigned-byte 8) (*)) = (buffer-vector c)
for cs = (buffer-start c)
for ce = (buffer-end c)
while (< offset end)
do (replace vector cv :start1 offset :end1 end
:start2 cs :end2 ce))
;; todo: probably should wrap babel error in something that doesn't leak
;; implementation details (like use of babel)
#++(babel:octets-to-string vector :encoding :utf-8 :errorp errorp)
;; babel isn't picky enough for the Autobahn test suite (it lets
;; utf16 surrogates through, so using flexistreams for now...
(flex:octets-to-string vector :external-format :utf-8)))
this does n't really belong here , too lazy to make a websockets
;;; specific subclass for now though
(defmethod mask-octets ((cb chunk-buffer) mask)
(declare (type (simple-array (unsigned-byte 8) (*)) mask)
(optimize speed))
todo : declare types , optimize to run 32/64 bits at a time , etc ...
(loop with i of-type (integer 0 4) = 0
for chunk in (chunks cb)
for vec of-type (simple-array (unsigned-byte 8) (*)) = (buffer-vector chunk)
for start fixnum = (buffer-start chunk)
for end fixnum = (buffer-end chunk)
do (loop for j from start below end
do (setf (aref vec j)
(logxor (aref vec j)
(aref mask i))
i (mod (1+ i) 4)))))
#++
(flet ((test-buf ()
(let ((foo (make-instance 'chunk-buffer))
(buf (string-to-shareable-octets "_<continued-test>_")))
(add-chunk foo (string-to-shareable-octets "TEST" ) 0 4)
(add-chunk foo (string-to-shareable-octets "test2") 0 5)
(add-chunk foo buf 1 5)
(add-chunk foo buf 5 (1- (length buf)))
(add-chunk foo (string-to-shareable-octets "..test3") 2 7)
foo)))
(list
(with-buffer-as-stream ((test-buf) s)
(with-open-stream (s (flex:make-flexi-stream s))
(read-line s nil nil)))
(babel:octets-to-string (get-octet-vector (test-buf)))
(get-utf8-string (test-buf))))
#++
(let ((foo (make-instance 'chunk-buffer)))
(add-chunk foo #(1 2 3 4) 0 3)
(add-chunk foo #(10 11 12 13) 0 1)
(add-chunk foo #(20 21 22 23) 0 3)
(loop repeat 10 collect (read-octet foo)))
;;; buffered reader class
;;; reads from a socket (or stream?) until some condition is met
( N octets read , specific pattern read ( CR LF for example ) , etc )
;;; then calls a continuation callback, or calls error callback if
;;; connection closed, or too many octets read without condition being matched
(defclass buffered-reader ()
(;; partially filled vector if any, + position of next empty octet
(partial-vector :accessor partial-vector :initform nil)
(partial-vector-pos :accessor partial-vector-pos :initform 0)
;; list of arrays + start,end values (in reverse order)
(chunks :initform (make-instance 'chunk-buffer) :accessor chunks)
;; function to call with new data to determine if callback should
;; be called yet
(predicate :initarg :predicate :accessor predicate)
(callback :initarg :callback :accessor callback)
(error-callback :initarg :error-callback :accessor error-callback)))
;;; allow calling some chunk-buffer functions on the buffered-reader
;;; and redirect to the slot...
(defmethod %get-chunks ((b buffered-reader))
(%get-chunks (chunks b)))
(define-condition fail-the-websockets-connection (error)
((code :initarg :status-code :initform nil :reader status-code)
;; possibly should include a verbose message for logging as well?
(message :initarg :message :initform nil :reader status-message)))
;; should this be an error?
(define-condition close-from-peer (error)
((code :initarg :status-code :initform 1000 :reader status-code)
(message :initarg :message :initform nil :reader status-message)))
;;; low level implementations
;;; non-blocking iolib
;;; when buffer gets more data, it checks predicate and calls
;;; callback if matched. Callback sets new predicate+callback, and
;;; loop repeats until predicate doesn't match, at which point it
;;; waits for more input
(defun add-reader-to-client (client &key (init-function 'maybe-policy-file))
(declare (optimize debug))
(setf (client-reader client)
(let ((socket (client-socket client))
(buffer client))
(funcall init-function buffer)
(lambda (fd event exception)
(declare (ignore fd event exception))
(handler-bind
((error
(lambda (c)
(cond
(*debug-on-server-errors*
(invoke-debugger c))
(t
(ignore-errors
(lg "server error ~s, dropping connection~%" c))
(invoke-restart 'drop-connection))))))
(restart-case
(handler-case
(progn
(when (or (not (partial-vector buffer))
(> (partial-vector-pos buffer)
(- (length (partial-vector buffer)) 16)))
(setf (partial-vector buffer)
(make-array-ubyte8 2048)
(partial-vector-pos buffer) 0))
(multiple-value-bind (_octets count)
fixme : decide on good read chunk size
(receive-from socket :buffer (partial-vector buffer)
:start (partial-vector-pos buffer)
:end (length (partial-vector buffer)))
(declare (ignore _octets))
(when (zerop count)
(error 'end-of-file))
(let* ((start (partial-vector-pos buffer))
(end (+ start count))
(failed nil))
(loop for match = (funcall (predicate buffer)
(partial-vector buffer)
start end)
do
(add-chunk (chunks buffer)
(partial-vector buffer)
start (or match end))
(when match
(setf start match)
(funcall (callback buffer) buffer))
while (and (not failed) match (>= end start)))
;; todo: if we used up all the data that
;; was read, dump the buffer in a pool or
;; something so we don't hold a buffer in
;; ram for each client while waiting for
;; data
(setf (partial-vector-pos buffer) end))))
;; protocol errors
(fail-the-websockets-connection (e)
(when (eq (client-connection-state client) :connected)
;; probably can send directly since running from
;; server thread here?
(write-to-client-close client :code (status-code e)
:message (status-message e)))
(setf (client-connection-state client) :failed)
(client-enqueue-read client (list client :eof))
(lg "failed connection ~s / ~s : ~s ~s~%"
(client-host client) (client-port client)
(status-code e) (status-message e))
(client-disconnect client :read t
:write t))
(close-from-peer (e)
(when (eq (client-connection-state client) :connected)
(write-to-client-close client))
(lg "got close frame from peer: ~s / ~s~%"
(status-code e) (status-message e))
(setf (client-connection-state client) :cloed)
;; probably should send code/message to resource handlers?
(client-enqueue-read client (list client :eof))
(client-disconnect client :read t
:write t))
;; close connection on socket/read errors
(end-of-file ()
(client-enqueue-read client (list client :eof))
(lg "closed connection ~s / ~s~%" (client-host client)
(client-port client))
(client-disconnect client :read t
:write t))
(socket-connection-reset-error ()
(client-enqueue-read client (list client :eof))
(lg "connection reset by peer ~s / ~s~%"
(client-host client)
(client-port client))
(client-disconnect client :read t))
;; ... add error handlers
)
(drop-connection ()
(client-disconnect client :read t :write t :abort t)))))))
(client-enable-handler client :read t))
(defun next-reader-state (buffer predicate callback)
(setf (predicate buffer) predicate
(callback buffer) callback))
| null | https://raw.githubusercontent.com/3b/clws/b20799dd37d8385d312c371181d465bbee2f9e4f/buffer.lisp | lisp | chunks stored by chunk-buffer class
chunked buffer class
stores a sequence of vectors + start/end
and will be consumed all at once after it is accumulated
operations:
add a chunk (vector+bounds)
-- check last chunk and combine if contiguous
append another buffer
read an octet
convert to a contiguous vector
convert (as utf8) to string
call thunk with contents as (binary or text) stream?
-- or maybe return a stream once it is implemented directly
as a gray stream rather than a pile of concatenated
l and flexi-streams?
? map over octets/characters?
todo: versions of octet-vector and string that don't clear buffer?
(mostly for debugging)
todo: option to build octet vector with extra space at beginning/end?
(for example to make a pong response from a ping body)
reference to last cons of chunks list, so we can append quickly
we already have some chunks, add at end
if we are continuing previous buffer, just combine them
else add new chunk
add initial chunk
fixme: should this make a new chunk-buffer? not clear more? reuse chunk-buffers better?
if we emptied a chunk, get rid of it
and clear end ref as well if no more buffers
not sure if it would be faster to pull through flexistreams
or make a separate octet vector and convert that with babel?
(best would be converting directly... possibly check for partial
character at beginning of buffer, find beginning in previous buffer
and only pass the valid part to babel, and add in the split char
by hand? might need to watch out for split over multiple buffers
if we get tiny chunks? (only when searching forward though, since
todo: probably should wrap babel error in something that doesn't leak
implementation details (like use of babel)
babel isn't picky enough for the Autobahn test suite (it lets
utf16 surrogates through, so using flexistreams for now...
specific subclass for now though
buffered reader class
reads from a socket (or stream?) until some condition is met
then calls a continuation callback, or calls error callback if
connection closed, or too many octets read without condition being matched
partially filled vector if any, + position of next empty octet
list of arrays + start,end values (in reverse order)
function to call with new data to determine if callback should
be called yet
allow calling some chunk-buffer functions on the buffered-reader
and redirect to the slot...
possibly should include a verbose message for logging as well?
should this be an error?
low level implementations
non-blocking iolib
when buffer gets more data, it checks predicate and calls
callback if matched. Callback sets new predicate+callback, and
loop repeats until predicate doesn't match, at which point it
waits for more input
todo: if we used up all the data that
was read, dump the buffer in a pool or
something so we don't hold a buffer in
ram for each client while waiting for
data
protocol errors
probably can send directly since running from
server thread here?
probably should send code/message to resource handlers?
close connection on socket/read errors
... add error handlers | (in-package #:ws)
(defclass buffer-chunk ()
((vector :reader buffer-vector :initarg :vector)
(start :reader buffer-start :initarg :start)
(end :reader buffer-end :initarg :end)))
(defmethod buffer-count ((buffer buffer-chunk))
(- (buffer-end buffer) (buffer-start buffer)))
intent is that one chunked - buffer is a single logical block of data
-- combine last / first chunks if contiguous ?
( 32bit for websockets masking stuff ? maybe subclass ? )
(defclass chunk-buffer ()
((buffer-size :accessor buffer-size :initform 0)
(chunks :accessor chunks :initform nil)
(end-of-chunks :accessor end-of-chunks :initform nil)))
(defmethod %get-chunks ((cb chunk-buffer))
(setf (end-of-chunks cb) nil)
(values (shiftf (chunks cb) nil)
(shiftf (buffer-size cb) 0)))
(defmethod add-chunk ((cb chunk-buffer) vector start end)
(if (chunks cb)
(let ((last (end-of-chunks cb)))
(if (and (eq vector (buffer-vector (car last)))
(= start (buffer-end (car last))))
(setf (slot-value (car last) 'end) end)
(progn
(push (make-instance 'buffer-chunk :vector vector
:start start :end end)
(cdr last))
(pop (end-of-chunks cb)))))
(progn
(push (make-instance 'buffer-chunk :vector vector
:start start :end end)
(chunks cb))
(setf (end-of-chunks cb) (chunks cb))))
(incf (buffer-size cb) (- end start)))
(defmethod add-chunks ((cb chunk-buffer) (more chunk-buffer))
(loop for i in (%get-chunks more)
do (add-chunk cb (buffer-vector i) (buffer-start i) (buffer-end i))))
(defmethod peek-octet ((cb chunk-buffer))
fixme : decide how to handle EOF ?
(unless (chunks cb)
(return-from peek-octet nil))
(let* ((chunk (car (chunks cb))))
(aref (buffer-vector chunk) (buffer-start chunk))))
(defmethod read-octet ((cb chunk-buffer))
fixme : decide how to handle EOF ?
(unless (chunks cb)
(return-from read-octet nil))
(let* ((chunk (car (chunks cb)))
(octet (aref (buffer-vector chunk) (buffer-start chunk))))
(incf (slot-value chunk 'start))
(decf (buffer-size cb))
(when (= (buffer-start chunk) (buffer-end chunk))
(pop (chunks cb))
(when (not (chunks cb))
(setf (end-of-chunks cb) nil)))
octet))
(defun call-with-buffer-as-stream (buffer thunk)
(let ((streams nil))
(unwind-protect
(progn
(setf streams
(loop for i in (%get-chunks buffer)
while i
collect (flex:make-in-memory-input-stream
(buffer-vector i)
:start (buffer-start i)
:end (buffer-end i))))
(with-open-stream (cs (apply #'make-concatenated-stream streams))
(funcall thunk cs)))
(map 'nil 'close streams))))
(defmacro with-buffer-as-stream ((buffer stream) &body body)
`(call-with-buffer-as-stream ,buffer
(lambda (,stream)
,@body)))
(defmethod get-octet-vector ((cb chunk-buffer))
(let* ((size (buffer-size cb))
(vector (make-array-ubyte8 size :initial-element 0))
(chunks (%get-chunks cb)))
(loop for c in chunks
for offset = 0 then (+ offset size)
for size = (buffer-count c)
for cv = (buffer-vector c)
for cs = (buffer-start c)
for ce = (buffer-end c)
do (replace vector cv :start1 offset :start2 cs :end2 ce))
vector))
(defmethod get-utf8-string ((cb chunk-buffer) &key (errorp t) octet-end)
(declare (ignorable errorp))
we should see the partial char in the first tiny chunk ... )
( or maybe just implement our own converter since we only need ? ) )
(let* ((size (buffer-size cb))
(end (or octet-end size))
(vector (make-array-ubyte8 end :initial-element 0))
(chunks (%get-chunks cb)))
(loop for c in chunks
for offset = 0 then (+ offset size)
for size = (buffer-count c)
for cv of-type (simple-array (unsigned-byte 8) (*)) = (buffer-vector c)
for cs = (buffer-start c)
for ce = (buffer-end c)
while (< offset end)
do (replace vector cv :start1 offset :end1 end
:start2 cs :end2 ce))
#++(babel:octets-to-string vector :encoding :utf-8 :errorp errorp)
(flex:octets-to-string vector :external-format :utf-8)))
this does n't really belong here , too lazy to make a websockets
(defmethod mask-octets ((cb chunk-buffer) mask)
(declare (type (simple-array (unsigned-byte 8) (*)) mask)
(optimize speed))
todo : declare types , optimize to run 32/64 bits at a time , etc ...
(loop with i of-type (integer 0 4) = 0
for chunk in (chunks cb)
for vec of-type (simple-array (unsigned-byte 8) (*)) = (buffer-vector chunk)
for start fixnum = (buffer-start chunk)
for end fixnum = (buffer-end chunk)
do (loop for j from start below end
do (setf (aref vec j)
(logxor (aref vec j)
(aref mask i))
i (mod (1+ i) 4)))))
#++
(flet ((test-buf ()
(let ((foo (make-instance 'chunk-buffer))
(buf (string-to-shareable-octets "_<continued-test>_")))
(add-chunk foo (string-to-shareable-octets "TEST" ) 0 4)
(add-chunk foo (string-to-shareable-octets "test2") 0 5)
(add-chunk foo buf 1 5)
(add-chunk foo buf 5 (1- (length buf)))
(add-chunk foo (string-to-shareable-octets "..test3") 2 7)
foo)))
(list
(with-buffer-as-stream ((test-buf) s)
(with-open-stream (s (flex:make-flexi-stream s))
(read-line s nil nil)))
(babel:octets-to-string (get-octet-vector (test-buf)))
(get-utf8-string (test-buf))))
#++
(let ((foo (make-instance 'chunk-buffer)))
(add-chunk foo #(1 2 3 4) 0 3)
(add-chunk foo #(10 11 12 13) 0 1)
(add-chunk foo #(20 21 22 23) 0 3)
(loop repeat 10 collect (read-octet foo)))
( N octets read , specific pattern read ( CR LF for example ) , etc )
(defclass buffered-reader ()
(partial-vector :accessor partial-vector :initform nil)
(partial-vector-pos :accessor partial-vector-pos :initform 0)
(chunks :initform (make-instance 'chunk-buffer) :accessor chunks)
(predicate :initarg :predicate :accessor predicate)
(callback :initarg :callback :accessor callback)
(error-callback :initarg :error-callback :accessor error-callback)))
(defmethod %get-chunks ((b buffered-reader))
(%get-chunks (chunks b)))
(define-condition fail-the-websockets-connection (error)
((code :initarg :status-code :initform nil :reader status-code)
(message :initarg :message :initform nil :reader status-message)))
(define-condition close-from-peer (error)
((code :initarg :status-code :initform 1000 :reader status-code)
(message :initarg :message :initform nil :reader status-message)))
(defun add-reader-to-client (client &key (init-function 'maybe-policy-file))
(declare (optimize debug))
(setf (client-reader client)
(let ((socket (client-socket client))
(buffer client))
(funcall init-function buffer)
(lambda (fd event exception)
(declare (ignore fd event exception))
(handler-bind
((error
(lambda (c)
(cond
(*debug-on-server-errors*
(invoke-debugger c))
(t
(ignore-errors
(lg "server error ~s, dropping connection~%" c))
(invoke-restart 'drop-connection))))))
(restart-case
(handler-case
(progn
(when (or (not (partial-vector buffer))
(> (partial-vector-pos buffer)
(- (length (partial-vector buffer)) 16)))
(setf (partial-vector buffer)
(make-array-ubyte8 2048)
(partial-vector-pos buffer) 0))
(multiple-value-bind (_octets count)
fixme : decide on good read chunk size
(receive-from socket :buffer (partial-vector buffer)
:start (partial-vector-pos buffer)
:end (length (partial-vector buffer)))
(declare (ignore _octets))
(when (zerop count)
(error 'end-of-file))
(let* ((start (partial-vector-pos buffer))
(end (+ start count))
(failed nil))
(loop for match = (funcall (predicate buffer)
(partial-vector buffer)
start end)
do
(add-chunk (chunks buffer)
(partial-vector buffer)
start (or match end))
(when match
(setf start match)
(funcall (callback buffer) buffer))
while (and (not failed) match (>= end start)))
(setf (partial-vector-pos buffer) end))))
(fail-the-websockets-connection (e)
(when (eq (client-connection-state client) :connected)
(write-to-client-close client :code (status-code e)
:message (status-message e)))
(setf (client-connection-state client) :failed)
(client-enqueue-read client (list client :eof))
(lg "failed connection ~s / ~s : ~s ~s~%"
(client-host client) (client-port client)
(status-code e) (status-message e))
(client-disconnect client :read t
:write t))
(close-from-peer (e)
(when (eq (client-connection-state client) :connected)
(write-to-client-close client))
(lg "got close frame from peer: ~s / ~s~%"
(status-code e) (status-message e))
(setf (client-connection-state client) :cloed)
(client-enqueue-read client (list client :eof))
(client-disconnect client :read t
:write t))
(end-of-file ()
(client-enqueue-read client (list client :eof))
(lg "closed connection ~s / ~s~%" (client-host client)
(client-port client))
(client-disconnect client :read t
:write t))
(socket-connection-reset-error ()
(client-enqueue-read client (list client :eof))
(lg "connection reset by peer ~s / ~s~%"
(client-host client)
(client-port client))
(client-disconnect client :read t))
)
(drop-connection ()
(client-disconnect client :read t :write t :abort t)))))))
(client-enable-handler client :read t))
(defun next-reader-state (buffer predicate callback)
(setf (predicate buffer) predicate
(callback buffer) callback))
|
16afbb4cb4bb158752b9183c321db00f98861d3d1499762ea8fcb9550366ec83 | azimut/shiny | endings.lisp | (in-package #:shiny)
(defparameter *mf*
"/home/sendai/Downloads/Octopath_Traveler_-_Haanits_Theme.mscz.mid")
(defparameter *notes*
(subseq (get-measures-pair *mf* 999 1.5 0) 0))
(defparameter *sing*
(subseq (get-measures-pair *mf* 999 1.5 1) 0))
(defparameter *pc-follow*
(delete-duplicates
(sort (mapcar (lambda (x) (mod x 12))
(subseq (get-notes *mf* 1) 0 16))
#'<)))
(defun f ())
(let ((measures (make-cycle *notes*))
(sing (make-cycle *sing*)))
(defun f (time)
(let* ((measure (next measures))
(notes (first measure))
(durations (second measure)))
(play-midi-arp time notes 50 durations 0 (d2o durations)))
(let* ((n (pc-random 70 90 *pc-follow*))
(i (max 0.1 (+ -3 (cm:interp n 20f0 .1f0 100f0 5f0)))))
(at (+ time #[.5 b]) (lambda () (setf *scale* i)))
( play - midi ( + # [ .5 b ] time )
n 30 .2 0 )
)
(let* ((measure (next sing))
(notes (first measure))
(durations (second measure)))
(setf *uvs* (+ -.5 (random 1f0)))
(progn
(setf *rotcube* (drunk 5 5 :low 0 :high 30))
;; (play-midi time
( pc - relative ( + 12 ( first notes ) )
( 1 + ( random 11 ) )
;; *pc-follow*)
30 .3 0 )
)
( setf * rotcube * 0 )
(play-midi-arp time notes 1 durations 0 (d2o durations))
)
(aat (+ time #[1.5 b]) #'f it))
)
(setf *actors* nil)
(make-cubemap)
(make-thing)
(make-piso (v! 0 -2 0) (q:identity) -.2)
( make - piso ( v ! 0 -2 0 ) ( q : from - axis - angle ( v ! 0 1 0 ) ( radians -180 ) ) -.2 )
( make - piso ( v ! 0 2 0 ) ( q : from - axis - angle ( v ! 1 0 0 ) ( radians -180 ) ) -.2 )
(make-piso (v! 0 2 0) (q:*
(q:from-axis-angle (v! 0 1 0) (radians -180))
(q:from-axis-angle (v! 1 0 0) (radians -180)))
-.2)
(make-piso (v! 0 -2 0))
(f (now))
| null | https://raw.githubusercontent.com/azimut/shiny/774381a9bde21c4ec7e7092c7516dd13a5a50780/compositions/drafts/midifile/endings.lisp | lisp | (play-midi time
*pc-follow*) | (in-package #:shiny)
(defparameter *mf*
"/home/sendai/Downloads/Octopath_Traveler_-_Haanits_Theme.mscz.mid")
(defparameter *notes*
(subseq (get-measures-pair *mf* 999 1.5 0) 0))
(defparameter *sing*
(subseq (get-measures-pair *mf* 999 1.5 1) 0))
(defparameter *pc-follow*
(delete-duplicates
(sort (mapcar (lambda (x) (mod x 12))
(subseq (get-notes *mf* 1) 0 16))
#'<)))
(defun f ())
(let ((measures (make-cycle *notes*))
(sing (make-cycle *sing*)))
(defun f (time)
(let* ((measure (next measures))
(notes (first measure))
(durations (second measure)))
(play-midi-arp time notes 50 durations 0 (d2o durations)))
(let* ((n (pc-random 70 90 *pc-follow*))
(i (max 0.1 (+ -3 (cm:interp n 20f0 .1f0 100f0 5f0)))))
(at (+ time #[.5 b]) (lambda () (setf *scale* i)))
( play - midi ( + # [ .5 b ] time )
n 30 .2 0 )
)
(let* ((measure (next sing))
(notes (first measure))
(durations (second measure)))
(setf *uvs* (+ -.5 (random 1f0)))
(progn
(setf *rotcube* (drunk 5 5 :low 0 :high 30))
( pc - relative ( + 12 ( first notes ) )
( 1 + ( random 11 ) )
30 .3 0 )
)
( setf * rotcube * 0 )
(play-midi-arp time notes 1 durations 0 (d2o durations))
)
(aat (+ time #[1.5 b]) #'f it))
)
(setf *actors* nil)
(make-cubemap)
(make-thing)
(make-piso (v! 0 -2 0) (q:identity) -.2)
( make - piso ( v ! 0 -2 0 ) ( q : from - axis - angle ( v ! 0 1 0 ) ( radians -180 ) ) -.2 )
( make - piso ( v ! 0 2 0 ) ( q : from - axis - angle ( v ! 1 0 0 ) ( radians -180 ) ) -.2 )
(make-piso (v! 0 2 0) (q:*
(q:from-axis-angle (v! 0 1 0) (radians -180))
(q:from-axis-angle (v! 1 0 0) (radians -180)))
-.2)
(make-piso (v! 0 -2 0))
(f (now))
|
a0dd04f387a3bae7b589adf61e22d73f70de7e26ff48df597b3d42a42c755e71 | esl/MongooseIM | path_helper.erl | %% @doc Common filename functions
-module(path_helper).
%% Paths
-export([repo_dir/1]).
-export([test_dir/1]).
-export([ct_run_dir/1]).
-export([ct_run_dir_in_browser/1]).
-export([data_dir/2]).
%% Path transformation
-export([canonicalize_path/1]).
%% @doc Get repository root directory
repo_dir(_Config) ->
get_env_var("REPO_DIR").
%% @doc Get `big_tests/' directory
test_dir(_Config) ->
get_env_var("TEST_DIR").
%% @doc Returns`big_tests/ct_report/ct_run.*' directory
%% Run it from a test case functions only (not group or suite functions)
ct_run_dir(Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
%% Remove: *SUITE.logs/run.*/log_private/
_RunDir = path_helper:test_dir(Config),
filename:absname(filename:join([PrivDir, "..", "..", ".."])).
%% @doc Returns path, corresponding to `ct_run_dir' in browser
ct_run_dir_in_browser(_Config) ->
"../..".
@doc Unsafe version of ` filename : safe_relative_path/1 '
canonicalize_path(Path) -> canonicalize_path(filename:split(Path), []).
canonicalize_path([], Acc) -> filename:join(lists:reverse(Acc));
canonicalize_path([".." | Path], [_ | Acc]) -> canonicalize_path(Path, Acc);
canonicalize_path(["." | Path], Acc) -> canonicalize_path(Path, Acc);
canonicalize_path([Elem | Path], Acc) -> canonicalize_path(Path, [Elem | Acc]).
get_env_var(VarName) ->
case os:getenv(VarName) of
false ->
ct:fail({undefined_envvar, VarName});
Value ->
Value
end.
%% Hand-made data_dir from Common Tests
data_dir(SuiteName, Config) ->
filename:join([test_dir(Config), "tests", atom_to_list(SuiteName) ++ "_data"]).
| null | https://raw.githubusercontent.com/esl/MongooseIM/8b8c294b1b01dc178eed1b3b28ca0fbbd73f382c/big_tests/tests/path_helper.erl | erlang | @doc Common filename functions
Paths
Path transformation
@doc Get repository root directory
@doc Get `big_tests/' directory
@doc Returns`big_tests/ct_report/ct_run.*' directory
Run it from a test case functions only (not group or suite functions)
Remove: *SUITE.logs/run.*/log_private/
@doc Returns path, corresponding to `ct_run_dir' in browser
Hand-made data_dir from Common Tests | -module(path_helper).
-export([repo_dir/1]).
-export([test_dir/1]).
-export([ct_run_dir/1]).
-export([ct_run_dir_in_browser/1]).
-export([data_dir/2]).
-export([canonicalize_path/1]).
repo_dir(_Config) ->
get_env_var("REPO_DIR").
test_dir(_Config) ->
get_env_var("TEST_DIR").
ct_run_dir(Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
_RunDir = path_helper:test_dir(Config),
filename:absname(filename:join([PrivDir, "..", "..", ".."])).
ct_run_dir_in_browser(_Config) ->
"../..".
@doc Unsafe version of ` filename : safe_relative_path/1 '
canonicalize_path(Path) -> canonicalize_path(filename:split(Path), []).
canonicalize_path([], Acc) -> filename:join(lists:reverse(Acc));
canonicalize_path([".." | Path], [_ | Acc]) -> canonicalize_path(Path, Acc);
canonicalize_path(["." | Path], Acc) -> canonicalize_path(Path, Acc);
canonicalize_path([Elem | Path], Acc) -> canonicalize_path(Path, [Elem | Acc]).
get_env_var(VarName) ->
case os:getenv(VarName) of
false ->
ct:fail({undefined_envvar, VarName});
Value ->
Value
end.
data_dir(SuiteName, Config) ->
filename:join([test_dir(Config), "tests", atom_to_list(SuiteName) ++ "_data"]).
|
b16395679a303ce41e19529d648eb63c287a65f398fcbaf110ad847660f832bc | ejgallego/coq-serapi | ser_sorts.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2016
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(************************************************************************)
(* Coq serialization API/Plugin *)
Copyright 2016 MINES ParisTech
(************************************************************************)
(* Status: Very Experimental *)
(************************************************************************)
include SerType.SJHC with type t = Sorts.t
type family = Sorts.family [@@deriving sexp,yojson,hash,compare]
type relevance = Sorts.relevance [@@deriving sexp,yojson,hash,compare]
module QVar : SerType.SJHC with type t = Sorts.QVar.t
| null | https://raw.githubusercontent.com/ejgallego/coq-serapi/4c18c49187603fd05554f22760fd3ef644dcd806/serlib/ser_sorts.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
**********************************************************************
Coq serialization API/Plugin
**********************************************************************
Status: Very Experimental
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2016
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Copyright 2016 MINES ParisTech
include SerType.SJHC with type t = Sorts.t
type family = Sorts.family [@@deriving sexp,yojson,hash,compare]
type relevance = Sorts.relevance [@@deriving sexp,yojson,hash,compare]
module QVar : SerType.SJHC with type t = Sorts.QVar.t
|
7ce690b3631bc088bfa395929c3d77c0c3cebe8ec906c7415ef212934863a74b | lagenorhynque/aqoursql | const.clj | (ns aqoursql.util.const)
(def ^:const artist-type-group 1)
(def ^:const artist-type-solo 2)
(def artist-types #{artist-type-group artist-type-solo})
(def error-code-validation
"VALIDATION_ERROR")
(defn error-map [code message]
{:message message
:extensions {:code code}})
| null | https://raw.githubusercontent.com/lagenorhynque/aqoursql/a7f00ee588ff7e1c1f10ac3f550aafea2ef661ab/src/aqoursql/util/const.clj | clojure | (ns aqoursql.util.const)
(def ^:const artist-type-group 1)
(def ^:const artist-type-solo 2)
(def artist-types #{artist-type-group artist-type-solo})
(def error-code-validation
"VALIDATION_ERROR")
(defn error-map [code message]
{:message message
:extensions {:code code}})
| |
6329b83490070af97f6cd8ffaab4444e871e9021f5b33f074f16d8a2d9f09b52 | rescript-lang/rescript-compiler | outcome_printer_ns.mli | Copyright ( C ) 2017 Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val out_ident : Format.formatter -> string -> unit
(** This function is used to
reverse namespace printing to
avoid namespace leaking
*)
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/edbe4efc5d8b3899f126a5efa563ae437cb41406/jscomp/outcome_printer/outcome_printer_ns.mli | ocaml | * This function is used to
reverse namespace printing to
avoid namespace leaking
| Copyright ( C ) 2017 Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val out_ident : Format.formatter -> string -> unit
|
19e535fefd767efa961c535154d968e733803f1e48eea9a2906251eeff76a563 | calvis/cKanren | symbolo-numbero.rkt | #lang racket
(require
"../ck.rkt"
"../tree-unify.rkt"
"../attributes.rkt"
"../neq.rkt"
"../tester.rkt")
(provide test-symbol-number
test-symbol-number-long)
(define (test-symbol-number)
(test
(run* (q) (symbol q) (number q))
'())
(test
(run* (q) (number q) (symbol q))
'())
(test
(run* (q)
(fresh (x)
(number x)
(symbol x)))
'())
(test
(run* (q)
(fresh (x)
(symbol x)
(number x)))
'())
(test
(run* (q)
(number q)
(fresh (x)
(symbol x)
(== x q)))
'())
(test
(run* (q)
(symbol q)
(fresh (x)
(number x)
(== x q)))
'())
(test
(run* (q)
(fresh (x)
(number x)
(== x q))
(symbol q))
'())
(test
(run* (q)
(fresh (x)
(symbol x)
(== x q))
(number q))
'())
(test
(run* (q)
(fresh (x)
(== x q)
(symbol x))
(number q))
'())
(test
(run* (q)
(fresh (x)
(== x q)
(number x))
(symbol q))
'())
(test
(run* (q)
(symbol q)
(fresh (x)
(number x)))
'((_.0 : (symbol _.0))))
(test
(run* (q)
(number q)
(fresh (x)
(symbol x)))
'((_.0 : (number _.0))))
(test
(run* (q)
(fresh (x y)
(symbol x)
(== `(,x ,y) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(number x)
(== `(,x ,y) q)))
'(((_.0 _.1) : (number _.0))))
(test
(run* (q)
(fresh (x y)
(number x)
(symbol y)
(== `(,x ,y) q)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(number x)
(== `(,x ,y) q)
(symbol y)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== `(,w ,z) q)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== `(,w ,z) q)
(== w 5)))
'(((5 _.0) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== 'a z)
(== `(,w ,z) q)))
'(((_.0 a) : (number _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== `(,w ,z) q)
(== 'a z)))
'(((_.0 a) : (number _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(=/= `(5 a) q)))
'(((_.0 _.1) : (=/= ((_.0 . 5) (_.1 . a))))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(=/= `(5 a) q)
(symbol x)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(symbol x)
(=/= `(5 a) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(symbol x)
(== `(,x ,y) q)
(=/= `(5 a) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(symbol x)
(== `(,x ,y) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(== `(,x ,y) q)
(symbol x)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(=/= `(5 a) q)
(number y)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number y)
(=/= `(5 a) q)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(number y)
(== `(,x ,y) q)
(=/= `(5 a) q)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(number y)
(== `(,x ,y) q)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(== `(,x ,y) q)
(number y)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(=/= `(,x ,y) q)
(number x)
(symbol y)))
'(_.0))
(test
(run* (q)
(fresh (x y)
(number x)
(=/= `(,x ,y) q)
(symbol y)))
'(_.0))
(test
(run* (q)
(fresh (x y)
(number x)
(symbol y)
(=/= `(,x ,y) q)))
'(_.0))
)
(define (test-symbol-number-long)
(test-symbol-number))
(module+ main
(test-symbol-number-long))
| null | https://raw.githubusercontent.com/calvis/cKanren/8714bdd442ca03dbf5b1d6250904cbc5fd275e68/cKanren/tests/symbolo-numbero.rkt | racket | #lang racket
(require
"../ck.rkt"
"../tree-unify.rkt"
"../attributes.rkt"
"../neq.rkt"
"../tester.rkt")
(provide test-symbol-number
test-symbol-number-long)
(define (test-symbol-number)
(test
(run* (q) (symbol q) (number q))
'())
(test
(run* (q) (number q) (symbol q))
'())
(test
(run* (q)
(fresh (x)
(number x)
(symbol x)))
'())
(test
(run* (q)
(fresh (x)
(symbol x)
(number x)))
'())
(test
(run* (q)
(number q)
(fresh (x)
(symbol x)
(== x q)))
'())
(test
(run* (q)
(symbol q)
(fresh (x)
(number x)
(== x q)))
'())
(test
(run* (q)
(fresh (x)
(number x)
(== x q))
(symbol q))
'())
(test
(run* (q)
(fresh (x)
(symbol x)
(== x q))
(number q))
'())
(test
(run* (q)
(fresh (x)
(== x q)
(symbol x))
(number q))
'())
(test
(run* (q)
(fresh (x)
(== x q)
(number x))
(symbol q))
'())
(test
(run* (q)
(symbol q)
(fresh (x)
(number x)))
'((_.0 : (symbol _.0))))
(test
(run* (q)
(number q)
(fresh (x)
(symbol x)))
'((_.0 : (number _.0))))
(test
(run* (q)
(fresh (x y)
(symbol x)
(== `(,x ,y) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(number x)
(== `(,x ,y) q)))
'(((_.0 _.1) : (number _.0))))
(test
(run* (q)
(fresh (x y)
(number x)
(symbol y)
(== `(,x ,y) q)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(number x)
(== `(,x ,y) q)
(symbol y)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== `(,w ,z) q)))
'(((_.0 _.1) : (number _.0) (symbol _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== `(,w ,z) q)
(== w 5)))
'(((5 _.0) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== 'a z)
(== `(,w ,z) q)))
'(((_.0 a) : (number _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number x)
(symbol y))
(fresh (w z)
(== `(,w ,z) q)
(== 'a z)))
'(((_.0 a) : (number _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(=/= `(5 a) q)))
'(((_.0 _.1) : (=/= ((_.0 . 5) (_.1 . a))))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(=/= `(5 a) q)
(symbol x)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(symbol x)
(=/= `(5 a) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(symbol x)
(== `(,x ,y) q)
(=/= `(5 a) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(symbol x)
(== `(,x ,y) q)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(== `(,x ,y) q)
(symbol x)))
'(((_.0 _.1) : (symbol _.0))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(=/= `(5 a) q)
(number y)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(== `(,x ,y) q)
(number y)
(=/= `(5 a) q)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(number y)
(== `(,x ,y) q)
(=/= `(5 a) q)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(number y)
(== `(,x ,y) q)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(=/= `(5 a) q)
(== `(,x ,y) q)
(number y)))
'(((_.0 _.1) : (number _.1))))
(test
(run* (q)
(fresh (x y)
(=/= `(,x ,y) q)
(number x)
(symbol y)))
'(_.0))
(test
(run* (q)
(fresh (x y)
(number x)
(=/= `(,x ,y) q)
(symbol y)))
'(_.0))
(test
(run* (q)
(fresh (x y)
(number x)
(symbol y)
(=/= `(,x ,y) q)))
'(_.0))
)
(define (test-symbol-number-long)
(test-symbol-number))
(module+ main
(test-symbol-number-long))
| |
4110194d0bafd82813c1a1dfa5defdbfae4e9d44bfb7f8937e1e9229f6dc6dac | ndmitchell/shake | FileName.hs | # LANGUAGE GeneralizedNewtypeDeriving , FlexibleInstances #
module Development.Shake.Internal.FileName(
FileName,
fileNameFromString, fileNameFromByteString,
fileNameToString, fileNameToByteString,
filepathNormalise
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.UTF8 as UTF8
import Development.Shake.Classes
import qualified System.FilePath as Native
import General.Binary
import System.Info.Extra
import Data.List
---------------------------------------------------------------------
-- FileName newtype
-- | UTF8 ByteString
newtype FileName = FileName BS.ByteString
deriving (Hashable, Binary, BinaryEx, Eq, NFData)
instance Show FileName where
show = fileNameToString
instance BinaryEx [FileName] where
putEx = putEx . map (\(FileName x) -> x)
getEx = map FileName . getEx
fileNameToString :: FileName -> FilePath
fileNameToString = UTF8.toString . fileNameToByteString
fileNameToByteString :: FileName -> BS.ByteString
fileNameToByteString (FileName x) = x
fileNameFromString :: FilePath -> FileName
fileNameFromString = fileNameFromByteString . UTF8.fromString
fileNameFromByteString :: BS.ByteString -> FileName
fileNameFromByteString = FileName . filepathNormalise
---------------------------------------------------------------------
NORMALISATION
-- | Equivalent to @toStandard . normaliseEx@ from "Development.Shake.FilePath".
filepathNormalise :: BS.ByteString -> BS.ByteString
filepathNormalise xs
| isWindows, Just (a,xs) <- BS.uncons xs, sep a, Just (b,_) <- BS.uncons xs, sep b = '/' `BS.cons` f xs
| otherwise = f xs
where
sep = Native.isPathSeparator
f o = deslash o $ BS.concat $ (slash:) $ intersperse slash $ reverse $ (BS.empty:) $ g 0 $ reverse $ split o
deslash o x
| x == slash = case (pre,pos) of
(True,True) -> slash
(True,False) -> BS.pack "/."
(False,True) -> BS.pack "./"
(False,False) -> dot
| otherwise = (if pre then id else BS.tail) $ (if pos then id else BS.init) x
where pre = not (BS.null o) && sep (BS.head o)
pos = not (BS.null o) && sep (BS.last o)
g i [] = replicate i dotDot
g i (x:xs) | BS.null x = g i xs
g i (x:xs) | x == dotDot = g (i+1) xs
g i (x:xs) | x == dot = g i xs
g 0 (x:xs) = x : g 0 xs
g i (_:xs) = g (i-1) xs -- equivalent to eliminating ../x
split = BS.splitWith sep
dotDot = BS.pack ".."
dot = BS.singleton '.'
slash = BS.singleton '/'
| null | https://raw.githubusercontent.com/ndmitchell/shake/99c5a7a4dc1d5a069b13ed5c1bc8e4bc7f13f4a6/src/Development/Shake/Internal/FileName.hs | haskell | -------------------------------------------------------------------
FileName newtype
| UTF8 ByteString
-------------------------------------------------------------------
| Equivalent to @toStandard . normaliseEx@ from "Development.Shake.FilePath".
equivalent to eliminating ../x | # LANGUAGE GeneralizedNewtypeDeriving , FlexibleInstances #
module Development.Shake.Internal.FileName(
FileName,
fileNameFromString, fileNameFromByteString,
fileNameToString, fileNameToByteString,
filepathNormalise
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.UTF8 as UTF8
import Development.Shake.Classes
import qualified System.FilePath as Native
import General.Binary
import System.Info.Extra
import Data.List
newtype FileName = FileName BS.ByteString
deriving (Hashable, Binary, BinaryEx, Eq, NFData)
instance Show FileName where
show = fileNameToString
instance BinaryEx [FileName] where
putEx = putEx . map (\(FileName x) -> x)
getEx = map FileName . getEx
fileNameToString :: FileName -> FilePath
fileNameToString = UTF8.toString . fileNameToByteString
fileNameToByteString :: FileName -> BS.ByteString
fileNameToByteString (FileName x) = x
fileNameFromString :: FilePath -> FileName
fileNameFromString = fileNameFromByteString . UTF8.fromString
fileNameFromByteString :: BS.ByteString -> FileName
fileNameFromByteString = FileName . filepathNormalise
NORMALISATION
filepathNormalise :: BS.ByteString -> BS.ByteString
filepathNormalise xs
| isWindows, Just (a,xs) <- BS.uncons xs, sep a, Just (b,_) <- BS.uncons xs, sep b = '/' `BS.cons` f xs
| otherwise = f xs
where
sep = Native.isPathSeparator
f o = deslash o $ BS.concat $ (slash:) $ intersperse slash $ reverse $ (BS.empty:) $ g 0 $ reverse $ split o
deslash o x
| x == slash = case (pre,pos) of
(True,True) -> slash
(True,False) -> BS.pack "/."
(False,True) -> BS.pack "./"
(False,False) -> dot
| otherwise = (if pre then id else BS.tail) $ (if pos then id else BS.init) x
where pre = not (BS.null o) && sep (BS.head o)
pos = not (BS.null o) && sep (BS.last o)
g i [] = replicate i dotDot
g i (x:xs) | BS.null x = g i xs
g i (x:xs) | x == dotDot = g (i+1) xs
g i (x:xs) | x == dot = g i xs
g 0 (x:xs) = x : g 0 xs
split = BS.splitWith sep
dotDot = BS.pack ".."
dot = BS.singleton '.'
slash = BS.singleton '/'
|
dd0ec5b95ace6c04213ba1b3112f75a3b21213465fd76da8503ea802f8c925ea | eslick/cl-registry | number.lisp | (in-package :registry)
(defclass number-presentation (input-based-field-presentation)
((precision :accessor precision :initarg :precision :initform nil)))
(define-lisp-value-getter number-presentation (client-value)
(or (when (> (length client-value) 0)
(parse-number:parse-number client-value))
:none))
(define-lisp-value-setter number-presentation (number precision)
(if (equal number :none)
""
(if precision
(format nil "~,vF" precision number)
(format nil "~D" number))))
;;;; * integer type
(defclass integer-presentation (number-presentation)
())
(define-lisp-value-getter integer-presentation (client-value)
(or (parse-integer client-value) :none))
(define-lisp-value-setter integer-presentation (integer)
(format nil "~D" integer))
;;;; * range integer type
(defclass range-integer-presentation (integer-presentation)
())
(define-lisp-value-getter range-integer-presentation (client-value)
(if (equal client-value "") :none
(let ((integers (cl-ppcre:all-matches-as-strings "[0-9]+" client-value)))
(case (length integers)
(1 (call-next-method))
(2 (cons (parse-integer (first integers))
(parse-integer (second integers))))
(t (error "Unrecognized range specification in ~A" client-value))))))
(define-lisp-value-setter range-integer-presentation (integer-pair)
(if (consp integer-pair)
(format nil "~D-~D" (car integer-pair) (cdr integer-pair))
(call-next-method)))
;;;; * range limited integers
(defclass number-validator (web-field-validator)
()
(:documentation "Super class for all validotars which compare a lisp-value to a number."))
(defmethod lisp-validate :around ((validator number-validator) (lisp-value t))
(if (and (not (numberp lisp-value)) (not (equal lisp-value :none)))
(fail-validation "~S is not a number." lisp-value)
(call-next-method)))
(defmethod client-validate ((validator number-validator) (client-value string))
(if (plusp (length client-value))
(handler-case
(parse-number:parse-number client-value)
(parse-number:invalid-number (c)
(fail-validation (parse-number:invalid-number-reason c)))
(error (c)
(declare (ignore c))
(fail-validation "~S is not a number" client-value)))
(fail-validation "No number provided.")))
(defclass min-value-validator (number-validator)
((min-value :accessor min-value :initarg :min-value :initform nil)))
(defmethod lisp-validate ((validator min-value-validator) (lisp-value number))
(if (<= (min-value validator) lisp-value)
t
(fail-validation "~S is greater than or equal to ~S." lisp-value (min-value validator))))
(defclass max-value-validator (number-validator)
((max-value :accessor max-value :initarg :max-value)))
(defmethod lisp-validate ((validator max-value-validator) (lisp-value number))
(if (>= (max-value validator) lisp-value)
t
(fail-validation "~S is less than or equal to ~S." lisp-value (max-value validator))))
| null | https://raw.githubusercontent.com/eslick/cl-registry/d4015c400dc6abf0eeaf908ed9056aac956eee82/src/libs/presentations/number.lisp | lisp | * integer type
* range integer type
* range limited integers | (in-package :registry)
(defclass number-presentation (input-based-field-presentation)
((precision :accessor precision :initarg :precision :initform nil)))
(define-lisp-value-getter number-presentation (client-value)
(or (when (> (length client-value) 0)
(parse-number:parse-number client-value))
:none))
(define-lisp-value-setter number-presentation (number precision)
(if (equal number :none)
""
(if precision
(format nil "~,vF" precision number)
(format nil "~D" number))))
(defclass integer-presentation (number-presentation)
())
(define-lisp-value-getter integer-presentation (client-value)
(or (parse-integer client-value) :none))
(define-lisp-value-setter integer-presentation (integer)
(format nil "~D" integer))
(defclass range-integer-presentation (integer-presentation)
())
(define-lisp-value-getter range-integer-presentation (client-value)
(if (equal client-value "") :none
(let ((integers (cl-ppcre:all-matches-as-strings "[0-9]+" client-value)))
(case (length integers)
(1 (call-next-method))
(2 (cons (parse-integer (first integers))
(parse-integer (second integers))))
(t (error "Unrecognized range specification in ~A" client-value))))))
(define-lisp-value-setter range-integer-presentation (integer-pair)
(if (consp integer-pair)
(format nil "~D-~D" (car integer-pair) (cdr integer-pair))
(call-next-method)))
(defclass number-validator (web-field-validator)
()
(:documentation "Super class for all validotars which compare a lisp-value to a number."))
(defmethod lisp-validate :around ((validator number-validator) (lisp-value t))
(if (and (not (numberp lisp-value)) (not (equal lisp-value :none)))
(fail-validation "~S is not a number." lisp-value)
(call-next-method)))
(defmethod client-validate ((validator number-validator) (client-value string))
(if (plusp (length client-value))
(handler-case
(parse-number:parse-number client-value)
(parse-number:invalid-number (c)
(fail-validation (parse-number:invalid-number-reason c)))
(error (c)
(declare (ignore c))
(fail-validation "~S is not a number" client-value)))
(fail-validation "No number provided.")))
(defclass min-value-validator (number-validator)
((min-value :accessor min-value :initarg :min-value :initform nil)))
(defmethod lisp-validate ((validator min-value-validator) (lisp-value number))
(if (<= (min-value validator) lisp-value)
t
(fail-validation "~S is greater than or equal to ~S." lisp-value (min-value validator))))
(defclass max-value-validator (number-validator)
((max-value :accessor max-value :initarg :max-value)))
(defmethod lisp-validate ((validator max-value-validator) (lisp-value number))
(if (>= (max-value validator) lisp-value)
t
(fail-validation "~S is less than or equal to ~S." lisp-value (max-value validator))))
|
6b6dfa8a4e94f20cdcad73869b92c076b1380d8af2950ea06634db03152039b3 | vtsingaras/orct | mbn.clj | ;; Open Radio Calibration Toolkit
An enhanced Open Source Implementation to replace Qualcomm 's QRCT
;;
;; The use and distribution terms for this software are covered by
the GNU General Public License
;;
( C ) 2015 ,
;;
;; mbn.cljs - parser for modem configuration binary files (mbn)
(ns orct.mbn
(:use [orct.utils]
[orct.elf]
[clojure.java.io]
[orct.qcn-parser]
[orct.nv-xml]
[orct.qcn-printer])
(:require [clojure.string :as str]))
(def mcfg-header-len 16)
(def mcfg-ver-nv-header-len 4)
(def mcfg-version-len 4)
(def mcfg-nv-data-offset (+ mcfg-header-len mcfg-ver-nv-header-len mcfg-version-len))
(def mcfg-prefix-len 8)
(def mcfg-int-nv-item 0x01)
(def mcfg-int-efs-file 0x02)
(def mcfg-int-sw-only 0x03)
(def mcfg-int-delete-nv-item 0x04)
(def mcfg-int-delete-efs-file 0x05)
(def mcfg-int-static-wo-efs-file 0x06)
(def mcfg-int-muxd-nv-item 0x07)
(def mcfg-int-muxd-sw-only 0x08)
(def mcfg-int-muxd-efs-file 0x09)
(def mcfg-int-muxd-sw-only-efs-file 0x0a)
(def mcfg-int-data-profile 0x0b)
(def mcfg-int-delete-data-profile 0x0c)
(def mcfg-int-static-wo-data-profile 0x0d)
(def mcfg-int-muxed-data-profile 0x0e)
(def mcfg-int-muxd-sw-only-data-profile 0x0f)
(def mcfg-int-config-item 0x10)
(defn- extract-mbn-elf32-prog-segment
"function retrieves the mbn item content from an elf32
byte sequence which e.g. read from a mbn file."
[s]
(let [h (parse-elf32-header s)
progs (parse-elf32-prog-headers h s)
filter LT_LOAD seg types
prog (first progs)]
(take (:orct.elf/p_filesz prog) (drop (:orct.elf/p_offset prog) s))))
(defn- extract-mcfg-header
"extract mcfg header
returns: [ result-map rest-of-byte-sequence ]"
[mbn]
{:post [(= (-> % first ::magic1) 1195787085)
(<= (-> % first ::mcfg_format_ver_num) 3)]}
(proc-parse-struct-with-rest
[{::magic1 rest-uint32-pair}
{::mcfg_format_ver_num rest-uint16-pair}
{::mcfg_type rest-uint16-pair}
{::mcfg_num_items rest-uint32-pair}
{::mcfg_muxd_carrier_index_info rest-uint16-pair}
{::spare_crc rest-uint16-pair}]
mbn))
(defn- extract-mcfg-version
"extract mcfg version
returns: [ result-map rest-of-byte-sequence ]"
[mbn-after-header]
(proc-parse-struct-with-rest
[{::mcfg_version_item_type rest-uint16-pair}
{::mcfg_version-item_length rest-uint16-pair}
{::mcfg_version rest-uint32-pair}]
mbn-after-header))
(defn- extract-mcfg-item-prefix
"extract mcfg item prefix
returns: [ result-map rest-of-byte-sequence ]"
[mcg-item-start-seq]
(proc-parse-struct-with-rest
[{::item_length rest-uint32-pair}
{::item_type rest-uint8-pair}
{::attrib rest-uint8-pair}
{::sp_ops rest-uint8-pair}
{::spare1 rest-uint8-pair}]
mcg-item-start-seq))
(defn- extract-mcfg-nv-item
"extract legacy nv item content
returns: [ result-map rest-of-byte-sequence ]"
[r]
(let [[header r] (proc-parse-struct-with-rest
[{::item_type rest-uint16-pair}
{::item_length rest-uint16-pair}]
r)
len (::item_length header)
[content r] [(take len r) (drop len r)]]
[{::nv-item {(keyword (str (::item_type header))) content}} r]))
(defn- extract-mcfg-efs-item
"extract efs item content
returns: [ result-map rest-of-byte-sequence ]"
[r]
(let [[path-header r] (proc-parse-struct-with-rest
[{::efs_header_type rest-uint16-pair}
{::efs_header_len rest-uint16-pair}] r)
path-len (::efs_header_len path-header)
[path-body r] [(take path-len r) (drop path-len r)]
[content-header r] (proc-parse-struct-with-rest
[{::efs_header_type rest-uint16-pair}
{::efs_header_len rest-uint16-pair}] r)
content-len (::efs_header_len content-header)
[content-body r] [(take content-len r) (drop content-len r)]]
[{::efs-item {(keyword (bytes2str path-body)) content-body}} r]))
(defn- extract-mcfg-item
"extract one mcfg item (currently only legacy and efs items are implemented)
returns: [ result-map rest-of-byte-sequence ]"
[nv-item-start-seq]
(let [[mcfg-item-prefix r] (extract-mcfg-item-prefix nv-item-start-seq)
type (::item_type mcfg-item-prefix)]
(cond
(= type mcfg-int-nv-item)
(extract-mcfg-nv-item r)
(= type mcfg-int-efs-file)
(extract-mcfg-efs-item r)
:else (throw (IllegalStateException. (format "mcfg item -> %s has wrong type %d!"
(str mcfg-item-prefix) type))))))
(comment
(def content (extract-mcfg-item (drop mcfg-nv-data-offset mbn)))
(def p1 (first content))
(def r2 (second content))
)
(defn- extract-mcfg-items
"extract all mcfg items (currently only legacy and efs items are implemented)
returns: [ mcfg-map rest-of-byte-sequence ]"
[num-mcfg-items stream]
(loop [mcfg-item-number 1
resmap-rest [[] stream]]
(if (== mcfg-item-number num-mcfg-items)
(first resmap-rest)
(recur (inc mcfg-item-number)
(let [except-fmt-str "Error in transformation of %d-th mcfg-item occurred: %s\n"
[item r] (try
(extract-mcfg-item (second resmap-rest))
(catch Throwable t
(throw (IllegalStateException.
(format except-fmt-str mcfg-item-number (.getMessage t))))))]
[(conj (first resmap-rest) item) r])))))
(comment
(def item-a {::nv-item {:4 "four"}})
(def item-b {::nv-item {:5 "five"}})
(merge-with merge item-a item-b)
(def a (extract-mcfg-items 10 (drop mcfg-nv-data-offset mbn)))
(apply merge-with merge a)
)
(defn- parse-mbn-file-content
"parses a byte sequence which has been read from an mbn file
returns: array with parsed items, currently only two content
types are implemented
::nv-items legacy nv item
::efs-items efs item"
[s]
(let [mbn (extract-mbn-elf32-prog-segment s)
[mcfg-header r] (extract-mcfg-header mbn)
[mcfg-version r] (extract-mcfg-version r)
num-mcfg-items (::mcfg_num_items mcfg-header)]
(conj
(extract-mcfg-items num-mcfg-items r)
{::mcfg_header mcfg-header}
{::mcfg_version mcfg-version})))
(defn- decode-mbn-nv-item-content
"decode legacy nv items out of mbn map
return hash map with nv item number as keyed argument
and parsed parameters as values."
[nv-definition-schema flat-mbn-item-map]
(let [resvec
(map
(fn [nv-item]
(let [item-key (first nv-item)
item-val (second nv-item)
item-index (first item-val)
item-val (drop 1 item-val) ;; omit subscriber respectively index id, not part of efs
item-schema (-> nv-definition-schema :nv-items item-key)
decoded (if item-schema
{:params (decode-binary-nv-params (:content item-schema) item-val)}
{:errors "missing schema"})]
{item-key (assoc decoded :data item-val :index item-index :name (:name item-schema))}))
(::nv-item flat-mbn-item-map))
error-items (filter #(-> % vals first :errors) resvec)
collected-errors (map #(format "nv-item %s: %s"
(-> % keys first key2str)
(-> % vals first :errors)) error-items)
resmap (apply merge resvec)]
(assoc resmap :errors collected-errors)))
(defn- decode-mbn-efs-item-content
"decode legacy nv items out of mbn map
return hash map with nv item number as keyed argument
and parsed parameters as values."
[nv-definition-schema flat-mbn-item-map]
(let [resvec
(map
(fn [nv-item]
(let [item-key (first nv-item)
item-val (second nv-item)
item-index (first item-val)
item-val (drop 1 item-val) ;; omit subscriber respectively index id, not part of efs
item-schema (-> nv-definition-schema :efs-items item-key)
decoded (if item-schema
{:params (decode-binary-nv-params (:content item-schema) item-val)}
{:errors "missing schema"})]
{item-key (assoc decoded :data item-val :index item-index :name (:name item-schema))}))
(::efs-item flat-mbn-item-map))
error-items (filter #(-> % vals first :errors) resvec)
collected-errors (map #(format "%s: %s"
(-> % keys first key2str)
(-> % vals first :errors)) error-items)
resmap (apply merge resvec)]
(assoc resmap :errors collected-errors)))
(defn- map-mbn-efs-to-qcn-efs
"transforms efs-item hash map of form
{:path_item_1 {:params [] :data [], :path_item_2 { ... } } to
legacy qcn representation of form
{:number_1 { :path path_item_1 :params [] :data [] }, :number_2 { ...} }
we can reuse the qcn_print function so"
[items]
(first
(reduce
(fn [[res num] item]
(let [path (first item)
item (second item)
item (assoc item :path path)
]
[(assoc res (keyword (str num)) item)
(inc num)]))
[{} 1]
items)))
(defn parse-mbn-data
"Parse Qualcomm's non-volatile modem configuration binary (mbn) file.
nv-definition-schema: parsed schema xml file, refer e.g. to parse-nv-definition-file
mbn-data-file-name: name of data mbn file, refer e.g. to samples/sample.mbn
returns nested clojure hash map in same/similar format as generated in qcn_parser.clj:
:NV_ITEM_ARRAY -> provides legacy numbered nv item backup data
:NV_Items -> provides EFS nv item backup data
:mcfg-version -> integer mbn file version identifier
:carrier-index-info -> integer with carrier index information
:errors -> contains prarsing errors."
[nv-definition-schema mbn-data-file-name]
(let [s (seq (read-file mbn-data-file-name))
mbn-items (parse-mbn-file-content s)
flat-item-map (apply merge-with merge mbn-items)
nv-items (decode-mbn-nv-item-content nv-definition-schema flat-item-map)
efs-items (decode-mbn-efs-item-content nv-definition-schema flat-item-map)
tot-errors (concat
(:errors nv-items)
(:errors efs-items))
nv-items (dissoc nv-items :errors)
efs-items (dissoc efs-items :errors)
_ (def e (dissoc efs-items :errors))
efs-items (map-mbn-efs-to-qcn-efs efs-items)]
{:NV_ITEM_ARRAY nv-items
:NV_Items efs-items
:Provisioning_Item_Files {}
:EFS_Backup {}
:errors tot-errors
:mcfg-version (-> flat-item-map ::mcfg_version ::mcfg_version)
:carrier-index-info (-> flat-item-map ::mcfg_header ::mcfg_muxd_carrier_index_info)}))
(comment
(def s (seq (read-file "samples/mcfg_sw_dt.mbn")))
(def mbn (extract-mbn-elf32-prog-segment s))
(let [mbn (extract-mbn-elf32-prog-segment s)
[mcfg-header r] (extract-mcfg-header mbn)
[mcfg-version r] (extract-mcfg-version r)
num-mcfg-items (::mcfg_num_items mcfg-header)]
(def mcfg-header mcfg-header)
(def mcfg-version mcfg-version)
(def num-mcfg-items num-mcfg-items))
(::mcfg_muxd_carrier_index_info header)
(first (extract-mcfg-item-prefix (drop mcfg-nv-data-offset mbn)))
(first (extract-mcfg-item (drop mcfg-nv-data-offset mbn)))
(def items (parse-mbn-file-content s))
(def nv-definition-schema (parse-nv-definition-file "samples/NvDefinition.xml"))
(def test-item-key :850)
(def test-item-content (test-item-key (::nv-item flat-item-map)))
(def test-item-schema (test-item-key (:nv-items nv-definition-schema)))
(decode-binary-nv-params (:content test-item-schema) (drop 1 test-item-content))
(decode-mbn-efs-item-content nv-definition-schema flat-item-map)
(decode-mbn-nv-item-content nv-definition-schema flat-item-map)
(:errors (decode-mbn-nv-item-content nv-definition-schema flat-item-map))
(def items (parse-mbn-data nv-definition-schema "samples/mcfg_sw_dt.mbn"))
(println items)
(println (:NV_ITEM_ARRAY items))
(println (:NV_Items items))
(println (keys (:NV_ITEM_ARRAY items)))
(keys (dissoc (:NV_ITEM_ARRAY items) :errors))
(println (:882 (:NV_ITEM_ARRAY items)))
(print-nv-item-set nv-definition-schema items :efs-subst false)
(items :File_Version)
)
| null | https://raw.githubusercontent.com/vtsingaras/orct/1b4c2b2b16c62cd8a51db8c1ed6a805a4677779c/src/orct/mbn.clj | clojure | Open Radio Calibration Toolkit
The use and distribution terms for this software are covered by
mbn.cljs - parser for modem configuration binary files (mbn)
omit subscriber respectively index id, not part of efs
omit subscriber respectively index id, not part of efs | An enhanced Open Source Implementation to replace Qualcomm 's QRCT
the GNU General Public License
( C ) 2015 ,
(ns orct.mbn
(:use [orct.utils]
[orct.elf]
[clojure.java.io]
[orct.qcn-parser]
[orct.nv-xml]
[orct.qcn-printer])
(:require [clojure.string :as str]))
(def mcfg-header-len 16)
(def mcfg-ver-nv-header-len 4)
(def mcfg-version-len 4)
(def mcfg-nv-data-offset (+ mcfg-header-len mcfg-ver-nv-header-len mcfg-version-len))
(def mcfg-prefix-len 8)
(def mcfg-int-nv-item 0x01)
(def mcfg-int-efs-file 0x02)
(def mcfg-int-sw-only 0x03)
(def mcfg-int-delete-nv-item 0x04)
(def mcfg-int-delete-efs-file 0x05)
(def mcfg-int-static-wo-efs-file 0x06)
(def mcfg-int-muxd-nv-item 0x07)
(def mcfg-int-muxd-sw-only 0x08)
(def mcfg-int-muxd-efs-file 0x09)
(def mcfg-int-muxd-sw-only-efs-file 0x0a)
(def mcfg-int-data-profile 0x0b)
(def mcfg-int-delete-data-profile 0x0c)
(def mcfg-int-static-wo-data-profile 0x0d)
(def mcfg-int-muxed-data-profile 0x0e)
(def mcfg-int-muxd-sw-only-data-profile 0x0f)
(def mcfg-int-config-item 0x10)
(defn- extract-mbn-elf32-prog-segment
"function retrieves the mbn item content from an elf32
byte sequence which e.g. read from a mbn file."
[s]
(let [h (parse-elf32-header s)
progs (parse-elf32-prog-headers h s)
filter LT_LOAD seg types
prog (first progs)]
(take (:orct.elf/p_filesz prog) (drop (:orct.elf/p_offset prog) s))))
(defn- extract-mcfg-header
"extract mcfg header
returns: [ result-map rest-of-byte-sequence ]"
[mbn]
{:post [(= (-> % first ::magic1) 1195787085)
(<= (-> % first ::mcfg_format_ver_num) 3)]}
(proc-parse-struct-with-rest
[{::magic1 rest-uint32-pair}
{::mcfg_format_ver_num rest-uint16-pair}
{::mcfg_type rest-uint16-pair}
{::mcfg_num_items rest-uint32-pair}
{::mcfg_muxd_carrier_index_info rest-uint16-pair}
{::spare_crc rest-uint16-pair}]
mbn))
(defn- extract-mcfg-version
"extract mcfg version
returns: [ result-map rest-of-byte-sequence ]"
[mbn-after-header]
(proc-parse-struct-with-rest
[{::mcfg_version_item_type rest-uint16-pair}
{::mcfg_version-item_length rest-uint16-pair}
{::mcfg_version rest-uint32-pair}]
mbn-after-header))
(defn- extract-mcfg-item-prefix
"extract mcfg item prefix
returns: [ result-map rest-of-byte-sequence ]"
[mcg-item-start-seq]
(proc-parse-struct-with-rest
[{::item_length rest-uint32-pair}
{::item_type rest-uint8-pair}
{::attrib rest-uint8-pair}
{::sp_ops rest-uint8-pair}
{::spare1 rest-uint8-pair}]
mcg-item-start-seq))
(defn- extract-mcfg-nv-item
"extract legacy nv item content
returns: [ result-map rest-of-byte-sequence ]"
[r]
(let [[header r] (proc-parse-struct-with-rest
[{::item_type rest-uint16-pair}
{::item_length rest-uint16-pair}]
r)
len (::item_length header)
[content r] [(take len r) (drop len r)]]
[{::nv-item {(keyword (str (::item_type header))) content}} r]))
(defn- extract-mcfg-efs-item
"extract efs item content
returns: [ result-map rest-of-byte-sequence ]"
[r]
(let [[path-header r] (proc-parse-struct-with-rest
[{::efs_header_type rest-uint16-pair}
{::efs_header_len rest-uint16-pair}] r)
path-len (::efs_header_len path-header)
[path-body r] [(take path-len r) (drop path-len r)]
[content-header r] (proc-parse-struct-with-rest
[{::efs_header_type rest-uint16-pair}
{::efs_header_len rest-uint16-pair}] r)
content-len (::efs_header_len content-header)
[content-body r] [(take content-len r) (drop content-len r)]]
[{::efs-item {(keyword (bytes2str path-body)) content-body}} r]))
(defn- extract-mcfg-item
"extract one mcfg item (currently only legacy and efs items are implemented)
returns: [ result-map rest-of-byte-sequence ]"
[nv-item-start-seq]
(let [[mcfg-item-prefix r] (extract-mcfg-item-prefix nv-item-start-seq)
type (::item_type mcfg-item-prefix)]
(cond
(= type mcfg-int-nv-item)
(extract-mcfg-nv-item r)
(= type mcfg-int-efs-file)
(extract-mcfg-efs-item r)
:else (throw (IllegalStateException. (format "mcfg item -> %s has wrong type %d!"
(str mcfg-item-prefix) type))))))
(comment
(def content (extract-mcfg-item (drop mcfg-nv-data-offset mbn)))
(def p1 (first content))
(def r2 (second content))
)
(defn- extract-mcfg-items
"extract all mcfg items (currently only legacy and efs items are implemented)
returns: [ mcfg-map rest-of-byte-sequence ]"
[num-mcfg-items stream]
(loop [mcfg-item-number 1
resmap-rest [[] stream]]
(if (== mcfg-item-number num-mcfg-items)
(first resmap-rest)
(recur (inc mcfg-item-number)
(let [except-fmt-str "Error in transformation of %d-th mcfg-item occurred: %s\n"
[item r] (try
(extract-mcfg-item (second resmap-rest))
(catch Throwable t
(throw (IllegalStateException.
(format except-fmt-str mcfg-item-number (.getMessage t))))))]
[(conj (first resmap-rest) item) r])))))
(comment
(def item-a {::nv-item {:4 "four"}})
(def item-b {::nv-item {:5 "five"}})
(merge-with merge item-a item-b)
(def a (extract-mcfg-items 10 (drop mcfg-nv-data-offset mbn)))
(apply merge-with merge a)
)
(defn- parse-mbn-file-content
"parses a byte sequence which has been read from an mbn file
returns: array with parsed items, currently only two content
types are implemented
::nv-items legacy nv item
::efs-items efs item"
[s]
(let [mbn (extract-mbn-elf32-prog-segment s)
[mcfg-header r] (extract-mcfg-header mbn)
[mcfg-version r] (extract-mcfg-version r)
num-mcfg-items (::mcfg_num_items mcfg-header)]
(conj
(extract-mcfg-items num-mcfg-items r)
{::mcfg_header mcfg-header}
{::mcfg_version mcfg-version})))
(defn- decode-mbn-nv-item-content
"decode legacy nv items out of mbn map
return hash map with nv item number as keyed argument
and parsed parameters as values."
[nv-definition-schema flat-mbn-item-map]
(let [resvec
(map
(fn [nv-item]
(let [item-key (first nv-item)
item-val (second nv-item)
item-index (first item-val)
item-schema (-> nv-definition-schema :nv-items item-key)
decoded (if item-schema
{:params (decode-binary-nv-params (:content item-schema) item-val)}
{:errors "missing schema"})]
{item-key (assoc decoded :data item-val :index item-index :name (:name item-schema))}))
(::nv-item flat-mbn-item-map))
error-items (filter #(-> % vals first :errors) resvec)
collected-errors (map #(format "nv-item %s: %s"
(-> % keys first key2str)
(-> % vals first :errors)) error-items)
resmap (apply merge resvec)]
(assoc resmap :errors collected-errors)))
(defn- decode-mbn-efs-item-content
"decode legacy nv items out of mbn map
return hash map with nv item number as keyed argument
and parsed parameters as values."
[nv-definition-schema flat-mbn-item-map]
(let [resvec
(map
(fn [nv-item]
(let [item-key (first nv-item)
item-val (second nv-item)
item-index (first item-val)
item-schema (-> nv-definition-schema :efs-items item-key)
decoded (if item-schema
{:params (decode-binary-nv-params (:content item-schema) item-val)}
{:errors "missing schema"})]
{item-key (assoc decoded :data item-val :index item-index :name (:name item-schema))}))
(::efs-item flat-mbn-item-map))
error-items (filter #(-> % vals first :errors) resvec)
collected-errors (map #(format "%s: %s"
(-> % keys first key2str)
(-> % vals first :errors)) error-items)
resmap (apply merge resvec)]
(assoc resmap :errors collected-errors)))
(defn- map-mbn-efs-to-qcn-efs
"transforms efs-item hash map of form
{:path_item_1 {:params [] :data [], :path_item_2 { ... } } to
legacy qcn representation of form
{:number_1 { :path path_item_1 :params [] :data [] }, :number_2 { ...} }
we can reuse the qcn_print function so"
[items]
(first
(reduce
(fn [[res num] item]
(let [path (first item)
item (second item)
item (assoc item :path path)
]
[(assoc res (keyword (str num)) item)
(inc num)]))
[{} 1]
items)))
(defn parse-mbn-data
"Parse Qualcomm's non-volatile modem configuration binary (mbn) file.
nv-definition-schema: parsed schema xml file, refer e.g. to parse-nv-definition-file
mbn-data-file-name: name of data mbn file, refer e.g. to samples/sample.mbn
returns nested clojure hash map in same/similar format as generated in qcn_parser.clj:
:NV_ITEM_ARRAY -> provides legacy numbered nv item backup data
:NV_Items -> provides EFS nv item backup data
:mcfg-version -> integer mbn file version identifier
:carrier-index-info -> integer with carrier index information
:errors -> contains prarsing errors."
[nv-definition-schema mbn-data-file-name]
(let [s (seq (read-file mbn-data-file-name))
mbn-items (parse-mbn-file-content s)
flat-item-map (apply merge-with merge mbn-items)
nv-items (decode-mbn-nv-item-content nv-definition-schema flat-item-map)
efs-items (decode-mbn-efs-item-content nv-definition-schema flat-item-map)
tot-errors (concat
(:errors nv-items)
(:errors efs-items))
nv-items (dissoc nv-items :errors)
efs-items (dissoc efs-items :errors)
_ (def e (dissoc efs-items :errors))
efs-items (map-mbn-efs-to-qcn-efs efs-items)]
{:NV_ITEM_ARRAY nv-items
:NV_Items efs-items
:Provisioning_Item_Files {}
:EFS_Backup {}
:errors tot-errors
:mcfg-version (-> flat-item-map ::mcfg_version ::mcfg_version)
:carrier-index-info (-> flat-item-map ::mcfg_header ::mcfg_muxd_carrier_index_info)}))
(comment
(def s (seq (read-file "samples/mcfg_sw_dt.mbn")))
(def mbn (extract-mbn-elf32-prog-segment s))
(let [mbn (extract-mbn-elf32-prog-segment s)
[mcfg-header r] (extract-mcfg-header mbn)
[mcfg-version r] (extract-mcfg-version r)
num-mcfg-items (::mcfg_num_items mcfg-header)]
(def mcfg-header mcfg-header)
(def mcfg-version mcfg-version)
(def num-mcfg-items num-mcfg-items))
(::mcfg_muxd_carrier_index_info header)
(first (extract-mcfg-item-prefix (drop mcfg-nv-data-offset mbn)))
(first (extract-mcfg-item (drop mcfg-nv-data-offset mbn)))
(def items (parse-mbn-file-content s))
(def nv-definition-schema (parse-nv-definition-file "samples/NvDefinition.xml"))
(def test-item-key :850)
(def test-item-content (test-item-key (::nv-item flat-item-map)))
(def test-item-schema (test-item-key (:nv-items nv-definition-schema)))
(decode-binary-nv-params (:content test-item-schema) (drop 1 test-item-content))
(decode-mbn-efs-item-content nv-definition-schema flat-item-map)
(decode-mbn-nv-item-content nv-definition-schema flat-item-map)
(:errors (decode-mbn-nv-item-content nv-definition-schema flat-item-map))
(def items (parse-mbn-data nv-definition-schema "samples/mcfg_sw_dt.mbn"))
(println items)
(println (:NV_ITEM_ARRAY items))
(println (:NV_Items items))
(println (keys (:NV_ITEM_ARRAY items)))
(keys (dissoc (:NV_ITEM_ARRAY items) :errors))
(println (:882 (:NV_ITEM_ARRAY items)))
(print-nv-item-set nv-definition-schema items :efs-subst false)
(items :File_Version)
)
|
d7d33f2cc898cebffe03445c8d13cad2c56257d2f741e01fa7c81c03876a42f0 | spartango/CS153 | mips_sim.ml | open Mips_ast
open Byte
open Binary_ops
exception TODO
exception FatalError
exception UnalignedAccessError
exception InvalidInstruction
Register file definitions . A register file is a map from a register
number to a 32 - bit quantity .
number to a 32-bit quantity. *)
module IntMap = Map.Make(struct type t = int let compare = compare end)
type regfile = int32 IntMap.t
let empty_rf = IntMap.empty
let rf_update (r : int) (v : int32) (rf : regfile) : regfile =
IntMap.add r v rf
let rec rf_update_many ( targets : (int * int32) list ) (rf : regfile) : regfile =
match targets with
| [] -> rf
| (r, v) :: rest -> (rf_update_many rest (rf_update r v rf))
let rf_lookup (r : int) (rf : regfile) : int32 =
try IntMap.find r rf with Not_found -> Int32.zero
let string_of_rf (rf : regfile) : string =
IntMap.fold (fun key v s ->
s^(string_of_int key)^" -> "^(Int32.to_string v)^"\n") rf ""
let compare_rf (rf_src : regfile) (rf_dest: regfile) : string =
let src_keys =
IntMap.fold
(fun key v rf -> (rf_update key 0l rf))
rf_src
empty_rf
in
let union_keys =
IntMap.fold
(fun key v rf -> (rf_update key 0l rf))
rf_dest
src_keys
in
(IntMap.fold
(fun key v s ->
let src_val = (rf_lookup key rf_src) in
let dest_val = (rf_lookup key rf_dest) in
if src_val = dest_val
then s^""
else s^(reg2str (ind2reg (Int32.of_int key)))^": "
^(Int32.to_string src_val)
^" vs "^(Int32.to_string dest_val))
union_keys
"")
Memory definitions . A memory is a map from 32 - bit addresses to bytes .
module Int32Map = Map.Make(struct type t = int32 let compare = Int32.compare end)
type memory = byte Int32Map.t
let empty_mem = Int32Map.empty
let mem_update (a : int32) (v : byte) (m : memory) : memory =
Int32Map.add a v m
let rec mem_update_many (targets : (int32 * byte) list) (m : memory) : memory =
match targets with
| [] -> m
| (a, v) :: rest -> (mem_update_many rest (mem_update a v m))
let mem_lookup (a : int32) (m : memory) : byte =
try (Int32Map.find a m) with Not_found -> mk_byte Int32.zero
let string_of_mem (m : memory) : string =
Int32Map.fold (fun key v s ->
s^(Int32.to_string key)^" -> "^(Int32.to_string (b2i32 v))^"\n") m ""
let compare_mem (mem_src : memory) (mem_dest: memory) : string =
let zero_byte = Byte.mk_byte 0l in
let src_keys =
Int32Map.fold
(fun key v mem -> (mem_update key zero_byte mem))
mem_src
empty_mem
in
let union_keys =
Int32Map.fold
(fun key v mem -> (mem_update key zero_byte mem))
mem_dest
src_keys
in
(Int32Map.fold
(fun key v s ->
let src_val = (mem_lookup key mem_src) in
let dest_val = (mem_lookup key mem_dest) in
if src_val = dest_val
then s
else s^(Int32.to_string key)^": "
^(Int32.to_string (Byte.b2i32 src_val))
^" vs "^(Int32.to_string (Byte.b2i32 dest_val)) ^ "; ")
union_keys
"")
State
type state = { r : regfile; pc : int32; m : memory }
let empty_state = { m = empty_mem; pc = 0l; r = empty_rf}
let string_of_state (s : state) : string =
"Memory:\n" ^(string_of_mem s.m)
^"---\nRegisters:\n"^(string_of_rf s.r)
^"---\nPc: " ^(Int32.to_string s.pc)
Copies a 32 bit object into adjacent memory locations
let word_mem_update (word : int32) (offset : int32) (m : memory) : memory =
(* Split into parts by shifting *)
(* Insert parts into slots from offset *)
let mem_1 = (mem_update offset (Byte.mk_byte (Int32.shift_right_logical word 24)) m) in
let mem_2 = (mem_update (Int32.add offset 1l) (Byte.mk_byte (Int32.shift_right_logical word 16)) mem_1) in
let mem_3 = (mem_update (Int32.add offset 2l) (Byte.mk_byte (Int32.shift_right_logical word 8)) mem_2) in
(mem_update (Int32.add offset 3l) (Byte.mk_byte word) mem_3)
(* Reads a word starting from the offset in memory *)
let word_mem_lookup (offset : int32) (m : memory) : int32 =
left_shift_or [ ((b2i32 (mem_lookup offset m)), 24);
((b2i32 (mem_lookup (Int32.add offset 1l) m)), 16);
((b2i32 (mem_lookup (Int32.add offset 2l) m)), 8);
((b2i32 (mem_lookup (Int32.add offset 3l) m)), 0) ]
(* Translates an instruction to binary and copies it into memory, resolving pseudoinstructions *)
let rec inst_update_mem (target : inst) (s : state) : state =
(* Pick out pseudoinstructions *)
match target with
| Li(rs, imm) ->
First put an Lui for the upper half of the immediate
let new_state = (inst_update_mem (Lui(R1, (int32_upper imm))) s) in
Then tack on an Ori for the lower half
(inst_update_mem (Ori(rs, R1, (int32_lower imm))) new_state)
(* Do a binary translate & update *)
| t_inst -> { r = s.r; m = (word_mem_update (inst_to_bin target) s.pc s.m); pc = (Int32.add s.pc 4l)}
Map a program , a list of Mips assembly instructions , down to a starting
state . You can start the PC at any address you wish . Just make sure that
you put the generated machine code where you started the PC in memory !
state. You can start the PC at any address you wish. Just make sure that
you put the generated machine code where you started the PC in memory! *)
let rec assem (prog : program) : state =
(* A nice helper function to accumulate state and move across memory space as it is updated *)
let rec assem_r (prog : program) (machine_s : state) : state =
(* Grab the next instruction *)
match prog with
(* If we're at the end, move the PC to the beginning and return*)
| [] -> {m = machine_s.m; pc = 0l; r = machine_s.r}
(* For real instructions *)
| t_inst :: rest ->
(* Encode the part in binary and push the binary into memory at the next free address(es) *)
let new_state = (inst_update_mem t_inst machine_s) in
(* assemble the rest of the program *)
(assem_r rest new_state)
in
let init_state = empty_state in
(assem_r prog init_state)
Disassembles a binary word into a MIPS instruction
let disassem (binary : int32) : inst =
match (get_opcode binary) with
| 0x00l -> (match (get_opcode2 binary) with
| 0x08l -> Jr(get_reg1 binary)
| 0x20l -> Add((get_reg3 binary), (get_reg1 binary), (get_reg2 binary))
| _ -> raise NotRegister)
(* Left-shift target, as target must be word aligned *)
| 0x03l -> Jal(Int32.shift_left (Int32.logand binary (masker 26 6)) 2)
| 0x04l -> Beq((get_reg1 binary), (get_reg2 binary), int16_to_int32 binary)
| 0x0dl -> Ori((get_reg2 binary), (get_reg1 binary), int16_to_int32 binary)
| 0x0fl -> Lui((get_reg2 binary), int32_lower binary)
| 0x23l -> Lw((get_reg2 binary), (get_reg1 binary), int16_to_int32 binary)
| 0x2bl -> Sw((get_reg2 binary), (get_reg1 binary), int16_to_int32 binary)
| _ -> raise InvalidInstruction
(* Checks for word alignment of address *)
let check_word_aligned (target_addr : int32) : int32 =
if (Int32.rem target_addr 4l) = 0l
then target_addr
else raise UnalignedAccessError
(* Increments the PC of a state *)
let increment_pc (machine_s : state) : state =
{ pc = (Int32.add 4l machine_s.pc);
m = machine_s.m;
r = machine_s.r }
Executes a on a given state , branching if equal
let exec_beq (rs : reg) (rt : reg) (label : int32) (machine_s : state) : state =
if (rf_lookup (reg2ind rs) machine_s.r) = (rf_lookup (reg2ind rt) machine_s.r)
then { pc = (Int32.add machine_s.pc (Int32.mul label 4l));
m = machine_s.m;
r = machine_s.r }
else (increment_pc machine_s)
(* Executes a Jr on a given state, jumping to the address stored in rs *)
let exec_jr (rs : reg) (machine_s : state) : state =
{ pc = (check_word_aligned (rf_lookup (reg2ind rs) machine_s.r));
m = machine_s.m;
r = machine_s.r }
(* Executes a Jal on a given state, jumping to a target and linking the return address *)
let exec_jal (target : int32) (machine_s : state) : state =
{ pc = (check_word_aligned target);
m = machine_s.m;
r = (rf_update (reg2ind R31) (Int32.add machine_s.pc 4l) (machine_s.r)) }
Executes a Lui on a given state , loading an immediate into the upper half of a register
let exec_lui (rt : reg) (imm : int32) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rt) (Int32.shift_left imm 16) (machine_s.r)) }
Executes a Ori on a given state , OR - ing an immediate
let exec_ori (rt : reg) (rs : reg) (imm : int32) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rt)
(Int32.logor imm (rf_lookup (reg2ind rs) machine_s.r))
machine_s.r ) }
Executes a Lw on a given state , loading a word
let exec_lw (rt : reg) (rs : reg) (offset : int32) (machine_s : state) : state =
let target_addr = (Int32.add (rf_lookup (reg2ind rs) machine_s.r) offset) in
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rt)
(word_mem_lookup
(check_word_aligned target_addr)
machine_s.m)
machine_s.r ) }
Executes a Sw on a given state , storing a word
let exec_sw (rt : reg) (rs : reg) (offset : int32) (machine_s : state) : state =
let target_addr = (Int32.add (rf_lookup (reg2ind rs) machine_s.r) offset) in
increment_pc { pc = machine_s.pc;
m = (word_mem_update (rf_lookup (reg2ind rt) machine_s.r)
(check_word_aligned target_addr)
machine_s.m);
r = machine_s.r }
(* Executes an Add on a given state, adding the targeted registers*)
let exec_add (rd : reg) (rs : reg) (rt : reg) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rd)
(Int32.add (rf_lookup (reg2ind rs) machine_s.r)
(rf_lookup (reg2ind rt) machine_s.r))
machine_s.r) }
Executes a Li on a given state , loading a 32 bit
let exec_li (rs : reg) (imm : int32) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rs) imm (machine_s.r))}
let exec (target : inst) (machine_s : state) : state =
(* Match against possible ops *)
match target with
(* Branch by offset if rs == rt *)
| Beq(rs, rt, label) -> (exec_beq rs rt label machine_s)
(* Jump to the address specified in rs*)
| Jr(rs) -> (exec_jr rs machine_s)
(* Jump to instruction at target, save address in RA*)
| Jal(target) -> (exec_jal target machine_s)
Load immediate into upper half of register
| Lui(rt, imm) -> (exec_lui rt imm machine_s)
(* rs |imm -> rt *)
| Ori(rt, rs, imm) -> (exec_ori rt rs imm machine_s)
(* Load (word) at address into register rt.*)
| Lw(rt, rs, offset) -> (exec_lw rt rs offset machine_s)
(* Store word from rt at address *)
| Sw(rt, rs, offset) -> (exec_sw rt rs offset machine_s)
(* rs + rt -> rd*)
| Add(rd, rs, rt) -> (exec_add rd rs rt machine_s)
| Li (rs, imm) -> (exec_li rs imm machine_s) (* This shouldn't get called with the dissambler in the pipe, but is good for testing *)
Given a starting state , simulate the Mips machine code to get a final state
let rec interp (init_state : state) : state =
(* Grab instruction binary from addresses, concatenating as we go *)
let bin_inst = (word_mem_lookup init_state.pc init_state.m ) in
match bin_inst with
| 0l -> (* Noop -> Done *) init_state
| _ ->
Disassemble
let t_inst = (disassem bin_inst) in
Exec
let new_state = (exec t_inst init_state) in
Handoff state
(interp new_state)
| null | https://raw.githubusercontent.com/spartango/CS153/16faf133889f1b287cb95c1ea1245d76c1d8db49/ps0/mips_sim.ml | ocaml | Split into parts by shifting
Insert parts into slots from offset
Reads a word starting from the offset in memory
Translates an instruction to binary and copies it into memory, resolving pseudoinstructions
Pick out pseudoinstructions
Do a binary translate & update
A nice helper function to accumulate state and move across memory space as it is updated
Grab the next instruction
If we're at the end, move the PC to the beginning and return
For real instructions
Encode the part in binary and push the binary into memory at the next free address(es)
assemble the rest of the program
Left-shift target, as target must be word aligned
Checks for word alignment of address
Increments the PC of a state
Executes a Jr on a given state, jumping to the address stored in rs
Executes a Jal on a given state, jumping to a target and linking the return address
Executes an Add on a given state, adding the targeted registers
Match against possible ops
Branch by offset if rs == rt
Jump to the address specified in rs
Jump to instruction at target, save address in RA
rs |imm -> rt
Load (word) at address into register rt.
Store word from rt at address
rs + rt -> rd
This shouldn't get called with the dissambler in the pipe, but is good for testing
Grab instruction binary from addresses, concatenating as we go
Noop -> Done | open Mips_ast
open Byte
open Binary_ops
exception TODO
exception FatalError
exception UnalignedAccessError
exception InvalidInstruction
Register file definitions . A register file is a map from a register
number to a 32 - bit quantity .
number to a 32-bit quantity. *)
module IntMap = Map.Make(struct type t = int let compare = compare end)
type regfile = int32 IntMap.t
let empty_rf = IntMap.empty
let rf_update (r : int) (v : int32) (rf : regfile) : regfile =
IntMap.add r v rf
let rec rf_update_many ( targets : (int * int32) list ) (rf : regfile) : regfile =
match targets with
| [] -> rf
| (r, v) :: rest -> (rf_update_many rest (rf_update r v rf))
let rf_lookup (r : int) (rf : regfile) : int32 =
try IntMap.find r rf with Not_found -> Int32.zero
let string_of_rf (rf : regfile) : string =
IntMap.fold (fun key v s ->
s^(string_of_int key)^" -> "^(Int32.to_string v)^"\n") rf ""
let compare_rf (rf_src : regfile) (rf_dest: regfile) : string =
let src_keys =
IntMap.fold
(fun key v rf -> (rf_update key 0l rf))
rf_src
empty_rf
in
let union_keys =
IntMap.fold
(fun key v rf -> (rf_update key 0l rf))
rf_dest
src_keys
in
(IntMap.fold
(fun key v s ->
let src_val = (rf_lookup key rf_src) in
let dest_val = (rf_lookup key rf_dest) in
if src_val = dest_val
then s^""
else s^(reg2str (ind2reg (Int32.of_int key)))^": "
^(Int32.to_string src_val)
^" vs "^(Int32.to_string dest_val))
union_keys
"")
Memory definitions . A memory is a map from 32 - bit addresses to bytes .
module Int32Map = Map.Make(struct type t = int32 let compare = Int32.compare end)
type memory = byte Int32Map.t
let empty_mem = Int32Map.empty
let mem_update (a : int32) (v : byte) (m : memory) : memory =
Int32Map.add a v m
let rec mem_update_many (targets : (int32 * byte) list) (m : memory) : memory =
match targets with
| [] -> m
| (a, v) :: rest -> (mem_update_many rest (mem_update a v m))
let mem_lookup (a : int32) (m : memory) : byte =
try (Int32Map.find a m) with Not_found -> mk_byte Int32.zero
let string_of_mem (m : memory) : string =
Int32Map.fold (fun key v s ->
s^(Int32.to_string key)^" -> "^(Int32.to_string (b2i32 v))^"\n") m ""
let compare_mem (mem_src : memory) (mem_dest: memory) : string =
let zero_byte = Byte.mk_byte 0l in
let src_keys =
Int32Map.fold
(fun key v mem -> (mem_update key zero_byte mem))
mem_src
empty_mem
in
let union_keys =
Int32Map.fold
(fun key v mem -> (mem_update key zero_byte mem))
mem_dest
src_keys
in
(Int32Map.fold
(fun key v s ->
let src_val = (mem_lookup key mem_src) in
let dest_val = (mem_lookup key mem_dest) in
if src_val = dest_val
then s
else s^(Int32.to_string key)^": "
^(Int32.to_string (Byte.b2i32 src_val))
^" vs "^(Int32.to_string (Byte.b2i32 dest_val)) ^ "; ")
union_keys
"")
State
type state = { r : regfile; pc : int32; m : memory }
let empty_state = { m = empty_mem; pc = 0l; r = empty_rf}
let string_of_state (s : state) : string =
"Memory:\n" ^(string_of_mem s.m)
^"---\nRegisters:\n"^(string_of_rf s.r)
^"---\nPc: " ^(Int32.to_string s.pc)
Copies a 32 bit object into adjacent memory locations
let word_mem_update (word : int32) (offset : int32) (m : memory) : memory =
let mem_1 = (mem_update offset (Byte.mk_byte (Int32.shift_right_logical word 24)) m) in
let mem_2 = (mem_update (Int32.add offset 1l) (Byte.mk_byte (Int32.shift_right_logical word 16)) mem_1) in
let mem_3 = (mem_update (Int32.add offset 2l) (Byte.mk_byte (Int32.shift_right_logical word 8)) mem_2) in
(mem_update (Int32.add offset 3l) (Byte.mk_byte word) mem_3)
let word_mem_lookup (offset : int32) (m : memory) : int32 =
left_shift_or [ ((b2i32 (mem_lookup offset m)), 24);
((b2i32 (mem_lookup (Int32.add offset 1l) m)), 16);
((b2i32 (mem_lookup (Int32.add offset 2l) m)), 8);
((b2i32 (mem_lookup (Int32.add offset 3l) m)), 0) ]
let rec inst_update_mem (target : inst) (s : state) : state =
match target with
| Li(rs, imm) ->
First put an Lui for the upper half of the immediate
let new_state = (inst_update_mem (Lui(R1, (int32_upper imm))) s) in
Then tack on an Ori for the lower half
(inst_update_mem (Ori(rs, R1, (int32_lower imm))) new_state)
| t_inst -> { r = s.r; m = (word_mem_update (inst_to_bin target) s.pc s.m); pc = (Int32.add s.pc 4l)}
Map a program , a list of Mips assembly instructions , down to a starting
state . You can start the PC at any address you wish . Just make sure that
you put the generated machine code where you started the PC in memory !
state. You can start the PC at any address you wish. Just make sure that
you put the generated machine code where you started the PC in memory! *)
let rec assem (prog : program) : state =
let rec assem_r (prog : program) (machine_s : state) : state =
match prog with
| [] -> {m = machine_s.m; pc = 0l; r = machine_s.r}
| t_inst :: rest ->
let new_state = (inst_update_mem t_inst machine_s) in
(assem_r rest new_state)
in
let init_state = empty_state in
(assem_r prog init_state)
Disassembles a binary word into a MIPS instruction
let disassem (binary : int32) : inst =
match (get_opcode binary) with
| 0x00l -> (match (get_opcode2 binary) with
| 0x08l -> Jr(get_reg1 binary)
| 0x20l -> Add((get_reg3 binary), (get_reg1 binary), (get_reg2 binary))
| _ -> raise NotRegister)
| 0x03l -> Jal(Int32.shift_left (Int32.logand binary (masker 26 6)) 2)
| 0x04l -> Beq((get_reg1 binary), (get_reg2 binary), int16_to_int32 binary)
| 0x0dl -> Ori((get_reg2 binary), (get_reg1 binary), int16_to_int32 binary)
| 0x0fl -> Lui((get_reg2 binary), int32_lower binary)
| 0x23l -> Lw((get_reg2 binary), (get_reg1 binary), int16_to_int32 binary)
| 0x2bl -> Sw((get_reg2 binary), (get_reg1 binary), int16_to_int32 binary)
| _ -> raise InvalidInstruction
let check_word_aligned (target_addr : int32) : int32 =
if (Int32.rem target_addr 4l) = 0l
then target_addr
else raise UnalignedAccessError
let increment_pc (machine_s : state) : state =
{ pc = (Int32.add 4l machine_s.pc);
m = machine_s.m;
r = machine_s.r }
Executes a on a given state , branching if equal
let exec_beq (rs : reg) (rt : reg) (label : int32) (machine_s : state) : state =
if (rf_lookup (reg2ind rs) machine_s.r) = (rf_lookup (reg2ind rt) machine_s.r)
then { pc = (Int32.add machine_s.pc (Int32.mul label 4l));
m = machine_s.m;
r = machine_s.r }
else (increment_pc machine_s)
let exec_jr (rs : reg) (machine_s : state) : state =
{ pc = (check_word_aligned (rf_lookup (reg2ind rs) machine_s.r));
m = machine_s.m;
r = machine_s.r }
let exec_jal (target : int32) (machine_s : state) : state =
{ pc = (check_word_aligned target);
m = machine_s.m;
r = (rf_update (reg2ind R31) (Int32.add machine_s.pc 4l) (machine_s.r)) }
Executes a Lui on a given state , loading an immediate into the upper half of a register
let exec_lui (rt : reg) (imm : int32) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rt) (Int32.shift_left imm 16) (machine_s.r)) }
Executes a Ori on a given state , OR - ing an immediate
let exec_ori (rt : reg) (rs : reg) (imm : int32) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rt)
(Int32.logor imm (rf_lookup (reg2ind rs) machine_s.r))
machine_s.r ) }
Executes a Lw on a given state , loading a word
let exec_lw (rt : reg) (rs : reg) (offset : int32) (machine_s : state) : state =
let target_addr = (Int32.add (rf_lookup (reg2ind rs) machine_s.r) offset) in
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rt)
(word_mem_lookup
(check_word_aligned target_addr)
machine_s.m)
machine_s.r ) }
Executes a Sw on a given state , storing a word
let exec_sw (rt : reg) (rs : reg) (offset : int32) (machine_s : state) : state =
let target_addr = (Int32.add (rf_lookup (reg2ind rs) machine_s.r) offset) in
increment_pc { pc = machine_s.pc;
m = (word_mem_update (rf_lookup (reg2ind rt) machine_s.r)
(check_word_aligned target_addr)
machine_s.m);
r = machine_s.r }
let exec_add (rd : reg) (rs : reg) (rt : reg) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rd)
(Int32.add (rf_lookup (reg2ind rs) machine_s.r)
(rf_lookup (reg2ind rt) machine_s.r))
machine_s.r) }
Executes a Li on a given state , loading a 32 bit
let exec_li (rs : reg) (imm : int32) (machine_s : state) : state =
increment_pc { pc = machine_s.pc;
m = machine_s.m;
r = (rf_update (reg2ind rs) imm (machine_s.r))}
let exec (target : inst) (machine_s : state) : state =
match target with
| Beq(rs, rt, label) -> (exec_beq rs rt label machine_s)
| Jr(rs) -> (exec_jr rs machine_s)
| Jal(target) -> (exec_jal target machine_s)
Load immediate into upper half of register
| Lui(rt, imm) -> (exec_lui rt imm machine_s)
| Ori(rt, rs, imm) -> (exec_ori rt rs imm machine_s)
| Lw(rt, rs, offset) -> (exec_lw rt rs offset machine_s)
| Sw(rt, rs, offset) -> (exec_sw rt rs offset machine_s)
| Add(rd, rs, rt) -> (exec_add rd rs rt machine_s)
Given a starting state , simulate the Mips machine code to get a final state
let rec interp (init_state : state) : state =
let bin_inst = (word_mem_lookup init_state.pc init_state.m ) in
match bin_inst with
| _ ->
Disassemble
let t_inst = (disassem bin_inst) in
Exec
let new_state = (exec t_inst init_state) in
Handoff state
(interp new_state)
|
1dfdeb329005fc45eef64c8e25db0faff3fe8c9dc9706df7cc49c5c9fc13f299 | technion/ct_advisor | ct_advisor_sup.erl | %%%-------------------------------------------------------------------
%% @doc ct_advisor top level supervisor.
%% @end
%%%-------------------------------------------------------------------
-module('ct_advisor_sup').
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
-define(SERVER, ?MODULE).
%%====================================================================
%% API functions
%%====================================================================
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
%%====================================================================
%% Supervisor callbacks
%%====================================================================
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) ->
{ok, { {one_for_all, 0, 1}, []} }.
%%====================================================================
Internal functions
%%====================================================================
| null | https://raw.githubusercontent.com/technion/ct_advisor/d5f3120c468e4203caefbe57f1c64fa4b7017613/apps/ct_advisor/src/ct_advisor_sup.erl | erlang | -------------------------------------------------------------------
@doc ct_advisor top level supervisor.
@end
-------------------------------------------------------------------
API
Supervisor callbacks
====================================================================
API functions
====================================================================
====================================================================
Supervisor callbacks
====================================================================
====================================================================
==================================================================== |
-module('ct_advisor_sup').
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) ->
{ok, { {one_for_all, 0, 1}, []} }.
Internal functions
|
a5618491e1b5c74d492ab164faa2615d7c8b2253805ca128c9d05eb5081e00fa | TerrorJack/yuuenchi | LazyRead.hs | # LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
module LazyRead
( ReadChunks(..)
, lazyRead
) where
import Control.Exception
import Data.Functor
import OneShotIO
import System.IO.Unsafe
data ReadChunks s a = ReadChunks
{ emptyChunks :: s
, prependChunk :: a -> s -> s
, isEmptyChunk :: a -> Bool
, readChunk :: IO a
, finalize :: IO ()
}
# INLINE lazyRead #
lazyRead :: ReadChunks s a -> IO s
lazyRead ReadChunks {..} = do
finalize_once <- oneshot finalize
let lazy_go = unsafeInterleaveIO go
go =
flip onException finalize_once $ do
c <- readChunk
if isEmptyChunk c
then finalize_once $> emptyChunks
else prependChunk c <$> lazy_go
lazy_go
| null | https://raw.githubusercontent.com/TerrorJack/yuuenchi/f2af8d8c2c71c93c1ddb61e87629ae53851dd2e6/src/LazyRead.hs | haskell | # LANGUAGE StrictData # | # LANGUAGE RecordWildCards #
module LazyRead
( ReadChunks(..)
, lazyRead
) where
import Control.Exception
import Data.Functor
import OneShotIO
import System.IO.Unsafe
data ReadChunks s a = ReadChunks
{ emptyChunks :: s
, prependChunk :: a -> s -> s
, isEmptyChunk :: a -> Bool
, readChunk :: IO a
, finalize :: IO ()
}
# INLINE lazyRead #
lazyRead :: ReadChunks s a -> IO s
lazyRead ReadChunks {..} = do
finalize_once <- oneshot finalize
let lazy_go = unsafeInterleaveIO go
go =
flip onException finalize_once $ do
c <- readChunk
if isEmptyChunk c
then finalize_once $> emptyChunks
else prependChunk c <$> lazy_go
lazy_go
|
52828858288c6e0300daf871496d791aab37a7fcb535046417b30d55768cbd14 | aviaviavi/legion | Server.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE GADTs #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
{-# LANGUAGE RankNTypes #-}
module Server where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Distributed.Process
import Control.Distributed.Process.Node
import Control.Monad (forever)
import Control.Monad.Trans
import Data.IORef
import Data.Maybe
import GHC.Generics
import Lib
import System.Log.Logger
import Text.PrettyPrint.GenericPretty
import Web.Spock
import Web.Spock.Config
import qualified Control.Distributed.Backend.P2P as P2P
import qualified Data.Binary as B
import qualified Data.Text as T
-- args for running the main application
data MainArgs = MainArgs { httpPort :: String
, p2pPort :: String
, seedNode :: Maybe String
}
data MySession = EmptySession
the state for our application , to be used as a spock state
data BlockChainState = BlockChainState { blockChainState :: IORef [Block]
, node :: LocalNode
, pid :: ProcessId
} deriving (Generic)
ADT for data that will be sent across the P2P network
data BlockUpdate = UpdateData Block | ReplaceData [Block] | RequestChain deriving (Generic)
instance B.Binary BlockUpdate
liftDebug :: (MonadIO m) => String -> m ()
liftDebug str = liftIO $ debugM "legion" (show str)
p2pServiceName :: String
p2pServiceName = "updateservice"
-- explicit entry point to run the application, which is useful
-- for our tests
runLegion :: MainArgs -> IO ()
runLegion args = do
liftDebug "starting"
(localNode, procId) <- runP2P (p2pPort args) (seedNode args) (return ())
ref <- maybe (newIORef [initialBlock]) (const $ newIORef []) (seedNode args)
spockCfg <- defaultSpockCfg EmptySession PCNoDatabase (BlockChainState ref localNode procId)
_ <- async $ runSpock (read (httpPort args) :: Int) (spock spockCfg Server.app)
-- wait for messages to come in from the p2p network and respond to them
runProcess localNode $ do
getSelfPid >>= register p2pServiceName
liftIO $ threadDelay 1000000
_ <- if isJust $ seedNode args
then do
liftDebug "this is not the initial node, requesting a chain"
requestChain localNode
else liftDebug "this is the initial node, not requesting a chain"
forever $ do
message <- expect :: Process BlockUpdate
liftDebug "got a message..."
case message of
(ReplaceData chain) -> do
liftDebug $ "got some stuff to replace: " ++ show chain
replaceChain ref chain
(UpdateData block) -> do
liftDebug $ "got some stuff to add: " ++ show block
addBlock ref block
RequestChain -> do
liftDebug "got chain request"
sendChain localNode ref
-- Type alias
type Get stateType returnType
= forall m.
(SpockState m ~ stateType, MonadIO m, HasSpock m)
=> m returnType
-- retrieve the current block chain
getBlockChain :: Get BlockChainState [Block]
getBlockChain = do
(BlockChainState chain _ _) <- getState
liftIO $ readIORef chain
-- retrieve the most recent block in the chain
getLatestBlock :: Get BlockChainState Block
getLatestBlock = fmap last getBlockChain
-- add a block to our blockchain, if it's valid
addBlock :: MonadIO m => IORef [Block] -> Block -> m ()
addBlock ref block = do
chain <- liftIO $ readIORef ref
if isValidNewBlock (last chain) block
then do
liftDebug "adding new block"
_ <- liftIO $ atomicModifyIORef' ref $ \b -> (b ++ [block], b ++ [block])
return ()
else
liftDebug "new block not valid. skipping"
-- given some data, create a valid block
mineBlock :: (SpockState m ~ BlockChainState, MonadIO m, HasSpock m) => String -> m Block
mineBlock stringData = do
lastBlock <- getLatestBlock
mineBlockFrom lastBlock stringData
-- if this chain is valid and longer than what we have, update it.
replaceChain :: MonadIO m => IORef [Block] -> [Block] -> m ()
replaceChain chainRef newChain = do
currentChain <- liftIO $ readIORef chainRef
if (not . isValidChain) newChain || length currentChain >= length newChain
then liftDebug $ "chain is not valid for updating!: " ++ show newChain
else do
setChain <- liftIO $ atomicModifyIORef' chainRef $ const (newChain, newChain)
liftDebug ("updated chain: " ++ show setChain)
-- ask other nodes for their chains
requestChain :: MonadIO m => LocalNode -> m ()
requestChain localNode = liftIO $ runProcess localNode $ do
liftDebug "requesting chain"
P2P.nsendPeers p2pServiceName RequestChain
-- sends the entire chain to all nodes in the network.
-- receiving nodes should update if this chain is newer than what they have
sendChain :: MonadIO m => LocalNode -> IORef [Block] -> m ()
sendChain localNode chainRef = liftIO $ runProcess localNode $ do
liftDebug "emitting chain"
chain <- liftIO $ readIORef chainRef
P2P.nsendPeers p2pServiceName $ ReplaceData chain
runP2P port bootstrapNode = P2P.bootstrapNonBlocking "127.0.0.1" port (maybeToList $ P2P.makeNodeId `fmap` bootstrapNode) initRemoteTable
-- spock http endpoint
app :: SpockM () MySession BlockChainState ()
app = do
get root $
text "Legion Blockchain Node"
post "block" $ do
(BlockChainState ref localNode _) <- getState
(blockString :: BlockArgs) <- jsonBody'
liftDebug $ show blockString
block <- mineBlock . blockBody $ blockString
_ <- addBlock ref block
chain <- getBlockChain
liftDebug $ show chain
liftIO $ runProcess localNode $ P2P.nsendPeers p2pServiceName $ UpdateData block
text . T.pack . pretty $ chain
get "chain" $ do
chain <- getBlockChain
text . T.pack . pretty $ chain
| null | https://raw.githubusercontent.com/aviaviavi/legion/32a8cacebd57ab97761907b23d1396b9a8700c2d/src/Server.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
args for running the main application
explicit entry point to run the application, which is useful
for our tests
wait for messages to come in from the p2p network and respond to them
Type alias
retrieve the current block chain
retrieve the most recent block in the chain
add a block to our blockchain, if it's valid
given some data, create a valid block
if this chain is valid and longer than what we have, update it.
ask other nodes for their chains
sends the entire chain to all nodes in the network.
receiving nodes should update if this chain is newer than what they have
spock http endpoint | # LANGUAGE DeriveGeneric #
# LANGUAGE GADTs #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Server where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Distributed.Process
import Control.Distributed.Process.Node
import Control.Monad (forever)
import Control.Monad.Trans
import Data.IORef
import Data.Maybe
import GHC.Generics
import Lib
import System.Log.Logger
import Text.PrettyPrint.GenericPretty
import Web.Spock
import Web.Spock.Config
import qualified Control.Distributed.Backend.P2P as P2P
import qualified Data.Binary as B
import qualified Data.Text as T
data MainArgs = MainArgs { httpPort :: String
, p2pPort :: String
, seedNode :: Maybe String
}
data MySession = EmptySession
the state for our application , to be used as a spock state
data BlockChainState = BlockChainState { blockChainState :: IORef [Block]
, node :: LocalNode
, pid :: ProcessId
} deriving (Generic)
ADT for data that will be sent across the P2P network
data BlockUpdate = UpdateData Block | ReplaceData [Block] | RequestChain deriving (Generic)
instance B.Binary BlockUpdate
liftDebug :: (MonadIO m) => String -> m ()
liftDebug str = liftIO $ debugM "legion" (show str)
p2pServiceName :: String
p2pServiceName = "updateservice"
runLegion :: MainArgs -> IO ()
runLegion args = do
liftDebug "starting"
(localNode, procId) <- runP2P (p2pPort args) (seedNode args) (return ())
ref <- maybe (newIORef [initialBlock]) (const $ newIORef []) (seedNode args)
spockCfg <- defaultSpockCfg EmptySession PCNoDatabase (BlockChainState ref localNode procId)
_ <- async $ runSpock (read (httpPort args) :: Int) (spock spockCfg Server.app)
runProcess localNode $ do
getSelfPid >>= register p2pServiceName
liftIO $ threadDelay 1000000
_ <- if isJust $ seedNode args
then do
liftDebug "this is not the initial node, requesting a chain"
requestChain localNode
else liftDebug "this is the initial node, not requesting a chain"
forever $ do
message <- expect :: Process BlockUpdate
liftDebug "got a message..."
case message of
(ReplaceData chain) -> do
liftDebug $ "got some stuff to replace: " ++ show chain
replaceChain ref chain
(UpdateData block) -> do
liftDebug $ "got some stuff to add: " ++ show block
addBlock ref block
RequestChain -> do
liftDebug "got chain request"
sendChain localNode ref
type Get stateType returnType
= forall m.
(SpockState m ~ stateType, MonadIO m, HasSpock m)
=> m returnType
getBlockChain :: Get BlockChainState [Block]
getBlockChain = do
(BlockChainState chain _ _) <- getState
liftIO $ readIORef chain
getLatestBlock :: Get BlockChainState Block
getLatestBlock = fmap last getBlockChain
addBlock :: MonadIO m => IORef [Block] -> Block -> m ()
addBlock ref block = do
chain <- liftIO $ readIORef ref
if isValidNewBlock (last chain) block
then do
liftDebug "adding new block"
_ <- liftIO $ atomicModifyIORef' ref $ \b -> (b ++ [block], b ++ [block])
return ()
else
liftDebug "new block not valid. skipping"
mineBlock :: (SpockState m ~ BlockChainState, MonadIO m, HasSpock m) => String -> m Block
mineBlock stringData = do
lastBlock <- getLatestBlock
mineBlockFrom lastBlock stringData
replaceChain :: MonadIO m => IORef [Block] -> [Block] -> m ()
replaceChain chainRef newChain = do
currentChain <- liftIO $ readIORef chainRef
if (not . isValidChain) newChain || length currentChain >= length newChain
then liftDebug $ "chain is not valid for updating!: " ++ show newChain
else do
setChain <- liftIO $ atomicModifyIORef' chainRef $ const (newChain, newChain)
liftDebug ("updated chain: " ++ show setChain)
requestChain :: MonadIO m => LocalNode -> m ()
requestChain localNode = liftIO $ runProcess localNode $ do
liftDebug "requesting chain"
P2P.nsendPeers p2pServiceName RequestChain
sendChain :: MonadIO m => LocalNode -> IORef [Block] -> m ()
sendChain localNode chainRef = liftIO $ runProcess localNode $ do
liftDebug "emitting chain"
chain <- liftIO $ readIORef chainRef
P2P.nsendPeers p2pServiceName $ ReplaceData chain
runP2P port bootstrapNode = P2P.bootstrapNonBlocking "127.0.0.1" port (maybeToList $ P2P.makeNodeId `fmap` bootstrapNode) initRemoteTable
app :: SpockM () MySession BlockChainState ()
app = do
get root $
text "Legion Blockchain Node"
post "block" $ do
(BlockChainState ref localNode _) <- getState
(blockString :: BlockArgs) <- jsonBody'
liftDebug $ show blockString
block <- mineBlock . blockBody $ blockString
_ <- addBlock ref block
chain <- getBlockChain
liftDebug $ show chain
liftIO $ runProcess localNode $ P2P.nsendPeers p2pServiceName $ UpdateData block
text . T.pack . pretty $ chain
get "chain" $ do
chain <- getBlockChain
text . T.pack . pretty $ chain
|
125d30883b03286fd02f0305c25dcb8c480a596093ad480377fb3d76f93b510e | cedlemo/OCaml-GI-ctypes-bindings-generator | Dialog.mli | open Ctypes
type t
val t_typ : t typ
val create :
unit -> Widget.t ptr
val add_action_widget :
t -> Widget.t ptr -> int32 -> unit
val add_button :
t -> string -> int32 -> Widget.t ptr
val get_action_area :
t -> Widget.t ptr
val get_content_area :
t -> Box.t ptr
val get_header_bar :
t -> Widget.t ptr
val get_response_for_widget :
t -> Widget.t ptr -> int32
val get_widget_for_response :
t -> int32 -> Widget.t ptr option
val response :
t -> int32 -> unit
val run :
t -> int32
(*Not implemented gtk_dialog_set_alternative_button_order_from_array type C Array type for Types.Array tag not implemented*)
val set_default_response :
t -> int32 -> unit
val set_response_sensitive :
t -> int32 -> bool -> unit
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Dialog.mli | ocaml | Not implemented gtk_dialog_set_alternative_button_order_from_array type C Array type for Types.Array tag not implemented | open Ctypes
type t
val t_typ : t typ
val create :
unit -> Widget.t ptr
val add_action_widget :
t -> Widget.t ptr -> int32 -> unit
val add_button :
t -> string -> int32 -> Widget.t ptr
val get_action_area :
t -> Widget.t ptr
val get_content_area :
t -> Box.t ptr
val get_header_bar :
t -> Widget.t ptr
val get_response_for_widget :
t -> Widget.t ptr -> int32
val get_widget_for_response :
t -> int32 -> Widget.t ptr option
val response :
t -> int32 -> unit
val run :
t -> int32
val set_default_response :
t -> int32 -> unit
val set_response_sensitive :
t -> int32 -> bool -> unit
|
606f3e3c140601af54ad3cc2f07252e7317e54b517fa43c861479ba8b03e7529 | toddaaro/advanced-dan | miniKanren.scm | ;; In order to use "basic" miniKanren
(load "ck.scm")
(define-syntax run1 (syntax-rules () ((_ (x) g0 g ...) (run 1 (x) g0 g ...))))
(define-syntax run2 (syntax-rules () ((_ (x) g0 g ...) (run 2 (x) g0 g ...))))
(define-syntax run3 (syntax-rules () ((_ (x) g0 g ...) (run 3 (x) g0 g ...))))
(define-syntax run4 (syntax-rules () ((_ (x) g0 g ...) (run 4 (x) g0 g ...))))
(define-syntax run5 (syntax-rules () ((_ (x) g0 g ...) (run 5 (x) g0 g ...))))
(define-syntax run6 (syntax-rules () ((_ (x) g0 g ...) (run 6 (x) g0 g ...))))
(define-syntax run7 (syntax-rules () ((_ (x) g0 g ...) (run 7 (x) g0 g ...))))
(define-syntax run8 (syntax-rules () ((_ (x) g0 g ...) (run 8 (x) g0 g ...))))
(define-syntax run9 (syntax-rules () ((_ (x) g0 g ...) (run 9 (x) g0 g ...))))
(define-syntax run10 (syntax-rules () ((_ (x) g0 g ...) (run 10 (x) g0 g ...))))
(define-syntax run11 (syntax-rules () ((_ (x) g0 g ...) (run 11 (x) g0 g ...))))
(define-syntax run12 (syntax-rules () ((_ (x) g0 g ...) (run 12 (x) g0 g ...))))
(define-syntax run13 (syntax-rules () ((_ (x) g0 g ...) (run 13 (x) g0 g ...))))
(define-syntax run14 (syntax-rules () ((_ (x) g0 g ...) (run 14 (x) g0 g ...))))
(define-syntax run15 (syntax-rules () ((_ (x) g0 g ...) (run 15 (x) g0 g ...))))
(define-syntax run16 (syntax-rules () ((_ (x) g0 g ...) (run 16 (x) g0 g ...))))
(define-syntax run17 (syntax-rules () ((_ (x) g0 g ...) (run 17 (x) g0 g ...))))
(define-syntax run18 (syntax-rules () ((_ (x) g0 g ...) (run 18 (x) g0 g ...))))
(define-syntax run19 (syntax-rules () ((_ (x) g0 g ...) (run 19 (x) g0 g ...))))
(define-syntax run20 (syntax-rules () ((_ (x) g0 g ...) (run 20 (x) g0 g ...))))
(define-syntax run21 (syntax-rules () ((_ (x) g0 g ...) (run 21 (x) g0 g ...))))
(define-syntax run22 (syntax-rules () ((_ (x) g0 g ...) (run 22 (x) g0 g ...))))
(define-syntax run23 (syntax-rules () ((_ (x) g0 g ...) (run 23 (x) g0 g ...))))
(define-syntax run24 (syntax-rules () ((_ (x) g0 g ...) (run 24 (x) g0 g ...))))
(define-syntax run25 (syntax-rules () ((_ (x) g0 g ...) (run 25 (x) g0 g ...))))
(define-syntax run26 (syntax-rules () ((_ (x) g0 g ...) (run 26 (x) g0 g ...))))
(define-syntax run27 (syntax-rules () ((_ (x) g0 g ...) (run 27 (x) g0 g ...))))
(define-syntax run28 (syntax-rules () ((_ (x) g0 g ...) (run 28 (x) g0 g ...))))
(define-syntax run29 (syntax-rules () ((_ (x) g0 g ...) (run 29 (x) g0 g ...))))
(define-syntax run30 (syntax-rules () ((_ (x) g0 g ...) (run 30 (x) g0 g ...))))
(define-syntax run31 (syntax-rules () ((_ (x) g0 g ...) (run 31 (x) g0 g ...))))
(define-syntax run32 (syntax-rules () ((_ (x) g0 g ...) (run 32 (x) g0 g ...))))
(define-syntax run33 (syntax-rules () ((_ (x) g0 g ...) (run 33 (x) g0 g ...))))
(define-syntax run34 (syntax-rules () ((_ (x) g0 g ...) (run 34 (x) g0 g ...))))
(define-syntax run35 (syntax-rules () ((_ (x) g0 g ...) (run 35 (x) g0 g ...))))
(define-syntax run36 (syntax-rules () ((_ (x) g0 g ...) (run 36 (x) g0 g ...))))
(define-syntax run37 (syntax-rules () ((_ (x) g0 g ...) (run 37 (x) g0 g ...))))
(define-syntax run38 (syntax-rules () ((_ (x) g0 g ...) (run 38 (x) g0 g ...))))
(define-syntax run39 (syntax-rules () ((_ (x) g0 g ...) (run 39 (x) g0 g ...))))
(define-syntax run40 (syntax-rules () ((_ (x) g0 g ...) (run 40 (x) g0 g ...))))
(define caro
(lambda (p a)
(fresh (d)
(== (cons a d) p))))
(define cdro
(lambda (p d)
(fresh (a)
(== (cons a d) p))))
(define conso
(lambda (a d p)
(== (cons a d) p)))
(define nullo
(lambda (x)
(== '() x)))
(define eqo
(lambda (x y)
(== x y)))
(define pairo
(lambda (p)
(fresh (a d)
(conso a d p))))
(define membero
(lambda (x l)
(conde
((fresh (a)
(caro l a)
(== a x)))
((fresh (d)
(cdro l d)
(membero x d))))))
(define rembero
(lambda (x l out)
(conde
((nullo l) (== '() out))
((caro l x) (cdro l out))
((fresh (a d res)
(conso a d l)
(rembero x d res)
(conso a res out))))))
(define appendo
(lambda (l s out)
(conde
((nullo l) (== s out))
((fresh (a d res)
(conso a d l)
(conso a res out)
(appendo d s res))))))
(define flatteno
(lambda (s out)
(conde
((nullo s) (== '() out))
((pairo s)
(fresh (a d res-a res-d)
(conso a d s)
(flatteno a res-a)
(flatteno d res-d)
(appendo res-a res-d out)))
((conso s '() out)))))
(define anyo
(lambda (g)
(conde
(g)
((anyo g)))))
(define nevero (anyo fail))
(define alwayso (anyo succeed))
(define build-num
(lambda (n)
(cond
((odd? n)
(cons 1
(build-num (quotient (- n 1) 2))))
((and (not (zero? n)) (even? n))
(cons 0
(build-num (quotient n 2))))
((zero? n) '()))))
(define poso
(lambda (n)
(fresh (a d)
(== `(,a . ,d) n))))
(define >1o
(lambda (n)
(fresh (a ad dd)
(== `(,a ,ad . ,dd) n))))
(define full-addero
(lambda (b x y r c)
(conde
((== 0 b) (== 0 x) (== 0 y) (== 0 r) (== 0 c))
((== 1 b) (== 0 x) (== 0 y) (== 1 r) (== 0 c))
((== 0 b) (== 1 x) (== 0 y) (== 1 r) (== 0 c))
((== 1 b) (== 1 x) (== 0 y) (== 0 r) (== 1 c))
((== 0 b) (== 0 x) (== 1 y) (== 1 r) (== 0 c))
((== 1 b) (== 0 x) (== 1 y) (== 0 r) (== 1 c))
((== 0 b) (== 1 x) (== 1 y) (== 0 r) (== 1 c))
((== 1 b) (== 1 x) (== 1 y) (== 1 r) (== 1 c)))))
(define addero
(lambda (d n m r)
(conde
((== 0 d) (== '() m) (== n r))
((== 0 d) (== '() n) (== m r)
(poso m))
((== 1 d) (== '() m)
(addero 0 n '(1) r))
((== 1 d) (== '() n) (poso m)
(addero 0 '(1) m r))
((== '(1) n) (== '(1) m)
(fresh (a c)
(== `(,a ,c) r)
(full-addero d 1 1 a c)))
((== '(1) n) (gen-addero d n m r))
((== '(1) m) (>1o n) (>1o r)
(addero d '(1) n r))
((>1o n) (gen-addero d n m r)))))
(define gen-addero
(lambda (d n m r)
(fresh (a b c e x y z)
(== `(,a . ,x) n)
(== `(,b . ,y) m) (poso y)
(== `(,c . ,z) r) (poso z)
(full-addero d a b c e)
(addero e x y z))))
(define pluso
(lambda (n m k)
(addero 0 n m k)))
(define minuso
(lambda (n m k)
(pluso m k n)))
(define *o
(lambda (n m p)
(conde
((== '() n) (== '() p))
((poso n) (== '() m) (== '() p))
((== '(1) n) (poso m) (== m p))
((>1o n) (== '(1) m) (== n p))
((fresh (x z)
(== `(0 . ,x) n) (poso x)
(== `(0 . ,z) p) (poso z)
(>1o m)
(*o x m z)))
((fresh (x y)
(== `(1 . ,x) n) (poso x)
(== `(0 . ,y) m) (poso y)
(*o m n p)))
((fresh (x y)
(== `(1 . ,x) n) (poso x)
(== `(1 . ,y) m) (poso y)
(odd-*o x n m p))))))
(define odd-*o
(lambda (x n m p)
(fresh (q)
(bound-*o q p n m)
(*o x m q)
(pluso `(0 . ,q) m p))))
(define bound-*o
(lambda (q p n m)
(conde
((nullo q) (pairo p))
((fresh (x y z)
(cdro q x)
(cdro p y)
(conde
((nullo n)
(cdro m z)
(bound-*o x y z '()))
((cdro n z)
(bound-*o x y z m))))))))
(define =lo
(lambda (n m)
(conde
((== '() n) (== '() m))
((== '(1) n) (== '(1) m))
((fresh (a x b y)
(== `(,a . ,x) n) (poso x)
(== `(,b . ,y) m) (poso y)
(=lo x y))))))
(define <lo
(lambda (n m)
(conde
((== '() n) (poso m))
((== '(1) n) (>1o m))
((fresh (a x b y)
(== `(,a . ,x) n) (poso x)
(== `(,b . ,y) m) (poso y)
(<lo x y))))))
(define <=lo
(lambda (n m)
(conde
((=lo n m))
((<lo n m)))))
(define <o
(lambda (n m)
(conde
((<lo n m))
((=lo n m)
(fresh (x)
(poso x)
(pluso n x m))))))
(define <=o
(lambda (n m)
(conde
((== n m))
((<o n m)))))
(define /o
(lambda (n m q r)
(conde
((== r n) (== '() q) (<o n m))
((== '(1) q) (=lo n m) (pluso r m n)
(<o r m))
((<lo m n)
(<o r m)
(poso q)
(fresh (nh nl qh ql qlm qlmr rr rh)
(splito n r nl nh)
(splito q r ql qh)
(conde
((== '() nh)
(== '() qh)
(minuso nl r qlm)
(*o ql m qlm))
((poso nh)
(*o ql m qlm)
(pluso qlm r qlmr)
(minuso qlmr nl rr)
(splito rr r '() rh)
(/o nh m qh rh))))))))
(define splito
(lambda (n r l h)
(conde
((== '() n) (== '() h) (== '() l))
((fresh (b n^)
(== `(0 ,b . ,n^) n)
(== '() r)
(== `(,b . ,n^) h)
(== '() l)))
((fresh (n^)
(== `(1 . ,n^) n)
(== '() r)
(== n^ h)
(== '(1) l)))
((fresh (b n^ a r^)
(== `(0 ,b . ,n^) n)
(== `(,a . ,r^) r)
(== '() l)
(splito `(,b . ,n^) r^ '() h)))
((fresh (n^ a r^)
(== `(1 . ,n^) n)
(== `(,a . ,r^) r)
(== '(1) l)
(splito n^ r^ '() h)))
((fresh (b n^ a r^ l^)
(== `(,b . ,n^) n)
(== `(,a . ,r^) r)
(== `(,b . ,l^) l)
(poso l^)
(splito n^ r^ l^ h))))))
(define logo
(lambda (n b q r)
(conde
((== '(1) n) (poso b) (== '() q) (== '() r))
((== '() q) (<o n b) (pluso r '(1) n))
((== '(1) q) (>1o b) (=lo n b) (pluso r b n))
((== '(1) b) (poso q) (pluso r '(1) n))
((== '() b) (poso q) (== r n))
((== '(0 1) b)
(fresh (a ad dd)
(poso dd)
(== `(,a ,ad . ,dd) n)
(exp2 n '() q)
(fresh (s)
(splito n dd r s))))
((fresh (a ad add ddd)
(conde
((== '(1 1) b))
((== `(,a ,ad ,add . ,ddd) b))))
(<lo b n)
(fresh (bw1 bw nw nw1 ql1 ql s)
(exp2 b '() bw1)
(pluso bw1 '(1) bw)
(<lo q n)
(fresh (q1 bwq1)
(pluso q '(1) q1)
(*o bw q1 bwq1)
(<o nw1 bwq1))
(exp2 n '() nw1)
(pluso nw1 '(1) nw)
(/o nw bw ql1 s)
(pluso ql '(1) ql1)
(<=lo ql q)
(fresh (bql qh s qdh qd)
(repeated-mul b ql bql)
(/o nw bw1 qh s)
(pluso ql qdh qh)
(pluso ql qd q)
(<=o qd qdh)
(fresh (bqd bq1 bq)
(repeated-mul b qd bqd)
(*o bql bqd bq)
(*o b bq bq1)
(pluso bq r n)
(<o n bq1))))))))
(define exp2
(lambda (n b q)
(conde
((== '(1) n) (== '() q))
((>1o n) (== '(1) q)
(fresh (s)
(splito n b s '(1))))
((fresh (q1 b2)
(== `(0 . ,q1) q)
(poso q1)
(<lo b n)
(appendo b `(1 . ,b) b2)
(exp2 n b2 q1)))
((fresh (q1 nh b2 s)
(== `(1 . ,q1) q)
(poso q1)
(poso nh)
(splito n b s nh)
(appendo b `(1 . ,b) b2)
(exp2 nh b2 q1))))))
(define repeated-mul
(lambda (n q nq)
(conde
((poso n) (== '() q) (== '(1) nq))
((== '(1) q) (== n nq))
((>1o q)
(fresh (q1 nq1)
(pluso q1 '(1) q)
(repeated-mul n q1 nq1)
(*o nq1 n nq))))))
(define expo
(lambda (b q n)
(logo n b q '())))
(define prnt
(lambda (vars)
(lambda (expr)
(lambda (s)
(begin
(write expr)
(newline)
(write (map (lambda (p)
`(,(car p) ,(walk* (cdr p) s)))
vars))
(newline)
(succeed s)))))) | null | https://raw.githubusercontent.com/toddaaro/advanced-dan/5d6c0762d998aa37774e0414a0f37404e804b536/valo/miniKanren.scm | scheme | In order to use "basic" miniKanren |
(load "ck.scm")
(define-syntax run1 (syntax-rules () ((_ (x) g0 g ...) (run 1 (x) g0 g ...))))
(define-syntax run2 (syntax-rules () ((_ (x) g0 g ...) (run 2 (x) g0 g ...))))
(define-syntax run3 (syntax-rules () ((_ (x) g0 g ...) (run 3 (x) g0 g ...))))
(define-syntax run4 (syntax-rules () ((_ (x) g0 g ...) (run 4 (x) g0 g ...))))
(define-syntax run5 (syntax-rules () ((_ (x) g0 g ...) (run 5 (x) g0 g ...))))
(define-syntax run6 (syntax-rules () ((_ (x) g0 g ...) (run 6 (x) g0 g ...))))
(define-syntax run7 (syntax-rules () ((_ (x) g0 g ...) (run 7 (x) g0 g ...))))
(define-syntax run8 (syntax-rules () ((_ (x) g0 g ...) (run 8 (x) g0 g ...))))
(define-syntax run9 (syntax-rules () ((_ (x) g0 g ...) (run 9 (x) g0 g ...))))
(define-syntax run10 (syntax-rules () ((_ (x) g0 g ...) (run 10 (x) g0 g ...))))
(define-syntax run11 (syntax-rules () ((_ (x) g0 g ...) (run 11 (x) g0 g ...))))
(define-syntax run12 (syntax-rules () ((_ (x) g0 g ...) (run 12 (x) g0 g ...))))
(define-syntax run13 (syntax-rules () ((_ (x) g0 g ...) (run 13 (x) g0 g ...))))
(define-syntax run14 (syntax-rules () ((_ (x) g0 g ...) (run 14 (x) g0 g ...))))
(define-syntax run15 (syntax-rules () ((_ (x) g0 g ...) (run 15 (x) g0 g ...))))
(define-syntax run16 (syntax-rules () ((_ (x) g0 g ...) (run 16 (x) g0 g ...))))
(define-syntax run17 (syntax-rules () ((_ (x) g0 g ...) (run 17 (x) g0 g ...))))
(define-syntax run18 (syntax-rules () ((_ (x) g0 g ...) (run 18 (x) g0 g ...))))
(define-syntax run19 (syntax-rules () ((_ (x) g0 g ...) (run 19 (x) g0 g ...))))
(define-syntax run20 (syntax-rules () ((_ (x) g0 g ...) (run 20 (x) g0 g ...))))
(define-syntax run21 (syntax-rules () ((_ (x) g0 g ...) (run 21 (x) g0 g ...))))
(define-syntax run22 (syntax-rules () ((_ (x) g0 g ...) (run 22 (x) g0 g ...))))
(define-syntax run23 (syntax-rules () ((_ (x) g0 g ...) (run 23 (x) g0 g ...))))
(define-syntax run24 (syntax-rules () ((_ (x) g0 g ...) (run 24 (x) g0 g ...))))
(define-syntax run25 (syntax-rules () ((_ (x) g0 g ...) (run 25 (x) g0 g ...))))
(define-syntax run26 (syntax-rules () ((_ (x) g0 g ...) (run 26 (x) g0 g ...))))
(define-syntax run27 (syntax-rules () ((_ (x) g0 g ...) (run 27 (x) g0 g ...))))
(define-syntax run28 (syntax-rules () ((_ (x) g0 g ...) (run 28 (x) g0 g ...))))
(define-syntax run29 (syntax-rules () ((_ (x) g0 g ...) (run 29 (x) g0 g ...))))
(define-syntax run30 (syntax-rules () ((_ (x) g0 g ...) (run 30 (x) g0 g ...))))
(define-syntax run31 (syntax-rules () ((_ (x) g0 g ...) (run 31 (x) g0 g ...))))
(define-syntax run32 (syntax-rules () ((_ (x) g0 g ...) (run 32 (x) g0 g ...))))
(define-syntax run33 (syntax-rules () ((_ (x) g0 g ...) (run 33 (x) g0 g ...))))
(define-syntax run34 (syntax-rules () ((_ (x) g0 g ...) (run 34 (x) g0 g ...))))
(define-syntax run35 (syntax-rules () ((_ (x) g0 g ...) (run 35 (x) g0 g ...))))
(define-syntax run36 (syntax-rules () ((_ (x) g0 g ...) (run 36 (x) g0 g ...))))
(define-syntax run37 (syntax-rules () ((_ (x) g0 g ...) (run 37 (x) g0 g ...))))
(define-syntax run38 (syntax-rules () ((_ (x) g0 g ...) (run 38 (x) g0 g ...))))
(define-syntax run39 (syntax-rules () ((_ (x) g0 g ...) (run 39 (x) g0 g ...))))
(define-syntax run40 (syntax-rules () ((_ (x) g0 g ...) (run 40 (x) g0 g ...))))
(define caro
(lambda (p a)
(fresh (d)
(== (cons a d) p))))
(define cdro
(lambda (p d)
(fresh (a)
(== (cons a d) p))))
(define conso
(lambda (a d p)
(== (cons a d) p)))
(define nullo
(lambda (x)
(== '() x)))
(define eqo
(lambda (x y)
(== x y)))
(define pairo
(lambda (p)
(fresh (a d)
(conso a d p))))
(define membero
(lambda (x l)
(conde
((fresh (a)
(caro l a)
(== a x)))
((fresh (d)
(cdro l d)
(membero x d))))))
(define rembero
(lambda (x l out)
(conde
((nullo l) (== '() out))
((caro l x) (cdro l out))
((fresh (a d res)
(conso a d l)
(rembero x d res)
(conso a res out))))))
(define appendo
(lambda (l s out)
(conde
((nullo l) (== s out))
((fresh (a d res)
(conso a d l)
(conso a res out)
(appendo d s res))))))
(define flatteno
(lambda (s out)
(conde
((nullo s) (== '() out))
((pairo s)
(fresh (a d res-a res-d)
(conso a d s)
(flatteno a res-a)
(flatteno d res-d)
(appendo res-a res-d out)))
((conso s '() out)))))
(define anyo
(lambda (g)
(conde
(g)
((anyo g)))))
(define nevero (anyo fail))
(define alwayso (anyo succeed))
(define build-num
(lambda (n)
(cond
((odd? n)
(cons 1
(build-num (quotient (- n 1) 2))))
((and (not (zero? n)) (even? n))
(cons 0
(build-num (quotient n 2))))
((zero? n) '()))))
(define poso
(lambda (n)
(fresh (a d)
(== `(,a . ,d) n))))
(define >1o
(lambda (n)
(fresh (a ad dd)
(== `(,a ,ad . ,dd) n))))
(define full-addero
(lambda (b x y r c)
(conde
((== 0 b) (== 0 x) (== 0 y) (== 0 r) (== 0 c))
((== 1 b) (== 0 x) (== 0 y) (== 1 r) (== 0 c))
((== 0 b) (== 1 x) (== 0 y) (== 1 r) (== 0 c))
((== 1 b) (== 1 x) (== 0 y) (== 0 r) (== 1 c))
((== 0 b) (== 0 x) (== 1 y) (== 1 r) (== 0 c))
((== 1 b) (== 0 x) (== 1 y) (== 0 r) (== 1 c))
((== 0 b) (== 1 x) (== 1 y) (== 0 r) (== 1 c))
((== 1 b) (== 1 x) (== 1 y) (== 1 r) (== 1 c)))))
(define addero
(lambda (d n m r)
(conde
((== 0 d) (== '() m) (== n r))
((== 0 d) (== '() n) (== m r)
(poso m))
((== 1 d) (== '() m)
(addero 0 n '(1) r))
((== 1 d) (== '() n) (poso m)
(addero 0 '(1) m r))
((== '(1) n) (== '(1) m)
(fresh (a c)
(== `(,a ,c) r)
(full-addero d 1 1 a c)))
((== '(1) n) (gen-addero d n m r))
((== '(1) m) (>1o n) (>1o r)
(addero d '(1) n r))
((>1o n) (gen-addero d n m r)))))
(define gen-addero
(lambda (d n m r)
(fresh (a b c e x y z)
(== `(,a . ,x) n)
(== `(,b . ,y) m) (poso y)
(== `(,c . ,z) r) (poso z)
(full-addero d a b c e)
(addero e x y z))))
(define pluso
(lambda (n m k)
(addero 0 n m k)))
(define minuso
(lambda (n m k)
(pluso m k n)))
(define *o
(lambda (n m p)
(conde
((== '() n) (== '() p))
((poso n) (== '() m) (== '() p))
((== '(1) n) (poso m) (== m p))
((>1o n) (== '(1) m) (== n p))
((fresh (x z)
(== `(0 . ,x) n) (poso x)
(== `(0 . ,z) p) (poso z)
(>1o m)
(*o x m z)))
((fresh (x y)
(== `(1 . ,x) n) (poso x)
(== `(0 . ,y) m) (poso y)
(*o m n p)))
((fresh (x y)
(== `(1 . ,x) n) (poso x)
(== `(1 . ,y) m) (poso y)
(odd-*o x n m p))))))
(define odd-*o
(lambda (x n m p)
(fresh (q)
(bound-*o q p n m)
(*o x m q)
(pluso `(0 . ,q) m p))))
(define bound-*o
(lambda (q p n m)
(conde
((nullo q) (pairo p))
((fresh (x y z)
(cdro q x)
(cdro p y)
(conde
((nullo n)
(cdro m z)
(bound-*o x y z '()))
((cdro n z)
(bound-*o x y z m))))))))
(define =lo
(lambda (n m)
(conde
((== '() n) (== '() m))
((== '(1) n) (== '(1) m))
((fresh (a x b y)
(== `(,a . ,x) n) (poso x)
(== `(,b . ,y) m) (poso y)
(=lo x y))))))
(define <lo
(lambda (n m)
(conde
((== '() n) (poso m))
((== '(1) n) (>1o m))
((fresh (a x b y)
(== `(,a . ,x) n) (poso x)
(== `(,b . ,y) m) (poso y)
(<lo x y))))))
(define <=lo
(lambda (n m)
(conde
((=lo n m))
((<lo n m)))))
(define <o
(lambda (n m)
(conde
((<lo n m))
((=lo n m)
(fresh (x)
(poso x)
(pluso n x m))))))
(define <=o
(lambda (n m)
(conde
((== n m))
((<o n m)))))
(define /o
(lambda (n m q r)
(conde
((== r n) (== '() q) (<o n m))
((== '(1) q) (=lo n m) (pluso r m n)
(<o r m))
((<lo m n)
(<o r m)
(poso q)
(fresh (nh nl qh ql qlm qlmr rr rh)
(splito n r nl nh)
(splito q r ql qh)
(conde
((== '() nh)
(== '() qh)
(minuso nl r qlm)
(*o ql m qlm))
((poso nh)
(*o ql m qlm)
(pluso qlm r qlmr)
(minuso qlmr nl rr)
(splito rr r '() rh)
(/o nh m qh rh))))))))
(define splito
(lambda (n r l h)
(conde
((== '() n) (== '() h) (== '() l))
((fresh (b n^)
(== `(0 ,b . ,n^) n)
(== '() r)
(== `(,b . ,n^) h)
(== '() l)))
((fresh (n^)
(== `(1 . ,n^) n)
(== '() r)
(== n^ h)
(== '(1) l)))
((fresh (b n^ a r^)
(== `(0 ,b . ,n^) n)
(== `(,a . ,r^) r)
(== '() l)
(splito `(,b . ,n^) r^ '() h)))
((fresh (n^ a r^)
(== `(1 . ,n^) n)
(== `(,a . ,r^) r)
(== '(1) l)
(splito n^ r^ '() h)))
((fresh (b n^ a r^ l^)
(== `(,b . ,n^) n)
(== `(,a . ,r^) r)
(== `(,b . ,l^) l)
(poso l^)
(splito n^ r^ l^ h))))))
(define logo
(lambda (n b q r)
(conde
((== '(1) n) (poso b) (== '() q) (== '() r))
((== '() q) (<o n b) (pluso r '(1) n))
((== '(1) q) (>1o b) (=lo n b) (pluso r b n))
((== '(1) b) (poso q) (pluso r '(1) n))
((== '() b) (poso q) (== r n))
((== '(0 1) b)
(fresh (a ad dd)
(poso dd)
(== `(,a ,ad . ,dd) n)
(exp2 n '() q)
(fresh (s)
(splito n dd r s))))
((fresh (a ad add ddd)
(conde
((== '(1 1) b))
((== `(,a ,ad ,add . ,ddd) b))))
(<lo b n)
(fresh (bw1 bw nw nw1 ql1 ql s)
(exp2 b '() bw1)
(pluso bw1 '(1) bw)
(<lo q n)
(fresh (q1 bwq1)
(pluso q '(1) q1)
(*o bw q1 bwq1)
(<o nw1 bwq1))
(exp2 n '() nw1)
(pluso nw1 '(1) nw)
(/o nw bw ql1 s)
(pluso ql '(1) ql1)
(<=lo ql q)
(fresh (bql qh s qdh qd)
(repeated-mul b ql bql)
(/o nw bw1 qh s)
(pluso ql qdh qh)
(pluso ql qd q)
(<=o qd qdh)
(fresh (bqd bq1 bq)
(repeated-mul b qd bqd)
(*o bql bqd bq)
(*o b bq bq1)
(pluso bq r n)
(<o n bq1))))))))
(define exp2
(lambda (n b q)
(conde
((== '(1) n) (== '() q))
((>1o n) (== '(1) q)
(fresh (s)
(splito n b s '(1))))
((fresh (q1 b2)
(== `(0 . ,q1) q)
(poso q1)
(<lo b n)
(appendo b `(1 . ,b) b2)
(exp2 n b2 q1)))
((fresh (q1 nh b2 s)
(== `(1 . ,q1) q)
(poso q1)
(poso nh)
(splito n b s nh)
(appendo b `(1 . ,b) b2)
(exp2 nh b2 q1))))))
(define repeated-mul
(lambda (n q nq)
(conde
((poso n) (== '() q) (== '(1) nq))
((== '(1) q) (== n nq))
((>1o q)
(fresh (q1 nq1)
(pluso q1 '(1) q)
(repeated-mul n q1 nq1)
(*o nq1 n nq))))))
(define expo
(lambda (b q n)
(logo n b q '())))
(define prnt
(lambda (vars)
(lambda (expr)
(lambda (s)
(begin
(write expr)
(newline)
(write (map (lambda (p)
`(,(car p) ,(walk* (cdr p) s)))
vars))
(newline)
(succeed s)))))) |
2ed38e41249f6765ef319c80e6eaca5fc7eaf5a5f7fd013eeae7ed494d276437 | yjqww6/drcomplete | walk-bound.rkt | #lang racket/base
(require racket/set racket/sequence syntax/kerncase "utils.rkt")
(cond-use-bound
(define (walk form phase mods)
(kernel-syntax-case/phase
form phase
[(module ?id ?path (_ ?form ...))
(walk* #'(?form ...) 0 (cons #'?path mods))]
[(module* ?id ?f (_ ?form ...))
(eq? (syntax-e #'?f) #f)
(walk* #'(?form ...) phase (cons #'?f mods))]
[(module* ?id ?path (_ ?form ...))
(walk* #'(?form ...) 0 (cons #'?path mods))]
[(begin ?form ...)
(walk* #'(?form ...) phase mods)]
[(begin-for-syntax ?form ...)
(walk* #'(?form ...) (add1 phase) mods)]
[_ mods]))
(define (walk* form* phase mods)
(for/fold ([mods mods])
([form (in-syntax form*)])
(walk form phase mods)))
(define (walk-module fpe)
(define mods
(kernel-syntax-case fpe #f
[(module ?id ?path (#%plain-module-begin ?form ...))
(walk* #'(?form ...) (namespace-base-phase) (list #'?path))]))
(define ids (mutable-set))
(for* ([mod (in-list mods)]
#:when (visible? mod)
[phase (in-list (syntax-bound-phases mod))]
[sym (in-list (syntax-bound-symbols mod phase))])
(set-add! ids sym))
ids)
(provide walk-module)) | null | https://raw.githubusercontent.com/yjqww6/drcomplete/9bb41e031280794d8e395d834ff49ebfc52569a5/drcomplete-required/private/walk-bound.rkt | racket | #lang racket/base
(require racket/set racket/sequence syntax/kerncase "utils.rkt")
(cond-use-bound
(define (walk form phase mods)
(kernel-syntax-case/phase
form phase
[(module ?id ?path (_ ?form ...))
(walk* #'(?form ...) 0 (cons #'?path mods))]
[(module* ?id ?f (_ ?form ...))
(eq? (syntax-e #'?f) #f)
(walk* #'(?form ...) phase (cons #'?f mods))]
[(module* ?id ?path (_ ?form ...))
(walk* #'(?form ...) 0 (cons #'?path mods))]
[(begin ?form ...)
(walk* #'(?form ...) phase mods)]
[(begin-for-syntax ?form ...)
(walk* #'(?form ...) (add1 phase) mods)]
[_ mods]))
(define (walk* form* phase mods)
(for/fold ([mods mods])
([form (in-syntax form*)])
(walk form phase mods)))
(define (walk-module fpe)
(define mods
(kernel-syntax-case fpe #f
[(module ?id ?path (#%plain-module-begin ?form ...))
(walk* #'(?form ...) (namespace-base-phase) (list #'?path))]))
(define ids (mutable-set))
(for* ([mod (in-list mods)]
#:when (visible? mod)
[phase (in-list (syntax-bound-phases mod))]
[sym (in-list (syntax-bound-symbols mod phase))])
(set-add! ids sym))
ids)
(provide walk-module)) | |
f71ccf531dd3007843d35c322c2414f9413d0b887ca3884dd875a3d44d68b2c7 | bortexz/graphcom | build.clj | (ns build
(:refer-clojure :exclude [test])
(:require [org.corfield.build :as bb]))
(def lib 'io.github.bortexz/graphcom)
(def version "0.2.0")
(defn- gha-output
[k v]
(println (str "::set-output name=" k "::" v)))
(defn test "Run the tests." [opts]
(bb/run-tests opts)
(bb/run-tests (assoc opts :aliases [:cljs-test])))
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/run-tests)
(bb/clean)
(assoc :src-pom "pom-template.xml")
(bb/jar)))
(defn install "Install the JAR locally." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/install)))
(defn deploy "Deploy the JAR to Clojars." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/deploy))
(gha-output "version" version))
| null | https://raw.githubusercontent.com/bortexz/graphcom/50439b9d40f89a9bf4ef14ddc449411ef2661e88/build.clj | clojure | (ns build
(:refer-clojure :exclude [test])
(:require [org.corfield.build :as bb]))
(def lib 'io.github.bortexz/graphcom)
(def version "0.2.0")
(defn- gha-output
[k v]
(println (str "::set-output name=" k "::" v)))
(defn test "Run the tests." [opts]
(bb/run-tests opts)
(bb/run-tests (assoc opts :aliases [:cljs-test])))
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/run-tests)
(bb/clean)
(assoc :src-pom "pom-template.xml")
(bb/jar)))
(defn install "Install the JAR locally." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/install)))
(defn deploy "Deploy the JAR to Clojars." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/deploy))
(gha-output "version" version))
| |
e35be495cd55dd202e9a2f30b5b881d42b8f07577a17af02b58fb8b99bc5639e | deadpendency/deadpendency | DependencyAssessmentFailure.hs | module Common.Model.Assessment.DependencyAssessmentFailure
( DependencyAssessmentFailure (..),
)
where
import Common.Aeson.Aeson
import Common.Model.Assessment.DependencyAssessmentViolation
import Data.Aeson
newtype DependencyAssessmentFailure = DependencyAssessmentFailure
{ _violation :: DependencyAssessmentViolation
}
deriving stock (Eq, Show, Generic)
instance ToJSON DependencyAssessmentFailure where
toJSON = genericToJSON cleanJSONOptions
toEncoding = genericToEncoding cleanJSONOptions
instance FromJSON DependencyAssessmentFailure where
parseJSON = genericParseJSON cleanJSONOptions
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/common/src/Common/Model/Assessment/DependencyAssessmentFailure.hs | haskell | module Common.Model.Assessment.DependencyAssessmentFailure
( DependencyAssessmentFailure (..),
)
where
import Common.Aeson.Aeson
import Common.Model.Assessment.DependencyAssessmentViolation
import Data.Aeson
newtype DependencyAssessmentFailure = DependencyAssessmentFailure
{ _violation :: DependencyAssessmentViolation
}
deriving stock (Eq, Show, Generic)
instance ToJSON DependencyAssessmentFailure where
toJSON = genericToJSON cleanJSONOptions
toEncoding = genericToEncoding cleanJSONOptions
instance FromJSON DependencyAssessmentFailure where
parseJSON = genericParseJSON cleanJSONOptions
| |
af4b9f8227c39814a6a90a429e48c498f3d14edf2145dd6a603084c1b02c6d01 | lexi-lambda/racket-commonmark | spec.rkt | #lang racket/base
(require json
net/uri-codec
racket/format
racket/list
racket/match
racket/runtime-path
rackunit
xml
commonmark)
(define-runtime-path spec.json "spec-0.30.json")
(define color:reset #"\e(B\e[m")
(define color:bold #"\e[1m")
(define color:red #"\e[31m")
(define color:green #"\e[32m")
(define color:yellow #"\e[33m")
(define color:gray #"\e[90m")
; The spec examples use very particular HTML formatting (the precise details of
; which are not actually mandated by the spec), and normalizing the spec’s
; expected HTML would cause problems for examples involving raw “HTML” that is
; not actually valid HTML, so this function manually converts an xexpr to HTML
; using the spec’s formatting rules.
(define (document->spec-html doc)
(define out (open-output-string))
(define last-out-newline? #t)
(define current-context (make-parameter 'block))
(define (newline-out)
(unless last-out-newline?
(newline out)
(set! last-out-newline? #t)))
(define (write-out str)
(write-string str out)
(set! last-out-newline? #f))
(define (oprintf . args)
(apply fprintf out args)
(set! last-out-newline? #f))
(define (do-xexpr xexpr)
(match xexpr
[(? string?) (write-out (xml-attribute-encode xexpr))]
[(? cdata?)
(when (eq? (current-context) 'block)
(newline-out))
(write-out (cdata-string xexpr))
(when (eq? (current-context) 'block)
(newline-out))]
[(list tag (list (list attr-names attr-vals) ...) xexprs ...)
(do-element tag (map cons attr-names attr-vals) xexprs)]
[(list tag xexprs ...)
(do-element tag '() xexprs)]))
(define (do-element tag attrs xexprs)
(define add-newlines? (memq tag '(blockquote h1 h2 h3 h4 h5 h6 hr li ol p pre ul)))
(when add-newlines? (newline-out))
(oprintf "<~a" tag)
(for ([attr (in-list attrs)])
(define attr-val (if (eq? (car attr) 'href)
(encode-url (cdr attr))
(cdr attr)))
(oprintf " ~a=\"~a\"" (car attr) (xml-attribute-encode attr-val)))
(cond
[(and (empty? xexprs) (memq tag html-empty-tags))
(write-out " />")]
[else
(write-out ">")
(when (eq? tag 'blockquote) (newline-out))
(if (memq tag '(h1 h2 h3 h4 h5 h6 p))
(parameterize ([current-context 'inline])
(for-each do-xexpr xexprs))
(for-each do-xexpr xexprs))
(when (eq? tag 'blockquote) (newline-out))
(oprintf "</~a>" tag)])
(when (or add-newlines? (eq? tag 'br))
(newline-out)))
(for-each do-xexpr (document->xexprs doc))
(get-output-string out))
(define (encode-url str)
(regexp-replace* #px"[^a-zA-Z0-9;/?:@&=+$,\\-_.!~*'()#%]" str uri-encode))
(define (run-spec-tests #:print-summary? [print-summary? #f])
(define all-specs (call-with-input-file* spec.json read-json #:mode 'text))
(define spec-sections '())
(define specs-per-section (make-hash))
(define successes-per-section (make-hash))
(for ([spec (in-list all-specs)])
(define section (hash-ref spec 'section))
(unless (hash-has-key? specs-per-section section)
(set! spec-sections (cons section spec-sections))
(hash-set! specs-per-section section 0)
(hash-set! successes-per-section section 0))
(hash-update! specs-per-section section add1)
(with-check-info (['section section]
['example (hash-ref spec 'example)]
['markdown (hash-ref spec 'markdown)])
(test-begin
(check-equal? (document->spec-html (string->document (hash-ref spec 'markdown)))
(hash-ref spec 'html))
(hash-update! successes-per-section section add1))))
(when print-summary?
(define section-title-width (apply max (map string-length spec-sections)))
(define total-specs (apply + (hash-values specs-per-section)))
(define totals-width (string-length (~a total-specs)))
(define (write-chars c len)
(for ([i (in-range len)])
(write-char c)))
(define (write-separator)
(write-chars #\─ (+ section-title-width (* totals-width 2) 58))
(newline))
(define (write-bar-line label successes total)
(write-string (~a label #:width section-title-width #:align 'right))
(write-string " ")
(define score (/ successes total))
(define score-color (cond
[(= score 1) color:green]
[(>= score 7/10) color:yellow]
[else color:red]))
(write-bytes score-color)
(define filled-chars (round (* score 50)))
(write-chars #\█ filled-chars)
(write-chars #\░ (- 50 filled-chars))
(write-string " ")
(write-string (~r (* score 100) #:precision 0 #:min-width 3))
(write-string "%")
(write-bytes color:gray)
(write-string (~a " " (~r successes #:min-width totals-width) "/" total))
(write-bytes color:reset)
(newline))
(newline)
(write-bytes color:bold)
(write-string "CommonMark conformance summary")
(write-bytes color:reset)
(newline)
(write-separator)
(for ([section (in-list (reverse spec-sections))])
(write-bar-line section
(hash-ref successes-per-section section)
(hash-ref specs-per-section section)))
(write-separator)
(write-bar-line "Total"
(apply + (hash-values successes-per-section))
total-specs)))
(module+ test (run-spec-tests))
(module+ main (run-spec-tests #:print-summary? #t))
| null | https://raw.githubusercontent.com/lexi-lambda/racket-commonmark/1d7f1d5fc70bedfbe201c2e794da69dc7afe6e63/commonmark-test/tests/commonmark/spec.rkt | racket | The spec examples use very particular HTML formatting (the precise details of
which are not actually mandated by the spec), and normalizing the spec’s
expected HTML would cause problems for examples involving raw “HTML” that is
not actually valid HTML, so this function manually converts an xexpr to HTML
using the spec’s formatting rules. | #lang racket/base
(require json
net/uri-codec
racket/format
racket/list
racket/match
racket/runtime-path
rackunit
xml
commonmark)
(define-runtime-path spec.json "spec-0.30.json")
(define color:reset #"\e(B\e[m")
(define color:bold #"\e[1m")
(define color:red #"\e[31m")
(define color:green #"\e[32m")
(define color:yellow #"\e[33m")
(define color:gray #"\e[90m")
(define (document->spec-html doc)
(define out (open-output-string))
(define last-out-newline? #t)
(define current-context (make-parameter 'block))
(define (newline-out)
(unless last-out-newline?
(newline out)
(set! last-out-newline? #t)))
(define (write-out str)
(write-string str out)
(set! last-out-newline? #f))
(define (oprintf . args)
(apply fprintf out args)
(set! last-out-newline? #f))
(define (do-xexpr xexpr)
(match xexpr
[(? string?) (write-out (xml-attribute-encode xexpr))]
[(? cdata?)
(when (eq? (current-context) 'block)
(newline-out))
(write-out (cdata-string xexpr))
(when (eq? (current-context) 'block)
(newline-out))]
[(list tag (list (list attr-names attr-vals) ...) xexprs ...)
(do-element tag (map cons attr-names attr-vals) xexprs)]
[(list tag xexprs ...)
(do-element tag '() xexprs)]))
(define (do-element tag attrs xexprs)
(define add-newlines? (memq tag '(blockquote h1 h2 h3 h4 h5 h6 hr li ol p pre ul)))
(when add-newlines? (newline-out))
(oprintf "<~a" tag)
(for ([attr (in-list attrs)])
(define attr-val (if (eq? (car attr) 'href)
(encode-url (cdr attr))
(cdr attr)))
(oprintf " ~a=\"~a\"" (car attr) (xml-attribute-encode attr-val)))
(cond
[(and (empty? xexprs) (memq tag html-empty-tags))
(write-out " />")]
[else
(write-out ">")
(when (eq? tag 'blockquote) (newline-out))
(if (memq tag '(h1 h2 h3 h4 h5 h6 p))
(parameterize ([current-context 'inline])
(for-each do-xexpr xexprs))
(for-each do-xexpr xexprs))
(when (eq? tag 'blockquote) (newline-out))
(oprintf "</~a>" tag)])
(when (or add-newlines? (eq? tag 'br))
(newline-out)))
(for-each do-xexpr (document->xexprs doc))
(get-output-string out))
(define (encode-url str)
(regexp-replace* #px"[^a-zA-Z0-9;/?:@&=+$,\\-_.!~*'()#%]" str uri-encode))
(define (run-spec-tests #:print-summary? [print-summary? #f])
(define all-specs (call-with-input-file* spec.json read-json #:mode 'text))
(define spec-sections '())
(define specs-per-section (make-hash))
(define successes-per-section (make-hash))
(for ([spec (in-list all-specs)])
(define section (hash-ref spec 'section))
(unless (hash-has-key? specs-per-section section)
(set! spec-sections (cons section spec-sections))
(hash-set! specs-per-section section 0)
(hash-set! successes-per-section section 0))
(hash-update! specs-per-section section add1)
(with-check-info (['section section]
['example (hash-ref spec 'example)]
['markdown (hash-ref spec 'markdown)])
(test-begin
(check-equal? (document->spec-html (string->document (hash-ref spec 'markdown)))
(hash-ref spec 'html))
(hash-update! successes-per-section section add1))))
(when print-summary?
(define section-title-width (apply max (map string-length spec-sections)))
(define total-specs (apply + (hash-values specs-per-section)))
(define totals-width (string-length (~a total-specs)))
(define (write-chars c len)
(for ([i (in-range len)])
(write-char c)))
(define (write-separator)
(write-chars #\─ (+ section-title-width (* totals-width 2) 58))
(newline))
(define (write-bar-line label successes total)
(write-string (~a label #:width section-title-width #:align 'right))
(write-string " ")
(define score (/ successes total))
(define score-color (cond
[(= score 1) color:green]
[(>= score 7/10) color:yellow]
[else color:red]))
(write-bytes score-color)
(define filled-chars (round (* score 50)))
(write-chars #\█ filled-chars)
(write-chars #\░ (- 50 filled-chars))
(write-string " ")
(write-string (~r (* score 100) #:precision 0 #:min-width 3))
(write-string "%")
(write-bytes color:gray)
(write-string (~a " " (~r successes #:min-width totals-width) "/" total))
(write-bytes color:reset)
(newline))
(newline)
(write-bytes color:bold)
(write-string "CommonMark conformance summary")
(write-bytes color:reset)
(newline)
(write-separator)
(for ([section (in-list (reverse spec-sections))])
(write-bar-line section
(hash-ref successes-per-section section)
(hash-ref specs-per-section section)))
(write-separator)
(write-bar-line "Total"
(apply + (hash-values successes-per-section))
total-specs)))
(module+ test (run-spec-tests))
(module+ main (run-spec-tests #:print-summary? #t))
|
28c3bb74197886bb3f771436590a5276c54df71ef2892c1694c202c637290eb7 | bootstrapworld/curr | Game.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname Game) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
(require "Teachpacks/bootstrap-teachpack.rkt")
(require "Teachpacks/bootstrap-teachpack.rkt")
;; DATA:
The World is a :
; define-struct:
;; STARTING WORLD
;; GRAPHICS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; GRAPHICS FUNCTIONS:
;; draw-world: world -> Image
place DANGER , TARGET , CLOUD and PLAYER onto BACKGROUND at the right coordinates
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; UPDATING FUNCTIONS:
;; update-world: world -> world
;; What does your update-world function do?
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; KEY EVENTS:
;; keypress: world string -> world
;; What does your keypress function do?
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TESTS FOR COND:
;; off-left? : number -> boolean
;; Checks whether an object has gone off the left side of the screen
;; off-right? : number -> boolean
;; Checks whether an object has gone off the right side of the screen
;; line-length : number number -> number
;; Finds 1D distance
;; distance : number number number number -> number
Finds the 2D distance between two points
;; collide? : number number number number -> boolean
determines whether two objects are within 50 pixels of eachother
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
big - bang using the START world
;; on a tick-event, use update-world
;; on a draw-event, use draw-world
;; on a key-event, use keypress
;(big-bang START
; (on-tick update-world)
; (on-draw draw-world)
; ) | null | https://raw.githubusercontent.com/bootstrapworld/curr/443015255eacc1c902a29978df0e3e8e8f3b9430/courses/reactive/resources/source-files/Game.rkt | racket | about the language level of this file in a form that our tools can easily process.
DATA:
define-struct:
STARTING WORLD
GRAPHICS
GRAPHICS FUNCTIONS:
draw-world: world -> Image
UPDATING FUNCTIONS:
update-world: world -> world
What does your update-world function do?
KEY EVENTS:
keypress: world string -> world
What does your keypress function do?
TESTS FOR COND:
off-left? : number -> boolean
Checks whether an object has gone off the left side of the screen
off-right? : number -> boolean
Checks whether an object has gone off the right side of the screen
line-length : number number -> number
Finds 1D distance
distance : number number number number -> number
collide? : number number number number -> boolean
on a tick-event, use update-world
on a draw-event, use draw-world
on a key-event, use keypress
(big-bang START
(on-tick update-world)
(on-draw draw-world)
) | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname Game) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
(require "Teachpacks/bootstrap-teachpack.rkt")
(require "Teachpacks/bootstrap-teachpack.rkt")
The World is a :
place DANGER , TARGET , CLOUD and PLAYER onto BACKGROUND at the right coordinates
Finds the 2D distance between two points
determines whether two objects are within 50 pixels of eachother
big - bang using the START world
|
2525d4d13093303bd2e4cfa8237148243c81543dcfe3275c92dd8fc075825cb4 | heraldry/heraldicon | config.cljs | (ns heraldicon.config
(:refer-clojure :exclude [get])
(:require
[clojure.string :as str]))
(defn- js->clj+
"For cases when built-in js->clj doesn't work. Source: "
[x]
(into {}
(map (fn [k]
[(keyword k) (aget x k)]))
(js-keys x)))
(def ^:private env
"Returns current env vars as a Clojure map."
(js->clj+ (.-env js/process)))
(goog-define stage "dev")
(goog-define commit "unknown")
(def ^:private base-font-dir
(if (= stage "dev")
"/Library/Fonts"
"/opt/fonts"))
(def ^:private config-data
(case stage
"dev" {:heraldicon-api-endpoint ":4000/api"
:heraldicon-url ":8081"
:heraldicon-site-url ":4000/dev"
:cognito-pool-config {:UserPoolId "eu-central-1_eHwF2byeJ"
:ClientId "2v90eij0l4aluf2amqumqh9gko"
:jwksUri "-idp.eu-central-1.amazonaws.com/eu-central-1_eHwF2byeJ/.well-known/jwks.json"}
:static-files-url ":8081"
:fleur-de-lis-charge-id "charge:ZfqrIl"
:torse-charge-id "charge:8vwlZ2"
:helmet-charge-id "charge:hlsnvP"
:compartment-charge-id "charge:SSLk9y"
:supporter-charge-id "charge:fxOk19"
:mantling-charge-id "charge:gTrIM7"}
"staging" {:heraldicon-api-endpoint "-api.eu-central-1.amazonaws.com/api"
:heraldicon-discord-api-endpoint "-api.eu-central-1.amazonaws.com/discord"
:heraldicon-url ""
:cognito-pool-config {:UserPoolId "eu-central-1_eHwF2byeJ"
:ClientId "2v90eij0l4aluf2amqumqh9gko"
:jwksUri "-idp.eu-central-1.amazonaws.com/eu-central-1_eHwF2byeJ/.well-known/jwks.json"}
:static-files-url ""
:fleur-de-lis-charge-id "charge:ZfqrIl"
:torse-charge-id "charge:8vwlZ2"
:helmet-charge-id "charge:hlsnvP"
:compartment-charge-id "charge:SSLk9y"
:supporter-charge-id "charge:fxOk19"
:mantling-charge-id "charge:gTrIM7"}
"prod" {:heraldicon-api-endpoint "-api.eu-central-1.amazonaws.com/api"
:heraldicon-discord-api-endpoint "-api.eu-central-1.amazonaws.com/discord"
:heraldicon-url ""
:cognito-pool-config {:UserPoolId "eu-central-1_WXqnJUEOT"
:ClientId "21pvp6cc4l3gptoj4bl3jc9s7r"
:jwksUri "-idp.eu-central-1.amazonaws.com/eu-central-1_WXqnJUEOT/.well-known/jwks.json"}
:static-files-url ""
:fleur-de-lis-charge-id "charge:ZfqrIl"
:torse-charge-id "charge:8vwlZ2"
:helmet-charge-id "charge:hlsnvP"
:compartment-charge-id "charge:SSLk9y"
:supporter-charge-id "charge:fxOk19"
:mantling-charge-id "charge:gTrIM7"}))
(defn get [setting]
(case setting
:stage stage
:commit commit
:region (or (:REGION env) "eu-central-1")
:admins #{"or"}
:maintenance-mode? false
:base-font-dir base-font-dir
:email-address ""
(or (some-> setting
name
str/upper-case
(str/replace "-" "_")
keyword
env)
(clojure.core/get config-data setting))))
| null | https://raw.githubusercontent.com/heraldry/heraldicon/54e003614cf2c14cda496ef36358059ba78275b0/src/heraldicon/config.cljs | clojure | (ns heraldicon.config
(:refer-clojure :exclude [get])
(:require
[clojure.string :as str]))
(defn- js->clj+
"For cases when built-in js->clj doesn't work. Source: "
[x]
(into {}
(map (fn [k]
[(keyword k) (aget x k)]))
(js-keys x)))
(def ^:private env
"Returns current env vars as a Clojure map."
(js->clj+ (.-env js/process)))
(goog-define stage "dev")
(goog-define commit "unknown")
(def ^:private base-font-dir
(if (= stage "dev")
"/Library/Fonts"
"/opt/fonts"))
(def ^:private config-data
(case stage
"dev" {:heraldicon-api-endpoint ":4000/api"
:heraldicon-url ":8081"
:heraldicon-site-url ":4000/dev"
:cognito-pool-config {:UserPoolId "eu-central-1_eHwF2byeJ"
:ClientId "2v90eij0l4aluf2amqumqh9gko"
:jwksUri "-idp.eu-central-1.amazonaws.com/eu-central-1_eHwF2byeJ/.well-known/jwks.json"}
:static-files-url ":8081"
:fleur-de-lis-charge-id "charge:ZfqrIl"
:torse-charge-id "charge:8vwlZ2"
:helmet-charge-id "charge:hlsnvP"
:compartment-charge-id "charge:SSLk9y"
:supporter-charge-id "charge:fxOk19"
:mantling-charge-id "charge:gTrIM7"}
"staging" {:heraldicon-api-endpoint "-api.eu-central-1.amazonaws.com/api"
:heraldicon-discord-api-endpoint "-api.eu-central-1.amazonaws.com/discord"
:heraldicon-url ""
:cognito-pool-config {:UserPoolId "eu-central-1_eHwF2byeJ"
:ClientId "2v90eij0l4aluf2amqumqh9gko"
:jwksUri "-idp.eu-central-1.amazonaws.com/eu-central-1_eHwF2byeJ/.well-known/jwks.json"}
:static-files-url ""
:fleur-de-lis-charge-id "charge:ZfqrIl"
:torse-charge-id "charge:8vwlZ2"
:helmet-charge-id "charge:hlsnvP"
:compartment-charge-id "charge:SSLk9y"
:supporter-charge-id "charge:fxOk19"
:mantling-charge-id "charge:gTrIM7"}
"prod" {:heraldicon-api-endpoint "-api.eu-central-1.amazonaws.com/api"
:heraldicon-discord-api-endpoint "-api.eu-central-1.amazonaws.com/discord"
:heraldicon-url ""
:cognito-pool-config {:UserPoolId "eu-central-1_WXqnJUEOT"
:ClientId "21pvp6cc4l3gptoj4bl3jc9s7r"
:jwksUri "-idp.eu-central-1.amazonaws.com/eu-central-1_WXqnJUEOT/.well-known/jwks.json"}
:static-files-url ""
:fleur-de-lis-charge-id "charge:ZfqrIl"
:torse-charge-id "charge:8vwlZ2"
:helmet-charge-id "charge:hlsnvP"
:compartment-charge-id "charge:SSLk9y"
:supporter-charge-id "charge:fxOk19"
:mantling-charge-id "charge:gTrIM7"}))
(defn get [setting]
(case setting
:stage stage
:commit commit
:region (or (:REGION env) "eu-central-1")
:admins #{"or"}
:maintenance-mode? false
:base-font-dir base-font-dir
:email-address ""
(or (some-> setting
name
str/upper-case
(str/replace "-" "_")
keyword
env)
(clojure.core/get config-data setting))))
| |
f5450c7747c0b24ab2104e17d7ab12b782809c1c9318610fb70d24ff8d63b1c3 | privet-kitty/cl-competitive | round-robin.lisp | (defpackage :cp/test/round-robin
(:use :cl :fiveam :cp/round-robin)
(:import-from :cp/test/base #:base-suite))
(in-package :cp/test/round-robin)
(in-suite base-suite)
(test round-robin
(let ((*test-dribble* nil))
(finishes (map-round-robin 0 (lambda (&rest _)
(declare (ignore _))
(error "Huh?"))))
(loop for n to 20 by 2
for round = 0
for marked = (make-array (list n n) :element-type 'bit :initial-element 0)
do (map-round-robin
n
(lambda (vector round*)
(is (= round round*))
(is (= n (length (remove-duplicates vector))))
(dotimes (i n)
(setf (aref marked i (aref vector i)) 1))
(incf round)))
(dotimes (i n)
(dotimes (j n)
(is (= (if (= i j) 0 1)
(aref marked i j))))))))
| null | https://raw.githubusercontent.com/privet-kitty/cl-competitive/4d1c601ff42b10773a5d0c5989b1234da5bb98b6/module/test/round-robin.lisp | lisp | (defpackage :cp/test/round-robin
(:use :cl :fiveam :cp/round-robin)
(:import-from :cp/test/base #:base-suite))
(in-package :cp/test/round-robin)
(in-suite base-suite)
(test round-robin
(let ((*test-dribble* nil))
(finishes (map-round-robin 0 (lambda (&rest _)
(declare (ignore _))
(error "Huh?"))))
(loop for n to 20 by 2
for round = 0
for marked = (make-array (list n n) :element-type 'bit :initial-element 0)
do (map-round-robin
n
(lambda (vector round*)
(is (= round round*))
(is (= n (length (remove-duplicates vector))))
(dotimes (i n)
(setf (aref marked i (aref vector i)) 1))
(incf round)))
(dotimes (i n)
(dotimes (j n)
(is (= (if (= i j) 0 1)
(aref marked i j))))))))
| |
d3ff71451c1364f2f71a4995babe35c339e5d5f1226729ada6e5fffe16c75741 | froggey/Mezzano | cold-start.lisp | ;;;; Cold initialization
;;;;
This code is run when an image is booted for the first time
(in-package :mezzano.internals)
(declaim (special *cold-toplevel-forms*
*package-system*
*additional-cold-toplevel-forms*
*initial-obarray*
*initial-keyword-obarray*
*initial-fref-obarray*
*initial-function-docstrings*)
(special *terminal-io*
*standard-output*
*standard-input*
*error-output*
*trace-output*
*debug-io*
*query-io*))
(declaim (special *features* *macroexpand-hook*))
;;; Stuff duplicated/reimplemented from stream.lisp.
stream.lisp builds on CLOS , which is not present in the cold image .
(defun write-char (character &optional stream)
(cold-write-char character stream))
(defun write-string (string &optional stream &key (start 0) end)
(unless end (setf end (length string)))
(dotimes (i (- end start))
(write-char (char string (+ start i)) stream))
string)
(defun terpri (&optional stream)
(write-char #\Newline stream)
nil)
(defun fresh-line (&optional stream)
(cond ((start-line-p stream)
nil)
(t (terpri stream)
t)))
(defun start-line-p (&optional stream)
(cold-start-line-p stream))
(defun read-char (&optional stream (eof-error-p t) eof-value recursive-p)
(declare (ignore eof-error-p eof-value recursive-p))
(cold-read-char stream))
(defun unread-char (character &optional stream)
(cold-unread-char character stream))
(defun peek-char (&optional peek-type s (eof-error-p t) eof-value recursive-p)
(declare (ignore eof-error-p eof-value recursive-p))
(cond ((eql peek-type nil)
(let ((ch (cold-read-char s)))
(cold-unread-char ch s)
ch))
((eql peek-type t)
(do ((ch (cold-read-char s)
(cold-read-char s)))
((not (whitespace[2]p ch))
(cold-unread-char ch s)
ch)))
((characterp peek-type)
(error "TODO: character peek."))
(t (error "Bad peek type ~S." peek-type))))
(defun read-line (&optional (input-stream *standard-input*) (eof-error-p t) eof-value recursive-p)
(do ((result (make-array 16 :element-type 'character :adjustable t :fill-pointer 0))
(c (read-char input-stream eof-error-p nil recursive-p)
(read-char input-stream eof-error-p nil recursive-p)))
((or (null c)
(eql c #\Newline))
(if (and (null c) (eql (length result) 0))
(values eof-value t)
(values result (null c))))
(vector-push-extend c result)))
(defun yes-or-no-p (&optional control &rest arguments)
(declare (dynamic-extent arguments))
(when control
(write-char #\Newline)
(apply 'format t control arguments)
(write-char #\Space))
(format t "(Yes or No) ")
(loop
(let ((line (read-line)))
(when (string-equal line "yes")
(return t))
(when (string-equal line "no")
(return nil)))
(write-char #\Newline)
(format t "Please respond with \"yes\" or \"no\". ")))
(defun streamp (object)
(eql object :cold-stream))
(defun %with-stream-editor (stream recursive-p function)
(declare (ignore stream recursive-p))
(funcall function))
(defun make-case-correcting-stream (stream case)
(declare (ignore case))
stream)
Initial PRINT - OBJECT , replaced when CLOS is loaded .
(defun print-object (object stream)
(print-unreadable-object (object stream :type t :identity t)))
;;; Pathname stuff before pathnames exist (file.lisp defines real pathnames).
(defun pathnamep (x)
(declare (ignore x))
nil)
(defun pathnames-equal (x y)
(declare (ignore x y))
nil)
(defun hash-pathname (pathname depth)
(declare (ignore pathname depth))
(error "Early call to hash-pathname"))
(declaim (special * ** ***))
(defun repl ()
(let ((* nil) (** nil) (*** nil))
(loop
(fresh-line)
(write-char #\>)
(let ((form (read)))
(fresh-line)
(let ((result (multiple-value-list (eval form))))
(setf *** **
** *
* (first result))
(when result
(dolist (v result)
(fresh-line)
(write v))))))))
;;; Fake streams & fake stream functions, used by the mini loader to load
multiboot / kboot modules .
(defstruct (mini-vector-stream
(:constructor mini-vector-stream (vector)))
vector
(offset 0))
(defun %read-byte (stream)
(if (mini-vector-stream-p stream)
(prog1 (aref (mini-vector-stream-vector stream) (mini-vector-stream-offset stream))
(incf (mini-vector-stream-offset stream)))
(read-byte stream)))
(defun %read-sequence (seq stream)
(cond ((mini-vector-stream-p stream)
(replace seq (mini-vector-stream-vector stream)
:start2 (mini-vector-stream-offset stream)
:end2 (+ (mini-vector-stream-offset stream) (length seq)))
(incf (mini-vector-stream-offset stream) (length seq)))
(t (read-sequence seq stream))))
Simple EVAL for use in cold images .
(defun eval-cons (form)
(case (first form)
((if) (if (eval (second form))
(eval (third form))
(eval (fourth form))))
((function) (if (and (consp (second form)) (eql (first (second form)) 'lambda))
(let ((lambda (second form)))
(when (second lambda)
(error "Not supported: Lambdas with arguments."))
(lambda ()
(eval `(progn ,@(cddr lambda)))))
(fdefinition (second form))))
((quote) (second form))
((progn) (do ((f (rest form) (cdr f)))
((null (cdr f))
(eval (car f)))
(eval (car f))))
((setq) (do ((f (rest form) (cddr f)))
((null (cddr f))
(setf (symbol-value (car f)) (eval (cadr f))))
(setf (symbol-value (car f)) (eval (cadr f)))))
(t (multiple-value-bind (expansion expanded-p)
(macroexpand form)
(if expanded-p
(eval expansion)
(apply (first form) (mapcar 'eval (rest form))))))))
(defun eval (form)
(typecase form
(cons (eval-cons form))
(symbol (symbol-value form))
(t form)))
;;; Used during cold-image bringup, various files will redefine packages before
;;; the package system is loaded.
(defvar *deferred-%defpackage-calls*)
(defun %defpackage (&rest arguments)
(push arguments *deferred-%defpackage-calls*))
(defun keywordp (object)
(and (symbolp object)
(eql (symbol-package object) :keyword)))
;;; Needed for IN-PACKAGE before the package system is bootstrapped.
(defun find-package-or-die (name)
(declare (ignore name))
t)
;;; Early FIND-CLASS, needed for typep.
(defun find-class (name &optional (errorp t))
(when errorp
(error "Early call to FIND-CLASS for ~S" name))
nil)
;;; Early handler bind
(defun %handler-bind (bindings thunk)
(declare (ignore bindings))
(funcall thunk))
(defvar *warm-llf-files*)
(defvar *cold-start-start-time*)
(defvar *cold-start-end-time*)
(defun initialize-lisp ()
"A grab-bag of things that must be done before Lisp will work properly.
Cold-generator sets up just enough stuff for functions to be called, for
structures to exist, and for memory to be allocated, but not much beyond that."
(setf *cold-start-start-time* (get-internal-run-time))
(cold-array-initialization)
(setf *package* nil
*terminal-io* :cold-stream
*standard-output* :cold-stream
*standard-input* :cold-stream
*debug-io* :cold-stream
* nil
** nil
*** nil
/// nil
// nil
/ nil
+++ nil
++ nil
+ nil)
(setf *print-base* 10.
*print-escape* t
*print-readably* nil
*print-safe* nil)
(setf *features* '(:short-float-is-ieee-half-float
:package-local-nicknames
:unicode
:little-endian
#+x86-64 :x86-64
#+arm64 :arm64
:mezzano
:ieee-floating-point
:ansi-cl
:common-lisp)
*macroexpand-hook* 'funcall
most-positive-fixnum #.(- (expt 2 (- 64 +n-fixnum-bits+ 1)) 1)
most-negative-fixnum #.(- (expt 2 (- 64 +n-fixnum-bits+ 1)))
*gc-epoch* 0
*hash-table-unbound-value* (list "unbound hash-table entry")
*hash-table-tombstone* (list "hash-table tombstone")
*deferred-%defpackage-calls* '())
;; System tables.
(setf *macros* (make-hash-table :test #'eq :synchronized t :weakness :key))
(setf *symbol-function-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*setf-function-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*cas-function-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*function-info-lock* (mezzano.supervisor:make-rw-lock '*function-info-lock*))
(setf *setf-expanders* (make-hash-table :test #'eq :synchronized t :weakness :key))
(setf *type-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*type-info-lock* (mezzano.supervisor:make-rw-lock '*type-info*))
;; Put initial classes into the class table.
(setf mezzano.clos::*class-reference-table* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
mezzano.clos::*class-reference-table-lock* (mezzano.supervisor:make-rw-lock 'mezzano.clos::*class-reference-table*))
(loop
for (name . class) across mezzano.clos::*initial-class-table*
do (setf (find-class name) class))
(write-line "Cold image coming up...")
;; Hook FREFs up where required.
(setf *setf-fref-table* (make-hash-table :synchronized t :weakness :key))
(setf *cas-fref-table* (make-hash-table :synchronized t :weakness :key))
(dotimes (i (length *initial-fref-obarray*))
(let* ((fref (svref *initial-fref-obarray* i))
(name (function-reference-name fref)))
(when (consp name)
(ecase (first name)
((setf)
(setf (gethash (second name) *setf-fref-table*) fref))
((cas)
(setf (gethash (second name) *cas-fref-table*) fref))))))
;; Create documentation hash tables.
FIXME : These should be weak but have structured keys . Need separate hash tables for setf / cas
(setf *function-documentation* (make-hash-table :test #'equal :synchronized t))
(setf *compiler-macro-documentation* (make-hash-table :test #'equal :synchronized t))
(setf *setf-documentation* (make-hash-table :synchronized t :weakness :key))
(setf *variable-documentation* (make-hash-table :synchronized t :weakness :key))
(setf *variable-source-locations* (make-hash-table :test 'eq :synchronized t :weakness :key))
;; Transfer the initial function documentation over.
(loop
for (name doc) in *initial-function-docstrings*
do (set-function-docstring name doc))
(makunbound '*initial-function-docstrings*)
;; Run toplevel forms.
(let ((*package* *package*))
(dotimes (i (length *cold-toplevel-forms*))
(eval (svref *cold-toplevel-forms* i))))
;; Constantify every keyword.
(dotimes (i (length *initial-obarray*))
(when (eql (symbol-package (aref *initial-obarray* i)) :keyword)
(setf (symbol-mode (aref *initial-obarray* i)) :constant)))
(dolist (sym '(nil t most-positive-fixnum most-negative-fixnum))
(setf (symbol-mode sym) :constant))
(mezzano.clos::initialize-clos)
;; Pull in the real package system.
;; If anything goes wrong before init-package-sys finishes then things
;; break in terrible ways.
(dotimes (i (length *package-system*))
(eval (svref *package-system* i)))
(initialize-package-system)
(dolist (args (reverse *deferred-%defpackage-calls*))
(apply #'%defpackage args))
(makunbound '*deferred-%defpackage-calls*)
(let ((*package* *package*))
(dotimes (i (length *additional-cold-toplevel-forms*))
(eval (svref *additional-cold-toplevel-forms* i))))
;; Flush the bootstrap stuff.
(makunbound '*initial-obarray*)
(makunbound '*package-system*)
(makunbound '*additional-cold-toplevel-forms*)
(makunbound '*cold-toplevel-forms*)
(makunbound '*initial-fref-obarray*)
(write-line "First GC.")
(room)
(gc :full t)
(room)
(write-line "Cold load complete.")
(mezzano.supervisor:snapshot)
(write-line "Loading warm modules.")
(let ((*terminal-io* *terminal-io*))
(dotimes (i (length *warm-llf-files*))
(write-string "Loading ")
(write-line (car (aref *warm-llf-files* i)))
(load-llf (mini-vector-stream (cdr (aref *warm-llf-files* i)))))
(makunbound '*warm-llf-files*)
(write-line "Post load GC.")
(room)
(gc)
(room)
(mezzano.supervisor:snapshot)
(setf *cold-start-end-time* (get-internal-run-time))
(format t "Hello, world.~%Cold start took ~:D seconds (~:D seconds of GC time).~%"
(float (/ (- *cold-start-end-time*
*cold-start-start-time*)
internal-time-units-per-second))
*gc-time*)))
| null | https://raw.githubusercontent.com/froggey/Mezzano/f0eeb2a3f032098b394e31e3dfd32800f8a51122/system/cold-start.lisp | lisp | Cold initialization
Stuff duplicated/reimplemented from stream.lisp.
Pathname stuff before pathnames exist (file.lisp defines real pathnames).
Fake streams & fake stream functions, used by the mini loader to load
Used during cold-image bringup, various files will redefine packages before
the package system is loaded.
Needed for IN-PACKAGE before the package system is bootstrapped.
Early FIND-CLASS, needed for typep.
Early handler bind
System tables.
Put initial classes into the class table.
Hook FREFs up where required.
Create documentation hash tables.
Transfer the initial function documentation over.
Run toplevel forms.
Constantify every keyword.
Pull in the real package system.
If anything goes wrong before init-package-sys finishes then things
break in terrible ways.
Flush the bootstrap stuff. | This code is run when an image is booted for the first time
(in-package :mezzano.internals)
(declaim (special *cold-toplevel-forms*
*package-system*
*additional-cold-toplevel-forms*
*initial-obarray*
*initial-keyword-obarray*
*initial-fref-obarray*
*initial-function-docstrings*)
(special *terminal-io*
*standard-output*
*standard-input*
*error-output*
*trace-output*
*debug-io*
*query-io*))
(declaim (special *features* *macroexpand-hook*))
stream.lisp builds on CLOS , which is not present in the cold image .
(defun write-char (character &optional stream)
(cold-write-char character stream))
(defun write-string (string &optional stream &key (start 0) end)
(unless end (setf end (length string)))
(dotimes (i (- end start))
(write-char (char string (+ start i)) stream))
string)
(defun terpri (&optional stream)
(write-char #\Newline stream)
nil)
(defun fresh-line (&optional stream)
(cond ((start-line-p stream)
nil)
(t (terpri stream)
t)))
(defun start-line-p (&optional stream)
(cold-start-line-p stream))
(defun read-char (&optional stream (eof-error-p t) eof-value recursive-p)
(declare (ignore eof-error-p eof-value recursive-p))
(cold-read-char stream))
(defun unread-char (character &optional stream)
(cold-unread-char character stream))
(defun peek-char (&optional peek-type s (eof-error-p t) eof-value recursive-p)
(declare (ignore eof-error-p eof-value recursive-p))
(cond ((eql peek-type nil)
(let ((ch (cold-read-char s)))
(cold-unread-char ch s)
ch))
((eql peek-type t)
(do ((ch (cold-read-char s)
(cold-read-char s)))
((not (whitespace[2]p ch))
(cold-unread-char ch s)
ch)))
((characterp peek-type)
(error "TODO: character peek."))
(t (error "Bad peek type ~S." peek-type))))
(defun read-line (&optional (input-stream *standard-input*) (eof-error-p t) eof-value recursive-p)
(do ((result (make-array 16 :element-type 'character :adjustable t :fill-pointer 0))
(c (read-char input-stream eof-error-p nil recursive-p)
(read-char input-stream eof-error-p nil recursive-p)))
((or (null c)
(eql c #\Newline))
(if (and (null c) (eql (length result) 0))
(values eof-value t)
(values result (null c))))
(vector-push-extend c result)))
(defun yes-or-no-p (&optional control &rest arguments)
(declare (dynamic-extent arguments))
(when control
(write-char #\Newline)
(apply 'format t control arguments)
(write-char #\Space))
(format t "(Yes or No) ")
(loop
(let ((line (read-line)))
(when (string-equal line "yes")
(return t))
(when (string-equal line "no")
(return nil)))
(write-char #\Newline)
(format t "Please respond with \"yes\" or \"no\". ")))
(defun streamp (object)
(eql object :cold-stream))
(defun %with-stream-editor (stream recursive-p function)
(declare (ignore stream recursive-p))
(funcall function))
(defun make-case-correcting-stream (stream case)
(declare (ignore case))
stream)
Initial PRINT - OBJECT , replaced when CLOS is loaded .
(defun print-object (object stream)
(print-unreadable-object (object stream :type t :identity t)))
(defun pathnamep (x)
(declare (ignore x))
nil)
(defun pathnames-equal (x y)
(declare (ignore x y))
nil)
(defun hash-pathname (pathname depth)
(declare (ignore pathname depth))
(error "Early call to hash-pathname"))
(declaim (special * ** ***))
(defun repl ()
(let ((* nil) (** nil) (*** nil))
(loop
(fresh-line)
(write-char #\>)
(let ((form (read)))
(fresh-line)
(let ((result (multiple-value-list (eval form))))
(setf *** **
** *
* (first result))
(when result
(dolist (v result)
(fresh-line)
(write v))))))))
multiboot / kboot modules .
(defstruct (mini-vector-stream
(:constructor mini-vector-stream (vector)))
vector
(offset 0))
(defun %read-byte (stream)
(if (mini-vector-stream-p stream)
(prog1 (aref (mini-vector-stream-vector stream) (mini-vector-stream-offset stream))
(incf (mini-vector-stream-offset stream)))
(read-byte stream)))
(defun %read-sequence (seq stream)
(cond ((mini-vector-stream-p stream)
(replace seq (mini-vector-stream-vector stream)
:start2 (mini-vector-stream-offset stream)
:end2 (+ (mini-vector-stream-offset stream) (length seq)))
(incf (mini-vector-stream-offset stream) (length seq)))
(t (read-sequence seq stream))))
Simple EVAL for use in cold images .
(defun eval-cons (form)
(case (first form)
((if) (if (eval (second form))
(eval (third form))
(eval (fourth form))))
((function) (if (and (consp (second form)) (eql (first (second form)) 'lambda))
(let ((lambda (second form)))
(when (second lambda)
(error "Not supported: Lambdas with arguments."))
(lambda ()
(eval `(progn ,@(cddr lambda)))))
(fdefinition (second form))))
((quote) (second form))
((progn) (do ((f (rest form) (cdr f)))
((null (cdr f))
(eval (car f)))
(eval (car f))))
((setq) (do ((f (rest form) (cddr f)))
((null (cddr f))
(setf (symbol-value (car f)) (eval (cadr f))))
(setf (symbol-value (car f)) (eval (cadr f)))))
(t (multiple-value-bind (expansion expanded-p)
(macroexpand form)
(if expanded-p
(eval expansion)
(apply (first form) (mapcar 'eval (rest form))))))))
(defun eval (form)
(typecase form
(cons (eval-cons form))
(symbol (symbol-value form))
(t form)))
(defvar *deferred-%defpackage-calls*)
(defun %defpackage (&rest arguments)
(push arguments *deferred-%defpackage-calls*))
(defun keywordp (object)
(and (symbolp object)
(eql (symbol-package object) :keyword)))
(defun find-package-or-die (name)
(declare (ignore name))
t)
(defun find-class (name &optional (errorp t))
(when errorp
(error "Early call to FIND-CLASS for ~S" name))
nil)
(defun %handler-bind (bindings thunk)
(declare (ignore bindings))
(funcall thunk))
(defvar *warm-llf-files*)
(defvar *cold-start-start-time*)
(defvar *cold-start-end-time*)
(defun initialize-lisp ()
"A grab-bag of things that must be done before Lisp will work properly.
Cold-generator sets up just enough stuff for functions to be called, for
structures to exist, and for memory to be allocated, but not much beyond that."
(setf *cold-start-start-time* (get-internal-run-time))
(cold-array-initialization)
(setf *package* nil
*terminal-io* :cold-stream
*standard-output* :cold-stream
*standard-input* :cold-stream
*debug-io* :cold-stream
* nil
** nil
*** nil
/// nil
// nil
/ nil
+++ nil
++ nil
+ nil)
(setf *print-base* 10.
*print-escape* t
*print-readably* nil
*print-safe* nil)
(setf *features* '(:short-float-is-ieee-half-float
:package-local-nicknames
:unicode
:little-endian
#+x86-64 :x86-64
#+arm64 :arm64
:mezzano
:ieee-floating-point
:ansi-cl
:common-lisp)
*macroexpand-hook* 'funcall
most-positive-fixnum #.(- (expt 2 (- 64 +n-fixnum-bits+ 1)) 1)
most-negative-fixnum #.(- (expt 2 (- 64 +n-fixnum-bits+ 1)))
*gc-epoch* 0
*hash-table-unbound-value* (list "unbound hash-table entry")
*hash-table-tombstone* (list "hash-table tombstone")
*deferred-%defpackage-calls* '())
(setf *macros* (make-hash-table :test #'eq :synchronized t :weakness :key))
(setf *symbol-function-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*setf-function-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*cas-function-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*function-info-lock* (mezzano.supervisor:make-rw-lock '*function-info-lock*))
(setf *setf-expanders* (make-hash-table :test #'eq :synchronized t :weakness :key))
(setf *type-info* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
*type-info-lock* (mezzano.supervisor:make-rw-lock '*type-info*))
(setf mezzano.clos::*class-reference-table* (make-hash-table :test #'eq :enforce-gc-invariant-keys t :weakness :key)
mezzano.clos::*class-reference-table-lock* (mezzano.supervisor:make-rw-lock 'mezzano.clos::*class-reference-table*))
(loop
for (name . class) across mezzano.clos::*initial-class-table*
do (setf (find-class name) class))
(write-line "Cold image coming up...")
(setf *setf-fref-table* (make-hash-table :synchronized t :weakness :key))
(setf *cas-fref-table* (make-hash-table :synchronized t :weakness :key))
(dotimes (i (length *initial-fref-obarray*))
(let* ((fref (svref *initial-fref-obarray* i))
(name (function-reference-name fref)))
(when (consp name)
(ecase (first name)
((setf)
(setf (gethash (second name) *setf-fref-table*) fref))
((cas)
(setf (gethash (second name) *cas-fref-table*) fref))))))
FIXME : These should be weak but have structured keys . Need separate hash tables for setf / cas
(setf *function-documentation* (make-hash-table :test #'equal :synchronized t))
(setf *compiler-macro-documentation* (make-hash-table :test #'equal :synchronized t))
(setf *setf-documentation* (make-hash-table :synchronized t :weakness :key))
(setf *variable-documentation* (make-hash-table :synchronized t :weakness :key))
(setf *variable-source-locations* (make-hash-table :test 'eq :synchronized t :weakness :key))
(loop
for (name doc) in *initial-function-docstrings*
do (set-function-docstring name doc))
(makunbound '*initial-function-docstrings*)
(let ((*package* *package*))
(dotimes (i (length *cold-toplevel-forms*))
(eval (svref *cold-toplevel-forms* i))))
(dotimes (i (length *initial-obarray*))
(when (eql (symbol-package (aref *initial-obarray* i)) :keyword)
(setf (symbol-mode (aref *initial-obarray* i)) :constant)))
(dolist (sym '(nil t most-positive-fixnum most-negative-fixnum))
(setf (symbol-mode sym) :constant))
(mezzano.clos::initialize-clos)
(dotimes (i (length *package-system*))
(eval (svref *package-system* i)))
(initialize-package-system)
(dolist (args (reverse *deferred-%defpackage-calls*))
(apply #'%defpackage args))
(makunbound '*deferred-%defpackage-calls*)
(let ((*package* *package*))
(dotimes (i (length *additional-cold-toplevel-forms*))
(eval (svref *additional-cold-toplevel-forms* i))))
(makunbound '*initial-obarray*)
(makunbound '*package-system*)
(makunbound '*additional-cold-toplevel-forms*)
(makunbound '*cold-toplevel-forms*)
(makunbound '*initial-fref-obarray*)
(write-line "First GC.")
(room)
(gc :full t)
(room)
(write-line "Cold load complete.")
(mezzano.supervisor:snapshot)
(write-line "Loading warm modules.")
(let ((*terminal-io* *terminal-io*))
(dotimes (i (length *warm-llf-files*))
(write-string "Loading ")
(write-line (car (aref *warm-llf-files* i)))
(load-llf (mini-vector-stream (cdr (aref *warm-llf-files* i)))))
(makunbound '*warm-llf-files*)
(write-line "Post load GC.")
(room)
(gc)
(room)
(mezzano.supervisor:snapshot)
(setf *cold-start-end-time* (get-internal-run-time))
(format t "Hello, world.~%Cold start took ~:D seconds (~:D seconds of GC time).~%"
(float (/ (- *cold-start-end-time*
*cold-start-start-time*)
internal-time-units-per-second))
*gc-time*)))
|
865ab46d61e949edd7dfccdbab97545a7ea8046e74e0f63ab69c4d2533a401d3 | input-output-hk/cardano-wallet | Hash.hs |
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE QuantifiedConstraints #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Cardano.Wallet.Read.Tx.Hash
( byronTxHash
, alonzoTxHash
, shelleyTxHash
, fromShelleyTxId
, getEraTxHash
)
where
import Prelude
import Cardano.Binary
( ToCBOR (..) )
import Cardano.Chain.UTxO
( ATxAux, taTx )
import Cardano.Crypto
( serializeCborHash )
import Cardano.Ledger.Core
( AuxiliaryData )
import Cardano.Ledger.Era
( Era (..) )
import Cardano.Ledger.Shelley.TxBody
( EraIndependentTxBody )
import Cardano.Wallet.Read
( Tx )
import Cardano.Wallet.Read.Eras
( EraFun (..), K (..) )
import Cardano.Wallet.Read.Tx.Eras
( onTx )
import qualified Cardano.Crypto as CryptoC
import qualified Cardano.Crypto.Hash as Crypto
import qualified Cardano.Ledger.Alonzo.Tx as Alonzo
import qualified Cardano.Ledger.Babbage.Tx as Babbage hiding
( ScriptIntegrityHash, TxBody )
import qualified Cardano.Ledger.Core as SL.Core
import qualified Cardano.Ledger.Crypto as SL
import qualified Cardano.Ledger.SafeHash as SafeHash
import qualified Cardano.Ledger.Shelley.API as SL
import qualified Cardano.Ledger.ShelleyMA as MA
import qualified Cardano.Ledger.TxIn as TxIn
getEraTxHash :: EraFun Tx (K Crypto.ByteString)
getEraTxHash = EraFun
{ byronFun = onTx $ K . byronTxHash
, shelleyFun = onTx $ K . shelleyTxHash
, allegraFun = onTx $ K . shelleyTxHash
, maryFun = onTx $ K . shelleyTxHash
, alonzoFun = onTx $ K . alonzoTxHash
, babbageFun = onTx $ K . alonzoTxHash
}
byronTxHash :: ATxAux a -> Crypto.ByteString
byronTxHash = CryptoC.hashToBytes . serializeCborHash . taTx
alonzoTxHash
:: ( Crypto.HashAlgorithm (SL.HASH crypto)
, SafeHash.HashAnnotated
(SL.Core.TxBody era)
EraIndependentTxBody
crypto)
=> Babbage.ValidatedTx era
-> Crypto.ByteString
alonzoTxHash (Alonzo.ValidatedTx bod _ _ _) = fromShelleyTxId $ TxIn.txid bod
shelleyTxHash
:: ( Era x
, ToCBOR (AuxiliaryData x)
, ToCBOR (SL.Core.TxBody x)
, ToCBOR (SL.Core.Witnesses x))
=> MA.Tx x
-> Crypto.ByteString
shelleyTxHash
(SL.Tx bod _ _) = fromShelleyTxId $ TxIn.txid bod
fromShelleyTxId :: SL.TxId crypto -> Crypto.ByteString
fromShelleyTxId (SL.TxId h) =
Crypto.hashToBytes $ SafeHash.extractHash h
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet/97e183d3a1999f8e5658a3883785a293b3beda21/lib/wallet/src/Cardano/Wallet/Read/Tx/Hash.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes # |
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE QuantifiedConstraints #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Cardano.Wallet.Read.Tx.Hash
( byronTxHash
, alonzoTxHash
, shelleyTxHash
, fromShelleyTxId
, getEraTxHash
)
where
import Prelude
import Cardano.Binary
( ToCBOR (..) )
import Cardano.Chain.UTxO
( ATxAux, taTx )
import Cardano.Crypto
( serializeCborHash )
import Cardano.Ledger.Core
( AuxiliaryData )
import Cardano.Ledger.Era
( Era (..) )
import Cardano.Ledger.Shelley.TxBody
( EraIndependentTxBody )
import Cardano.Wallet.Read
( Tx )
import Cardano.Wallet.Read.Eras
( EraFun (..), K (..) )
import Cardano.Wallet.Read.Tx.Eras
( onTx )
import qualified Cardano.Crypto as CryptoC
import qualified Cardano.Crypto.Hash as Crypto
import qualified Cardano.Ledger.Alonzo.Tx as Alonzo
import qualified Cardano.Ledger.Babbage.Tx as Babbage hiding
( ScriptIntegrityHash, TxBody )
import qualified Cardano.Ledger.Core as SL.Core
import qualified Cardano.Ledger.Crypto as SL
import qualified Cardano.Ledger.SafeHash as SafeHash
import qualified Cardano.Ledger.Shelley.API as SL
import qualified Cardano.Ledger.ShelleyMA as MA
import qualified Cardano.Ledger.TxIn as TxIn
getEraTxHash :: EraFun Tx (K Crypto.ByteString)
getEraTxHash = EraFun
{ byronFun = onTx $ K . byronTxHash
, shelleyFun = onTx $ K . shelleyTxHash
, allegraFun = onTx $ K . shelleyTxHash
, maryFun = onTx $ K . shelleyTxHash
, alonzoFun = onTx $ K . alonzoTxHash
, babbageFun = onTx $ K . alonzoTxHash
}
byronTxHash :: ATxAux a -> Crypto.ByteString
byronTxHash = CryptoC.hashToBytes . serializeCborHash . taTx
alonzoTxHash
:: ( Crypto.HashAlgorithm (SL.HASH crypto)
, SafeHash.HashAnnotated
(SL.Core.TxBody era)
EraIndependentTxBody
crypto)
=> Babbage.ValidatedTx era
-> Crypto.ByteString
alonzoTxHash (Alonzo.ValidatedTx bod _ _ _) = fromShelleyTxId $ TxIn.txid bod
shelleyTxHash
:: ( Era x
, ToCBOR (AuxiliaryData x)
, ToCBOR (SL.Core.TxBody x)
, ToCBOR (SL.Core.Witnesses x))
=> MA.Tx x
-> Crypto.ByteString
shelleyTxHash
(SL.Tx bod _ _) = fromShelleyTxId $ TxIn.txid bod
fromShelleyTxId :: SL.TxId crypto -> Crypto.ByteString
fromShelleyTxId (SL.TxId h) =
Crypto.hashToBytes $ SafeHash.extractHash h
|
086dd0659525921e22412710f7f9eb6de322aceac43ad41586abb13723003c72 | airbus-seclab/bincat | npkcontext.mli |
C2Newspeak : compiles C code into Newspeak . Newspeak is a minimal language
well - suited for static analysis .
Copyright ( C ) 2007 , ,
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
EADS Innovation Works - SE / CS
12 , rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email :
email :
EADS Innovation Works - SE / IS
12 , rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email : ( dot ) zennou ( at ) eads ( dot ) net
C2Newspeak: compiles C code into Newspeak. Newspeak is a minimal language
well-suited for static analysis.
Copyright (C) 2007 Charles Hymans, Olivier Levillain, Sarah Zennou
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Charles Hymans
EADS Innovation Works - SE/CS
12, rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email:
Olivier Levillain
email:
Sarah Zennou
EADS Innovation Works - SE/IS
12, rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email: sarah (dot) zennou (at) eads (dot) net
*)
* The module Npkcontext allows cil2newspeak to keep track of the
current location in the C files and to report warnings and errors
to the user . It also regroups every command line option of
cil2newspeak
current location in the C files and to report warnings and errors
to the user. It also regroups every command line option of
cil2newspeak *)
type error =
Asm
| Pragma
| Pack
| Volatile
| DirtyCast
| DirtySyntax
| PartialFunDecl
| MissingFunDecl
| ForwardGoto
| BackwardGoto
| StrictSyntax
| ExternGlobal
| FlexArray
| MultipleDef
| GnuC
| DisableOpt
| DisableCheckOpt
| TransparentUnion
| ExternFunDef
| SignedIndex
* { 1 Comand line options }
val accept_gnuc : bool ref
(** When remove_temp is set, only used variables are kept in newspeak
code *)
val remove_temp : bool ref
val accept_flex_array: bool ref
val typed_npk: bool ref
(** If no_opt is set, then no code simplification is performed *)
val no_opt : bool ref
val verb_ast : bool ref
val verb_cir : bool ref
val verb_npko : bool ref
val verb_newspeak : bool ref
val verb_lowspeak: bool ref
val opt_checks: bool ref
* If true , then the goto elimination transformation of is performed
val accept_goto: bool ref
* Names of the files that are to be compiled / link . The first
string is the name of the file that need to be read , the second is the
initial name of the .c file ; they differ when the files are
preprocessed .
string is the name of the file that need to be read, the second is the
initial name of the .c file; they differ when the files are
preprocessed. *)
val input_files : string list ref
(** Compiles the input files to .no, without linking them into a .npk.
corresponds to -c option
*)
val compile_only : bool ref
(** Name of the result file of the process *)
val output_file : string ref
(** TODO: document that *)
val handle_cmdline_options : string -> string -> unit
(** Name of the xml output file *)
val xml_output : string ref
* Name of ABI description file
val abi_file : string ref
* { 1 Location handling }
(** [set_loc cil_loc] translates a Cil.location cil_loc into a
Newspeak.location and stores it to track the position in the file *)
val set_loc : Newspeak.location -> unit
val forget_loc : unit -> unit
val get_loc : unit -> Newspeak.location
val get_fname : unit -> string
(** {1 Warnings/errors generation and display } *)
(* rename to report_warning *)
(* TODO: unify these functions!!! into one, with a level!!! *)
(* TODO: remove this function?? or rename? *)
val report_warning : string -> string -> unit
(* TODO: remove this function *)
TODO : clean up / simplify npkcontext interface
val report_strict_warning: string -> string -> unit
val report_ignore_warning: string -> string -> error -> unit
* [ report_accept_warning file_function message error_type ]
val report_accept_warning: string -> string -> error -> unit
(** Throws an Invalid_argument exception with a message *)
val report_error : string -> string -> 'a
(** Displays a message to the user *)
val print_debug : string -> unit
(** [print_size sz] displays [sz] as the current size of the representation
in debug mode. *)
val print_size: int -> unit
(** writes all warnings into the xml_ouput file *)
val dump_xml_warns: unit -> unit
| null | https://raw.githubusercontent.com/airbus-seclab/bincat/493a03890b3b472fd198ce58c7e9280abd0f9f93/ocaml/src/npk/newspeak/npkcontext.mli | ocaml | * When remove_temp is set, only used variables are kept in newspeak
code
* If no_opt is set, then no code simplification is performed
* Compiles the input files to .no, without linking them into a .npk.
corresponds to -c option
* Name of the result file of the process
* TODO: document that
* Name of the xml output file
* [set_loc cil_loc] translates a Cil.location cil_loc into a
Newspeak.location and stores it to track the position in the file
* {1 Warnings/errors generation and display }
rename to report_warning
TODO: unify these functions!!! into one, with a level!!!
TODO: remove this function?? or rename?
TODO: remove this function
* Throws an Invalid_argument exception with a message
* Displays a message to the user
* [print_size sz] displays [sz] as the current size of the representation
in debug mode.
* writes all warnings into the xml_ouput file |
C2Newspeak : compiles C code into Newspeak . Newspeak is a minimal language
well - suited for static analysis .
Copyright ( C ) 2007 , ,
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
EADS Innovation Works - SE / CS
12 , rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email :
email :
EADS Innovation Works - SE / IS
12 , rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email : ( dot ) zennou ( at ) eads ( dot ) net
C2Newspeak: compiles C code into Newspeak. Newspeak is a minimal language
well-suited for static analysis.
Copyright (C) 2007 Charles Hymans, Olivier Levillain, Sarah Zennou
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Charles Hymans
EADS Innovation Works - SE/CS
12, rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email:
Olivier Levillain
email:
Sarah Zennou
EADS Innovation Works - SE/IS
12, rue Pasteur - BP 76 - 92152 Suresnes Cedex - France
email: sarah (dot) zennou (at) eads (dot) net
*)
* The module Npkcontext allows cil2newspeak to keep track of the
current location in the C files and to report warnings and errors
to the user . It also regroups every command line option of
cil2newspeak
current location in the C files and to report warnings and errors
to the user. It also regroups every command line option of
cil2newspeak *)
type error =
Asm
| Pragma
| Pack
| Volatile
| DirtyCast
| DirtySyntax
| PartialFunDecl
| MissingFunDecl
| ForwardGoto
| BackwardGoto
| StrictSyntax
| ExternGlobal
| FlexArray
| MultipleDef
| GnuC
| DisableOpt
| DisableCheckOpt
| TransparentUnion
| ExternFunDef
| SignedIndex
* { 1 Comand line options }
val accept_gnuc : bool ref
val remove_temp : bool ref
val accept_flex_array: bool ref
val typed_npk: bool ref
val no_opt : bool ref
val verb_ast : bool ref
val verb_cir : bool ref
val verb_npko : bool ref
val verb_newspeak : bool ref
val verb_lowspeak: bool ref
val opt_checks: bool ref
* If true , then the goto elimination transformation of is performed
val accept_goto: bool ref
* Names of the files that are to be compiled / link . The first
string is the name of the file that need to be read , the second is the
initial name of the .c file ; they differ when the files are
preprocessed .
string is the name of the file that need to be read, the second is the
initial name of the .c file; they differ when the files are
preprocessed. *)
val input_files : string list ref
val compile_only : bool ref
val output_file : string ref
val handle_cmdline_options : string -> string -> unit
val xml_output : string ref
* Name of ABI description file
val abi_file : string ref
* { 1 Location handling }
val set_loc : Newspeak.location -> unit
val forget_loc : unit -> unit
val get_loc : unit -> Newspeak.location
val get_fname : unit -> string
val report_warning : string -> string -> unit
TODO : clean up / simplify npkcontext interface
val report_strict_warning: string -> string -> unit
val report_ignore_warning: string -> string -> error -> unit
* [ report_accept_warning file_function message error_type ]
val report_accept_warning: string -> string -> error -> unit
val report_error : string -> string -> 'a
val print_debug : string -> unit
val print_size: int -> unit
val dump_xml_warns: unit -> unit
|
b8d241083d9d6d76b674facbdee07adb22e72a45e255a435072b4be4ea3c345b | haskell-mafia/mafia | Ghc.hs | # LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module Mafia.Ghc (
GhcVersion(..)
, mkGhcVersion
, renderGhcVersion
, getGhcVersion
, GhcTarget(..)
, getGhcTarget
, GhcError(..)
, renderGhcError
) where
import Control.Monad.Trans.Either (EitherT, left, runEitherT)
import qualified Data.Text as T
import Mafia.P
import Mafia.Package
import Mafia.Process
import System.IO (IO)
newtype GhcVersion =
GhcVersion {
unGhcVersion :: Version
} deriving (Eq, Ord, Show)
newtype GhcTarget =
GhcTarget {
unGhcTarget :: Text
} deriving (Eq, Ord, Show)
data GhcError =
GhcProcessError !ProcessError
| GhcCannotParseVersion !Text
| GhcNotInstalled
deriving (Show)
renderGhcError :: GhcError -> Text
renderGhcError = \case
GhcProcessError e ->
renderProcessError e
GhcCannotParseVersion v ->
"ghc returned an invalid version: " <> v
GhcNotInstalled ->
mconcat
[ "ghc is not installed."
, "\nTo install:"
, "\n - download from /"
, "\n - ./configure --prefix=$HOME/haskell/ghc-$VERSION # or wherever you like to keep ghc"
, "\n - make install"
, "\n - ln -s $HOME/haskell/ghc-$VERSION $HOME/haskell/ghc"
, "\n - add $HOME/haskell/ghc/bin to your $PATH" ]
mkGhcVersion :: [Int] -> GhcVersion
mkGhcVersion vs =
GhcVersion $ makeVersion vs
renderGhcVersion :: GhcVersion -> Text
renderGhcVersion =
renderVersion . unGhcVersion
getGhcVersion :: EitherT GhcError IO GhcVersion
getGhcVersion = do
v0 <- ghc "--numeric-version"
case parseVersion v0 of
Nothing ->
left $ GhcCannotParseVersion v0
Just v ->
pure $ GhcVersion v
getGhcTarget :: EitherT GhcError IO GhcTarget
getGhcTarget = do
GhcTarget <$> ghc "--print-target-platform"
ghc :: Text -> EitherT GhcError IO Text
ghc argument = do
result <- runEitherT (call GhcProcessError "ghc" [argument])
case result of
Left _ ->
left GhcNotInstalled
Right (Out out) ->
pure $ T.strip out
| null | https://raw.githubusercontent.com/haskell-mafia/mafia/529440246ee571bf1473615e6218f52cd1e990ae/src/Mafia/Ghc.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
module Mafia.Ghc (
GhcVersion(..)
, mkGhcVersion
, renderGhcVersion
, getGhcVersion
, GhcTarget(..)
, getGhcTarget
, GhcError(..)
, renderGhcError
) where
import Control.Monad.Trans.Either (EitherT, left, runEitherT)
import qualified Data.Text as T
import Mafia.P
import Mafia.Package
import Mafia.Process
import System.IO (IO)
newtype GhcVersion =
GhcVersion {
unGhcVersion :: Version
} deriving (Eq, Ord, Show)
newtype GhcTarget =
GhcTarget {
unGhcTarget :: Text
} deriving (Eq, Ord, Show)
data GhcError =
GhcProcessError !ProcessError
| GhcCannotParseVersion !Text
| GhcNotInstalled
deriving (Show)
renderGhcError :: GhcError -> Text
renderGhcError = \case
GhcProcessError e ->
renderProcessError e
GhcCannotParseVersion v ->
"ghc returned an invalid version: " <> v
GhcNotInstalled ->
mconcat
[ "ghc is not installed."
, "\nTo install:"
, "\n - download from /"
, "\n - ./configure --prefix=$HOME/haskell/ghc-$VERSION # or wherever you like to keep ghc"
, "\n - make install"
, "\n - ln -s $HOME/haskell/ghc-$VERSION $HOME/haskell/ghc"
, "\n - add $HOME/haskell/ghc/bin to your $PATH" ]
mkGhcVersion :: [Int] -> GhcVersion
mkGhcVersion vs =
GhcVersion $ makeVersion vs
renderGhcVersion :: GhcVersion -> Text
renderGhcVersion =
renderVersion . unGhcVersion
getGhcVersion :: EitherT GhcError IO GhcVersion
getGhcVersion = do
v0 <- ghc "--numeric-version"
case parseVersion v0 of
Nothing ->
left $ GhcCannotParseVersion v0
Just v ->
pure $ GhcVersion v
getGhcTarget :: EitherT GhcError IO GhcTarget
getGhcTarget = do
GhcTarget <$> ghc "--print-target-platform"
ghc :: Text -> EitherT GhcError IO Text
ghc argument = do
result <- runEitherT (call GhcProcessError "ghc" [argument])
case result of
Left _ ->
left GhcNotInstalled
Right (Out out) ->
pure $ T.strip out
|
f0f8bad41853dd8e5283dcedd3d4ea181c8dd90c6fad41e3a64a9be035f511a1 | jwiegley/notes | main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Applicative
import Control.Exception
import Control.Monad
import qualified Data.ByteString as B
import Data.Conduit
import Data.Conduit.Filesystem
import qualified Data.Conduit.List as CL
import Data.List
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Filesystem.Path.CurrentOS
import qualified Prelude
import Prelude hiding (FilePath)
main :: IO ()
main = preludeStyle "cybersource.h"
conduitStyle :: FilePath -> IO ()
conduitStyle path = do
ls <- T.lines . T.decodeUtf8 . B.concat
<$> runResourceT (sourceFile path $$ CL.consume)
case find ("WS-Header" `T.isInfixOf`) ls of
Just _ -> return ()
Nothing ->
let Just idx = findIndex (" Import " `T.isInfixOf`) ls
in runResourceT $
(yield $ T.encodeUtf8 . T.unlines
$ take (idx + 3) ls
++ ["#import \"WS-Header.h\""]
++ drop (idx + 3) ls) $$ sinkFile path
preludeStyle :: Prelude.FilePath -> IO ()
preludeStyle path = do
ls <- lines <$> readFile path
void $ evaluate (length ls)
case find ("WS-Header" `isInfixOf`) ls of
Just _ -> return ()
Nothing ->
let Just idx = findIndex (" Import " `isInfixOf`) ls
in writeFile path
$ unlines
$ take (idx + 2) ls
++ ["#import \"WS-Header.h\""]
++ drop (idx + 2) ls
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/f719a3d41696d48f6005/gists/6084024/main.hs | haskell | # LANGUAGE OverloadedStrings # |
module Main where
import Control.Applicative
import Control.Exception
import Control.Monad
import qualified Data.ByteString as B
import Data.Conduit
import Data.Conduit.Filesystem
import qualified Data.Conduit.List as CL
import Data.List
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Filesystem.Path.CurrentOS
import qualified Prelude
import Prelude hiding (FilePath)
main :: IO ()
main = preludeStyle "cybersource.h"
conduitStyle :: FilePath -> IO ()
conduitStyle path = do
ls <- T.lines . T.decodeUtf8 . B.concat
<$> runResourceT (sourceFile path $$ CL.consume)
case find ("WS-Header" `T.isInfixOf`) ls of
Just _ -> return ()
Nothing ->
let Just idx = findIndex (" Import " `T.isInfixOf`) ls
in runResourceT $
(yield $ T.encodeUtf8 . T.unlines
$ take (idx + 3) ls
++ ["#import \"WS-Header.h\""]
++ drop (idx + 3) ls) $$ sinkFile path
preludeStyle :: Prelude.FilePath -> IO ()
preludeStyle path = do
ls <- lines <$> readFile path
void $ evaluate (length ls)
case find ("WS-Header" `isInfixOf`) ls of
Just _ -> return ()
Nothing ->
let Just idx = findIndex (" Import " `isInfixOf`) ls
in writeFile path
$ unlines
$ take (idx + 2) ls
++ ["#import \"WS-Header.h\""]
++ drop (idx + 2) ls
|
45c742357f01ee11f139bc27cc2a49e39b1c7742c490f68124b5edf8041194c6 | fp-works/2019-winter-Haskell-school | Exercise02Spec.hs | module CIS194.Homework01.Exercise02Spec where
import CIS194.Homework01.Exercise02
import Test.Tasty.Hspec
spec_doubleEveryOther :: Spec
spec_doubleEveryOther = do
it "returns an empty list for an empty string" $
doubleEveryOther [] `shouldBe` []
it "returns the same list itself for a single element list" $
doubleEveryOther [1] `shouldBe` [1]
it "returns the expected list for a multiple elements list" $ do
doubleEveryOther [1, 2, 3, 4] `shouldBe` [2, 2, 6, 4]
doubleEveryOther [1, 2, 3, 4, 5] `shouldBe` [1, 4, 3, 8, 5]
| null | https://raw.githubusercontent.com/fp-works/2019-winter-Haskell-school/823b67f019b9e7bc0d3be36711c0cc7da4eba7d2/cis194/week1/daniel-deng/test/CIS194/Homework01/Exercise02Spec.hs | haskell | module CIS194.Homework01.Exercise02Spec where
import CIS194.Homework01.Exercise02
import Test.Tasty.Hspec
spec_doubleEveryOther :: Spec
spec_doubleEveryOther = do
it "returns an empty list for an empty string" $
doubleEveryOther [] `shouldBe` []
it "returns the same list itself for a single element list" $
doubleEveryOther [1] `shouldBe` [1]
it "returns the expected list for a multiple elements list" $ do
doubleEveryOther [1, 2, 3, 4] `shouldBe` [2, 2, 6, 4]
doubleEveryOther [1, 2, 3, 4, 5] `shouldBe` [1, 4, 3, 8, 5]
| |
09884b381f24b42516fbdcdd6996708b4f5a1f75da8eb53abcfe474cbd33de6c | racket/pict | code.rkt | #lang racket/base
(require pict/private/pict
(prefix-in r: racket/base)
mzlib/class
mzlib/list
(only-in scheme/list last)
racket/draw
mzlib/unit
(for-syntax racket/base)
(only-in mzscheme make-namespace))
(provide define-code code^ code-params^ code@
(for-syntax prop:code-transformer
code-transformer?
make-code-transformer))
(define (to-code-pict p extension)
(use-last* p extension))
(define (code-pict? p)
(and (pict-last p) #t))
(define (code-pict-bottom-line p)
(single-pict (pict-last p)))
(define (single-pict p)
(if (list? p)
(last p)
p))
(define (make-code-append htl-append)
(case-lambda
[(a b) (let ([a-last (pict-last a)])
(if a-last
(let* ([a-dup (launder (ghost (single-pict a-last)))]
[extension (htl-append a-dup b)])
(let ([p (let-values ([(x y) (lt-find a a-last)]
[(dx dy) (lt-find extension a-dup)])
(let ([ex (- x dx)]
[ey (- y dy)])
(if (negative? ey)
(lt-superimpose
(inset a 0 (- ey) 0 0)
(inset extension ex 0 0 0))
(lt-superimpose
a
(inset extension ex ey 0 0)))))])
(use-last* p b)))
(htl-append a b)))]
[(a) a]
[(a . rest)
((make-code-append htl-append)
a
(apply (make-code-append htl-append) rest))]))
(define code-htl-append (make-code-append htl-append))
(define code-hbl-append (make-code-append hbl-append))
(define code-vl-append
(case-lambda
[(sep a b) (to-code-pict (vl-append sep a b) b)]
[(sep a) a]
[(sep a . rest)
(code-vl-append sep a (apply code-vl-append sep rest))]))
(begin-for-syntax
(define-values (prop:code-transformer code-transformer? code-transformer-ref)
(make-struct-type-property 'code-transformer
(lambda (proc info)
(unless (and (procedure? proc)
(procedure-arity-includes? proc 2))
(raise-argument-error 'guard-for-code-transformer
"(procedure-arity-includes/c 2)"
proc))
proc)))
(define make-code-transformer
(let ()
(define-struct code-transformer (proc)
#:property prop:code-transformer (lambda (r stx)
(let ([proc (code-transformer-proc r)])
(if (syntax? proc)
(if (identifier? stx)
proc
#f) ; => render normally
(proc stx)))))
(lambda (proc)
(unless (or (syntax? proc)
(and (procedure? proc)
(procedure-arity-includes? proc 1)))
(raise-argument-error 'make-code-transformer
"(or/c syntax? (procedure-arity-includes/c 1))"
proc))
(make-code-transformer proc))))
(define (transform id stx uncode-stx recur default)
(define r (syntax-local-value id (lambda () #f)))
(define t ((code-transformer-ref r) r stx))
(if t
(recur (datum->syntax stx
(list uncode-stx t)
stx
stx))
(default stx))))
(define-syntax (define-code stx)
(syntax-case stx ()
[(_ code typeset-code uncode)
(syntax/loc stx
(define-syntax (code stx)
(define (stx->loc-s-expr v)
(cond
[(syntax? v)
(define (default v)
(let ([mk `(datum->syntax
#f
,(syntax-case v (uncode)
[(uncode e) #'e]
[_ (stx->loc-s-expr (syntax-e v))])
(list 'code
,(syntax-line v)
,(syntax-column v)
,(syntax-position v)
,(syntax-span v)))])
(let ([prop (syntax-property v 'paren-shape)])
(if prop
`(syntax-property ,mk 'paren-shape ,prop)
mk))))
(syntax-case v ()
[(id e (... ...))
(and (identifier? #'id)
(code-transformer? (syntax-local-value #'id (lambda () #f))))
(transform #'id v (quote-syntax uncode) stx->loc-s-expr default)]
[id
(and (identifier? #'id)
(code-transformer? (syntax-local-value #'id (lambda () #f))))
(transform #'id v (quote-syntax uncode) stx->loc-s-expr default)]
[_ (default v)])]
[(pair? v) `(cons ,(stx->loc-s-expr (car v))
,(stx->loc-s-expr (cdr v)))]
[(vector? v) `(vector ,@(map
stx->loc-s-expr
(vector->list v)))]
[(box? v) `(box ,(stx->loc-s-expr (unbox v)))]
[(null? v) 'null]
[else `(quote ,v)]))
(define (cvt s)
(datum->syntax #'here (stx->loc-s-expr s)))
(syntax-case stx ()
[(_ expr) #`(typeset-code #,(cvt #'expr))]
[(_ expr (... ...))
#`(typeset-code #,(cvt
;; Avoid a syntax location for the synthesized `code:line` wrapper,
otherwise the ` expr`s will be arranged relative to it :
(datum->syntax #f (cons 'code:line (datum->syntax #f (syntax-e #'(expr (... ...))))))))])))]
[(_ code typeset-code) #'(define-code code typeset-code unsyntax)]))
(define-signature code^
(typeset-code code-pict-bottom-line-pict pict->code-pict
comment-color keyword-color id-color const-color literal-color
code-align current-code-tt current-code-font
current-keyword-list current-const-list current-literal-list
code-colorize-enabled code-colorize-quote-enabled
code-italic-underscore-enabled code-scripts-enabled
current-comment-color current-keyword-color
current-base-color current-id-color current-literal-color current-const-color
current-reader-forms
mzscheme-const-list
racket/base-const-list))
(define-signature code-params^
(current-font-size
current-code-line-sep))
(define-syntax (define-computed stx)
(syntax-case stx ()
[(_ id v)
#'(begin
(define (get-val) v)
(define-syntax id
(syntax-id-rules (set!)
[(x (... ...)) ,illegal-use-of-once]
[x (get-val)])))]))
Find which line ` stx ' ends on , # f for unknown
(define (syntax-end-line stx)
(cond
[(syntax? stx) (or (syntax-end-line (syntax-e stx))
(syntax-line stx))]
[(pair? stx) (or (syntax-end-line (cdr stx))
(syntax-end-line (car stx)))]
[(vector? stx) (syntax-end-line (reverse (vector->list stx)))]
[else #f]))
Find which column ` stx ' ends on if it 's not on ` line '
(define (syntax-end-column stx line delta)
(cond
[(syntax? stx) (or (syntax-end-column (syntax-e stx) line delta)
(let ([line2 (syntax-line stx)])
(and line line2
(not (= line line2))
(let ([span (syntax-span stx)]
[col (syntax-column stx)])
(and span col (+ col span delta))))))]
[(pair? stx) (or (syntax-end-column (cdr stx) line (+ delta 1))
(and (or (null? (cdr stx))
(and (syntax? (cdr stx)) (null? (cdr stx))))
(syntax-end-column (car stx) line (+ delta 1))))]
[else #f]))
(define-unit code@
(import code-params^)
(export code^)
(define current-code-font (make-parameter `(bold . modern)))
(define (default-tt s)
(text s (current-code-font) (current-font-size)))
(define current-code-tt (make-parameter default-tt))
(define (tt s)
((current-code-tt) s))
(define (code-align p)
(let ([b (dc void
(pict-width p)
(pict-height p)
(pict-height p)
0)])
(refocus (cc-superimpose p b) b)))
(define (code-pict-bottom-line-pict p)
(if (code-pict? p)
(code-pict-bottom-line p)
#f))
(define (pict->code-pict p bottom-line)
(if bottom-line
(to-code-pict p bottom-line)
p))
(define (get-vars/bindings ns require-spec)
(define ns (let ([n (make-namespace)])
(parameterize ([current-namespace n])
(namespace-require/copy require-spec))
n))
(define bindings (namespace-mapped-symbols ns))
(define vars (filter (lambda (n)
(not (eq? 'nope
(namespace-variable-value n #f (lambda () 'nope) ns))))
bindings))
(values vars bindings))
(define-values (mzscheme-vars mzscheme-bindings) (get-vars/bindings (make-namespace) 'mzscheme))
(define-values (racket/base-vars racket/base-bindings) (get-vars/bindings (r:make-base-namespace) 'racket/base))
(define current-keyword-list
(make-parameter
(let ([ht (make-hasheq)])
(for-each (lambda (n) (hash-set! ht n #f))
mzscheme-vars)
(for-each (lambda (n) (hash-set! ht n #f))
racket/base-vars)
(map symbol->string
(filter (lambda (n)
(hash-ref ht n #t))
(append mzscheme-bindings
racket/base-bindings))))))
(define current-const-list
(make-parameter '()))
(define current-literal-list
(make-parameter '()))
(define mzscheme-const-list
(map symbol->string mzscheme-vars))
(define racket/base-const-list
(map symbol->string racket/base-vars))
(define code-colorize-enabled
(make-parameter #t))
(define code-colorize-quote-enabled
(make-parameter #t))
(define code-italic-underscore-enabled (make-parameter #t))
(define code-scripts-enabled (make-parameter #t))
(define (maybe-colorize p c)
(if (code-colorize-enabled)
(colorize p c)
p))
(define current-base-color (make-parameter "brown"))
(define keyword-color "black")
(define current-keyword-color (make-parameter keyword-color))
(define id-color "navy")
(define current-id-color (make-parameter id-color))
(define literal-color (make-object color% 51 135 39))
(define current-literal-color (make-parameter literal-color))
(define const-color (make-object color% #x99 0 0))
(define current-const-color (make-parameter const-color))
(define comment-color (current-base-color))
(define current-comment-color (make-parameter comment-color))
(define current-reader-forms (make-parameter '(quote
quasiquote
unquote unquote-splicing
syntax
quasisyntax
unsyntax unsyntax-splicing)))
(define-computed open-paren-p (tt "("))
(define-computed close-paren-p (tt ")"))
(define-computed open-sq-p (tt "["))
(define-computed close-sq-p (tt "]"))
(define-computed open-curly-p (tt "{"))
(define-computed close-curly-p (tt "}"))
(define-computed quote-p (tt "'"))
(define-computed unquote-p (tt ","))
(define-computed unquote-splicing-p (tt ",@"))
(define-computed quasiquote-p (tt "`"))
(define-computed syntax-p (tt "#'"))
(define-computed unsyntax-p (tt "#,"))
(define-computed unsyntax-splicing-p (tt "#,@"))
(define-computed quasisyntax-p (tt "#`"))
(define-computed semi-p (tt "; "))
(define (comment-mode? mode)
(eq? mode 'comment))
(define-computed dot-p (tt "."))
(define (mode-colorize mode type p)
(maybe-colorize
p
(case mode
[(literal) (current-literal-color)]
[(comment) (current-comment-color)]
[else (cond
[(number? mode) (current-literal-color)]
[(eq? type 'keyword) (current-keyword-color)]
[(eq? type 'literal) (current-literal-color)]
[(eq? type 'const) (current-const-color)]
[(eq? type 'id) (current-id-color)]
[else (current-base-color)])])))
(define (get-open mode stx)
(if (memq mode '(contract line))
(blank)
(mode-colorize
mode #f
(case (syntax-property stx 'paren-shape)
[(#\[) open-sq-p]
[(#\{) open-curly-p]
[else open-paren-p]))))
(define (get-close mode stx)
(if (memq mode '(contract line))
(blank)
(mode-colorize
mode #f
(case (syntax-property stx 'paren-shape)
[(#\[) close-sq-p]
[(#\{) close-curly-p]
[else close-paren-p]))))
(define (add-close p closes [force-line #f])
(cond
[(null? closes) p]
[(memq (caar closes) '(contract line))
(add-close p (cdr closes) force-line)]
[else
(let ([p (if force-line
(vl-append p (tt ""))
p)])
(add-close (code-hbl-append p (get-close (caar closes) (cdar closes)))
(cdr closes)
#f))]))
(define (pad-left space p)
(if (= 0 space)
p
(code-htl-append (tt (make-string space #\space)) p)))
(define (pad-bottom space p)
(if (= 0 space)
p
(code-vl-append (current-code-line-sep) (tt " ") (pad-bottom (sub1 space) p))))
(define (colorize-id str mode)
(cond
[(and (code-italic-underscore-enabled)
((string-length str) . > . 1)
(char=? #\_ (string-ref str 0))
(not (char=? #\_ (string-ref str 1))))
(mode-colorize
mode 'id
(text (substring str 1) `(italic . ,(current-code-font)) (current-font-size)))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)_([0-9a-z()+-]+)\\^([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(cc-superimpose
(text (caddr m) `(subscript . ,(current-code-font)) (current-font-size))
(text (cadddr m) `(superscript . ,(current-code-font)) (current-font-size)))))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)\\^([0-9a-z()+-]+)_([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(cc-superimpose
(text (cadddr m) `(subscript . ,(current-code-font)) (current-font-size))
(text (caddr m) `(superscript . ,(current-code-font)) (current-font-size)))))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)\\^([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(text (caddr m) `(superscript . ,(current-code-font)) (current-font-size))))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)_([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(text (caddr m) `(subscript . ,(current-code-font)) (current-font-size))))]
[else
(mode-colorize
mode
(cond
[(member str (current-keyword-list)) 'keyword]
[(member str (current-const-list)) 'const]
[(member str (current-literal-list)) 'literal]
[else 'id])
(tt str))]))
(define (sub-mode mode)
(case mode
[(line cond local) #f]
[(template-cond) 'template]
[(contract) 'comment]
[else mode]))
(define (cond? s)
(memq (syntax-e s) '(cond))) ; syntax-rules syntax-case)))
(define (local? s)
(memq (syntax-e s) '(local)))
(define (get-span stx)
(syntax-case stx (code:blank)
[code:blank 1]
[_ (or (syntax-span stx) 1)]))
(define (color-semi-p)
(mode-colorize 'comment #f semi-p))
(define (add-semis p)
(let loop ([p p] [semis (color-semi-p)])
(if ((pict-height p) . > . (+ (pict-height semis) 1))
(loop p (vl-append (current-code-line-sep) (color-semi-p) semis))
(htl-append semis p))))
(define (add-unquote unquote-p loop x closes mode)
(let ([mode (cond
[(number? mode) (if (zero? mode)
#f
(sub1 mode))]
[else mode])])
(code-htl-append (mode-colorize mode 'keyword unquote-p)
(loop x closes mode))))
(define (typeset-code stx)
(let loop ([stx stx][closes null][mode #f])
(syntax-case* stx (quote unquote unquote-splicing quasiquote
syntax-unquote syntax unsyntax
unsyntax-splicing quasisyntax
code:contract code:comment code:line
code:template code:blank $)
(lambda (a b) (eq? (syntax-e a) (syntax-e b)))
[() (add-close (htl-append (get-open mode stx) (get-close mode stx))
closes)]
[code:blank (add-close (tt " ")
closes)]
[$ (colorize-id "|" closes)]
[(quote x)
(memq 'quote (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal quote-p)
(loop #'x closes (if (or (not (code-colorize-quote-enabled))
(comment-mode? mode))
mode
'literal)))]
[(unquote x)
(memq 'unquote (current-reader-forms))
(add-unquote unquote-p loop #'x closes mode)]
[(unquote-splicing x)
(memq 'unquote-splicing (current-reader-forms))
(add-unquote unquote-splicing-p loop #'x closes mode)]
[(quasiquote x)
(memq 'quasiquote (current-reader-forms))
(code-htl-append (mode-colorize mode 'keyword quasiquote-p)
(loop #'x closes (cond
[(not (code-colorize-quote-enabled)) mode]
[(comment-mode? mode) mode]
[(number? mode) (add1 mode)]
[else 0])))]
[(syntax x)
(memq 'syntax (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal syntax-p)
(loop #'x closes mode))]
[(unsyntax x)
(memq 'unsyntax (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal unsyntax-p)
(loop #'x closes mode))]
[(unsyntax-splicing x)
(memq 'unsyntax-splicing (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal unsyntax-splicing-p)
(loop #'x closes mode))]
[(quasisyntax x)
(memq 'unsyntax-splicing (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal quasisyntax-p)
(loop #'x closes mode))]
[(code:contract i ...)
(add-semis (loop (datum->syntax #f (syntax->list #'(i ...)))
closes 'contract))]
[(code:line i ...)
(loop (datum->syntax #f (syntax->list #'(i ...))
(syntax-case stx ()
[(_ a . b)
(let ([src (syntax-source stx)]
[line (syntax-line stx)]
[col (syntax-column stx)]
[pos (syntax-position stx)]
[span (syntax-span stx)]
[a-pos (syntax-position #'a)])
(if (and pos a-pos (a-pos . > . pos))
(vector src
line
(and col (+ col (- a-pos pos)))
a-pos
(and span (max 0 (- span (- a-pos pos)))))
stx))]
[else stx]))
closes 'line)]
[(code:comment s ...)
(let ([p
(apply htl-append
(color-semi-p)
(map (lambda (s)
(define datum (syntax-e s))
(if (pict-convertible? datum)
datum
(if (string? datum)
(maybe-colorize (tt datum) (current-comment-color))
(raise-argument-error 'code:comment "string?" datum))))
(syntax->list #'(s ...))))])
;; Ungraceful handling of ungraceful closes by adding a line
;; --- better than sticking them to the right of the comment, at least
(add-close p closes 'force-line))]
[(code:template i ...)
(add-semis (loop #'(code:line i ...) closes 'template))]
[(a b i ... c)
(let ([pos (for/fold ([pos (syntax-position #'b)]) ([i (in-list (syntax->list #'(i ... c)))])
(and pos
(syntax-position i)
((syntax-position i) . > . pos)
(syntax-position i)))])
(and pos
(syntax-position #'a)
((syntax-position #'a) . > . (syntax-position #'b))
((syntax-position #'a) . < . (syntax-position #'c))))
;; position of `a' is after `b', while everything else is in
;; order, so print as infix-dot notation
(loop
(datum->syntax
stx
(cons #'b
(let loop ([l (syntax->list #'(i ... c))])
(cond
[((syntax-position #'a) . < . (syntax-position (car l)))
(let ([src (syntax-source #'a)]
[pos (syntax-position #'a)]
[line (syntax-line #'a)]
[col (syntax-column #'a)]
[span (syntax-span #'a)])
(list* (datum->syntax #f '|.|
(vector src line
(and col (max 0 (- col 2)))
(max 1 (- pos 2))
1))
#'a
(datum->syntax #f '|.|
(vector src line
(and col (+ col 1 span))
(+ pos 1 span)
1))
l))]
[else (cons (car l) (loop (cdr l)))])))
stx)
closes
mode)]
[(i ...)
(let ([is (syntax->list #'(i ...))])
;; Convert each i to a picture, include close paren in last item:
(let ([ips (let iloop ([is is][sub-mode (sub-mode mode)])
(cond
[(null? (cdr is)) (list (loop (car is) (cons (cons mode stx) closes) sub-mode))]
[else (cons (loop (car is) null sub-mode)
(iloop (cdr is) (cond
[(cond? (car is))
(if (eq? mode 'template)
'template-cond
'cond)]
[(local? (car is))
'local]
[(eq? sub-mode 'local)
#f]
[else
sub-mode])))]))])
;; Combine the parts:
(let ([left (or (syntax-column stx) +inf.0)])
(let loop ([stxs is]
[ps ips]
[line-so-far (get-open mode stx)]
[col (+ left 1)]
[line (syntax-line stx)]
[always-space? #f]
[col->width (make-hash)])
(cond
[(null? ps) (blank)]
[(or (not line)
(= line (or (syntax-line (car stxs)) line)))
(let* ([space (if (syntax-column (car stxs))
(inexact->exact
(max (if always-space? 1 0) (- (syntax-column (car stxs)) col)))
(if always-space? 1 0))]
[p (code-htl-append
line-so-far
(pad-left space (car ps)))])
(unless (equal? +inf.0 (+ space col))
(hash-set! col->width
(+ space col)
(pict-width (code-htl-append line-so-far (pad-left space (blank))))))
(if (null? (cdr stxs))
p
(loop (cdr stxs)
(cdr ps)
p
(or (syntax-end-column (car stxs) line 0)
(if (not (syntax-column (car stxs)))
+inf.0
(+ col space (get-span (car stxs)))))
(or (syntax-end-line (car stxs))
line
(syntax-line (car stxs)))
#t
col->width)))]
[else
;; Start on next line:
(code-vl-append
(current-code-line-sep)
line-so-far
(let* ([space (max 0 (- (or (syntax-column (car stxs)) 0) left))]
[p
(let/ec k
(code-htl-append
(blank (hash-ref col->width
(+ space left)
(lambda ()
(k (pad-left space (car ps)))))
0)
(car ps)))])
(if (null? (cdr stxs))
p
(loop (cdr stxs)
(cdr ps)
p
(+ left space (get-span (car stxs)))
(or (syntax-line (car stxs)) (add1 line))
#t
(let ([ht (make-hash)]
[v (hash-ref col->width (+ space left) #f)])
(when v (hash-set! ht (+ space left) v))
ht)))))])))))]
[id
(identifier? stx)
(add-close (colorize-id (symbol->string (syntax-e stx)) mode) closes)]
[kw
(keyword? (syntax-e #'kw))
(add-close (mode-colorize mode #f (tt (format "~s" (syntax-e stx)))) closes)]
[(a . b)
;; Build a list that makes the "." explicit.
(let ([p (let loop ([a (syntax-e stx)])
(cond
[(pair? a) (cons (car a) (loop (cdr a)))]
[else (list (datum->syntax #f
(mode-colorize mode #f dot-p)
(list (syntax-source a)
(syntax-line a)
(- (syntax-column a) 2)
(- (syntax-position a) 2)
1))
a)]))])
(loop (datum->syntax stx
p
stx)
closes
mode))]
[#(e ...)
(code-htl-append (mode-colorize mode 'literal (tt "#"))
(loop (datum->syntax stx
(syntax->list #'(e ...))
(list (syntax-source stx)
(syntax-line stx)
(and (syntax-column stx)
(+ (syntax-column stx) 1))
(and (syntax-position stx)
(+ (syntax-position stx) 1))
(and (syntax-span stx)
(max 0 (- (syntax-span stx) 1)))))
closes
mode))]
[else
(add-close (if (pict-convertible? (syntax-e stx))
(syntax-e stx)
(mode-colorize mode 'literal
(tt (format "~s" (syntax-e stx)))))
closes)])))
)
| null | https://raw.githubusercontent.com/racket/pict/add4f1deba60fe284016ad889a49941a0ff1c3df/pict-lib/texpict/code.rkt | racket | => render normally
Avoid a syntax location for the synthesized `code:line` wrapper,
syntax-rules syntax-case)))
Ungraceful handling of ungraceful closes by adding a line
--- better than sticking them to the right of the comment, at least
position of `a' is after `b', while everything else is in
order, so print as infix-dot notation
Convert each i to a picture, include close paren in last item:
Combine the parts:
Start on next line:
Build a list that makes the "." explicit. | #lang racket/base
(require pict/private/pict
(prefix-in r: racket/base)
mzlib/class
mzlib/list
(only-in scheme/list last)
racket/draw
mzlib/unit
(for-syntax racket/base)
(only-in mzscheme make-namespace))
(provide define-code code^ code-params^ code@
(for-syntax prop:code-transformer
code-transformer?
make-code-transformer))
(define (to-code-pict p extension)
(use-last* p extension))
(define (code-pict? p)
(and (pict-last p) #t))
(define (code-pict-bottom-line p)
(single-pict (pict-last p)))
(define (single-pict p)
(if (list? p)
(last p)
p))
(define (make-code-append htl-append)
(case-lambda
[(a b) (let ([a-last (pict-last a)])
(if a-last
(let* ([a-dup (launder (ghost (single-pict a-last)))]
[extension (htl-append a-dup b)])
(let ([p (let-values ([(x y) (lt-find a a-last)]
[(dx dy) (lt-find extension a-dup)])
(let ([ex (- x dx)]
[ey (- y dy)])
(if (negative? ey)
(lt-superimpose
(inset a 0 (- ey) 0 0)
(inset extension ex 0 0 0))
(lt-superimpose
a
(inset extension ex ey 0 0)))))])
(use-last* p b)))
(htl-append a b)))]
[(a) a]
[(a . rest)
((make-code-append htl-append)
a
(apply (make-code-append htl-append) rest))]))
(define code-htl-append (make-code-append htl-append))
(define code-hbl-append (make-code-append hbl-append))
(define code-vl-append
(case-lambda
[(sep a b) (to-code-pict (vl-append sep a b) b)]
[(sep a) a]
[(sep a . rest)
(code-vl-append sep a (apply code-vl-append sep rest))]))
(begin-for-syntax
(define-values (prop:code-transformer code-transformer? code-transformer-ref)
(make-struct-type-property 'code-transformer
(lambda (proc info)
(unless (and (procedure? proc)
(procedure-arity-includes? proc 2))
(raise-argument-error 'guard-for-code-transformer
"(procedure-arity-includes/c 2)"
proc))
proc)))
(define make-code-transformer
(let ()
(define-struct code-transformer (proc)
#:property prop:code-transformer (lambda (r stx)
(let ([proc (code-transformer-proc r)])
(if (syntax? proc)
(if (identifier? stx)
proc
(proc stx)))))
(lambda (proc)
(unless (or (syntax? proc)
(and (procedure? proc)
(procedure-arity-includes? proc 1)))
(raise-argument-error 'make-code-transformer
"(or/c syntax? (procedure-arity-includes/c 1))"
proc))
(make-code-transformer proc))))
(define (transform id stx uncode-stx recur default)
(define r (syntax-local-value id (lambda () #f)))
(define t ((code-transformer-ref r) r stx))
(if t
(recur (datum->syntax stx
(list uncode-stx t)
stx
stx))
(default stx))))
(define-syntax (define-code stx)
(syntax-case stx ()
[(_ code typeset-code uncode)
(syntax/loc stx
(define-syntax (code stx)
(define (stx->loc-s-expr v)
(cond
[(syntax? v)
(define (default v)
(let ([mk `(datum->syntax
#f
,(syntax-case v (uncode)
[(uncode e) #'e]
[_ (stx->loc-s-expr (syntax-e v))])
(list 'code
,(syntax-line v)
,(syntax-column v)
,(syntax-position v)
,(syntax-span v)))])
(let ([prop (syntax-property v 'paren-shape)])
(if prop
`(syntax-property ,mk 'paren-shape ,prop)
mk))))
(syntax-case v ()
[(id e (... ...))
(and (identifier? #'id)
(code-transformer? (syntax-local-value #'id (lambda () #f))))
(transform #'id v (quote-syntax uncode) stx->loc-s-expr default)]
[id
(and (identifier? #'id)
(code-transformer? (syntax-local-value #'id (lambda () #f))))
(transform #'id v (quote-syntax uncode) stx->loc-s-expr default)]
[_ (default v)])]
[(pair? v) `(cons ,(stx->loc-s-expr (car v))
,(stx->loc-s-expr (cdr v)))]
[(vector? v) `(vector ,@(map
stx->loc-s-expr
(vector->list v)))]
[(box? v) `(box ,(stx->loc-s-expr (unbox v)))]
[(null? v) 'null]
[else `(quote ,v)]))
(define (cvt s)
(datum->syntax #'here (stx->loc-s-expr s)))
(syntax-case stx ()
[(_ expr) #`(typeset-code #,(cvt #'expr))]
[(_ expr (... ...))
#`(typeset-code #,(cvt
otherwise the ` expr`s will be arranged relative to it :
(datum->syntax #f (cons 'code:line (datum->syntax #f (syntax-e #'(expr (... ...))))))))])))]
[(_ code typeset-code) #'(define-code code typeset-code unsyntax)]))
(define-signature code^
(typeset-code code-pict-bottom-line-pict pict->code-pict
comment-color keyword-color id-color const-color literal-color
code-align current-code-tt current-code-font
current-keyword-list current-const-list current-literal-list
code-colorize-enabled code-colorize-quote-enabled
code-italic-underscore-enabled code-scripts-enabled
current-comment-color current-keyword-color
current-base-color current-id-color current-literal-color current-const-color
current-reader-forms
mzscheme-const-list
racket/base-const-list))
(define-signature code-params^
(current-font-size
current-code-line-sep))
(define-syntax (define-computed stx)
(syntax-case stx ()
[(_ id v)
#'(begin
(define (get-val) v)
(define-syntax id
(syntax-id-rules (set!)
[(x (... ...)) ,illegal-use-of-once]
[x (get-val)])))]))
Find which line ` stx ' ends on , # f for unknown
(define (syntax-end-line stx)
(cond
[(syntax? stx) (or (syntax-end-line (syntax-e stx))
(syntax-line stx))]
[(pair? stx) (or (syntax-end-line (cdr stx))
(syntax-end-line (car stx)))]
[(vector? stx) (syntax-end-line (reverse (vector->list stx)))]
[else #f]))
Find which column ` stx ' ends on if it 's not on ` line '
(define (syntax-end-column stx line delta)
(cond
[(syntax? stx) (or (syntax-end-column (syntax-e stx) line delta)
(let ([line2 (syntax-line stx)])
(and line line2
(not (= line line2))
(let ([span (syntax-span stx)]
[col (syntax-column stx)])
(and span col (+ col span delta))))))]
[(pair? stx) (or (syntax-end-column (cdr stx) line (+ delta 1))
(and (or (null? (cdr stx))
(and (syntax? (cdr stx)) (null? (cdr stx))))
(syntax-end-column (car stx) line (+ delta 1))))]
[else #f]))
(define-unit code@
(import code-params^)
(export code^)
(define current-code-font (make-parameter `(bold . modern)))
(define (default-tt s)
(text s (current-code-font) (current-font-size)))
(define current-code-tt (make-parameter default-tt))
(define (tt s)
((current-code-tt) s))
(define (code-align p)
(let ([b (dc void
(pict-width p)
(pict-height p)
(pict-height p)
0)])
(refocus (cc-superimpose p b) b)))
(define (code-pict-bottom-line-pict p)
(if (code-pict? p)
(code-pict-bottom-line p)
#f))
(define (pict->code-pict p bottom-line)
(if bottom-line
(to-code-pict p bottom-line)
p))
(define (get-vars/bindings ns require-spec)
(define ns (let ([n (make-namespace)])
(parameterize ([current-namespace n])
(namespace-require/copy require-spec))
n))
(define bindings (namespace-mapped-symbols ns))
(define vars (filter (lambda (n)
(not (eq? 'nope
(namespace-variable-value n #f (lambda () 'nope) ns))))
bindings))
(values vars bindings))
(define-values (mzscheme-vars mzscheme-bindings) (get-vars/bindings (make-namespace) 'mzscheme))
(define-values (racket/base-vars racket/base-bindings) (get-vars/bindings (r:make-base-namespace) 'racket/base))
(define current-keyword-list
(make-parameter
(let ([ht (make-hasheq)])
(for-each (lambda (n) (hash-set! ht n #f))
mzscheme-vars)
(for-each (lambda (n) (hash-set! ht n #f))
racket/base-vars)
(map symbol->string
(filter (lambda (n)
(hash-ref ht n #t))
(append mzscheme-bindings
racket/base-bindings))))))
(define current-const-list
(make-parameter '()))
(define current-literal-list
(make-parameter '()))
(define mzscheme-const-list
(map symbol->string mzscheme-vars))
(define racket/base-const-list
(map symbol->string racket/base-vars))
(define code-colorize-enabled
(make-parameter #t))
(define code-colorize-quote-enabled
(make-parameter #t))
(define code-italic-underscore-enabled (make-parameter #t))
(define code-scripts-enabled (make-parameter #t))
(define (maybe-colorize p c)
(if (code-colorize-enabled)
(colorize p c)
p))
(define current-base-color (make-parameter "brown"))
(define keyword-color "black")
(define current-keyword-color (make-parameter keyword-color))
(define id-color "navy")
(define current-id-color (make-parameter id-color))
(define literal-color (make-object color% 51 135 39))
(define current-literal-color (make-parameter literal-color))
(define const-color (make-object color% #x99 0 0))
(define current-const-color (make-parameter const-color))
(define comment-color (current-base-color))
(define current-comment-color (make-parameter comment-color))
(define current-reader-forms (make-parameter '(quote
quasiquote
unquote unquote-splicing
syntax
quasisyntax
unsyntax unsyntax-splicing)))
(define-computed open-paren-p (tt "("))
(define-computed close-paren-p (tt ")"))
(define-computed open-sq-p (tt "["))
(define-computed close-sq-p (tt "]"))
(define-computed open-curly-p (tt "{"))
(define-computed close-curly-p (tt "}"))
(define-computed quote-p (tt "'"))
(define-computed unquote-p (tt ","))
(define-computed unquote-splicing-p (tt ",@"))
(define-computed quasiquote-p (tt "`"))
(define-computed syntax-p (tt "#'"))
(define-computed unsyntax-p (tt "#,"))
(define-computed unsyntax-splicing-p (tt "#,@"))
(define-computed quasisyntax-p (tt "#`"))
(define-computed semi-p (tt "; "))
(define (comment-mode? mode)
(eq? mode 'comment))
(define-computed dot-p (tt "."))
(define (mode-colorize mode type p)
(maybe-colorize
p
(case mode
[(literal) (current-literal-color)]
[(comment) (current-comment-color)]
[else (cond
[(number? mode) (current-literal-color)]
[(eq? type 'keyword) (current-keyword-color)]
[(eq? type 'literal) (current-literal-color)]
[(eq? type 'const) (current-const-color)]
[(eq? type 'id) (current-id-color)]
[else (current-base-color)])])))
(define (get-open mode stx)
(if (memq mode '(contract line))
(blank)
(mode-colorize
mode #f
(case (syntax-property stx 'paren-shape)
[(#\[) open-sq-p]
[(#\{) open-curly-p]
[else open-paren-p]))))
(define (get-close mode stx)
(if (memq mode '(contract line))
(blank)
(mode-colorize
mode #f
(case (syntax-property stx 'paren-shape)
[(#\[) close-sq-p]
[(#\{) close-curly-p]
[else close-paren-p]))))
(define (add-close p closes [force-line #f])
(cond
[(null? closes) p]
[(memq (caar closes) '(contract line))
(add-close p (cdr closes) force-line)]
[else
(let ([p (if force-line
(vl-append p (tt ""))
p)])
(add-close (code-hbl-append p (get-close (caar closes) (cdar closes)))
(cdr closes)
#f))]))
(define (pad-left space p)
(if (= 0 space)
p
(code-htl-append (tt (make-string space #\space)) p)))
(define (pad-bottom space p)
(if (= 0 space)
p
(code-vl-append (current-code-line-sep) (tt " ") (pad-bottom (sub1 space) p))))
(define (colorize-id str mode)
(cond
[(and (code-italic-underscore-enabled)
((string-length str) . > . 1)
(char=? #\_ (string-ref str 0))
(not (char=? #\_ (string-ref str 1))))
(mode-colorize
mode 'id
(text (substring str 1) `(italic . ,(current-code-font)) (current-font-size)))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)_([0-9a-z()+-]+)\\^([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(cc-superimpose
(text (caddr m) `(subscript . ,(current-code-font)) (current-font-size))
(text (cadddr m) `(superscript . ,(current-code-font)) (current-font-size)))))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)\\^([0-9a-z()+-]+)_([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(cc-superimpose
(text (cadddr m) `(subscript . ,(current-code-font)) (current-font-size))
(text (caddr m) `(superscript . ,(current-code-font)) (current-font-size)))))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)\\^([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(text (caddr m) `(superscript . ,(current-code-font)) (current-font-size))))]
[(and (code-scripts-enabled)
(regexp-match #rx"^(.+)_([0-9a-z()+-]+)$" str))
=> (lambda (m)
(hbl-append (colorize-id (cadr m) mode)
(text (caddr m) `(subscript . ,(current-code-font)) (current-font-size))))]
[else
(mode-colorize
mode
(cond
[(member str (current-keyword-list)) 'keyword]
[(member str (current-const-list)) 'const]
[(member str (current-literal-list)) 'literal]
[else 'id])
(tt str))]))
(define (sub-mode mode)
(case mode
[(line cond local) #f]
[(template-cond) 'template]
[(contract) 'comment]
[else mode]))
(define (cond? s)
(define (local? s)
(memq (syntax-e s) '(local)))
(define (get-span stx)
(syntax-case stx (code:blank)
[code:blank 1]
[_ (or (syntax-span stx) 1)]))
(define (color-semi-p)
(mode-colorize 'comment #f semi-p))
(define (add-semis p)
(let loop ([p p] [semis (color-semi-p)])
(if ((pict-height p) . > . (+ (pict-height semis) 1))
(loop p (vl-append (current-code-line-sep) (color-semi-p) semis))
(htl-append semis p))))
(define (add-unquote unquote-p loop x closes mode)
(let ([mode (cond
[(number? mode) (if (zero? mode)
#f
(sub1 mode))]
[else mode])])
(code-htl-append (mode-colorize mode 'keyword unquote-p)
(loop x closes mode))))
(define (typeset-code stx)
(let loop ([stx stx][closes null][mode #f])
(syntax-case* stx (quote unquote unquote-splicing quasiquote
syntax-unquote syntax unsyntax
unsyntax-splicing quasisyntax
code:contract code:comment code:line
code:template code:blank $)
(lambda (a b) (eq? (syntax-e a) (syntax-e b)))
[() (add-close (htl-append (get-open mode stx) (get-close mode stx))
closes)]
[code:blank (add-close (tt " ")
closes)]
[$ (colorize-id "|" closes)]
[(quote x)
(memq 'quote (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal quote-p)
(loop #'x closes (if (or (not (code-colorize-quote-enabled))
(comment-mode? mode))
mode
'literal)))]
[(unquote x)
(memq 'unquote (current-reader-forms))
(add-unquote unquote-p loop #'x closes mode)]
[(unquote-splicing x)
(memq 'unquote-splicing (current-reader-forms))
(add-unquote unquote-splicing-p loop #'x closes mode)]
[(quasiquote x)
(memq 'quasiquote (current-reader-forms))
(code-htl-append (mode-colorize mode 'keyword quasiquote-p)
(loop #'x closes (cond
[(not (code-colorize-quote-enabled)) mode]
[(comment-mode? mode) mode]
[(number? mode) (add1 mode)]
[else 0])))]
[(syntax x)
(memq 'syntax (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal syntax-p)
(loop #'x closes mode))]
[(unsyntax x)
(memq 'unsyntax (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal unsyntax-p)
(loop #'x closes mode))]
[(unsyntax-splicing x)
(memq 'unsyntax-splicing (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal unsyntax-splicing-p)
(loop #'x closes mode))]
[(quasisyntax x)
(memq 'unsyntax-splicing (current-reader-forms))
(code-htl-append (mode-colorize mode 'literal quasisyntax-p)
(loop #'x closes mode))]
[(code:contract i ...)
(add-semis (loop (datum->syntax #f (syntax->list #'(i ...)))
closes 'contract))]
[(code:line i ...)
(loop (datum->syntax #f (syntax->list #'(i ...))
(syntax-case stx ()
[(_ a . b)
(let ([src (syntax-source stx)]
[line (syntax-line stx)]
[col (syntax-column stx)]
[pos (syntax-position stx)]
[span (syntax-span stx)]
[a-pos (syntax-position #'a)])
(if (and pos a-pos (a-pos . > . pos))
(vector src
line
(and col (+ col (- a-pos pos)))
a-pos
(and span (max 0 (- span (- a-pos pos)))))
stx))]
[else stx]))
closes 'line)]
[(code:comment s ...)
(let ([p
(apply htl-append
(color-semi-p)
(map (lambda (s)
(define datum (syntax-e s))
(if (pict-convertible? datum)
datum
(if (string? datum)
(maybe-colorize (tt datum) (current-comment-color))
(raise-argument-error 'code:comment "string?" datum))))
(syntax->list #'(s ...))))])
(add-close p closes 'force-line))]
[(code:template i ...)
(add-semis (loop #'(code:line i ...) closes 'template))]
[(a b i ... c)
(let ([pos (for/fold ([pos (syntax-position #'b)]) ([i (in-list (syntax->list #'(i ... c)))])
(and pos
(syntax-position i)
((syntax-position i) . > . pos)
(syntax-position i)))])
(and pos
(syntax-position #'a)
((syntax-position #'a) . > . (syntax-position #'b))
((syntax-position #'a) . < . (syntax-position #'c))))
(loop
(datum->syntax
stx
(cons #'b
(let loop ([l (syntax->list #'(i ... c))])
(cond
[((syntax-position #'a) . < . (syntax-position (car l)))
(let ([src (syntax-source #'a)]
[pos (syntax-position #'a)]
[line (syntax-line #'a)]
[col (syntax-column #'a)]
[span (syntax-span #'a)])
(list* (datum->syntax #f '|.|
(vector src line
(and col (max 0 (- col 2)))
(max 1 (- pos 2))
1))
#'a
(datum->syntax #f '|.|
(vector src line
(and col (+ col 1 span))
(+ pos 1 span)
1))
l))]
[else (cons (car l) (loop (cdr l)))])))
stx)
closes
mode)]
[(i ...)
(let ([is (syntax->list #'(i ...))])
(let ([ips (let iloop ([is is][sub-mode (sub-mode mode)])
(cond
[(null? (cdr is)) (list (loop (car is) (cons (cons mode stx) closes) sub-mode))]
[else (cons (loop (car is) null sub-mode)
(iloop (cdr is) (cond
[(cond? (car is))
(if (eq? mode 'template)
'template-cond
'cond)]
[(local? (car is))
'local]
[(eq? sub-mode 'local)
#f]
[else
sub-mode])))]))])
(let ([left (or (syntax-column stx) +inf.0)])
(let loop ([stxs is]
[ps ips]
[line-so-far (get-open mode stx)]
[col (+ left 1)]
[line (syntax-line stx)]
[always-space? #f]
[col->width (make-hash)])
(cond
[(null? ps) (blank)]
[(or (not line)
(= line (or (syntax-line (car stxs)) line)))
(let* ([space (if (syntax-column (car stxs))
(inexact->exact
(max (if always-space? 1 0) (- (syntax-column (car stxs)) col)))
(if always-space? 1 0))]
[p (code-htl-append
line-so-far
(pad-left space (car ps)))])
(unless (equal? +inf.0 (+ space col))
(hash-set! col->width
(+ space col)
(pict-width (code-htl-append line-so-far (pad-left space (blank))))))
(if (null? (cdr stxs))
p
(loop (cdr stxs)
(cdr ps)
p
(or (syntax-end-column (car stxs) line 0)
(if (not (syntax-column (car stxs)))
+inf.0
(+ col space (get-span (car stxs)))))
(or (syntax-end-line (car stxs))
line
(syntax-line (car stxs)))
#t
col->width)))]
[else
(code-vl-append
(current-code-line-sep)
line-so-far
(let* ([space (max 0 (- (or (syntax-column (car stxs)) 0) left))]
[p
(let/ec k
(code-htl-append
(blank (hash-ref col->width
(+ space left)
(lambda ()
(k (pad-left space (car ps)))))
0)
(car ps)))])
(if (null? (cdr stxs))
p
(loop (cdr stxs)
(cdr ps)
p
(+ left space (get-span (car stxs)))
(or (syntax-line (car stxs)) (add1 line))
#t
(let ([ht (make-hash)]
[v (hash-ref col->width (+ space left) #f)])
(when v (hash-set! ht (+ space left) v))
ht)))))])))))]
[id
(identifier? stx)
(add-close (colorize-id (symbol->string (syntax-e stx)) mode) closes)]
[kw
(keyword? (syntax-e #'kw))
(add-close (mode-colorize mode #f (tt (format "~s" (syntax-e stx)))) closes)]
[(a . b)
(let ([p (let loop ([a (syntax-e stx)])
(cond
[(pair? a) (cons (car a) (loop (cdr a)))]
[else (list (datum->syntax #f
(mode-colorize mode #f dot-p)
(list (syntax-source a)
(syntax-line a)
(- (syntax-column a) 2)
(- (syntax-position a) 2)
1))
a)]))])
(loop (datum->syntax stx
p
stx)
closes
mode))]
[#(e ...)
(code-htl-append (mode-colorize mode 'literal (tt "#"))
(loop (datum->syntax stx
(syntax->list #'(e ...))
(list (syntax-source stx)
(syntax-line stx)
(and (syntax-column stx)
(+ (syntax-column stx) 1))
(and (syntax-position stx)
(+ (syntax-position stx) 1))
(and (syntax-span stx)
(max 0 (- (syntax-span stx) 1)))))
closes
mode))]
[else
(add-close (if (pict-convertible? (syntax-e stx))
(syntax-e stx)
(mode-colorize mode 'literal
(tt (format "~s" (syntax-e stx)))))
closes)])))
)
|
bf1d9f2591ddc16a2f6e4e45c2491b2ea88c4bfc8cfc952131b80cd211070c6f | inria-parkas/sundialsml | idaFoodWeb_kry_bbd_p.ml |
* -----------------------------------------------------------------
* $ Revision : 1.4 $
* $ Date : 2010/12/01 23:03:29 $
* -----------------------------------------------------------------
* Programmer(s ): , and
* @ LLNL
* -----------------------------------------------------------------
* OCaml port : , , Aug 2014 .
* -----------------------------------------------------------------
* Example program for IDA : Food web , parallel , GMRES , IDABBD
* preconditioner .
*
* This example program for IDA uses IDASPGMR as the linear solver .
* It is written for a parallel computer system and uses the
* IDABBDPRE band - block - diagonal preconditioner module for the
* IDASPGMR package . It was originally run on a Sun SPARC cluster
* and used MPICH .
*
* The mathematical problem solved in this example is a DAE system
* that arises from a system of partial differential equations after
* spatial discretization . The PDE system is a food web population
* model , with predator - prey interaction and diffusion on the unit
* square in two dimensions . The dependent variable vector is :
*
* 1 2 ns
* c = ( c , c , ... , c ) , ns = 2 * np
*
* and the PDE 's are as follows :
*
* i i i
* dc /dt = d(i)*(c + c ) + R ( x , y , c ) ( i = 1, ... ,np )
* xx yy i
*
* i i
* 0 = d(i)*(c + c ) + R ( x , y , c ) ( i = ... ,ns )
* xx yy i
*
* where the reaction terms R are :
*
* i ns j
* R ( x , y , c ) = c * ( b(i ) + sum a(i , j)*c )
* i j=1
*
* The number of species is ns = 2 * np , with the first np being
* prey and the last np being predators . The coefficients a(i , j ) ,
* b(i ) , d(i ) are :
*
* a(i , i ) = -AA ( all i )
* a(i , j ) = -GG ( i < = np , j > np )
* a(i , j ) = EE ( i > np , j < = np )
* all other a(i , j ) = 0
* b(i ) = BB*(1 + alpha * x*y + beta*sin(4 pi x)*sin(4 pi y ) ) ( i < = np )
* b(i ) = -BB*(1 + alpha * x*y + beta*sin(4 pi x)*sin(4 pi y ) ) ( i > np )
* ) = DPREY ( i < = np )
* ) = DPRED ( i > np )
*
* Note : The above equations are written in 1 - based indices ,
* whereas the code has 0 - based indices , being written in C.
*
* The various scalar parameters required are set using ' # define '
* statements or directly in routine InitUserData . In this program ,
* np = 1 , ns = 2 . The boundary conditions are homogeneous :
* normal derivative = 0 .
*
* A polynomial in x and y is used to set the initial values of the
* first np variables ( the prey variables ) at each x , y location ,
* while initial values for the remaining ( predator ) variables are
* set to a flat value , which is corrected by IDACalcIC .
*
* The PDEs are discretized by central differencing on a MX by MY
* mesh , and so the system size Neq is the product
* MX * MY * NUM_SPECIES . The system is actually implemented on
* submeshes , processor by processor , with an MXSUB by MYSUB mesh
* on each of NPEX * NPEY processors .
*
* The DAE system is solved by IDA using the IDASPGMR linear solver ,
* in conjunction with the preconditioner module IDABBDPRE . The
* preconditioner uses a 5 - diagonal band - block - diagonal
* approximation ( half - bandwidths = 2 ) . Output is printed at
* t = 0 , .001 , .01 , .1 , .4 , .7 , 1 .
* -----------------------------------------------------------------
* References :
* [ 1 ] and ,
* Reduced Storage Matrix Methods in Stiff ODE systems ,
* Journal of Applied Mathematics and Computation , Vol . 31
* ( May 1989 ) , pp . 40 - 91 .
*
* [ 2 ] , , and ,
* Using Krylov Methods in the Solution of Large - Scale
* Differential - Algebraic Systems , SIAM . Comput . , 15
* ( 1994 ) , pp . 1467 - 1488 .
*
* [ 3 ] , , and ,
* Consistent Initial Condition Calculation for Differential-
* Algebraic Systems , SIAM . Comput . , 19 ( 1998 ) ,
* pp . 1495 - 1512 .
* -----------------------------------------------------------------
* -----------------------------------------------------------------
* $Revision: 1.4 $
* $Date: 2010/12/01 23:03:29 $
* -----------------------------------------------------------------
* Programmer(s): Allan Taylor, Alan Hindmarsh and
* Radu Serban @ LLNL
* -----------------------------------------------------------------
* OCaml port: Jun Inoue, Inria, Aug 2014.
* -----------------------------------------------------------------
* Example program for IDA: Food web, parallel, GMRES, IDABBD
* preconditioner.
*
* This example program for IDA uses IDASPGMR as the linear solver.
* It is written for a parallel computer system and uses the
* IDABBDPRE band-block-diagonal preconditioner module for the
* IDASPGMR package. It was originally run on a Sun SPARC cluster
* and used MPICH.
*
* The mathematical problem solved in this example is a DAE system
* that arises from a system of partial differential equations after
* spatial discretization. The PDE system is a food web population
* model, with predator-prey interaction and diffusion on the unit
* square in two dimensions. The dependent variable vector is:
*
* 1 2 ns
* c = (c , c , ..., c ) , ns = 2 * np
*
* and the PDE's are as follows:
*
* i i i
* dc /dt = d(i)*(c + c ) + R (x,y,c) (i = 1,...,np)
* xx yy i
*
* i i
* 0 = d(i)*(c + c ) + R (x,y,c) (i = np+1,...,ns)
* xx yy i
*
* where the reaction terms R are:
*
* i ns j
* R (x,y,c) = c * (b(i) + sum a(i,j)*c )
* i j=1
*
* The number of species is ns = 2 * np, with the first np being
* prey and the last np being predators. The coefficients a(i,j),
* b(i), d(i) are:
*
* a(i,i) = -AA (all i)
* a(i,j) = -GG (i <= np , j > np)
* a(i,j) = EE (i > np, j <= np)
* all other a(i,j) = 0
* b(i) = BB*(1+ alpha * x*y + beta*sin(4 pi x)*sin(4 pi y)) (i <= np)
* b(i) =-BB*(1+ alpha * x*y + beta*sin(4 pi x)*sin(4 pi y)) (i > np)
* d(i) = DPREY (i <= np)
* d(i) = DPRED (i > np)
*
* Note: The above equations are written in 1-based indices,
* whereas the code has 0-based indices, being written in C.
*
* The various scalar parameters required are set using '#define'
* statements or directly in routine InitUserData. In this program,
* np = 1, ns = 2. The boundary conditions are homogeneous Neumann:
* normal derivative = 0.
*
* A polynomial in x and y is used to set the initial values of the
* first np variables (the prey variables) at each x,y location,
* while initial values for the remaining (predator) variables are
* set to a flat value, which is corrected by IDACalcIC.
*
* The PDEs are discretized by central differencing on a MX by MY
* mesh, and so the system size Neq is the product
* MX * MY * NUM_SPECIES. The system is actually implemented on
* submeshes, processor by processor, with an MXSUB by MYSUB mesh
* on each of NPEX * NPEY processors.
*
* The DAE system is solved by IDA using the IDASPGMR linear solver,
* in conjunction with the preconditioner module IDABBDPRE. The
* preconditioner uses a 5-diagonal band-block-diagonal
* approximation (half-bandwidths = 2). Output is printed at
* t = 0, .001, .01, .1, .4, .7, 1.
* -----------------------------------------------------------------
* References:
* [1] Peter N. Brown and Alan C. Hindmarsh,
* Reduced Storage Matrix Methods in Stiff ODE systems,
* Journal of Applied Mathematics and Computation, Vol. 31
* (May 1989), pp. 40-91.
*
* [2] Peter N. Brown, Alan C. Hindmarsh, and Linda R. Petzold,
* Using Krylov Methods in the Solution of Large-Scale
* Differential-Algebraic Systems, SIAM J. Sci. Comput., 15
* (1994), pp. 1467-1488.
*
* [3] Peter N. Brown, Alan C. Hindmarsh, and Linda R. Petzold,
* Consistent Initial Condition Calculation for Differential-
* Algebraic Systems, SIAM J. Sci. Comput., 19 (1998),
* pp. 1495-1512.
* -----------------------------------------------------------------
*)
open Sundials
let fprintf = Printf.fprintf
let printf = Printf.printf
let vconst = Nvector_parallel.DataOps.const
let vscale = Nvector_parallel.DataOps.scale
let slice = Bigarray.Array1.sub
let header_and_empty_array_size =
Marshal.total_size (Marshal.to_bytes (RealArray.create 0) []) 0
let float_cell_size =
Marshal.total_size (Marshal.to_bytes (RealArray.create 1) []) 0
- header_and_empty_array_size
let bytes x = header_and_empty_array_size + x * float_cell_size
(* Problem Constants *)
let nprey = 1 (* Number of prey (= number of predators). *)
let num_species = 2*nprey
let pi = 3.1415926535898 (* pi *)
4 pi
let mxsub = 10 (* Number of x mesh points per processor subgrid *)
let mysub = 10 (* Number of y mesh points per processor subgrid *)
let npex = 2 (* Number of subgrids in the x direction *)
let npey = 2 (* Number of subgrids in the y direction *)
let mx = (mxsub*npex) (* mx = number of x mesh points *)
let my = (mysub*npey) (* my = number of y mesh points *)
let nsmxsub = (num_species * mxsub)
let neq = (num_species*mx*my) (* Number of equations in system *)
let aa = 1.0 (* Coefficient in above eqns. for a *)
let ee = 10000. (* Coefficient in above eqns. for a *)
let gg = 0.5e-6 (* Coefficient in above eqns. for a *)
let bb = 1.0 (* Coefficient in above eqns. for b *)
let dprey = 1.0 (* Coefficient in above eqns. for d *)
let dpred = 0.05 (* Coefficient in above eqns. for d *)
let alpha = 50. (* Coefficient alpha in above eqns. *)
let beta = 1000. (* Coefficient beta in above eqns. *)
let ax = 1.0 (* Total range of x variable *)
let ay = 1.0 (* Total range of y variable *)
rtol tolerance
let atol = 1.e-5 (* atol tolerance *)
let zero = 0. (* 0. *)
1 .
let nout = 6
let tmult = 10.0 (* Multiplier for tout values *)
let tadd = 0.3 (* Increment for tout values *)
User - defined vector accessor macro IJ_Vptr .
* IJ_Vptr is defined in order to express the underlying 3 - d structure of the
* dependent variable vector from its underlying 1 - d storage ( an N_Vector ) .
* IJ_Vptr(vv , i , j ) returns a pointer to the location in vv corresponding to
* species index is = 0 , x - index ix = i , and y - index jy =
* IJ_Vptr is defined in order to express the underlying 3-d structure of the
* dependent variable vector from its underlying 1-d storage (an N_Vector).
* IJ_Vptr(vv,i,j) returns a pointer to the location in vv corresponding to
* species index is = 0, x-index ix = i, and y-index jy = j.
*)
let ij_index i j = i*num_species + j*nsmxsub
Type : UserData . Contains problem constants , preconditioner data , etc .
type user_data =
{
ns : int;
np : int;
thispe : int;
npes : int;
ixsub : int;
jysub : int;
npex : int;
npey : int;
mxsub : int;
mysub : int;
nsmxsub : int;
nsmxsub2 : int;
dx : float;
dy : float;
acoef : RealArray2.t;
cox : RealArray.t; (* size = num_species *)
coy : RealArray.t; (* size = num_species *)
bcoef : RealArray.t; (* size = num_species *)
rhs : RealArray.t; (* size = num_species *)
cext : RealArray.t; (* size = (mxsub+2)*(mysub+2)*num_species *)
comm : Mpi.communicator;
rates : Nvector_parallel.data;
n_local : int;
}
(*
*--------------------------------------------------------------------
* FUNCTIONS CALLED BY IDA & SUPPORTING FUNCTIONS
*--------------------------------------------------------------------
*)
* BSend : Send boundary data to neighboring PEs .
* This routine sends components of cc from internal subgrid boundaries
* to the appropriate neighbor PEs .
* BSend: Send boundary data to neighboring PEs.
* This routine sends components of cc from internal subgrid boundaries
* to the appropriate neighbor PEs.
*)
let bsend comm my_pe isubx isuby dsizex dsizey udata =
let bufleft = RealArray.create (num_species*mysub)
and bufright = RealArray.create (num_species*mysub)
in
If isuby > 0 , send data from bottom x - line of u
if isuby <> 0 then Mpi.send (slice udata 0 dsizex) (my_pe-npex) 0 comm;
If isuby < NPEY-1 , send data from top x - line of u
if isuby <> npey-1 then begin
let offsetu = (mysub-1)*dsizex in
Mpi.send (slice udata offsetu dsizex) (my_pe+npex) 0 comm
end;
If isubx > 0 , send data from left y - line of u ( via bufleft )
if isubx <> 0 then begin
for ly = 0 to mysub-1 do
RealArray.blitn ~src:udata ~spos:(ly*dsizex)
~dst:bufleft ~dpos:(ly*num_species)
num_species
done;
Mpi.send (slice bufleft 0 dsizey) (my_pe-1) 0 comm
end;
If isubx < NPEX-1 , send data from right y - line of u ( via bufright )
if isubx <> npex-1 then begin
for ly = 0 to mysub-1 do
let offsetbuf = ly*num_species in
let offsetu = offsetbuf*mxsub + (mxsub-1)*num_species in
RealArray.blitn ~src:udata ~spos:offsetu
~dst:bufright ~dpos:offsetbuf
num_species
done;
Mpi.send (slice bufright 0 dsizey) (my_pe+1) 0 comm
end
* BRecvPost : Start receiving boundary data from neighboring PEs .
* ( 1 ) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries ,
* should be passed to both the BRecvPost and BRecvWait functions , and
* should not be manipulated between the two calls .
* ( 2 ) request should have 4 entries , and is also passed in both calls .
* BRecvPost: Start receiving boundary data from neighboring PEs.
* (1) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries,
* should be passed to both the BRecvPost and BRecvWait functions, and
* should not be manipulated between the two calls.
* (2) request should have 4 entries, and is also passed in both calls.
*)
let brecvpost comm my_pe isubx isuby dsizex dsizey =
If isuby > 0 , receive data for bottom x - line of uext
let r0 = if isuby <> 0
then Mpi.ireceive (bytes dsizex) (my_pe-npex) 0 comm
else Mpi.null_request
in
If isuby < NPEY-1 , receive data for top x - line of uext
let r1 = if isuby <> npey-1
then Mpi.ireceive (bytes dsizex) (my_pe+npex) 0 comm
else Mpi.null_request
in
If isubx > 0 , receive data for left y - line of uext ( via bufleft )
let r2 = if isubx <> 0
then Mpi.ireceive (bytes dsizey) (my_pe-1) 0 comm
else Mpi.null_request
in
If isubx < NPEX-1 , receive data for right y - line of uext ( via bufright )
let r3 = if isubx <> npex-1
then Mpi.ireceive (bytes dsizey) (my_pe+1) 0 comm
else Mpi.null_request
in
[|r0; r1; r2; r3|]
* BRecvWait : Finish receiving boundary data from neighboring PEs .
* ( 1 ) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries ,
* should be passed to both the BRecvPost and BRecvWait functions , and
* should not be manipulated between the two calls .
* ( 2 ) request should have 4 entries , and is also passed in both calls .
* BRecvWait: Finish receiving boundary data from neighboring PEs.
* (1) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries,
* should be passed to both the BRecvPost and BRecvWait functions, and
* should not be manipulated between the two calls.
* (2) request should have 4 entries, and is also passed in both calls.
*)
let brecvwait request isubx isuby dsizex cext =
let dsizex2 = dsizex + 2*num_species in
If isuby > 0 , receive data for bottom x - line of cext
if isuby <> 0 then begin
let buf = (Mpi.wait_receive request.(0) : RealArray.t) in
RealArray.blitn ~src:buf ~dst:cext ~dpos:num_species dsizex
end;
If isuby < NPEY-1 , receive data for top x - line of cext
if isuby <> npey-1 then begin
let buf = (Mpi.wait_receive request.(1) : RealArray.t) in
RealArray.blitn ~src:buf
~dst:cext ~dpos:(num_species*(1 + (mysub+1)*(mxsub+2)))
dsizex
end;
If isubx > 0 , receive data for left y - line of cext ( via bufleft )
if isubx <> 0 then begin
let bufleft = (Mpi.wait_receive request.(2) : RealArray.t) in
Copy the buffer to cext
for ly = 0 to mysub - 1 do
let offsetbuf = ly*num_species in
let offsetue = (ly+1)*dsizex2 in
RealArray.blitn ~src:bufleft ~spos:offsetbuf
~dst:cext ~dpos:offsetue
num_species
done
end;
If isubx < NPEX-1 , receive data for right y - line of cext ( via bufright )
if isubx <> npex-1 then begin
let bufright = (Mpi.wait_receive request.(3) : RealArray.t) in
Copy the buffer to cext
for ly = 0 to mysub-1 do
let offsetbuf = ly*num_species in
let offsetue = (ly+2)*dsizex2 - num_species in
RealArray.blitn ~src:bufright ~spos:offsetbuf
~dst:cext ~dpos:offsetue
num_species
done
end
* rescomm : Communication routine in support of resweb .
* This routine performs all inter - processor communication of components
* of the cc vector needed to calculate F , namely the components at all
* interior subgrid boundaries ( ghost cell data ) . It loads this data
* into a work array cext ( the local portion of c , extended ) .
* The message - passing uses blocking sends , non - blocking receives ,
* and receive - waiting , in routines BRecvPost , BSend , BRecvWait .
* rescomm: Communication routine in support of resweb.
* This routine performs all inter-processor communication of components
* of the cc vector needed to calculate F, namely the components at all
* interior subgrid boundaries (ghost cell data). It loads this data
* into a work array cext (the local portion of c, extended).
* The message-passing uses blocking sends, non-blocking receives,
* and receive-waiting, in routines BRecvPost, BSend, BRecvWait.
*)
let rescomm webdata _ cc _ =
let (cdata,_,_) = cc in
(* Get comm, thispe, subgrid indices, data sizes, extended array cext. *)
let comm = webdata.comm in
let thispe = webdata.thispe in
let ixsub = webdata.ixsub in
let jysub = webdata.jysub in
let cext = webdata.cext in
let nsmxsub = webdata.nsmxsub in
let nsmysub = (webdata.ns)*(webdata.mysub) in
(* Start receiving boundary data from neighboring PEs. *)
let requests = brecvpost comm thispe ixsub jysub nsmxsub nsmysub in
(* Send data from boundary of local grid to neighboring PEs. *)
bsend comm thispe ixsub jysub nsmxsub nsmysub cdata;
(* Finish receiving boundary data from neighboring PEs. *)
brecvwait requests ixsub jysub nsmxsub cext
(*
* WebRates: Evaluate reaction rates at a given spatial point.
* At a given (x,y), evaluate the array of ns reaction terms R.
*)
let web_rates webdata x y ((cxy : RealArray.t), cxy_off)
((ratesxy : RealArray.t), ratesxy_off) =
let acoef = RealArray2.unwrap webdata.acoef
and bcoef = webdata.bcoef in
for is = 0 to num_species-1 do
ratesxy.{is } < - dotprod cxy ( Directdensematrix.column is )
ratesxy.{ratesxy_off + is} <- 0.;
for j = 0 to num_species-1 do
ratesxy.{ratesxy_off + is} <- ratesxy.{ratesxy_off + is}
+. cxy.{cxy_off + j} *. acoef.{is, j}
done
done;
let fac = 1. +. alpha*.x*.y +. beta*.sin(fourpi*.x)*.sin(fourpi*.y) in
for is = 0 to num_species-1 do
ratesxy.{ratesxy_off + is} <- cxy.{cxy_off + is}*.( bcoef.{is}*.fac
+. ratesxy.{ratesxy_off + is} )
done
* reslocal : Compute res = F(t , cc , cp ) .
* This routine assumes that all inter - processor communication of data
* needed to calculate F has already been done . Components at interior
* subgrid boundaries are assumed to be in the work array cext .
* The local portion of the cc vector is first copied into cext .
* The exterior Neumann boundary conditions are explicitly handled here
* by copying data from the first interior mesh line to the ghost cell
* locations in cext . Then the reaction and diffusion terms are
* evaluated in terms of the cext array , and the residuals are formed .
* The reaction terms are saved separately in the vector webdata.rates
* for use by the preconditioner setup routine .
* reslocal: Compute res = F(t,cc,cp).
* This routine assumes that all inter-processor communication of data
* needed to calculate F has already been done. Components at interior
* subgrid boundaries are assumed to be in the work array cext.
* The local portion of the cc vector is first copied into cext.
* The exterior Neumann boundary conditions are explicitly handled here
* by copying data from the first interior mesh line to the ghost cell
* locations in cext. Then the reaction and diffusion terms are
* evaluated in terms of the cext array, and the residuals are formed.
* The reaction terms are saved separately in the vector webdata.rates
* for use by the preconditioner setup routine.
*)
let reslocal webdata _ cc ((cp : RealArray.t), _, _)
((rr : RealArray.t), _, _) =
let mxsub = webdata.mxsub in
let mysub = webdata.mysub in
let npex = webdata.npex in
let npey = webdata.npey in
let ixsub = webdata.ixsub in
let jysub = webdata.jysub in
let nsmxsub = webdata.nsmxsub in
let nsmxsub2 = webdata.nsmxsub2 in
let np = webdata.np in
let dx = webdata.dx in
let dy = webdata.dy in
let cox = webdata.cox in
let coy = webdata.coy in
let rhs = webdata.rhs in
let cext = webdata.cext in
let rates, _, _ = webdata.rates in
(* Get data pointers, subgrid data, array sizes, work array cext. *)
let (cdata : RealArray.t),_,_ = cc in
Copy local segment of cc vector into the working extended array cext .
let locc = ref 0 in
let locce = ref (nsmxsub2 + num_species) in
for _ = 0 to mysub-1 do
for i = 0 to nsmxsub-1 do
cext.{!locce+i} <- cdata.{!locc+i}
done;
locc := !locc + nsmxsub;
locce := !locce + nsmxsub2;
done;
To facilitate homogeneous Neumann boundary conditions , when this is
a boundary PE , copy data from the first interior mesh line of cc to cext .
a boundary PE, copy data from the first interior mesh line of cc to cext. *)
If jysub = 0 , copy x - line 2 of cc to cext .
if jysub = 0 then begin
for i = 0 to nsmxsub-1 do
cext.{num_species+i} <- cdata.{nsmxsub+i}
done
end;
If jysub = npey-1 , copy x - line mysub-1 of cc to cext .
if jysub = npey-1 then begin
let locc = (mysub-2)*nsmxsub in
let locce = (mysub+1)*nsmxsub2 + num_species in
for i = 0 to nsmxsub-1 do
cext.{locce+i} <- cdata.{locc+i}
done
end;
If = 0 , copy y - line 2 of cc to cext .
if ixsub = 0 then begin
for jy = 0 to mysub-1 do
let locc = jy*nsmxsub + num_species in
let locce = (jy+1)*nsmxsub2 in
for i = 0 to num_species-1 do
cext.{locce+i} <- cdata.{locc+i}
done
done
end;
If npex-1 , copy y - line mxsub-1 of cc to cext .
if ixsub = npex-1 then begin
for jy = 0 to mysub-1 do
let locc = (jy+1)*nsmxsub - 2*num_species in
let locce = (jy+2)*nsmxsub2 - num_species in
for i = 0 to num_species-1 do
cext.{locce+i} <- cdata.{locc+i}
done
done
end;
(* Loop over all grid points, setting local array rates to right-hand sides.
Then set rr values appropriately for prey/predator components of F. *)
for jy = 0 to mysub-1 do
let ylocce = (jy+1)*nsmxsub2 in
let yy = float_of_int(jy+jysub*mysub)*.dy in
for ix = 0 to mxsub-1 do
let locce = ylocce + (ix+1)*num_species in
let xx = float_of_int(ix + ixsub*mxsub)*.dx in
let rates_off = ij_index ix jy in
web_rates webdata xx yy (cext, locce) (rates, ij_index ix jy);
web_rates ( cext , ) ( rates , ij_index ix jy ) ;
let rr_off = ij_index ix jy in
let cp_off = ij_index ix jy in
for is = 0 to num_species-1 do
let dcyli = cext.{locce+is} -. cext.{locce+is-nsmxsub2} in
let dcyui = cext.{locce+is+nsmxsub2} -. cext.{locce+is} in
let dcxli = cext.{locce+is} -. cext.{locce+is-num_species} in
let dcxui = cext.{locce+is+num_species} -. cext.{locce+is} in
rhs.{is} <- cox.{is}*.(dcxui-.dcxli)
+. coy.{is}*.(dcyui-.dcyli)
+. rates.{rates_off + is};
if is < np then rr.{rr_off + is} <- cp.{cp_off + is} -. rhs.{is}
else rr.{rr_off + is} <- -. rhs.{is}
done
done
done
(*
* resweb: System residual function for predator-prey system.
* To compute the residual function F, this routine calls:
* rescomm, for needed communication, and then
* reslocal, for computation of the residuals on this processor.
*)
let resweb webdata tt cc cp rr =
let _Nlocal = webdata.n_local in
(* Call rescomm to do inter-processor communication. *)
rescomm webdata tt cc cp;
(* Call reslocal to calculate the local portion of residual vector. *)
reslocal webdata tt cc cp rr
(*
*--------------------------------------------------------------------
* PRIVATE FUNCTIONS
*--------------------------------------------------------------------
*)
* InitUserData : Load problem constants in webdata ( of type UserData ) .
* InitUserData: Load problem constants in webdata (of type UserData).
*)
let init_user_data local_N system_size thispe npes comm =
(* extracted from main() in original C code *)
let rates = Nvector_parallel.make local_N system_size comm 0. in
let acoef = RealArray2.create num_species num_species in
let jysub = thispe / npex in
let ixsub = thispe - (jysub)*npex in
let ns = num_species in
let np = nprey in
let dx = ax/.float_of_int(mx-1) in
let dy = ay/.float_of_int(my-1) in
let nsmxsub = mxsub * num_species in
let nsmxsub2 = (mxsub+2)*num_species in
let n_local = mxsub*mysub*num_species in
(* Set up the coefficients a and b plus others found in the equations. *)
let dx2 = dx*.dx in
let dy2 = dy*.dy in
let bcoef = RealArray.create num_species in
let cox = RealArray.create num_species in
let coy = RealArray.create num_species in
let rhs = RealArray.create num_species in
let cext = RealArray.create ((mxsub+2)*(mysub+2)*num_species) in
for i = 0 to np-1 do
Fill in the portion of acoef in the four quadrants , row by row .
for j = 0 to np-1 do
RealArray2.set acoef (np+j) i (-.gg);
RealArray2.set acoef j (i+np) ee;
RealArray2.set acoef j i zero;
RealArray2.set acoef (np+j) (i+np) zero
done;
(* Reset the diagonal elements of acoef to -aa. *)
RealArray2.set acoef i i (-.aa); RealArray2.set acoef (i+np) (i+np) (-.aa);
(* Set coefficients for b and diffusion terms. *)
bcoef.{i} <- bb; bcoef.{i+np} <- -.bb;
cox.{i} <- dprey/.dx2; cox.{i+np} <- dpred/.dx2;
coy.{i} <- dprey/.dy2; coy.{i+np} <- dpred/.dy2
done;
{
ns = ns;
np = np;
thispe = thispe;
npes = npes;
ixsub = ixsub;
jysub = jysub;
npex = npex;
npey = npey;
mxsub = mxsub;
mysub = mysub;
nsmxsub = nsmxsub;
nsmxsub2 = nsmxsub2;
dx = dx;
dy = dy;
acoef = acoef;
cox = cox;
coy = coy;
bcoef = bcoef;
rhs = rhs;
cext = cext;
comm = comm;
rates = Nvector.unwrap rates;
n_local = n_local;
}
* SetInitialProfiles : Set initial conditions in cc , cp , and i d.
* A polynomial profile is used for the prey cc values , and a constant
* ( 1.0e5 ) is loaded as the initial guess for the predator cc values .
* The i d values are set to 1 for the prey and 0 for the predators .
* The prey cp values are set according to the given system , and
* the predator cp values are set to zero .
* SetInitialProfiles: Set initial conditions in cc, cp, and id.
* A polynomial profile is used for the prey cc values, and a constant
* (1.0e5) is loaded as the initial guess for the predator cc values.
* The id values are set to 1 for the prey and 0 for the predators.
* The prey cp values are set according to the given system, and
* the predator cp values are set to zero.
*)
let set_initial_profiles webdata (((ccdata : RealArray.t), _, _) as cc)
(((cpdata : RealArray.t), _, _) as cp)
((iddata : RealArray.t), _, _) res =
let ixsub = webdata.ixsub in
let jysub = webdata.jysub in
let mxsub = webdata.mxsub in
let mysub = webdata.mxsub in
let dx = webdata.dx in
let dy = webdata.dy in
let np = webdata.np in
Loop over grid , load cc values and i d values .
for jy = 0 to mysub-1 do
let yy = float_of_int(jy + jysub*mysub) *. dy in
for ix = 0 to mxsub-1 do
let xx = float_of_int(ix + ixsub*mxsub) *. dx in
let xyfactor = 16.*.xx*.(1. -. xx)*.yy*.(1. -. yy) in
let xyfactor = xyfactor *. xyfactor in
let cc_off = ij_index ix jy in
let id_off = ij_index ix jy in
for is = 0 to num_species-1 do
if is < np
then (ccdata.{cc_off + is} <- 10.0+.float_of_int(is+1)*.xyfactor;
iddata.{id_off + is} <- one)
else (ccdata.{cc_off + is} <- 1.0e5;
iddata.{id_off + is} <- zero)
done
done
done;
(* Set c' for the prey by calling the residual function with cp = 0. *)
vconst zero cp;
resweb webdata zero cc cp res;
vscale (-.one) res cp;
(* Set c' for predators to 0. *)
for jy = 0 to mysub-1 do
for ix = 0 to mxsub-1 do
let cp_off = ij_index ix jy in
for is = np to num_species-1 do
cpdata.{cp_off + is} <- zero
done
done
done
* Print first lines of output ( problem description )
* and table headerr
* Print first lines of output (problem description)
* and table headerr
*)
let idaspgmr =
match Config.sundials_version with
| 2,_,_ -> "IDASPGMR"
| 3,_,_ -> "SUNSPGMR"
| _,_,_ -> "SUNLinSol_SPGMR"
let print_header system_size maxl mudq mldq mukeep mlkeep rtol atol =
printf "\nidaFoodWeb_kry_bbd_p: Predator-prey DAE parallel example problem for IDA \n\n";
printf "Number of species ns: %d" num_species;
printf " Mesh dimensions: %d x %d" mx my;
printf " Total system size: %d\n"system_size;
printf "Subgrid dimensions: %d x %d" mxsub mysub;
printf " Processor array: %d x %d\n" npex npey;
printf "Tolerance parameters: rtol = %g atol = %g\n" rtol atol;
printf "Linear solver: %s Max. Krylov dimension maxl: %d\n" idaspgmr maxl;
printf "Preconditioner: band-block-diagonal (IDABBDPRE), with parameters\n";
printf " mudq = %d, mldq = %d, mukeep = %d, mlkeep = %d\n"
mudq mldq mukeep mlkeep;
printf "CalcIC called to correct initial predator concentrations \n\n";
printf "-----------------------------------------------------------\n";
printf " t bottom-left top-right";
printf " | nst k h\n";
printf "-----------------------------------------------------------\n\n"
(*
* PrintOutput: Print output values at output time t = tt.
* Selected run statistics are printed. Then values of c1 and c2
* are printed for the bottom left and top right grid points only.
*)
let print_output webdata mem cc tt comm =
let clast = RealArray.create 2 in
let thispe = webdata.thispe in
let npelast = webdata.npes - 1 in
let cdata,_,_ = cc in
(* Send conc. at top right mesh point from PE npes-1 to PE 0. *)
if thispe = npelast then begin
let ilast = num_species*mxsub*mysub - 2 in
if npelast <> 0
then Mpi.send (slice cdata ilast 2) 0 0 comm
else (clast.{0} <- cdata.{ilast}; clast.{1} <- cdata.{ilast+1})
end;
On PE 0 , receive conc . at top right from PE npes - 1 .
Then print performance data and sampled solution values .
Then print performance data and sampled solution values. *)
if thispe = 0 then begin
if npelast <> 0 then
let buf = (Mpi.receive npelast 0 comm : RealArray.t) in
RealArray.blitn ~src:buf ~dst:clast 2
;
let kused = Ida.get_last_order mem in
let nst = Ida.get_num_steps mem in
let hused = Ida.get_last_step mem in
printf "%8.2e %12.4e %12.4e | %3d %1d %12.4e\n"
tt cdata.{0} clast.{0} nst kused hused;
for i = 1 to num_species-1 do
printf " %12.4e %12.4e |\n" cdata.{i} clast.{i}
done;
printf "\n"
end
* : Print final run data contained in iopt .
* PrintFinalStats: Print final run data contained in iopt.
*)
let print_final_stats mem =
let open Ida in
let nst = get_num_steps mem in
let nre = get_num_res_evals mem in
let netf = get_num_err_test_fails mem in
let ncfn = get_num_nonlin_solv_conv_fails mem in
let nni = get_num_nonlin_solv_iters mem in
let ncfl = Spils.get_num_lin_conv_fails mem in
let nli = Spils.get_num_lin_iters mem in
let npe = Spils.get_num_prec_evals mem in
let nps = Spils.get_num_prec_solves mem in
let nreLS = Spils.get_num_lin_res_evals mem in
let nge = Ida_bbd.get_num_gfn_evals mem in
printf "-----------------------------------------------------------\n";
printf "\nFinal statistics: \n\n";
printf "Number of steps = %d\n" nst;
printf "Number of residual evaluations = %d\n" (nre+nreLS);
printf "Number of nonlinear iterations = %d\n" nni;
printf "Number of error test failures = %d\n" netf;
printf "Number of nonlinear conv. failures = %d\n\n" ncfn;
printf "Number of linear iterations = %d\n" nli;
printf "Number of linear conv. failures = %d\n\n" ncfl;
printf "Number of preconditioner setups = %d\n" npe;
printf "Number of preconditioner solves = %d\n" nps;
printf "Number of local residual evals. = %d\n" nge
(*
*--------------------------------------------------------------------
* MAIN PROGRAM
*--------------------------------------------------------------------
*)
let main () =
(* Set communicator, and get processor number and total number of PE's. *)
let comm = Mpi.comm_world in
let thispe = Mpi.comm_rank comm in
let npes = Mpi.comm_size comm in
if npes <> npex*npey then begin
if thispe = 0 then
fprintf stderr
"\nMPI_ERROR(0): npes = %d not equal to NPEX*NPEY = %d\n"
npes (npex*npey)
;
exit 1
end;
Set local length ( local_N ) and global length ( system_size ) .
let local_N = mxsub*mysub*num_species in
let system_size = neq in
Set up user data block webdata .
let webdata = init_user_data local_N system_size thispe npes comm in
(* Create needed vectors, and load initial values.
The vector res is used temporarily only. *)
let cc = Nvector_parallel.make local_N system_size comm 0. in
let cp = Nvector_parallel.make local_N system_size comm 0. in
let res = Nvector_parallel.make local_N system_size comm 0. in
let id = Nvector_parallel.make local_N system_size comm 0. in
set_initial_profiles webdata (Nvector.unwrap cc) (Nvector.unwrap cp)
(Nvector.unwrap id) (Nvector.unwrap res);
Set remaining inputs to IDAMalloc .
let t0 = zero in
Call IDACreate and IDAMalloc to initialize solution
Call IDASpgmr to specify the IDA linear solver IDASPGMR
Call IDABBDPrecInit to initialize the band - block - diagonal preconditioner .
The half - bandwidths for the difference quotient evaluation are exact
for the system Jacobian , but only a 5 - diagonal band matrix is retained .
The half-bandwidths for the difference quotient evaluation are exact
for the system Jacobian, but only a 5-diagonal band matrix is retained. *)
let maxl = 16 in
let mudq = nsmxsub and mldq = nsmxsub
and mukeep = 2 and mlkeep = 2 in
let mem =
Ida.(init
(SStolerances (rtol,atol))
~lsolver:Spils.(solver (spgmr ~maxl cc)
(Ida_bbd.prec_left ~dqrely:zero
Ida_bbd.({ mudq; mldq; mukeep; mlkeep })
(reslocal webdata)))
(resweb webdata) t0 cc cp)
in
(* Call IDACalcIC (with default options) to correct the initial values. *)
let tout = ref 0.001 in
Ida.calc_ic_ya_yd' mem ~varid:id !tout;
(* On PE 0, print heading, basic parameters, initial values. *)
if thispe = 0 then
print_header system_size maxl mudq mldq mukeep mlkeep rtol atol
;
print_output webdata mem (Nvector.unwrap cc) t0 comm;
Call IDA in tout loop , normal mode , and print selected output .
for iout = 1 to nout do
let (tret, _) = Ida.solve_normal mem !tout cc cp in
print_output webdata mem (Nvector.unwrap cc) tret comm;
if iout < 3 then tout := !tout *. tmult
else tout := !tout +. tadd
done;
On PE 0 , print final set of statistics .
if thispe = 0 then print_final_stats mem
(* Check environment variables for extra arguments. *)
let reps =
try int_of_string (Unix.getenv "NUM_REPS")
with Not_found | Failure _ -> 1
let gc_at_end =
try int_of_string (Unix.getenv "GC_AT_END") <> 0
with Not_found | Failure _ -> false
let gc_each_rep =
try int_of_string (Unix.getenv "GC_EACH_REP") <> 0
with Not_found | Failure _ -> false
(* Entry point *)
let _ =
for _ = 1 to reps do
main ();
if gc_each_rep then Gc.compact ()
done;
if gc_at_end then Gc.compact ()
| null | https://raw.githubusercontent.com/inria-parkas/sundialsml/a1848318cac2e340c32ddfd42671bef07b1390db/examples/ida/parallel/idaFoodWeb_kry_bbd_p.ml | ocaml | Problem Constants
Number of prey (= number of predators).
pi
Number of x mesh points per processor subgrid
Number of y mesh points per processor subgrid
Number of subgrids in the x direction
Number of subgrids in the y direction
mx = number of x mesh points
my = number of y mesh points
Number of equations in system
Coefficient in above eqns. for a
Coefficient in above eqns. for a
Coefficient in above eqns. for a
Coefficient in above eqns. for b
Coefficient in above eqns. for d
Coefficient in above eqns. for d
Coefficient alpha in above eqns.
Coefficient beta in above eqns.
Total range of x variable
Total range of y variable
atol tolerance
0.
Multiplier for tout values
Increment for tout values
size = num_species
size = num_species
size = num_species
size = num_species
size = (mxsub+2)*(mysub+2)*num_species
*--------------------------------------------------------------------
* FUNCTIONS CALLED BY IDA & SUPPORTING FUNCTIONS
*--------------------------------------------------------------------
Get comm, thispe, subgrid indices, data sizes, extended array cext.
Start receiving boundary data from neighboring PEs.
Send data from boundary of local grid to neighboring PEs.
Finish receiving boundary data from neighboring PEs.
* WebRates: Evaluate reaction rates at a given spatial point.
* At a given (x,y), evaluate the array of ns reaction terms R.
Get data pointers, subgrid data, array sizes, work array cext.
Loop over all grid points, setting local array rates to right-hand sides.
Then set rr values appropriately for prey/predator components of F.
* resweb: System residual function for predator-prey system.
* To compute the residual function F, this routine calls:
* rescomm, for needed communication, and then
* reslocal, for computation of the residuals on this processor.
Call rescomm to do inter-processor communication.
Call reslocal to calculate the local portion of residual vector.
*--------------------------------------------------------------------
* PRIVATE FUNCTIONS
*--------------------------------------------------------------------
extracted from main() in original C code
Set up the coefficients a and b plus others found in the equations.
Reset the diagonal elements of acoef to -aa.
Set coefficients for b and diffusion terms.
Set c' for the prey by calling the residual function with cp = 0.
Set c' for predators to 0.
* PrintOutput: Print output values at output time t = tt.
* Selected run statistics are printed. Then values of c1 and c2
* are printed for the bottom left and top right grid points only.
Send conc. at top right mesh point from PE npes-1 to PE 0.
*--------------------------------------------------------------------
* MAIN PROGRAM
*--------------------------------------------------------------------
Set communicator, and get processor number and total number of PE's.
Create needed vectors, and load initial values.
The vector res is used temporarily only.
Call IDACalcIC (with default options) to correct the initial values.
On PE 0, print heading, basic parameters, initial values.
Check environment variables for extra arguments.
Entry point |
* -----------------------------------------------------------------
* $ Revision : 1.4 $
* $ Date : 2010/12/01 23:03:29 $
* -----------------------------------------------------------------
* Programmer(s ): , and
* @ LLNL
* -----------------------------------------------------------------
* OCaml port : , , Aug 2014 .
* -----------------------------------------------------------------
* Example program for IDA : Food web , parallel , GMRES , IDABBD
* preconditioner .
*
* This example program for IDA uses IDASPGMR as the linear solver .
* It is written for a parallel computer system and uses the
* IDABBDPRE band - block - diagonal preconditioner module for the
* IDASPGMR package . It was originally run on a Sun SPARC cluster
* and used MPICH .
*
* The mathematical problem solved in this example is a DAE system
* that arises from a system of partial differential equations after
* spatial discretization . The PDE system is a food web population
* model , with predator - prey interaction and diffusion on the unit
* square in two dimensions . The dependent variable vector is :
*
* 1 2 ns
* c = ( c , c , ... , c ) , ns = 2 * np
*
* and the PDE 's are as follows :
*
* i i i
* dc /dt = d(i)*(c + c ) + R ( x , y , c ) ( i = 1, ... ,np )
* xx yy i
*
* i i
* 0 = d(i)*(c + c ) + R ( x , y , c ) ( i = ... ,ns )
* xx yy i
*
* where the reaction terms R are :
*
* i ns j
* R ( x , y , c ) = c * ( b(i ) + sum a(i , j)*c )
* i j=1
*
* The number of species is ns = 2 * np , with the first np being
* prey and the last np being predators . The coefficients a(i , j ) ,
* b(i ) , d(i ) are :
*
* a(i , i ) = -AA ( all i )
* a(i , j ) = -GG ( i < = np , j > np )
* a(i , j ) = EE ( i > np , j < = np )
* all other a(i , j ) = 0
* b(i ) = BB*(1 + alpha * x*y + beta*sin(4 pi x)*sin(4 pi y ) ) ( i < = np )
* b(i ) = -BB*(1 + alpha * x*y + beta*sin(4 pi x)*sin(4 pi y ) ) ( i > np )
* ) = DPREY ( i < = np )
* ) = DPRED ( i > np )
*
* Note : The above equations are written in 1 - based indices ,
* whereas the code has 0 - based indices , being written in C.
*
* The various scalar parameters required are set using ' # define '
* statements or directly in routine InitUserData . In this program ,
* np = 1 , ns = 2 . The boundary conditions are homogeneous :
* normal derivative = 0 .
*
* A polynomial in x and y is used to set the initial values of the
* first np variables ( the prey variables ) at each x , y location ,
* while initial values for the remaining ( predator ) variables are
* set to a flat value , which is corrected by IDACalcIC .
*
* The PDEs are discretized by central differencing on a MX by MY
* mesh , and so the system size Neq is the product
* MX * MY * NUM_SPECIES . The system is actually implemented on
* submeshes , processor by processor , with an MXSUB by MYSUB mesh
* on each of NPEX * NPEY processors .
*
* The DAE system is solved by IDA using the IDASPGMR linear solver ,
* in conjunction with the preconditioner module IDABBDPRE . The
* preconditioner uses a 5 - diagonal band - block - diagonal
* approximation ( half - bandwidths = 2 ) . Output is printed at
* t = 0 , .001 , .01 , .1 , .4 , .7 , 1 .
* -----------------------------------------------------------------
* References :
* [ 1 ] and ,
* Reduced Storage Matrix Methods in Stiff ODE systems ,
* Journal of Applied Mathematics and Computation , Vol . 31
* ( May 1989 ) , pp . 40 - 91 .
*
* [ 2 ] , , and ,
* Using Krylov Methods in the Solution of Large - Scale
* Differential - Algebraic Systems , SIAM . Comput . , 15
* ( 1994 ) , pp . 1467 - 1488 .
*
* [ 3 ] , , and ,
* Consistent Initial Condition Calculation for Differential-
* Algebraic Systems , SIAM . Comput . , 19 ( 1998 ) ,
* pp . 1495 - 1512 .
* -----------------------------------------------------------------
* -----------------------------------------------------------------
* $Revision: 1.4 $
* $Date: 2010/12/01 23:03:29 $
* -----------------------------------------------------------------
* Programmer(s): Allan Taylor, Alan Hindmarsh and
* Radu Serban @ LLNL
* -----------------------------------------------------------------
* OCaml port: Jun Inoue, Inria, Aug 2014.
* -----------------------------------------------------------------
* Example program for IDA: Food web, parallel, GMRES, IDABBD
* preconditioner.
*
* This example program for IDA uses IDASPGMR as the linear solver.
* It is written for a parallel computer system and uses the
* IDABBDPRE band-block-diagonal preconditioner module for the
* IDASPGMR package. It was originally run on a Sun SPARC cluster
* and used MPICH.
*
* The mathematical problem solved in this example is a DAE system
* that arises from a system of partial differential equations after
* spatial discretization. The PDE system is a food web population
* model, with predator-prey interaction and diffusion on the unit
* square in two dimensions. The dependent variable vector is:
*
* 1 2 ns
* c = (c , c , ..., c ) , ns = 2 * np
*
* and the PDE's are as follows:
*
* i i i
* dc /dt = d(i)*(c + c ) + R (x,y,c) (i = 1,...,np)
* xx yy i
*
* i i
* 0 = d(i)*(c + c ) + R (x,y,c) (i = np+1,...,ns)
* xx yy i
*
* where the reaction terms R are:
*
* i ns j
* R (x,y,c) = c * (b(i) + sum a(i,j)*c )
* i j=1
*
* The number of species is ns = 2 * np, with the first np being
* prey and the last np being predators. The coefficients a(i,j),
* b(i), d(i) are:
*
* a(i,i) = -AA (all i)
* a(i,j) = -GG (i <= np , j > np)
* a(i,j) = EE (i > np, j <= np)
* all other a(i,j) = 0
* b(i) = BB*(1+ alpha * x*y + beta*sin(4 pi x)*sin(4 pi y)) (i <= np)
* b(i) =-BB*(1+ alpha * x*y + beta*sin(4 pi x)*sin(4 pi y)) (i > np)
* d(i) = DPREY (i <= np)
* d(i) = DPRED (i > np)
*
* Note: The above equations are written in 1-based indices,
* whereas the code has 0-based indices, being written in C.
*
* The various scalar parameters required are set using '#define'
* statements or directly in routine InitUserData. In this program,
* np = 1, ns = 2. The boundary conditions are homogeneous Neumann:
* normal derivative = 0.
*
* A polynomial in x and y is used to set the initial values of the
* first np variables (the prey variables) at each x,y location,
* while initial values for the remaining (predator) variables are
* set to a flat value, which is corrected by IDACalcIC.
*
* The PDEs are discretized by central differencing on a MX by MY
* mesh, and so the system size Neq is the product
* MX * MY * NUM_SPECIES. The system is actually implemented on
* submeshes, processor by processor, with an MXSUB by MYSUB mesh
* on each of NPEX * NPEY processors.
*
* The DAE system is solved by IDA using the IDASPGMR linear solver,
* in conjunction with the preconditioner module IDABBDPRE. The
* preconditioner uses a 5-diagonal band-block-diagonal
* approximation (half-bandwidths = 2). Output is printed at
* t = 0, .001, .01, .1, .4, .7, 1.
* -----------------------------------------------------------------
* References:
* [1] Peter N. Brown and Alan C. Hindmarsh,
* Reduced Storage Matrix Methods in Stiff ODE systems,
* Journal of Applied Mathematics and Computation, Vol. 31
* (May 1989), pp. 40-91.
*
* [2] Peter N. Brown, Alan C. Hindmarsh, and Linda R. Petzold,
* Using Krylov Methods in the Solution of Large-Scale
* Differential-Algebraic Systems, SIAM J. Sci. Comput., 15
* (1994), pp. 1467-1488.
*
* [3] Peter N. Brown, Alan C. Hindmarsh, and Linda R. Petzold,
* Consistent Initial Condition Calculation for Differential-
* Algebraic Systems, SIAM J. Sci. Comput., 19 (1998),
* pp. 1495-1512.
* -----------------------------------------------------------------
*)
open Sundials
let fprintf = Printf.fprintf
let printf = Printf.printf
let vconst = Nvector_parallel.DataOps.const
let vscale = Nvector_parallel.DataOps.scale
let slice = Bigarray.Array1.sub
let header_and_empty_array_size =
Marshal.total_size (Marshal.to_bytes (RealArray.create 0) []) 0
let float_cell_size =
Marshal.total_size (Marshal.to_bytes (RealArray.create 1) []) 0
- header_and_empty_array_size
let bytes x = header_and_empty_array_size + x * float_cell_size
let num_species = 2*nprey
4 pi
let nsmxsub = (num_species * mxsub)
rtol tolerance
1 .
let nout = 6
User - defined vector accessor macro IJ_Vptr .
* IJ_Vptr is defined in order to express the underlying 3 - d structure of the
* dependent variable vector from its underlying 1 - d storage ( an N_Vector ) .
* IJ_Vptr(vv , i , j ) returns a pointer to the location in vv corresponding to
* species index is = 0 , x - index ix = i , and y - index jy =
* IJ_Vptr is defined in order to express the underlying 3-d structure of the
* dependent variable vector from its underlying 1-d storage (an N_Vector).
* IJ_Vptr(vv,i,j) returns a pointer to the location in vv corresponding to
* species index is = 0, x-index ix = i, and y-index jy = j.
*)
let ij_index i j = i*num_species + j*nsmxsub
Type : UserData . Contains problem constants , preconditioner data , etc .
type user_data =
{
ns : int;
np : int;
thispe : int;
npes : int;
ixsub : int;
jysub : int;
npex : int;
npey : int;
mxsub : int;
mysub : int;
nsmxsub : int;
nsmxsub2 : int;
dx : float;
dy : float;
acoef : RealArray2.t;
comm : Mpi.communicator;
rates : Nvector_parallel.data;
n_local : int;
}
* BSend : Send boundary data to neighboring PEs .
* This routine sends components of cc from internal subgrid boundaries
* to the appropriate neighbor PEs .
* BSend: Send boundary data to neighboring PEs.
* This routine sends components of cc from internal subgrid boundaries
* to the appropriate neighbor PEs.
*)
let bsend comm my_pe isubx isuby dsizex dsizey udata =
let bufleft = RealArray.create (num_species*mysub)
and bufright = RealArray.create (num_species*mysub)
in
If isuby > 0 , send data from bottom x - line of u
if isuby <> 0 then Mpi.send (slice udata 0 dsizex) (my_pe-npex) 0 comm;
If isuby < NPEY-1 , send data from top x - line of u
if isuby <> npey-1 then begin
let offsetu = (mysub-1)*dsizex in
Mpi.send (slice udata offsetu dsizex) (my_pe+npex) 0 comm
end;
If isubx > 0 , send data from left y - line of u ( via bufleft )
if isubx <> 0 then begin
for ly = 0 to mysub-1 do
RealArray.blitn ~src:udata ~spos:(ly*dsizex)
~dst:bufleft ~dpos:(ly*num_species)
num_species
done;
Mpi.send (slice bufleft 0 dsizey) (my_pe-1) 0 comm
end;
If isubx < NPEX-1 , send data from right y - line of u ( via bufright )
if isubx <> npex-1 then begin
for ly = 0 to mysub-1 do
let offsetbuf = ly*num_species in
let offsetu = offsetbuf*mxsub + (mxsub-1)*num_species in
RealArray.blitn ~src:udata ~spos:offsetu
~dst:bufright ~dpos:offsetbuf
num_species
done;
Mpi.send (slice bufright 0 dsizey) (my_pe+1) 0 comm
end
* BRecvPost : Start receiving boundary data from neighboring PEs .
* ( 1 ) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries ,
* should be passed to both the BRecvPost and BRecvWait functions , and
* should not be manipulated between the two calls .
* ( 2 ) request should have 4 entries , and is also passed in both calls .
* BRecvPost: Start receiving boundary data from neighboring PEs.
* (1) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries,
* should be passed to both the BRecvPost and BRecvWait functions, and
* should not be manipulated between the two calls.
* (2) request should have 4 entries, and is also passed in both calls.
*)
let brecvpost comm my_pe isubx isuby dsizex dsizey =
If isuby > 0 , receive data for bottom x - line of uext
let r0 = if isuby <> 0
then Mpi.ireceive (bytes dsizex) (my_pe-npex) 0 comm
else Mpi.null_request
in
If isuby < NPEY-1 , receive data for top x - line of uext
let r1 = if isuby <> npey-1
then Mpi.ireceive (bytes dsizex) (my_pe+npex) 0 comm
else Mpi.null_request
in
If isubx > 0 , receive data for left y - line of uext ( via bufleft )
let r2 = if isubx <> 0
then Mpi.ireceive (bytes dsizey) (my_pe-1) 0 comm
else Mpi.null_request
in
If isubx < NPEX-1 , receive data for right y - line of uext ( via bufright )
let r3 = if isubx <> npex-1
then Mpi.ireceive (bytes dsizey) (my_pe+1) 0 comm
else Mpi.null_request
in
[|r0; r1; r2; r3|]
* BRecvWait : Finish receiving boundary data from neighboring PEs .
* ( 1 ) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries ,
* should be passed to both the BRecvPost and BRecvWait functions , and
* should not be manipulated between the two calls .
* ( 2 ) request should have 4 entries , and is also passed in both calls .
* BRecvWait: Finish receiving boundary data from neighboring PEs.
* (1) buffer should be able to hold 2*NUM_SPECIES*MYSUB realtype entries,
* should be passed to both the BRecvPost and BRecvWait functions, and
* should not be manipulated between the two calls.
* (2) request should have 4 entries, and is also passed in both calls.
*)
let brecvwait request isubx isuby dsizex cext =
let dsizex2 = dsizex + 2*num_species in
If isuby > 0 , receive data for bottom x - line of cext
if isuby <> 0 then begin
let buf = (Mpi.wait_receive request.(0) : RealArray.t) in
RealArray.blitn ~src:buf ~dst:cext ~dpos:num_species dsizex
end;
If isuby < NPEY-1 , receive data for top x - line of cext
if isuby <> npey-1 then begin
let buf = (Mpi.wait_receive request.(1) : RealArray.t) in
RealArray.blitn ~src:buf
~dst:cext ~dpos:(num_species*(1 + (mysub+1)*(mxsub+2)))
dsizex
end;
If isubx > 0 , receive data for left y - line of cext ( via bufleft )
if isubx <> 0 then begin
let bufleft = (Mpi.wait_receive request.(2) : RealArray.t) in
Copy the buffer to cext
for ly = 0 to mysub - 1 do
let offsetbuf = ly*num_species in
let offsetue = (ly+1)*dsizex2 in
RealArray.blitn ~src:bufleft ~spos:offsetbuf
~dst:cext ~dpos:offsetue
num_species
done
end;
If isubx < NPEX-1 , receive data for right y - line of cext ( via bufright )
if isubx <> npex-1 then begin
let bufright = (Mpi.wait_receive request.(3) : RealArray.t) in
Copy the buffer to cext
for ly = 0 to mysub-1 do
let offsetbuf = ly*num_species in
let offsetue = (ly+2)*dsizex2 - num_species in
RealArray.blitn ~src:bufright ~spos:offsetbuf
~dst:cext ~dpos:offsetue
num_species
done
end
* rescomm : Communication routine in support of resweb .
* This routine performs all inter - processor communication of components
* of the cc vector needed to calculate F , namely the components at all
* interior subgrid boundaries ( ghost cell data ) . It loads this data
* into a work array cext ( the local portion of c , extended ) .
* The message - passing uses blocking sends , non - blocking receives ,
* and receive - waiting , in routines BRecvPost , BSend , BRecvWait .
* rescomm: Communication routine in support of resweb.
* This routine performs all inter-processor communication of components
* of the cc vector needed to calculate F, namely the components at all
* interior subgrid boundaries (ghost cell data). It loads this data
* into a work array cext (the local portion of c, extended).
* The message-passing uses blocking sends, non-blocking receives,
* and receive-waiting, in routines BRecvPost, BSend, BRecvWait.
*)
let rescomm webdata _ cc _ =
let (cdata,_,_) = cc in
let comm = webdata.comm in
let thispe = webdata.thispe in
let ixsub = webdata.ixsub in
let jysub = webdata.jysub in
let cext = webdata.cext in
let nsmxsub = webdata.nsmxsub in
let nsmysub = (webdata.ns)*(webdata.mysub) in
let requests = brecvpost comm thispe ixsub jysub nsmxsub nsmysub in
bsend comm thispe ixsub jysub nsmxsub nsmysub cdata;
brecvwait requests ixsub jysub nsmxsub cext
let web_rates webdata x y ((cxy : RealArray.t), cxy_off)
((ratesxy : RealArray.t), ratesxy_off) =
let acoef = RealArray2.unwrap webdata.acoef
and bcoef = webdata.bcoef in
for is = 0 to num_species-1 do
ratesxy.{is } < - dotprod cxy ( Directdensematrix.column is )
ratesxy.{ratesxy_off + is} <- 0.;
for j = 0 to num_species-1 do
ratesxy.{ratesxy_off + is} <- ratesxy.{ratesxy_off + is}
+. cxy.{cxy_off + j} *. acoef.{is, j}
done
done;
let fac = 1. +. alpha*.x*.y +. beta*.sin(fourpi*.x)*.sin(fourpi*.y) in
for is = 0 to num_species-1 do
ratesxy.{ratesxy_off + is} <- cxy.{cxy_off + is}*.( bcoef.{is}*.fac
+. ratesxy.{ratesxy_off + is} )
done
* reslocal : Compute res = F(t , cc , cp ) .
* This routine assumes that all inter - processor communication of data
* needed to calculate F has already been done . Components at interior
* subgrid boundaries are assumed to be in the work array cext .
* The local portion of the cc vector is first copied into cext .
* The exterior Neumann boundary conditions are explicitly handled here
* by copying data from the first interior mesh line to the ghost cell
* locations in cext . Then the reaction and diffusion terms are
* evaluated in terms of the cext array , and the residuals are formed .
* The reaction terms are saved separately in the vector webdata.rates
* for use by the preconditioner setup routine .
* reslocal: Compute res = F(t,cc,cp).
* This routine assumes that all inter-processor communication of data
* needed to calculate F has already been done. Components at interior
* subgrid boundaries are assumed to be in the work array cext.
* The local portion of the cc vector is first copied into cext.
* The exterior Neumann boundary conditions are explicitly handled here
* by copying data from the first interior mesh line to the ghost cell
* locations in cext. Then the reaction and diffusion terms are
* evaluated in terms of the cext array, and the residuals are formed.
* The reaction terms are saved separately in the vector webdata.rates
* for use by the preconditioner setup routine.
*)
let reslocal webdata _ cc ((cp : RealArray.t), _, _)
((rr : RealArray.t), _, _) =
let mxsub = webdata.mxsub in
let mysub = webdata.mysub in
let npex = webdata.npex in
let npey = webdata.npey in
let ixsub = webdata.ixsub in
let jysub = webdata.jysub in
let nsmxsub = webdata.nsmxsub in
let nsmxsub2 = webdata.nsmxsub2 in
let np = webdata.np in
let dx = webdata.dx in
let dy = webdata.dy in
let cox = webdata.cox in
let coy = webdata.coy in
let rhs = webdata.rhs in
let cext = webdata.cext in
let rates, _, _ = webdata.rates in
let (cdata : RealArray.t),_,_ = cc in
Copy local segment of cc vector into the working extended array cext .
let locc = ref 0 in
let locce = ref (nsmxsub2 + num_species) in
for _ = 0 to mysub-1 do
for i = 0 to nsmxsub-1 do
cext.{!locce+i} <- cdata.{!locc+i}
done;
locc := !locc + nsmxsub;
locce := !locce + nsmxsub2;
done;
To facilitate homogeneous Neumann boundary conditions , when this is
a boundary PE , copy data from the first interior mesh line of cc to cext .
a boundary PE, copy data from the first interior mesh line of cc to cext. *)
If jysub = 0 , copy x - line 2 of cc to cext .
if jysub = 0 then begin
for i = 0 to nsmxsub-1 do
cext.{num_species+i} <- cdata.{nsmxsub+i}
done
end;
If jysub = npey-1 , copy x - line mysub-1 of cc to cext .
if jysub = npey-1 then begin
let locc = (mysub-2)*nsmxsub in
let locce = (mysub+1)*nsmxsub2 + num_species in
for i = 0 to nsmxsub-1 do
cext.{locce+i} <- cdata.{locc+i}
done
end;
If = 0 , copy y - line 2 of cc to cext .
if ixsub = 0 then begin
for jy = 0 to mysub-1 do
let locc = jy*nsmxsub + num_species in
let locce = (jy+1)*nsmxsub2 in
for i = 0 to num_species-1 do
cext.{locce+i} <- cdata.{locc+i}
done
done
end;
If npex-1 , copy y - line mxsub-1 of cc to cext .
if ixsub = npex-1 then begin
for jy = 0 to mysub-1 do
let locc = (jy+1)*nsmxsub - 2*num_species in
let locce = (jy+2)*nsmxsub2 - num_species in
for i = 0 to num_species-1 do
cext.{locce+i} <- cdata.{locc+i}
done
done
end;
for jy = 0 to mysub-1 do
let ylocce = (jy+1)*nsmxsub2 in
let yy = float_of_int(jy+jysub*mysub)*.dy in
for ix = 0 to mxsub-1 do
let locce = ylocce + (ix+1)*num_species in
let xx = float_of_int(ix + ixsub*mxsub)*.dx in
let rates_off = ij_index ix jy in
web_rates webdata xx yy (cext, locce) (rates, ij_index ix jy);
web_rates ( cext , ) ( rates , ij_index ix jy ) ;
let rr_off = ij_index ix jy in
let cp_off = ij_index ix jy in
for is = 0 to num_species-1 do
let dcyli = cext.{locce+is} -. cext.{locce+is-nsmxsub2} in
let dcyui = cext.{locce+is+nsmxsub2} -. cext.{locce+is} in
let dcxli = cext.{locce+is} -. cext.{locce+is-num_species} in
let dcxui = cext.{locce+is+num_species} -. cext.{locce+is} in
rhs.{is} <- cox.{is}*.(dcxui-.dcxli)
+. coy.{is}*.(dcyui-.dcyli)
+. rates.{rates_off + is};
if is < np then rr.{rr_off + is} <- cp.{cp_off + is} -. rhs.{is}
else rr.{rr_off + is} <- -. rhs.{is}
done
done
done
let resweb webdata tt cc cp rr =
let _Nlocal = webdata.n_local in
rescomm webdata tt cc cp;
reslocal webdata tt cc cp rr
* InitUserData : Load problem constants in webdata ( of type UserData ) .
* InitUserData: Load problem constants in webdata (of type UserData).
*)
let init_user_data local_N system_size thispe npes comm =
let rates = Nvector_parallel.make local_N system_size comm 0. in
let acoef = RealArray2.create num_species num_species in
let jysub = thispe / npex in
let ixsub = thispe - (jysub)*npex in
let ns = num_species in
let np = nprey in
let dx = ax/.float_of_int(mx-1) in
let dy = ay/.float_of_int(my-1) in
let nsmxsub = mxsub * num_species in
let nsmxsub2 = (mxsub+2)*num_species in
let n_local = mxsub*mysub*num_species in
let dx2 = dx*.dx in
let dy2 = dy*.dy in
let bcoef = RealArray.create num_species in
let cox = RealArray.create num_species in
let coy = RealArray.create num_species in
let rhs = RealArray.create num_species in
let cext = RealArray.create ((mxsub+2)*(mysub+2)*num_species) in
for i = 0 to np-1 do
Fill in the portion of acoef in the four quadrants , row by row .
for j = 0 to np-1 do
RealArray2.set acoef (np+j) i (-.gg);
RealArray2.set acoef j (i+np) ee;
RealArray2.set acoef j i zero;
RealArray2.set acoef (np+j) (i+np) zero
done;
RealArray2.set acoef i i (-.aa); RealArray2.set acoef (i+np) (i+np) (-.aa);
bcoef.{i} <- bb; bcoef.{i+np} <- -.bb;
cox.{i} <- dprey/.dx2; cox.{i+np} <- dpred/.dx2;
coy.{i} <- dprey/.dy2; coy.{i+np} <- dpred/.dy2
done;
{
ns = ns;
np = np;
thispe = thispe;
npes = npes;
ixsub = ixsub;
jysub = jysub;
npex = npex;
npey = npey;
mxsub = mxsub;
mysub = mysub;
nsmxsub = nsmxsub;
nsmxsub2 = nsmxsub2;
dx = dx;
dy = dy;
acoef = acoef;
cox = cox;
coy = coy;
bcoef = bcoef;
rhs = rhs;
cext = cext;
comm = comm;
rates = Nvector.unwrap rates;
n_local = n_local;
}
* SetInitialProfiles : Set initial conditions in cc , cp , and i d.
* A polynomial profile is used for the prey cc values , and a constant
* ( 1.0e5 ) is loaded as the initial guess for the predator cc values .
* The i d values are set to 1 for the prey and 0 for the predators .
* The prey cp values are set according to the given system , and
* the predator cp values are set to zero .
* SetInitialProfiles: Set initial conditions in cc, cp, and id.
* A polynomial profile is used for the prey cc values, and a constant
* (1.0e5) is loaded as the initial guess for the predator cc values.
* The id values are set to 1 for the prey and 0 for the predators.
* The prey cp values are set according to the given system, and
* the predator cp values are set to zero.
*)
let set_initial_profiles webdata (((ccdata : RealArray.t), _, _) as cc)
(((cpdata : RealArray.t), _, _) as cp)
((iddata : RealArray.t), _, _) res =
let ixsub = webdata.ixsub in
let jysub = webdata.jysub in
let mxsub = webdata.mxsub in
let mysub = webdata.mxsub in
let dx = webdata.dx in
let dy = webdata.dy in
let np = webdata.np in
Loop over grid , load cc values and i d values .
for jy = 0 to mysub-1 do
let yy = float_of_int(jy + jysub*mysub) *. dy in
for ix = 0 to mxsub-1 do
let xx = float_of_int(ix + ixsub*mxsub) *. dx in
let xyfactor = 16.*.xx*.(1. -. xx)*.yy*.(1. -. yy) in
let xyfactor = xyfactor *. xyfactor in
let cc_off = ij_index ix jy in
let id_off = ij_index ix jy in
for is = 0 to num_species-1 do
if is < np
then (ccdata.{cc_off + is} <- 10.0+.float_of_int(is+1)*.xyfactor;
iddata.{id_off + is} <- one)
else (ccdata.{cc_off + is} <- 1.0e5;
iddata.{id_off + is} <- zero)
done
done
done;
vconst zero cp;
resweb webdata zero cc cp res;
vscale (-.one) res cp;
for jy = 0 to mysub-1 do
for ix = 0 to mxsub-1 do
let cp_off = ij_index ix jy in
for is = np to num_species-1 do
cpdata.{cp_off + is} <- zero
done
done
done
* Print first lines of output ( problem description )
* and table headerr
* Print first lines of output (problem description)
* and table headerr
*)
let idaspgmr =
match Config.sundials_version with
| 2,_,_ -> "IDASPGMR"
| 3,_,_ -> "SUNSPGMR"
| _,_,_ -> "SUNLinSol_SPGMR"
let print_header system_size maxl mudq mldq mukeep mlkeep rtol atol =
printf "\nidaFoodWeb_kry_bbd_p: Predator-prey DAE parallel example problem for IDA \n\n";
printf "Number of species ns: %d" num_species;
printf " Mesh dimensions: %d x %d" mx my;
printf " Total system size: %d\n"system_size;
printf "Subgrid dimensions: %d x %d" mxsub mysub;
printf " Processor array: %d x %d\n" npex npey;
printf "Tolerance parameters: rtol = %g atol = %g\n" rtol atol;
printf "Linear solver: %s Max. Krylov dimension maxl: %d\n" idaspgmr maxl;
printf "Preconditioner: band-block-diagonal (IDABBDPRE), with parameters\n";
printf " mudq = %d, mldq = %d, mukeep = %d, mlkeep = %d\n"
mudq mldq mukeep mlkeep;
printf "CalcIC called to correct initial predator concentrations \n\n";
printf "-----------------------------------------------------------\n";
printf " t bottom-left top-right";
printf " | nst k h\n";
printf "-----------------------------------------------------------\n\n"
let print_output webdata mem cc tt comm =
let clast = RealArray.create 2 in
let thispe = webdata.thispe in
let npelast = webdata.npes - 1 in
let cdata,_,_ = cc in
if thispe = npelast then begin
let ilast = num_species*mxsub*mysub - 2 in
if npelast <> 0
then Mpi.send (slice cdata ilast 2) 0 0 comm
else (clast.{0} <- cdata.{ilast}; clast.{1} <- cdata.{ilast+1})
end;
On PE 0 , receive conc . at top right from PE npes - 1 .
Then print performance data and sampled solution values .
Then print performance data and sampled solution values. *)
if thispe = 0 then begin
if npelast <> 0 then
let buf = (Mpi.receive npelast 0 comm : RealArray.t) in
RealArray.blitn ~src:buf ~dst:clast 2
;
let kused = Ida.get_last_order mem in
let nst = Ida.get_num_steps mem in
let hused = Ida.get_last_step mem in
printf "%8.2e %12.4e %12.4e | %3d %1d %12.4e\n"
tt cdata.{0} clast.{0} nst kused hused;
for i = 1 to num_species-1 do
printf " %12.4e %12.4e |\n" cdata.{i} clast.{i}
done;
printf "\n"
end
* : Print final run data contained in iopt .
* PrintFinalStats: Print final run data contained in iopt.
*)
let print_final_stats mem =
let open Ida in
let nst = get_num_steps mem in
let nre = get_num_res_evals mem in
let netf = get_num_err_test_fails mem in
let ncfn = get_num_nonlin_solv_conv_fails mem in
let nni = get_num_nonlin_solv_iters mem in
let ncfl = Spils.get_num_lin_conv_fails mem in
let nli = Spils.get_num_lin_iters mem in
let npe = Spils.get_num_prec_evals mem in
let nps = Spils.get_num_prec_solves mem in
let nreLS = Spils.get_num_lin_res_evals mem in
let nge = Ida_bbd.get_num_gfn_evals mem in
printf "-----------------------------------------------------------\n";
printf "\nFinal statistics: \n\n";
printf "Number of steps = %d\n" nst;
printf "Number of residual evaluations = %d\n" (nre+nreLS);
printf "Number of nonlinear iterations = %d\n" nni;
printf "Number of error test failures = %d\n" netf;
printf "Number of nonlinear conv. failures = %d\n\n" ncfn;
printf "Number of linear iterations = %d\n" nli;
printf "Number of linear conv. failures = %d\n\n" ncfl;
printf "Number of preconditioner setups = %d\n" npe;
printf "Number of preconditioner solves = %d\n" nps;
printf "Number of local residual evals. = %d\n" nge
let main () =
let comm = Mpi.comm_world in
let thispe = Mpi.comm_rank comm in
let npes = Mpi.comm_size comm in
if npes <> npex*npey then begin
if thispe = 0 then
fprintf stderr
"\nMPI_ERROR(0): npes = %d not equal to NPEX*NPEY = %d\n"
npes (npex*npey)
;
exit 1
end;
Set local length ( local_N ) and global length ( system_size ) .
let local_N = mxsub*mysub*num_species in
let system_size = neq in
Set up user data block webdata .
let webdata = init_user_data local_N system_size thispe npes comm in
let cc = Nvector_parallel.make local_N system_size comm 0. in
let cp = Nvector_parallel.make local_N system_size comm 0. in
let res = Nvector_parallel.make local_N system_size comm 0. in
let id = Nvector_parallel.make local_N system_size comm 0. in
set_initial_profiles webdata (Nvector.unwrap cc) (Nvector.unwrap cp)
(Nvector.unwrap id) (Nvector.unwrap res);
Set remaining inputs to IDAMalloc .
let t0 = zero in
Call IDACreate and IDAMalloc to initialize solution
Call IDASpgmr to specify the IDA linear solver IDASPGMR
Call IDABBDPrecInit to initialize the band - block - diagonal preconditioner .
The half - bandwidths for the difference quotient evaluation are exact
for the system Jacobian , but only a 5 - diagonal band matrix is retained .
The half-bandwidths for the difference quotient evaluation are exact
for the system Jacobian, but only a 5-diagonal band matrix is retained. *)
let maxl = 16 in
let mudq = nsmxsub and mldq = nsmxsub
and mukeep = 2 and mlkeep = 2 in
let mem =
Ida.(init
(SStolerances (rtol,atol))
~lsolver:Spils.(solver (spgmr ~maxl cc)
(Ida_bbd.prec_left ~dqrely:zero
Ida_bbd.({ mudq; mldq; mukeep; mlkeep })
(reslocal webdata)))
(resweb webdata) t0 cc cp)
in
let tout = ref 0.001 in
Ida.calc_ic_ya_yd' mem ~varid:id !tout;
if thispe = 0 then
print_header system_size maxl mudq mldq mukeep mlkeep rtol atol
;
print_output webdata mem (Nvector.unwrap cc) t0 comm;
Call IDA in tout loop , normal mode , and print selected output .
for iout = 1 to nout do
let (tret, _) = Ida.solve_normal mem !tout cc cp in
print_output webdata mem (Nvector.unwrap cc) tret comm;
if iout < 3 then tout := !tout *. tmult
else tout := !tout +. tadd
done;
On PE 0 , print final set of statistics .
if thispe = 0 then print_final_stats mem
let reps =
try int_of_string (Unix.getenv "NUM_REPS")
with Not_found | Failure _ -> 1
let gc_at_end =
try int_of_string (Unix.getenv "GC_AT_END") <> 0
with Not_found | Failure _ -> false
let gc_each_rep =
try int_of_string (Unix.getenv "GC_EACH_REP") <> 0
with Not_found | Failure _ -> false
let _ =
for _ = 1 to reps do
main ();
if gc_each_rep then Gc.compact ()
done;
if gc_at_end then Gc.compact ()
|
de0629780c3a00c7c0e79c128f4f7f5597626bf87e98580076c806295bb5e0be | kupl/LearnML | original.ml | type lambda = V of var | P of (var * lambda) | C of (lambda * lambda)
and var = string
let rec makePList (lambda : lambda) (pl : string list) : string list =
match lambda with
| V v -> pl
| P (v, e) -> pl @ [ v ] @ makePList e pl
| C (e1, e2) -> makePList e1 pl @ makePList e2 pl
let rec makeVList (lambda : lambda) (vl : string list) : string list =
match lambda with
| V v -> vl @ [ v ]
| P (v, e) -> vl @ makeVList e vl
| C (e1, e2) -> makeVList e1 vl @ makeVList e2 vl
let rec compareToPlist (pl : 'a list) e : bool =
match pl with [] -> false | hd :: tl -> e = hd || compareToPlist tl e
let rec compareList (pl : 'b list) (vl : 'b list) : bool =
match vl with
| [] -> true
| hd :: tl -> compareList pl tl && compareToPlist pl hd
let rec check (lambda : lambda) : bool =
match lambda with
| V v -> false
| P (v, e) ->
let pl : string list = makePList lambda [] in
let vl : string list = makeVList lambda [] in
compareList pl vl
| C (e1, e2) -> check e1 && check e2
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/lambda/sub91/original.ml | ocaml | type lambda = V of var | P of (var * lambda) | C of (lambda * lambda)
and var = string
let rec makePList (lambda : lambda) (pl : string list) : string list =
match lambda with
| V v -> pl
| P (v, e) -> pl @ [ v ] @ makePList e pl
| C (e1, e2) -> makePList e1 pl @ makePList e2 pl
let rec makeVList (lambda : lambda) (vl : string list) : string list =
match lambda with
| V v -> vl @ [ v ]
| P (v, e) -> vl @ makeVList e vl
| C (e1, e2) -> makeVList e1 vl @ makeVList e2 vl
let rec compareToPlist (pl : 'a list) e : bool =
match pl with [] -> false | hd :: tl -> e = hd || compareToPlist tl e
let rec compareList (pl : 'b list) (vl : 'b list) : bool =
match vl with
| [] -> true
| hd :: tl -> compareList pl tl && compareToPlist pl hd
let rec check (lambda : lambda) : bool =
match lambda with
| V v -> false
| P (v, e) ->
let pl : string list = makePList lambda [] in
let vl : string list = makeVList lambda [] in
compareList pl vl
| C (e1, e2) -> check e1 && check e2
| |
a36d0dfbb00c6f5e45e20a506511148e2688d14742fe2373c5fd2ecef4451d91 | tweag/pirouette | QuasiQuoter.hs | # LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE DeriveLift #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE UndecidableInstances #
-- | Provides the quasiquoters to be able to write @lang@ programs
directly into Haskell source files . Using the functions
-- exported by this module requires the @-XQuasiQuotes@ extension.
Check " Language . . Example . " for an example instantiation .
module Language.Pirouette.QuasiQuoter (QuasiQuoter, prog, progNoTC, term, ty, funDecl) where
import Language.Haskell.TH.Quote
import Language.Pirouette.QuasiQuoter.Internal
import Language.Pirouette.QuasiQuoter.Syntax
import Language.Pirouette.QuasiQuoter.ToTerm
import Pirouette.Term.Syntax.Base
import Pirouette.Term.TypeChecker (typeCheckDecls)
import Text.Megaparsec
prog :: forall lang. (LanguageParser lang, LanguageBuiltinTypes lang, LanguagePretty lang) => QuasiQuoter
prog = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseProgram @lang) <* eof) str
decls <- trQ (trProgram p0)
_ <- maybeQ (typeCheckDecls decls)
[e|(PrtUnorderedDefs decls)|]
progNoTC :: forall lang. (LanguageParser lang, LanguagePretty lang) => QuasiQuoter
progNoTC = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseProgram @lang) <* eof) str
decls <- trQ (trProgram p0)
[e|(PrtUnorderedDefs decls)|]
term :: forall lang. (LanguageParser lang, LanguagePretty lang) => QuasiQuoter
term = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseTerm @lang) <* eof) str
p1 <- trQ (trTerm [] [] p0)
[e|p1|]
ty :: forall lang. (LanguageParser lang, LanguagePretty lang) => QuasiQuoter
ty = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseType @lang) <* eof) str
p1 <- trQ (trType [] p0)
[e|p1|]
funDecl :: forall lang. (LanguageParser lang, Language lang) => QuasiQuoter
funDecl = quoter $ \str -> do
(_, p0) <- parseQ (spaceConsumer *> lexeme (parseFunDecl @lang) <* eof) str
p1 <- trQ (trFunDecl p0)
[e|p1|]
| null | https://raw.githubusercontent.com/tweag/pirouette/1bfaa872dc04654ad32e9f8e7e2420ec4fa6de3b/src/Language/Pirouette/QuasiQuoter.hs | haskell | # LANGUAGE DeriveLift #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
| Provides the quasiquoters to be able to write @lang@ programs
exported by this module requires the @-XQuasiQuotes@ extension. | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE UndecidableInstances #
directly into Haskell source files . Using the functions
Check " Language . . Example . " for an example instantiation .
module Language.Pirouette.QuasiQuoter (QuasiQuoter, prog, progNoTC, term, ty, funDecl) where
import Language.Haskell.TH.Quote
import Language.Pirouette.QuasiQuoter.Internal
import Language.Pirouette.QuasiQuoter.Syntax
import Language.Pirouette.QuasiQuoter.ToTerm
import Pirouette.Term.Syntax.Base
import Pirouette.Term.TypeChecker (typeCheckDecls)
import Text.Megaparsec
prog :: forall lang. (LanguageParser lang, LanguageBuiltinTypes lang, LanguagePretty lang) => QuasiQuoter
prog = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseProgram @lang) <* eof) str
decls <- trQ (trProgram p0)
_ <- maybeQ (typeCheckDecls decls)
[e|(PrtUnorderedDefs decls)|]
progNoTC :: forall lang. (LanguageParser lang, LanguagePretty lang) => QuasiQuoter
progNoTC = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseProgram @lang) <* eof) str
decls <- trQ (trProgram p0)
[e|(PrtUnorderedDefs decls)|]
term :: forall lang. (LanguageParser lang, LanguagePretty lang) => QuasiQuoter
term = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseTerm @lang) <* eof) str
p1 <- trQ (trTerm [] [] p0)
[e|p1|]
ty :: forall lang. (LanguageParser lang, LanguagePretty lang) => QuasiQuoter
ty = quoter $ \str -> do
p0 <- parseQ (spaceConsumer *> lexeme (parseType @lang) <* eof) str
p1 <- trQ (trType [] p0)
[e|p1|]
funDecl :: forall lang. (LanguageParser lang, Language lang) => QuasiQuoter
funDecl = quoter $ \str -> do
(_, p0) <- parseQ (spaceConsumer *> lexeme (parseFunDecl @lang) <* eof) str
p1 <- trQ (trFunDecl p0)
[e|p1|]
|
687e414498460dae2d9a27453c854492971351eaee968b5343502630c7913ec6 | conreality/conreality | cccp.ml | (* This is free and unencumbered software released into the public domain. *)
open Prelude
open Lwt.Infix
open Networking
open Syntax
module Protocol = struct
let port = 1984
end
module Client = struct
type t = Lwt_unix.sockaddr
let any =
Unix.(ADDR_INET (Unix.inet_addr_any, 0))
let compare (a : t) (b : t) =
Pervasives.compare a b
let to_string = function
| Unix.ADDR_INET (addr, port) ->
Printf.sprintf "%s:%d" (Unix.string_of_inet_addr addr) port
| Unix.ADDR_UNIX _ -> assert false
end
module Callback = struct
type t = Client.t -> string -> unit
end
module Server = struct
type t = {
socket: UDP.Socket.t;
buffer: Lwt_bytes.t;
}
let create socket =
{ socket; buffer = UDP.Packet.make_buffer (); }
let socket { socket; _ } = socket
let buffer { buffer; _ } = buffer
let rec loop server (callback : Callback.t) =
let { socket; buffer } = server in
UDP.Socket.recvfrom socket buffer >>= fun (length, client) ->
let command = String.sub (Lwt_bytes.to_string buffer) 0 length in
Lwt_log.ign_notice_f "Received %d bytes from %s: %s" length (Client.to_string client) command;
callback client (if (String.length command) > 1 then command else "") |> ignore; (* for `nc` probe packets *)
loop server callback
end
| null | https://raw.githubusercontent.com/conreality/conreality/e03328ef1f0056b58e4ffe181a279a1dc776e094/src/consensus/messaging/cccp.ml | ocaml | This is free and unencumbered software released into the public domain.
for `nc` probe packets |
open Prelude
open Lwt.Infix
open Networking
open Syntax
module Protocol = struct
let port = 1984
end
module Client = struct
type t = Lwt_unix.sockaddr
let any =
Unix.(ADDR_INET (Unix.inet_addr_any, 0))
let compare (a : t) (b : t) =
Pervasives.compare a b
let to_string = function
| Unix.ADDR_INET (addr, port) ->
Printf.sprintf "%s:%d" (Unix.string_of_inet_addr addr) port
| Unix.ADDR_UNIX _ -> assert false
end
module Callback = struct
type t = Client.t -> string -> unit
end
module Server = struct
type t = {
socket: UDP.Socket.t;
buffer: Lwt_bytes.t;
}
let create socket =
{ socket; buffer = UDP.Packet.make_buffer (); }
let socket { socket; _ } = socket
let buffer { buffer; _ } = buffer
let rec loop server (callback : Callback.t) =
let { socket; buffer } = server in
UDP.Socket.recvfrom socket buffer >>= fun (length, client) ->
let command = String.sub (Lwt_bytes.to_string buffer) 0 length in
Lwt_log.ign_notice_f "Received %d bytes from %s: %s" length (Client.to_string client) command;
loop server callback
end
|
64cf277c912f9258061e0c0a5e254c8eddebf03ad45f270412edbe6c055ce671 | disconcision/fructure | layout-demos.rkt | #lang racket
(require 2htdp/image
"../shared/slash-patterns/slash-patterns.rkt"
; only for div in add-dynamic. TODO: refactor
"../src/common.rkt"
"../src/layout/layout.rkt"
"layout-demos-data.rkt")
; TODO: rework as default settings?
(define test-settings-init
(hash 'text-size 34
'typeface "Iosevka, Light"
'max-menu-length 3
'max-menu-length-chars 1
'transform-template-only #f
'popout-transform? #t
'popout-menu? #t
'custom-menu-selector? #t
'simple-menu? #f
'force-horizontal-layout? #f
'length-conditional-layout? #t
'length-conditional-cutoff 14
'dodge-enabled? #t
'implicit-forms '(ref num app)
1
5
'show-parens? #f
'hole-bottom-color (color 252 225 62)
'hole-side-color (color 193 115 23)
'background-block-color (color 25 80 84)
'transform-tint-color (color 160 0 0) ; selected-color
'selected-atom-color (color 255 255 255)
'menu-bkg-color (color 112 112 112)
'form-color (color 0 130 214)
'literal-color (color 228 150 34)
'grey-one (color 230 230 230)
'grey-two (color 215 215 215)
'identifier-color (color 0 0 0)
'selected-color (color 230 0 0)
'+hole-color (color 25 80 84)
'transform-arrow-color (color 255 255 255)
'bkg-color (color 0 47 54)
'pattern-bkg-color (color 230 230 230)
( color 76 76 76 )
'pattern-grey-two (color 110 110 110)
'menu-secondary-color (color 40 40 40)
))
COPIED from fructure.rkt TODO REFACTOR
(define (add-dynamic-settings layout)
(define-from layout
text-size typeface
char-padding-vertical)
(define (div-integer x y)
(inexact->exact (round (div x y))))
(define space-image
(text/font " " text-size "black"
typeface 'modern 'normal 'normal #f))
(hash-set* layout
'radius (sub1 (div-integer text-size 2))
'margin (div-integer text-size 5)
'unit-width (image-width space-image)
'unit-height (+ char-padding-vertical (image-height space-image))))
; add dynamic settings to layout
(define test-settings (add-dynamic-settings test-settings-init))
( second
(render
data-0
test-settings))
( second
(render
data-1
test-settings))
( second
(render
data-2
test-settings))
THIS IS THE ONE WE WERE TESTING UNITL 2019.jan31
( second
(render
data-3
test-settings))
( second
(render
data-4
test-settings))
( second
(render
data-5
test-settings))
( second
(render
(match data-6
[(/ a/ a)
(/ [display-box `(800 240)]
[display-offset `(0 0)]
a/ a)])
test-settings))
#;(draw-outlines-abs
(augment-absolute-offsets
(first (render (match data-6
[(/ a/ a)
(/ [display-box `(800 240)]
[display-offset `(0 0)]
a/ a)])
test-settings))))
( second
(fructure-layout data-7 test-settings))
(draw-outlines-abs (first (fructure-layout data-8 test-settings))
(second (fructure-layout data-8 test-settings)))
(second
(fructure-layout data-8 test-settings))
(second
(fructure-layout data-9 test-settings))
(second
(fructure-layout data-10 test-settings))
(second
(render data-11 test-settings))
(second
(render data-12 test-settings))
(second
(fructure-layout data-13 test-settings))
(second
(fructure-layout data-14 test-settings))
(second
(fructure-layout data-15 test-settings))
(second
(fructure-layout data-16 test-settings))
(second
(fructure-layout transforming-λ-dog-dog-dog-to-hole
test-settings))
(second
(fructure-layout transforming-three-liner-to-hole
test-settings))
(second
(fructure-layout transforming-3-line-to-4-line
test-settings))
(second
(fructure-layout transforming-hole-to-2-line
test-settings))
| null | https://raw.githubusercontent.com/disconcision/fructure/d434086052eab3c450f631b7b14dcbf9358f45b7/tests/layout-demos.rkt | racket | only for div in add-dynamic. TODO: refactor
TODO: rework as default settings?
selected-color
add dynamic settings to layout
(draw-outlines-abs | #lang racket
(require 2htdp/image
"../shared/slash-patterns/slash-patterns.rkt"
"../src/common.rkt"
"../src/layout/layout.rkt"
"layout-demos-data.rkt")
(define test-settings-init
(hash 'text-size 34
'typeface "Iosevka, Light"
'max-menu-length 3
'max-menu-length-chars 1
'transform-template-only #f
'popout-transform? #t
'popout-menu? #t
'custom-menu-selector? #t
'simple-menu? #f
'force-horizontal-layout? #f
'length-conditional-layout? #t
'length-conditional-cutoff 14
'dodge-enabled? #t
'implicit-forms '(ref num app)
1
5
'show-parens? #f
'hole-bottom-color (color 252 225 62)
'hole-side-color (color 193 115 23)
'background-block-color (color 25 80 84)
'selected-atom-color (color 255 255 255)
'menu-bkg-color (color 112 112 112)
'form-color (color 0 130 214)
'literal-color (color 228 150 34)
'grey-one (color 230 230 230)
'grey-two (color 215 215 215)
'identifier-color (color 0 0 0)
'selected-color (color 230 0 0)
'+hole-color (color 25 80 84)
'transform-arrow-color (color 255 255 255)
'bkg-color (color 0 47 54)
'pattern-bkg-color (color 230 230 230)
( color 76 76 76 )
'pattern-grey-two (color 110 110 110)
'menu-secondary-color (color 40 40 40)
))
COPIED from fructure.rkt TODO REFACTOR
(define (add-dynamic-settings layout)
(define-from layout
text-size typeface
char-padding-vertical)
(define (div-integer x y)
(inexact->exact (round (div x y))))
(define space-image
(text/font " " text-size "black"
typeface 'modern 'normal 'normal #f))
(hash-set* layout
'radius (sub1 (div-integer text-size 2))
'margin (div-integer text-size 5)
'unit-width (image-width space-image)
'unit-height (+ char-padding-vertical (image-height space-image))))
(define test-settings (add-dynamic-settings test-settings-init))
( second
(render
data-0
test-settings))
( second
(render
data-1
test-settings))
( second
(render
data-2
test-settings))
THIS IS THE ONE WE WERE TESTING UNITL 2019.jan31
( second
(render
data-3
test-settings))
( second
(render
data-4
test-settings))
( second
(render
data-5
test-settings))
( second
(render
(match data-6
[(/ a/ a)
(/ [display-box `(800 240)]
[display-offset `(0 0)]
a/ a)])
test-settings))
(augment-absolute-offsets
(first (render (match data-6
[(/ a/ a)
(/ [display-box `(800 240)]
[display-offset `(0 0)]
a/ a)])
test-settings))))
( second
(fructure-layout data-7 test-settings))
(draw-outlines-abs (first (fructure-layout data-8 test-settings))
(second (fructure-layout data-8 test-settings)))
(second
(fructure-layout data-8 test-settings))
(second
(fructure-layout data-9 test-settings))
(second
(fructure-layout data-10 test-settings))
(second
(render data-11 test-settings))
(second
(render data-12 test-settings))
(second
(fructure-layout data-13 test-settings))
(second
(fructure-layout data-14 test-settings))
(second
(fructure-layout data-15 test-settings))
(second
(fructure-layout data-16 test-settings))
(second
(fructure-layout transforming-λ-dog-dog-dog-to-hole
test-settings))
(second
(fructure-layout transforming-three-liner-to-hole
test-settings))
(second
(fructure-layout transforming-3-line-to-4-line
test-settings))
(second
(fructure-layout transforming-hole-to-2-line
test-settings))
|
673b4b37c46c8408055935ee5cf736f050387c62661b2787984a8b13a27e3887 | bobbae/gosling-emacs | spell.ml | (defun
(correct-spelling-mistakes word action continue
(setq continue 1)
(progn;error-occured
(while continue
(save-excursion
(temp-use-buffer "Error log")
(beginning-of-file)
(set-mark)
(end-of-line)
(setq word (region-to-string))
(forward-character)
(delete-to-killbuffer)
)
(beginning-of-file)
(error-occured (re-search-forward (concat "\\b"
(quote word)
"\\b")))
(message (concat word " ? "))
(setq action (get-tty-character))
(beginning-of-line)
(if
(| (= action '^G')
(= action 'e')) (setq continue 0)
(= action 'r') (error-occured
(re-query-replace-string ""
(get-tty-string
(concat word " => "))))
)
)
)
(novalue)
)
)
(defun
(spell
(message (concat "Looking for errors in " (current-file-name)
", please wait..."))
(sit-for 0)
(save-excursion
(compile-it (concat "spell " (current-file-name))))
(error-occured (correct-spelling-mistakes))
(message "Done!")
(novalue)
)
)
| null | https://raw.githubusercontent.com/bobbae/gosling-emacs/8fdda532abbffb0c952251a0b5a4857e0f27495a/maclib/spell.ml | ocaml | (defun
(correct-spelling-mistakes word action continue
(setq continue 1)
(progn;error-occured
(while continue
(save-excursion
(temp-use-buffer "Error log")
(beginning-of-file)
(set-mark)
(end-of-line)
(setq word (region-to-string))
(forward-character)
(delete-to-killbuffer)
)
(beginning-of-file)
(error-occured (re-search-forward (concat "\\b"
(quote word)
"\\b")))
(message (concat word " ? "))
(setq action (get-tty-character))
(beginning-of-line)
(if
(| (= action '^G')
(= action 'e')) (setq continue 0)
(= action 'r') (error-occured
(re-query-replace-string ""
(get-tty-string
(concat word " => "))))
)
)
)
(novalue)
)
)
(defun
(spell
(message (concat "Looking for errors in " (current-file-name)
", please wait..."))
(sit-for 0)
(save-excursion
(compile-it (concat "spell " (current-file-name))))
(error-occured (correct-spelling-mistakes))
(message "Done!")
(novalue)
)
)
| |
1600458f731492146bfbf066ab9063c6740da05a5c7c96efdd566dd170c5f529 | camlp5/camlp5 | pa_lex.ml | (* camlp5r *)
(* pa_lex.ml,v *)
Copyright ( c ) INRIA 2007 - 2017
#load "pa_extend.cmo";
#load "q_MLast.cmo";
(* Simplified syntax of parsers of characters streams *)
open Pcaml;
open Exparser;
(**)
value var () = "buf";
value empty loc = <:expr< B.empty >>;
value add_char loc c cl = <:expr< B.add $c$ $cl$ >>;
value get_buf loc cl = <:expr< B.get $cl$ >>;
value var ( ) = " buf " ;
value empty loc = < : expr < [ ] > > ;
value cl = < : expr < [ $ c$ : : $ cl$ ] > > ;
value get_buf loc cl = < : expr < List.rev $ cl$ > > ;
value var () = "buf";
value empty loc = <:expr< [] >>;
value add_char loc c cl = <:expr< [$c$ :: $cl$] >>;
value get_buf loc cl = <:expr< List.rev $cl$ >>;
*)
value fresh_c cl =
let n =
List.fold_left
(fun n c ->
match c with
[ <:expr< $lid:_$ >> -> n + 1
| _ -> n ])
0 cl
in
if n = 0 then "c" else "c" ^ string_of_int n
;
value accum_chars loc cl =
List.fold_right (add_char loc) cl <:expr< $lid:var ()$ >>
;
value conv_rules loc rl =
List.map
(fun (sl, cl, a) ->
let a =
let b = accum_chars loc cl in
match a with
[ Some e -> e
| None -> b ]
in
(List.rev sl, None, a))
rl
;
value mk_lexer loc rl =
cparser loc None (conv_rules loc rl)
;
value mk_lexer_match loc e rl =
cparser_match loc e None (conv_rules loc rl)
;
group together consecutive rules just containing one character
value isolate_char_patt_list =
loop [] where rec loop pl =
fun
[ [([(SpTrm _ p <:vala< None >>, SpoNoth)], [_], None) :: rl] ->
let p =
match p with
[ <:patt< $chr:_$ >> -> p
| <:patt< ($p$ as $lid:_$) >> -> p
| p -> p ]
in
loop [p :: pl] rl
| rl -> (List.rev pl, rl) ]
;
value or_patt_of_patt_list loc =
fun
[ [p :: pl] -> List.fold_left (fun p1 p2 -> <:patt< $p1$ | $p2$ >>) p pl
| [] -> invalid_arg "or_patt_of_patt_list" ]
;
value isolate_char_patt loc rl =
match isolate_char_patt_list rl with
[ ([] | [_], _) -> (None, rl)
| (pl, rl) -> (Some (or_patt_of_patt_list loc pl), rl) ]
;
value make_rules loc rl sl cl errk =
match isolate_char_patt loc rl with
[ (Some p, []) ->
let c = fresh_c cl in
let s =
let p = <:patt< ($p$ as $lid:c$) >> in
(SpTrm loc p <:vala< None >>, errk)
in
([s :: sl], [<:expr< $lid:c$ >> :: cl])
| x ->
let rl =
match x with
[ (Some p, rl) ->
let r =
let p = <:patt< ($p$ as c) >> in
let e = <:expr< c >> in
([(SpTrm loc p <:vala< None >>, SpoNoth)], [e], None)
in
[r :: rl]
| (None, rl) -> rl ]
in
let errk =
match List.rev rl with
[ [([], _, _) :: _] -> SpoBang
| _ -> errk ]
in
let sl =
if cl = [] then sl
else
let s =
let b = accum_chars loc cl in
let e = cparser loc None [([], None, b)] in
(SpNtr loc <:patt< $lid:var ()$ >> e, SpoBang)
in
[s :: sl]
in
let s =
let e = mk_lexer loc rl in
(SpNtr loc <:patt< $lid:var ()$ >> e, errk)
in
([s :: sl], []) ]
;
value make_any loc norec sl cl errk =
let (p, cl) =
if norec then (<:patt< _ >>, cl)
else
let c = fresh_c cl in
(<:patt< $lid:c$ >>, [<:expr< $lid:c$ >> :: cl])
in
let s = (SpTrm loc p <:vala< None >>, errk) in
([s :: sl], cl)
;
value next_char s i =
if i = String.length s then invalid_arg "next_char"
else if s.[i] = '\\' then
if i + 1 = String.length s then ("\\", i + 1)
else
match s.[i+1] with
[ '0'..'9' ->
if i + 3 < String.length s then
(Printf.sprintf "\\%c%c%c" s.[i+1] s.[i+2] s.[i+3], i + 4)
else ("\\", i + 1)
| c -> ("\\" ^ String.make 1 c, i + 2) ]
else (String.make 1 s.[i], i + 1)
;
value fold_string_chars f s a =
loop 0 a where rec loop i a =
if i = String.length s then a
else
let (c, i) = next_char s i in
loop i (f c a)
;
value make_or_chars loc s norec sl cl errk =
let pl =
loop 0 where rec loop i =
if i = String.length s then []
else
let (c, i) = next_char s i in
let p = <:patt< $chr:c$ >> in
let (p, i) =
if i < String.length s - 2 && s.[i] = '.' && s.[i+1] = '.' then
let (c, i) = next_char s (i + 2) in
(<:patt< $p$ .. $chr:c$ >>, i)
else
(p, i)
in
[p :: loop i]
in
match pl with
[ [] -> (sl, cl)
| [<:patt< $chr:c$ >>] ->
let s = (SpTrm loc <:patt< $chr:c$ >> <:vala< None >>, errk) in
let cl = if norec then cl else [<:expr< $chr:c$ >> :: cl] in
([s :: sl], cl)
| pl ->
let c = fresh_c cl in
let s =
let p =
let p = or_patt_of_patt_list loc pl in
if norec then p else <:patt< ($p$ as $lid:c$) >>
in
(SpTrm loc p <:vala< None >>, errk)
in
let cl = if norec then cl else [<:expr< $lid:c$ >> :: cl] in
([s :: sl], cl) ]
;
value make_sub_lexer loc f sl cl errk =
let s =
let buf = accum_chars loc cl in
let e = <:expr< $f$ $buf$ >> in
let p = <:patt< $lid:var ()$ >> in
(SpNtr loc p e, errk)
in
([s :: sl], [])
;
value make_lookahd loc pll sl cl errk =
let s = (SpLhd loc pll, errk) in
([s :: sl], cl)
;
value gcl = ref [];
EXTEND
GLOBAL: expr ext_attributes;
expr: LIKE "match"
[ [ "lexer"; rl = rules ->
let rl =
match isolate_char_patt loc rl with
[ (Some p, rl) ->
let p = <:patt< ($p$ as c) >> in
let e = <:expr< c >> in
[([(SpTrm loc p <:vala< None >>, SpoNoth)], [e], None) :: rl]
| (None, rl) -> rl ]
in
<:expr< fun $lid:var ()$ -> $mk_lexer loc rl$ >>
| "match"; (ext,attrs) = ext_attributes; e = SELF; "with"; "lexer"; rl = rules ->
Pa_r.expr_to_inline loc (mk_lexer_match loc e rl) ext attrs ] ]
;
expr: LEVEL "simple"
[ [ "$"; LIDENT "add"; s = STRING ->
loop (accum_chars loc gcl.val) 0 where rec loop v i =
if i = String.length s then v
else
let (c, i) = next_char s i in
loop (add_char loc <:expr< $chr:c$ >> v) i
| "$"; LIDENT "add"; e = simple_expr ->
add_char loc e (accum_chars loc gcl.val)
| "$"; LIDENT "buf" ->
get_buf loc (accum_chars loc gcl.val)
| "$"; LIDENT "empty" ->
empty loc
| "$"; LIDENT "pos" ->
<:expr< Stream.count $lid:strm_n$ >> ] ]
;
rules:
[ [ "["; rl = LIST0 rule SEP "|"; "]" -> rl ] ]
;
rule:
[ [ (sl, cl) = symb_list; a = act -> (sl, cl, a) ] ]
;
symb_list:
[ [ (sl, cl) = symbs -> do { gcl.val := cl; (sl, cl) } ] ]
;
symbs:
[ [ (sl, cl) = SELF; f = symb; kont = err_kont -> f sl cl kont
| -> ([], []) ] ]
;
symb:
[ [ "_"; norec = no_rec -> make_any loc norec
| s = STRING; norec = no_rec -> make_or_chars loc s norec
| f = simple_expr -> make_sub_lexer loc f
| "?="; "["; pll = LIST1 lookahead SEP "|"; "]" -> make_lookahd loc pll
| rl = rules -> make_rules loc rl ] ]
;
simple_expr:
[ [ i = LIDENT -> <:expr< $lid:i$ >>
| c = CHAR -> <:expr< $chr:c$ >>
| "("; e = expr; ")" -> e ] ]
;
lookahead:
[ [ pl = LIST1 lookahead_char -> pl
| s = STRING ->
List.rev
(fold_string_chars (fun c pl -> [<:patt< $chr:c$ >> :: pl]) s
[]) ] ]
;
lookahead_char:
[ [ c = CHAR -> <:patt< $chr:c$ >>
| "_" -> <:patt< _ >> ] ]
;
no_rec:
[ [ "/" -> True
| -> False ] ]
;
err_kont:
[ [ "!" -> SpoBang
| "?"; s = STRING -> SpoQues <:expr< $str:s$ >>
| "?"; e = simple_expr -> SpoQues e
| -> SpoNoth ] ]
;
act:
[ [ "->"; e = expr -> Some e
| -> None ] ]
;
END;
| null | https://raw.githubusercontent.com/camlp5/camlp5/15e03f56f55b2856dafe7dd3ca232799069f5dda/etc/pa_lex.ml | ocaml | camlp5r
pa_lex.ml,v
Simplified syntax of parsers of characters streams
| Copyright ( c ) INRIA 2007 - 2017
#load "pa_extend.cmo";
#load "q_MLast.cmo";
open Pcaml;
open Exparser;
value var () = "buf";
value empty loc = <:expr< B.empty >>;
value add_char loc c cl = <:expr< B.add $c$ $cl$ >>;
value get_buf loc cl = <:expr< B.get $cl$ >>;
value var ( ) = " buf " ;
value empty loc = < : expr < [ ] > > ;
value cl = < : expr < [ $ c$ : : $ cl$ ] > > ;
value get_buf loc cl = < : expr < List.rev $ cl$ > > ;
value var () = "buf";
value empty loc = <:expr< [] >>;
value add_char loc c cl = <:expr< [$c$ :: $cl$] >>;
value get_buf loc cl = <:expr< List.rev $cl$ >>;
*)
value fresh_c cl =
let n =
List.fold_left
(fun n c ->
match c with
[ <:expr< $lid:_$ >> -> n + 1
| _ -> n ])
0 cl
in
if n = 0 then "c" else "c" ^ string_of_int n
;
value accum_chars loc cl =
List.fold_right (add_char loc) cl <:expr< $lid:var ()$ >>
;
value conv_rules loc rl =
List.map
(fun (sl, cl, a) ->
let a =
let b = accum_chars loc cl in
match a with
[ Some e -> e
| None -> b ]
in
(List.rev sl, None, a))
rl
;
value mk_lexer loc rl =
cparser loc None (conv_rules loc rl)
;
value mk_lexer_match loc e rl =
cparser_match loc e None (conv_rules loc rl)
;
group together consecutive rules just containing one character
value isolate_char_patt_list =
loop [] where rec loop pl =
fun
[ [([(SpTrm _ p <:vala< None >>, SpoNoth)], [_], None) :: rl] ->
let p =
match p with
[ <:patt< $chr:_$ >> -> p
| <:patt< ($p$ as $lid:_$) >> -> p
| p -> p ]
in
loop [p :: pl] rl
| rl -> (List.rev pl, rl) ]
;
value or_patt_of_patt_list loc =
fun
[ [p :: pl] -> List.fold_left (fun p1 p2 -> <:patt< $p1$ | $p2$ >>) p pl
| [] -> invalid_arg "or_patt_of_patt_list" ]
;
value isolate_char_patt loc rl =
match isolate_char_patt_list rl with
[ ([] | [_], _) -> (None, rl)
| (pl, rl) -> (Some (or_patt_of_patt_list loc pl), rl) ]
;
value make_rules loc rl sl cl errk =
match isolate_char_patt loc rl with
[ (Some p, []) ->
let c = fresh_c cl in
let s =
let p = <:patt< ($p$ as $lid:c$) >> in
(SpTrm loc p <:vala< None >>, errk)
in
([s :: sl], [<:expr< $lid:c$ >> :: cl])
| x ->
let rl =
match x with
[ (Some p, rl) ->
let r =
let p = <:patt< ($p$ as c) >> in
let e = <:expr< c >> in
([(SpTrm loc p <:vala< None >>, SpoNoth)], [e], None)
in
[r :: rl]
| (None, rl) -> rl ]
in
let errk =
match List.rev rl with
[ [([], _, _) :: _] -> SpoBang
| _ -> errk ]
in
let sl =
if cl = [] then sl
else
let s =
let b = accum_chars loc cl in
let e = cparser loc None [([], None, b)] in
(SpNtr loc <:patt< $lid:var ()$ >> e, SpoBang)
in
[s :: sl]
in
let s =
let e = mk_lexer loc rl in
(SpNtr loc <:patt< $lid:var ()$ >> e, errk)
in
([s :: sl], []) ]
;
value make_any loc norec sl cl errk =
let (p, cl) =
if norec then (<:patt< _ >>, cl)
else
let c = fresh_c cl in
(<:patt< $lid:c$ >>, [<:expr< $lid:c$ >> :: cl])
in
let s = (SpTrm loc p <:vala< None >>, errk) in
([s :: sl], cl)
;
value next_char s i =
if i = String.length s then invalid_arg "next_char"
else if s.[i] = '\\' then
if i + 1 = String.length s then ("\\", i + 1)
else
match s.[i+1] with
[ '0'..'9' ->
if i + 3 < String.length s then
(Printf.sprintf "\\%c%c%c" s.[i+1] s.[i+2] s.[i+3], i + 4)
else ("\\", i + 1)
| c -> ("\\" ^ String.make 1 c, i + 2) ]
else (String.make 1 s.[i], i + 1)
;
value fold_string_chars f s a =
loop 0 a where rec loop i a =
if i = String.length s then a
else
let (c, i) = next_char s i in
loop i (f c a)
;
value make_or_chars loc s norec sl cl errk =
let pl =
loop 0 where rec loop i =
if i = String.length s then []
else
let (c, i) = next_char s i in
let p = <:patt< $chr:c$ >> in
let (p, i) =
if i < String.length s - 2 && s.[i] = '.' && s.[i+1] = '.' then
let (c, i) = next_char s (i + 2) in
(<:patt< $p$ .. $chr:c$ >>, i)
else
(p, i)
in
[p :: loop i]
in
match pl with
[ [] -> (sl, cl)
| [<:patt< $chr:c$ >>] ->
let s = (SpTrm loc <:patt< $chr:c$ >> <:vala< None >>, errk) in
let cl = if norec then cl else [<:expr< $chr:c$ >> :: cl] in
([s :: sl], cl)
| pl ->
let c = fresh_c cl in
let s =
let p =
let p = or_patt_of_patt_list loc pl in
if norec then p else <:patt< ($p$ as $lid:c$) >>
in
(SpTrm loc p <:vala< None >>, errk)
in
let cl = if norec then cl else [<:expr< $lid:c$ >> :: cl] in
([s :: sl], cl) ]
;
value make_sub_lexer loc f sl cl errk =
let s =
let buf = accum_chars loc cl in
let e = <:expr< $f$ $buf$ >> in
let p = <:patt< $lid:var ()$ >> in
(SpNtr loc p e, errk)
in
([s :: sl], [])
;
value make_lookahd loc pll sl cl errk =
let s = (SpLhd loc pll, errk) in
([s :: sl], cl)
;
value gcl = ref [];
EXTEND
GLOBAL: expr ext_attributes;
expr: LIKE "match"
[ [ "lexer"; rl = rules ->
let rl =
match isolate_char_patt loc rl with
[ (Some p, rl) ->
let p = <:patt< ($p$ as c) >> in
let e = <:expr< c >> in
[([(SpTrm loc p <:vala< None >>, SpoNoth)], [e], None) :: rl]
| (None, rl) -> rl ]
in
<:expr< fun $lid:var ()$ -> $mk_lexer loc rl$ >>
| "match"; (ext,attrs) = ext_attributes; e = SELF; "with"; "lexer"; rl = rules ->
Pa_r.expr_to_inline loc (mk_lexer_match loc e rl) ext attrs ] ]
;
expr: LEVEL "simple"
[ [ "$"; LIDENT "add"; s = STRING ->
loop (accum_chars loc gcl.val) 0 where rec loop v i =
if i = String.length s then v
else
let (c, i) = next_char s i in
loop (add_char loc <:expr< $chr:c$ >> v) i
| "$"; LIDENT "add"; e = simple_expr ->
add_char loc e (accum_chars loc gcl.val)
| "$"; LIDENT "buf" ->
get_buf loc (accum_chars loc gcl.val)
| "$"; LIDENT "empty" ->
empty loc
| "$"; LIDENT "pos" ->
<:expr< Stream.count $lid:strm_n$ >> ] ]
;
rules:
[ [ "["; rl = LIST0 rule SEP "|"; "]" -> rl ] ]
;
rule:
[ [ (sl, cl) = symb_list; a = act -> (sl, cl, a) ] ]
;
symb_list:
[ [ (sl, cl) = symbs -> do { gcl.val := cl; (sl, cl) } ] ]
;
symbs:
[ [ (sl, cl) = SELF; f = symb; kont = err_kont -> f sl cl kont
| -> ([], []) ] ]
;
symb:
[ [ "_"; norec = no_rec -> make_any loc norec
| s = STRING; norec = no_rec -> make_or_chars loc s norec
| f = simple_expr -> make_sub_lexer loc f
| "?="; "["; pll = LIST1 lookahead SEP "|"; "]" -> make_lookahd loc pll
| rl = rules -> make_rules loc rl ] ]
;
simple_expr:
[ [ i = LIDENT -> <:expr< $lid:i$ >>
| c = CHAR -> <:expr< $chr:c$ >>
| "("; e = expr; ")" -> e ] ]
;
lookahead:
[ [ pl = LIST1 lookahead_char -> pl
| s = STRING ->
List.rev
(fold_string_chars (fun c pl -> [<:patt< $chr:c$ >> :: pl]) s
[]) ] ]
;
lookahead_char:
[ [ c = CHAR -> <:patt< $chr:c$ >>
| "_" -> <:patt< _ >> ] ]
;
no_rec:
[ [ "/" -> True
| -> False ] ]
;
err_kont:
[ [ "!" -> SpoBang
| "?"; s = STRING -> SpoQues <:expr< $str:s$ >>
| "?"; e = simple_expr -> SpoQues e
| -> SpoNoth ] ]
;
act:
[ [ "->"; e = expr -> Some e
| -> None ] ]
;
END;
|
eefeae7cc2d5d6cac1455e8ab4d7cf076dc8f011e2e78669b90be6187b2e97c6 | faylang/fay | patternGuards.hs | # LANGUAGE FlexibleInstances #
module PatternGuards where
import FFI
isPositive :: Double -> Bool
isPositive x | x > 0 = True
| x <= 0 = False
threeConds :: Double -> Double
threeConds x | x > 1 = 2
| x == 1 = 1
| x < 1 = 0
withOtherwise :: Double -> Bool
withOtherwise x | x > 1 = True
| otherwise = False
-- Not called, throws "non-exhaustive guard"
nonExhaustive :: Double -> Bool
nonExhaustive x | x > 1 = True
main :: Fay ()
main = do
putStrLn $ showListB [isPositive 1, isPositive 0]
putStrLn $ showListD [threeConds 3, threeConds 1, threeConds 0]
putStrLn $ showListB [withOtherwise 2, withOtherwise 0]
showListB :: [Bool] -> String
showListB = ffi "JSON.stringify(%1)"
showListD :: [Double] -> String
showListD = ffi "JSON.stringify(%1)"
| null | https://raw.githubusercontent.com/faylang/fay/8455d975f9f0db2ecc922410e43e484fbd134699/tests/patternGuards.hs | haskell | Not called, throws "non-exhaustive guard" | # LANGUAGE FlexibleInstances #
module PatternGuards where
import FFI
isPositive :: Double -> Bool
isPositive x | x > 0 = True
| x <= 0 = False
threeConds :: Double -> Double
threeConds x | x > 1 = 2
| x == 1 = 1
| x < 1 = 0
withOtherwise :: Double -> Bool
withOtherwise x | x > 1 = True
| otherwise = False
nonExhaustive :: Double -> Bool
nonExhaustive x | x > 1 = True
main :: Fay ()
main = do
putStrLn $ showListB [isPositive 1, isPositive 0]
putStrLn $ showListD [threeConds 3, threeConds 1, threeConds 0]
putStrLn $ showListB [withOtherwise 2, withOtherwise 0]
showListB :: [Bool] -> String
showListB = ffi "JSON.stringify(%1)"
showListD :: [Double] -> String
showListD = ffi "JSON.stringify(%1)"
|
f398ac1f6885464f36f647cb9c3aa6eaaa93601ca13f312957cc9c69b939c044 | spechub/Hets | Lib.hs | |
Module : ./atermlib / src / ATerm / Lib.hs
Description : reexports modules needed for ATC generation
Copyright : ( c ) , Uni Bremen 2002 - 2004
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : non - portable ( via imports )
reexports the names needed for many ' ShATermConvertible '
instances . For converting ' ShATerm 's to and from ' 's you 'll need
the module " ATerm . ReadWrite " .
For more information on ATerms look under
< > , < -Environment/ATerms > .
Module : ./atermlib/src/ATerm/Lib.hs
Description : reexports modules needed for ATC generation
Copyright : (c) Klaus Luettich, Uni Bremen 2002-2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : non-portable (via imports)
reexports the names needed for many 'ShATermConvertible'
instances. For converting 'ShATerm's to and from 'String's you'll need
the module "ATerm.ReadWrite".
For more information on ATerms look under
<>, <-Environment/ATerms>.
-}
module ATerm.Lib
( ShATerm (..)
, ATermTable
, addATerm
, getShATerm
, ShATermConvertible (toShATermAux, fromShATermAux)
, toShATerm'
, fromShATerm'
, fromShATermError
) where
import ATerm.AbstractSyntax
import ATerm.Conversion
| null | https://raw.githubusercontent.com/spechub/Hets/af7b628a75aab0d510b8ae7f067a5c9bc48d0f9e/atermlib/src/ATerm/Lib.hs | haskell | |
Module : ./atermlib / src / ATerm / Lib.hs
Description : reexports modules needed for ATC generation
Copyright : ( c ) , Uni Bremen 2002 - 2004
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : non - portable ( via imports )
reexports the names needed for many ' ShATermConvertible '
instances . For converting ' ShATerm 's to and from ' 's you 'll need
the module " ATerm . ReadWrite " .
For more information on ATerms look under
< > , < -Environment/ATerms > .
Module : ./atermlib/src/ATerm/Lib.hs
Description : reexports modules needed for ATC generation
Copyright : (c) Klaus Luettich, Uni Bremen 2002-2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : non-portable (via imports)
reexports the names needed for many 'ShATermConvertible'
instances. For converting 'ShATerm's to and from 'String's you'll need
the module "ATerm.ReadWrite".
For more information on ATerms look under
<>, <-Environment/ATerms>.
-}
module ATerm.Lib
( ShATerm (..)
, ATermTable
, addATerm
, getShATerm
, ShATermConvertible (toShATermAux, fromShATermAux)
, toShATerm'
, fromShATerm'
, fromShATermError
) where
import ATerm.AbstractSyntax
import ATerm.Conversion
| |
cb0cecc04475d9b2406f4e40827806442b77844a5564110bee38fc369b156c92 | GillianPlatform/Gillian | Verifier.ml | open Containers
module DL = Debugger_log
module type S = sig
type st
type heap_t
type state
type m_err
type annot
module SPState :
PState.S
with type t = state
and type vt = SVal.M.t
and type st = st
and type store_t = SStore.t
and type heap_t = heap_t
and type m_err_t = m_err
and type preds_t = Preds.SPreds.t
module SAInterpreter :
GInterpreter.S
with type vt = SVal.M.t
and type st = st
and type store_t = SStore.t
and type state_t = state
and type heap_t = heap_t
and type state_err_t = SPState.err_t
and type annot = annot
module SUnifier : Unifier.S with type st = SVal.SESubst.t
type t
type prog_t = (annot, int) Prog.t
type proc_tests = (string * t) list [@@deriving to_yojson]
val start_time : float ref
val reset : unit -> unit
val verify_prog :
init_data:SPState.init_data ->
prog_t ->
bool ->
SourceFiles.t option ->
unit
val verify_up_to_procs :
?proc_name:string ->
init_data:SPState.init_data ->
prog_t ->
SAInterpreter.result_t SAInterpreter.cont_func
val postprocess_files : SourceFiles.t option -> unit
module Debug : sig
val get_tests_for_prog : init_data:SPState.init_data -> prog_t -> proc_tests
val analyse_result :
t -> Logging.Report_id.t -> SAInterpreter.result_t -> bool
end
end
module Make
(SState : SState.S
with type vt = SVal.M.t
and type st = SVal.SESubst.t
and type store_t = SStore.t)
(SPState : PState.S
with type vt = SState.vt
and type st = SState.st
and type state_t = SState.t
and type store_t = SState.store_t
and type preds_t = Preds.SPreds.t
and type init_data = SState.init_data)
(PC : ParserAndCompiler.S)
(External : External.T(PC.Annot).S) =
struct
module L = Logging
module SSubst = SVal.SESubst
module SPState = SPState
module SAInterpreter =
GInterpreter.Make (SVal.M) (SVal.SESubst) (SStore) (SPState) (PC) (External)
module Normaliser = Normaliser.Make (SPState)
type st = SPState.st
type state = SPState.t
type heap_t = SPState.heap_t
type m_err = SPState.m_err_t
type annot = PC.Annot.t
module SUnifier =
Unifier.Make (SVal.M) (SVal.SESubst) (SStore) (SState) (Preds.SPreds)
let print_success_or_failure success =
if success then Fmt.pr "%a" (Fmt.styled `Green Fmt.string) "Success\n"
else Fmt.pr "%a" (Fmt.styled `Red Fmt.string) "Failure\n";
Format.print_flush ()
let yojson_of_expr_set set =
`List (List.map Expr.to_yojson (Expr.Set.elements set))
type t = {
name : string;
id : int * int;
params : string list;
pre_state : SPState.t;
post_up : UP.t;
flag : Flag.t option;
spec_vars : Expr.Set.t; [@to_yojson yojson_of_expr_set]
}
[@@deriving to_yojson]
type prog_t = (annot, int) Prog.t
type proc_tests = (string * t) list
let proc_tests_to_yojson tests =
`List
(tests
|> List.map (fun (name, test) -> `List [ `String name; to_yojson test ]))
let global_results = VerificationResults.make ()
let start_time = ref 0.
let reset () =
VerificationResults.reset global_results;
SAInterpreter.reset ()
module Hides_derivations = struct
(** For a given definition of a predicate, this function derives the
corresponding logical variables it hides. *)
let add_hides
~prog
~init_data
~pred_ins
~preds
~pred_name
~subst_params
~known_params
orig_def =
let orig_info, orig_def, orig_hides = orig_def in
let subst_def =
List.fold_left
(fun def (pv, lv) ->
Asrt.subst_expr_for_expr ~to_subst:pv ~subst_with:lv def)
orig_def subst_params
in
let subst_params = List.map snd subst_params in
let ( let* ) = Result.bind in
let def =
let info =
Option.map (fun (s, vars) -> (s, SS.of_list vars)) orig_info
in
(subst_def, (info, None, orig_hides))
in
L.verbose (fun fmt -> fmt "Examining definition: %a" Asrt.pp (fst def));
let* def_up =
UP.init known_params UP.KB.empty pred_ins [ def ]
|> Result.map_error (fun _ ->
"Creation of unification plans for predicates failed.")
in
let a, _ = def in
let* state =
match Normaliser.normalise_assertion ~init_data ~pred_defs:preds a with
| Ok [ (state, _) ] -> Ok state
| Ok _ ->
Error
"Creation of unification plans for predicates failed: \
normalisation resulted in more than one state"
| Error msg ->
Fmt.error "Creation of unification plans for predicates failed: %s"
msg
in
FOLD / UNFOLD / UNIFY
let () =
L.verbose (fun fmt -> fmt "EXACT: hiding fold:\n%a" SPState.pp state)
in
let fold_predicate = SLCmd.Fold (pred_name, subst_params, None) in
let* fstate =
match SPState.evaluate_slcmd prog fold_predicate state with
| Ok [ fstate ] -> Ok fstate
| Ok _ -> Error "EXACT: ERROR: fold resulting in multiple states"
| Error _ -> Error "EXACT: ERROR: Impossible fold"
in
let () =
L.verbose (fun fmt -> fmt "EXACT: hiding unfold:\n%a" SPState.pp fstate)
in
let unfold_predicate =
SLCmd.Unfold (pred_name, subst_params, None, false)
in
let* fustate =
match SPState.evaluate_slcmd prog unfold_predicate fstate with
| Ok [ fustate ] -> Ok fustate
| Ok _ -> Error "EXACT: ERROR: unfold resulting in multiple states"
| Error _ -> Error "EXACT: ERROR: Impossible unfold"
in
L.verbose (fun fmt -> fmt "EXACT: Hiding: Before:\n%a" SPState.pp state);
L.verbose (fun fmt -> fmt "EXACT: Hiding: After:\n%a" SPState.pp fustate);
let state, predicates, _, variants = SPState.expose fustate in
let subst = SVal.SESubst.init (List.map (fun x -> (x, x)) subst_params) in
let unification_result =
SUnifier.unify
(state, predicates, preds, variants)
subst def_up LogicCommand
in
let* subst =
match unification_result with
| UPUSucc [ (_, subst, _) ] -> Ok subst
| UPUSucc _ ->
Error
"EXACT: ERROR: initial definition unified against in multiple \
ways"
| UPUFail _ ->
Error "EXACT: ERROR: cannot unify against initial definition"
in
L.verbose (fun fmt -> fmt "EXACT: Obtained subst: %a" SSubst.pp subst);
let def_lvars =
Expr.Set.of_list
(List.map (fun x -> Expr.LVar x) (SS.elements (Asrt.lvars a)))
in
SSubst.filter_in_place subst (fun k v ->
match (Expr.equal k v, Expr.Set.mem k def_lvars) with
| _, false -> None
| true, _ -> None
| _ -> (
match k with
| Expr.LVar x when not (Names.is_spec_var_name x) -> None
| _ -> Some v));
L.verbose (fun fmt -> fmt "EXACT: Filtered subst: %a" SSubst.pp subst);
let subst = SSubst.to_list subst in
let hidden =
List.map
(fun (before, after) ->
let we_good_bro =
Expr.UnOp (UNot, Expr.BinOp (before, Equal, after))
in
(before, SPState.sat_check fustate we_good_bro))
subst
in
let hidden =
List.filter_map
(fun (before, b) ->
match (b, before) with
| false, _ -> None
| true, Expr.LVar x -> Some x
| true, _ -> Fmt.failwith "EXACT: Error: non-LVar in ESubst")
hidden
in
L.verbose (fun fmt ->
fmt "EXACT: Hidden variables: %a" Fmt.(list ~sep:comma string) hidden);
Ok (orig_info, orig_def, hidden)
(** Same as add_hides, but fails in case of error *)
let add_hides_exn
~prog
~init_data
~pred_ins
~preds
~pred_name
~subst_params
~known_params
pred_def =
match
add_hides ~prog ~init_data ~pred_ins ~preds ~pred_name ~subst_params
~known_params pred_def
with
| Ok x -> x
| Error msg -> failwith msg
(** For a given predicate, returns a new predicate where the hides have been derived *)
let derive_predicate_hiding
~preds
~prog
~init_data
~pred_ins
(pred : Pred.t) =
let module KB = UP.KB in
L.verbose (fun fmt -> fmt "Examinining predicate: %s" pred.pred_name);
let pred_params = pred.pred_params in
let defs = pred.pred_definitions in
let subst_params =
List.map
(fun (pv, _) -> (Expr.PVar pv, Expr.LVar ("#_" ^ pv)))
pred_params
in
let known_params =
KB.of_list
(List.map (fun i -> snd (List.nth subst_params i)) pred.pred_ins)
in
let new_defs =
List.map
(add_hides_exn ~prog ~init_data ~pred_ins ~preds
~pred_name:pred.pred_name ~subst_params ~known_params)
defs
in
{ pred with pred_definitions = new_defs }
(** Given a program and its unification plans, modifies the program in place
to add the hides to every predicate definition. *)
let derive_predicates_hiding
~(prog : prog_t)
~(init_data : SPState.init_data)
(preds : (string, UP.pred) Hashtbl.t) : unit =
if not !Config.Verification.exact then ()
else
let () =
L.verbose (fun fmt -> fmt "EXACT: Examining hiding in predicates")
in
let prog : annot UP.prog =
{
preds;
specs = Hashtbl.create 1;
lemmas = Hashtbl.create 1;
coverage = Hashtbl.create 1;
prog;
}
in
let module KB = UP.KB in
let pred_ins =
Hashtbl.fold
(fun name (pred_with_up : UP.pred) pred_ins ->
Hashtbl.add pred_ins name pred_with_up.pred.pred_ins;
pred_ins)
preds
(Hashtbl.create Config.medium_tbl_size)
in
Hashtbl.filter_map_inplace
(fun _pred_name up_pred ->
Some
UP.
{
up_pred with
pred =
derive_predicate_hiding ~preds ~prog ~init_data ~pred_ins
up_pred.pred;
})
preds
end
let testify
~(init_data : SPState.init_data)
(func_or_lemma_name : string)
(preds : (string, UP.pred) Hashtbl.t)
(pred_ins : (string, int list) Hashtbl.t)
(name : string)
(params : string list)
(id : int)
(pre : Asrt.t)
(posts : Asrt.t list)
(variant : Expr.t option)
(hides : string list option)
(flag : Flag.t option)
(label : (string * SS.t) option)
(to_verify : bool) : (t option * (Asrt.t * Asrt.t list) option) list =
let test_of_normalised_state id' (ss_pre, subst) =
Step 2 - spec_vars = ) -U- alocs(range(subst ) )
let lvars =
SS.fold
(fun x acc ->
if Names.is_spec_var_name x then Expr.Set.add (Expr.LVar x) acc
else acc)
(Asrt.lvars pre) Expr.Set.empty
in
let subst_dom = SSubst.domain subst None in
let alocs =
SSubst.fold subst
(fun _ v_val acc ->
match v_val with
| ALoc _ -> Expr.Set.add v_val acc
| _ -> acc)
Expr.Set.empty
in
let spec_vars = Expr.Set.union (Expr.Set.diff lvars subst_dom) alocs in
Step 3 - postconditions to symbolic states
L.verbose (fun m ->
m
"Processing one postcondition of %s with label %a and spec_vars: \
@[<h>%a@].@\n\
Original Pre:@\n\
%a\n\
Symb State Pre:@\n\
%a@\n\
Subst:@\n\
%a@\n\
Posts (%d):@\n\
%a"
name
Fmt.(
option ~none:(any "None") (fun ft (s, e) ->
Fmt.pf ft "[ %s; %a ]" s (iter ~sep:comma SS.iter string) e))
label
Fmt.(iter ~sep:comma Expr.Set.iter Expr.pp)
spec_vars Asrt.pp pre SPState.pp ss_pre SSubst.pp subst
(List.length posts)
Fmt.(list ~sep:(any "@\n") Asrt.pp)
posts);
let subst =
SSubst.filter subst (fun e _ ->
match e with
| PVar _ -> false
| _ -> true)
in
let posts =
List.filter_map
(fun p ->
let substituted = SSubst.substitute_asrt subst ~partial:true p in
let reduced = Reduction.reduce_assertion substituted in
if Simplifications.admissible_assertion reduced then Some reduced
else None)
posts
in
if not to_verify then
let pre' = Asrt.star (SPState.to_assertions ss_pre) in
(None, Some (pre', posts))
else
Step 4 - create a unification plan for the postconditions and s_test
let () =
L.verbose (fun fmt -> fmt "Creating UPs for posts of %s" name)
in
let pvar_params =
List.fold_left
(fun acc x -> Expr.Set.add (Expr.PVar x) acc)
Expr.Set.empty params
in
let known_unifiables =
Expr.Set.add (PVar Names.return_variable)
(Expr.Set.union pvar_params spec_vars)
in
let existentials =
Option.fold ~none:Expr.Set.empty
~some:(fun (_, exs) ->
SS.fold
(fun x acc -> Expr.Set.add (LVar x) acc)
exs Expr.Set.empty)
label
in
let known_unifiables = Expr.Set.union known_unifiables existentials in
let hides =
match (flag, hides) with
| None, Some hides -> hides
| None, None when !Config.Verification.exact ->
failwith "Lemma must declare hides logicals in exact verification"
| _, _ -> []
in
let simple_posts =
List.map
(fun post ->
let post_lvars = Asrt.lvars post in
let lstr_pp = Fmt.(list ~sep:comma string) in
let () =
L.verbose (fun fmt ->
fmt "OX hiding: %a\nPost lvars: %a" lstr_pp hides lstr_pp
(SS.elements post_lvars))
in
let inter = SS.inter post_lvars (SS.of_list hides) in
match SS.is_empty inter with
| true -> (post, (label, None, hides))
| false ->
failwith
("Error: Exact lemma with impossible hiding: "
^ SS.min_elt inter))
posts
in
let post_up =
UP.init known_unifiables Expr.Set.empty pred_ins simple_posts
in
L.verbose (fun m -> m "END of STEP 4@\n");
match post_up with
| Error _ ->
let msg =
Printf.sprintf "Warning: testify failed for %s. Cause: post_up \n"
name
in
Printf.printf "%s" msg;
L.verbose (fun m -> m "%s" msg);
(None, None)
| Ok post_up ->
let pre' = Asrt.star (SPState.to_assertions ss_pre) in
let ss_pre =
match flag with
Lemmas should not have stores when being proven
| None ->
let empty_store = SStore.init [] in
SPState.set_store ss_pre empty_store
| Some _ -> ss_pre
in
let test =
{
name;
id = (id, id');
params;
pre_state = ss_pre;
post_up;
flag;
spec_vars;
}
in
(Some test, Some (pre', posts))
in
try
Step 1 - normalise the precondition
match
Normaliser.normalise_assertion ~init_data ~pred_defs:preds
~pvars:(SS.of_list params) pre
with
| Error _ -> [ (None, None) ]
| Ok normalised_assertions ->
let variants = Hashtbl.create 1 in
let () = Hashtbl.add variants func_or_lemma_name variant in
let normalised_assertions =
List.map
(fun (state, subst) ->
(SPState.set_variants state (Hashtbl.copy variants), subst))
normalised_assertions
in
let result =
List.mapi test_of_normalised_state normalised_assertions
in
result
with Failure msg ->
let new_msg =
Printf.sprintf
"WARNING: testify failed for %s. Cause: normalisation with msg: %s.\n"
name msg
in
Printf.printf "%s" new_msg;
L.normal (fun m -> m "%s" new_msg);
[ (None, None) ]
let testify_sspec
~init_data
(spec_name : string)
(preds : UP.preds_tbl_t)
(pred_ins : (string, int list) Hashtbl.t)
(name : string)
(params : string list)
(id : int)
(sspec : Spec.st) : (t option * Spec.st option) list =
let ( let+ ) x f = List.map f x in
let+ stest, sspec' =
testify ~init_data spec_name preds pred_ins name params id sspec.ss_pre
sspec.ss_posts sspec.ss_variant None (Some sspec.ss_flag)
(Spec.label_vars_to_set sspec.ss_label)
sspec.ss_to_verify
in
let sspec' =
Option.map
(fun (pre, posts) -> { sspec with ss_pre = pre; ss_posts = posts })
sspec'
in
(stest, sspec')
let testify_spec
~init_data
(spec_name : string)
(preds : UP.preds_tbl_t)
(pred_ins : (string, int list) Hashtbl.t)
(spec : Spec.t) : t list * Spec.t =
if not spec.spec_to_verify then ([], spec)
else
let () =
List.iter
(fun (sspec : Spec.st) ->
if sspec.ss_posts = [] then
failwith
("Specification without post-condition for function "
^ spec.spec_name))
spec.spec_sspecs
in
L.verbose (fun m ->
m
("-------------------------------------------------------------------------@\n"
^^ "Creating symbolic tests for procedure %s: %d cases\n"
^^ "-------------------------------------------------------------------------"
)
spec.spec_name
(List.length spec.spec_sspecs));
let _, tests, spec_sspecs =
List.fold_left
(fun (id, tests, sspecs) sspec ->
let tests_and_specs =
testify_sspec ~init_data spec_name preds pred_ins spec.spec_name
spec.spec_params id sspec
in
let new_tests, new_specs =
List.fold_left
(fun (nt, ns) (t, s) ->
let nt =
match t with
| Some test -> test :: nt
| None -> nt
in
let ns =
match s with
| Some spec -> spec :: ns
| None -> ns
in
(nt, ns))
([], []) tests_and_specs
in
(id + 1, new_tests @ tests, new_specs @ sspecs))
(0, [], []) spec.spec_sspecs
in
let new_spec = { spec with spec_sspecs } in
L.verbose (fun m -> m "Simplified SPECS:@\n@[%a@]@\n" Spec.pp new_spec);
(tests, new_spec)
let testify_lemma
~init_data
(preds : UP.preds_tbl_t)
(pred_ins : (string, int list) Hashtbl.t)
(lemma : Lemma.t) : t list * Lemma.t =
let tests_and_specs =
List.concat_map
(fun Lemma.
{ lemma_hyp; lemma_concs; lemma_spec_variant; lemma_spec_hides } ->
List.map
(fun t -> (t, lemma_spec_hides))
(testify ~init_data lemma.lemma_name preds pred_ins lemma.lemma_name
lemma.lemma_params 0 lemma_hyp lemma_concs lemma_spec_variant
lemma_spec_hides None None true))
lemma.lemma_specs
in
let tests, specs =
List.fold_left
(fun (test_acc, spec_acc) ((test_opt, spec_opt), lemma_spec_hides) ->
let test_acc =
match test_opt with
| Some t -> t :: test_acc
| None -> test_acc
in
let spec_acc =
match spec_opt with
| Some (lemma_hyp, lemma_concs) ->
Lemma.
{
lemma_hyp;
lemma_concs;
lemma_spec_variant = lemma.lemma_variant;
lemma_spec_hides;
}
:: spec_acc
| None -> spec_acc
in
(test_acc, spec_acc))
([], []) tests_and_specs
in
let () =
match specs with
| [] ->
raise
(Failure
(Printf.sprintf "Could not testify lemma %s" lemma.lemma_name))
| _ -> ()
in
(tests, { lemma with lemma_specs = specs })
let analyse_result (subst : SSubst.t) (test : t) (state : SPState.t) : bool =
TODO : ASSUMING SIMPLIFICATION DOES NOT BRANCH HERE
let _, states = SPState.simplify state in
assert (List.length states = 1);
let state = List.hd states in
let subst = SSubst.copy subst in
(* Adding spec vars in the post to the subst - these are effectively the existentials of the post *)
List.iter
(fun x ->
if not (SSubst.mem subst (LVar x)) then
SSubst.add subst (LVar x) (LVar x))
(SS.elements (SPState.get_spec_vars state));
L.verbose (fun m ->
m "Analyse result: About to unify one postcondition of %s. post: %a"
test.name UP.pp test.post_up);
match SPState.unify state subst test.post_up Unifier.Postcondition with
| true ->
L.verbose (fun m ->
m "Analyse result: Postcondition unified successfully");
VerificationResults.set_result global_results test.name test.id true;
true
| false ->
L.normal (fun m -> m "Analyse result: Postcondition not unifiable.");
VerificationResults.set_result global_results test.name test.id false;
false
let make_post_subst (test : t) (post_state : SPState.t) : SSubst.t =
let subst_lst =
List.map (fun e -> (e, e)) (Expr.Set.elements test.spec_vars)
in
let params_subst_lst = SStore.bindings (SPState.get_store post_state) in
let params_subst_lst =
List.map (fun (x, v) -> (Expr.PVar x, v)) params_subst_lst
in
let subst = SSubst.init (subst_lst @ params_subst_lst) in
subst
let analyse_proc_result test flag ?parent_id result =
match (result : SAInterpreter.result_t) with
| ExecRes.RFail { proc; proc_idx; error_state; errors } ->
L.verbose (fun m ->
m
"VERIFICATION FAILURE: Procedure %s, Command %d\n\
Spec %s %a\n\
@[<v 2>State:@\n\
%a@]@\n\
@[<v 2>Errors:@\n\
%a@]@\n"
proc proc_idx test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id SPState.pp error_state
Fmt.(list ~sep:(any "@\n") SAInterpreter.Logging.pp_err)
errors);
if not !Config.debug then Fmt.pr "f @?";
false
| ExecRes.RSucc { flag = fl; final_state; last_report; _ } ->
if Some fl <> test.flag then (
L.normal (fun m ->
m
"VERIFICATION FAILURE: Spec %s %a terminated with flag %s \
instead of %s\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id (Flag.str fl) (Flag.str flag));
if not !Config.debug then Fmt.pr "f @?";
false)
else
let parent_id =
match parent_id with
| None -> last_report
| id -> id
in
DL.log (fun m ->
m "Unify: setting parent to %a"
(Fmt.option L.Report_id.pp)
parent_id);
L.Parent.with_id parent_id (fun () ->
let store = SPState.get_store final_state in
let () =
SStore.filter store (fun x v ->
if x = Names.return_variable then Some v else None)
in
let subst = make_post_subst test final_state in
if analyse_result subst test final_state then (
L.normal (fun m ->
m
"VERIFICATION SUCCESS: Spec %s %a terminated successfully\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
if not !Config.debug then Fmt.pr "s @?";
true)
else (
L.normal (fun m ->
m
"VERIFICATION FAILURE: Spec %s %a - post condition not \
unifiable\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
if not !Config.debug then Fmt.pr "f @?";
false))
let analyse_proc_results
(test : t)
(flag : Flag.t)
(rets : SAInterpreter.result_t list) : bool =
if rets = [] then (
L.(
normal (fun m ->
m "ERROR: Function %s evaluates to 0 results." test.name));
exit 1);
let success = List.for_all (analyse_proc_result test flag) rets in
print_success_or_failure success;
success
let analyse_lemma_results (test : t) (rets : SPState.t list) : bool =
let success : bool =
rets <> []
&& List.fold_left
(fun ac final_state ->
let empty_store = SStore.init [] in
let final_state = SPState.set_store final_state empty_store in
let subst = make_post_subst test final_state in
if analyse_result subst test final_state then (
L.normal (fun m ->
m
"VERIFICATION SUCCESS: Spec %s %a terminated successfully\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
ac)
else (
L.normal (fun m ->
m
"VERIFICATION FAILURE: Spec %s %a - post condition not \
unifiable\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
false))
true rets
in
if rets = [] then (
L.(
normal (fun m ->
m "ERROR: Function %s evaluates to 0 results." test.name));
exit 1);
print_success_or_failure success;
success
(* FIXME: This function name is very bad! *)
let verify_up_to_procs (prog : annot UP.prog) (test : t) : annot UP.prog =
(* Printf.printf "Inside verify with a test for %s\n" test.name; *)
match test.flag with
| Some _ ->
let msg = "Verifying one spec of procedure " ^ test.name ^ "... " in
L.tmi (fun fmt -> fmt "%s" msg);
Fmt.pr "%s@?" msg;
(* Reset coverage for every procedure in verification *)
{ prog with coverage = Hashtbl.create 1 }
| None -> raise (Failure "Debugging lemmas unsupported!")
let verify (prog : annot UP.prog) (test : t) : bool =
let state = test.pre_state in
(* Printf.printf "Inside verify with a test for %s\n" test.name; *)
match test.flag with
| Some flag ->
let prog = verify_up_to_procs prog test in
let rets =
SAInterpreter.evaluate_proc
(fun x -> x)
prog test.name test.params state
in
L.verbose (fun m ->
m "Verification: Concluded evaluation: %d obtained results.%a@\n"
(List.length rets) SAInterpreter.Logging.pp_result rets);
analyse_proc_results test flag rets
| None -> (
let lemma = Prog.get_lemma_exn prog.prog test.name in
match lemma.lemma_proof with
| None ->
if !Config.lemma_proof then
raise
(Failure (Printf.sprintf "Lemma %s WITHOUT proof" test.name))
else true (* It's already correct *)
| Some proof -> (
let msg = "Verifying lemma " ^ test.name ^ "... " in
L.tmi (fun fmt -> fmt "%s" msg);
Fmt.pr "%s@?" msg;
match SAInterpreter.evaluate_lcmds prog proof state with
| Ok rets -> analyse_lemma_results test rets
| Error _ -> false))
let pred_extracting_visitor =
object
inherit [_] Visitors.reduce
inherit Visitors.Utils.ss_monoid
method! visit_Pred _ pred_name _ = SS.singleton pred_name
method! visit_Fold _ pred_name _ _ = SS.singleton pred_name
method! visit_Unfold _ pred_name _ _ _ = SS.singleton pred_name
method! visit_GUnfold _ pred_name = SS.singleton pred_name
end
let filter_internal_preds (prog : annot UP.prog) (pred_names : SS.t) =
SS.filter
(fun pred_name ->
let pred = Prog.get_pred_exn prog.prog pred_name in
not pred.pred_internal)
pred_names
let lemma_extracting_visitor =
object
inherit [_] Visitors.reduce
inherit Visitors.Utils.ss_monoid
method! visit_ApplyLem _ lemma_name _ _ = SS.singleton lemma_name
end
let filter_internal_lemmas (prog : annot UP.prog) (lemma_names : SS.t) =
SS.filter
(fun lemma_name ->
let lemma = Prog.get_lemma_exn prog.prog lemma_name in
not lemma.lemma_internal)
lemma_names
let record_proc_dependencies proc_name (prog : annot UP.prog) =
let proc = Prog.get_proc_exn prog.prog proc_name in
let preds_used =
filter_internal_preds prog (pred_extracting_visitor#visit_proc () proc)
in
let lemmas_used =
filter_internal_lemmas prog (lemma_extracting_visitor#visit_proc () proc)
in
SS.iter
(CallGraph.add_proc_pred_use SAInterpreter.call_graph proc_name)
preds_used;
SS.iter
(CallGraph.add_proc_lemma_use SAInterpreter.call_graph proc_name)
lemmas_used
let record_lemma_dependencies lemma_name (prog : annot UP.prog) =
let lemma = Prog.get_lemma_exn prog.prog lemma_name in
let preds_used =
filter_internal_preds prog (pred_extracting_visitor#visit_lemma () lemma)
in
let lemmas_used =
filter_internal_lemmas prog
(lemma_extracting_visitor#visit_lemma () lemma)
in
SS.iter
(CallGraph.add_lemma_pred_use SAInterpreter.call_graph lemma_name)
preds_used;
SS.iter
(CallGraph.add_lemma_call SAInterpreter.call_graph lemma_name)
lemmas_used
let record_preds_used_by_pred pred_name (prog : annot UP.prog) =
let pred = Prog.get_pred_exn prog.prog pred_name in
let preds_used =
filter_internal_preds prog (pred_extracting_visitor#visit_pred () pred)
in
SS.iter
(CallGraph.add_pred_call SAInterpreter.call_graph pred_name)
preds_used
let check_previously_verified prev_results cur_verified =
Option.fold ~none:true
~some:(fun res ->
VerificationResults.check_previously_verified
~printer:print_success_or_failure res cur_verified)
prev_results
let get_tests_to_verify
~init_data
(prog : prog_t)
(pnames_to_verify : SS.t)
(lnames_to_verify : SS.t) : annot UP.prog * t list * t list =
let ipreds = UP.init_preds prog.preds in
match ipreds with
| Error e ->
Fmt.pr
"Creation of unification plans for predicates failed with:\n%a\n@?"
UP.pp_up_err_t e;
Fmt.failwith "Creation of unification plans for predicates failed."
| Ok preds -> (
let () =
Hides_derivations.derive_predicates_hiding ~init_data ~prog preds
in
let preds_with_hiding = Hashtbl.create 1 in
let () =
Hashtbl.iter
(fun name (up_pred : UP.pred) ->
Hashtbl.replace preds_with_hiding name up_pred.pred)
preds
in
let ipreds = UP.init_preds preds_with_hiding in
let preds = Result.get_ok ipreds in
let pred_ins =
Hashtbl.fold
(fun name (pred : UP.pred) pred_ins ->
Hashtbl.add pred_ins name pred.pred.pred_ins;
pred_ins)
preds
(Hashtbl.create Config.medium_tbl_size)
in
STEP 1 : Get the specs to verify
Fmt.pr "Obtaining specs to verify...\n@?";
let specs_to_verify =
List.filter
(fun (spec : Spec.t) -> SS.mem spec.spec_name pnames_to_verify)
(Prog.get_specs prog)
in
STEP 2 : Convert specs to symbolic tests
(* Printf.printf "Converting symbolic tests from specs: %f\n" (cur_time -. start_time); *)
let tests : t list =
List.concat_map
(fun (spec : Spec.t) ->
let tests, new_spec =
testify_spec ~init_data spec.spec_name preds pred_ins spec
in
let proc = Prog.get_proc_exn prog spec.spec_name in
Hashtbl.replace prog.procs proc.proc_name
{ proc with proc_spec = Some new_spec };
tests)
specs_to_verify
in
STEP 3 : Get the lemmas to verify
Fmt.pr "Obtaining lemmas to verify...\n@?";
let lemmas_to_verify =
List.filter
(fun (lemma : Lemma.t) -> SS.mem lemma.lemma_name lnames_to_verify)
(Prog.get_lemmas prog)
in
STEP 4 : Convert lemmas to symbolic tests
(* Printf.printf "Converting symbolic tests from lemmas: %f\n" (cur_time -. start_time); *)
let lemmas_to_verify =
List.sort
(fun (l1 : Lemma.t) l2 ->
Stdlib.compare l1.lemma_name l2.lemma_name)
lemmas_to_verify
in
let tests' : t list =
List.concat_map
(fun lemma ->
let tests, new_lemma =
testify_lemma ~init_data preds pred_ins lemma
in
Hashtbl.replace prog.lemmas lemma.lemma_name new_lemma;
tests)
lemmas_to_verify
in
Fmt.pr "Obtained %d symbolic tests in total\n@?"
(List.length tests + List.length tests');
L.verbose (fun m ->
m
("@[-------------------------------------------------------------------------@\n"
^^ "UNFOLDED and SIMPLIFIED SPECS and LEMMAS@\n%a@\n%a"
^^ "@\n\
-------------------------------------------------------------------------@]"
)
Fmt.(list ~sep:(any "@\n") Spec.pp)
(Prog.get_specs prog)
Fmt.(list ~sep:(any "@\n") Lemma.pp)
(Prog.get_lemmas prog));
STEP 4 : Create unification plans for specs and predicates
(* Printf.printf "Creating unification plans: %f\n" (cur_time -. start_time); *)
match UP.init_prog ~preds_tbl:preds prog with
| Error _ -> failwith "Creation of unification plans failed."
| Ok prog' ->
STEP 5 : Determine static dependencies and add to call graph
List.iter
(fun test -> record_proc_dependencies test.name prog')
tests;
List.iter
(fun test -> record_lemma_dependencies test.name prog')
tests';
Hashtbl.iter
(fun pred_name _ -> record_preds_used_by_pred pred_name prog')
prog'.preds;
(prog', tests', tests))
let verify_procs
~(init_data : SPState.init_data)
?(prev_results : VerificationResults.t option)
(prog : prog_t)
(pnames_to_verify : SS.t)
(lnames_to_verify : SS.t) : unit =
let prog', tests', tests =
get_tests_to_verify ~init_data prog pnames_to_verify lnames_to_verify
in
STEP 6 : Run the symbolic tests
let cur_time = Sys.time () in
Printf.printf "Running symbolic tests: %f\n" (cur_time -. !start_time);
let success : bool =
List.fold_left
(fun ac test -> if verify prog' test then ac else false)
true (tests' @ tests)
in
let end_time = Sys.time () in
let cur_verified = SS.union pnames_to_verify lnames_to_verify in
let success =
success && check_previously_verified prev_results cur_verified
in
let msg : string =
if success then "All specs succeeded:" else "There were failures:"
in
let msg : string = Printf.sprintf "%s %f%!" msg (end_time -. !start_time) in
Printf.printf "%s\n" msg;
L.normal (fun m -> m "%s" msg)
let verify_up_to_procs
?(proc_name : string option)
~(init_data : SPState.init_data)
(prog : prog_t) : SAInterpreter.result_t SAInterpreter.cont_func =
L.Phase.with_normal ~title:"Program verification" (fun () ->
(* Analyse all procedures and lemmas *)
let procs_to_verify =
SS.of_list (Prog.get_noninternal_proc_names prog)
in
let lemmas_to_verify =
SS.of_list (Prog.get_noninternal_lemma_names prog)
in
let procs_to_verify, lemmas_to_verify =
if !Config.Verification.verify_only_some_of_the_things then
( SS.inter procs_to_verify
(SS.of_list !Config.Verification.procs_to_verify),
SS.inter lemmas_to_verify
(SS.of_list !Config.Verification.lemmas_to_verify) )
else (procs_to_verify, lemmas_to_verify)
in
let prog, _, proc_tests =
get_tests_to_verify ~init_data prog procs_to_verify lemmas_to_verify
in
TODO : Verify All procedures . Currently we only verify the first
procedure ( unless specified ) .
Assume there is at least one procedure
procedure (unless specified).
Assume there is at least one procedure*)
let test =
match proc_name with
| Some proc_name -> (
match
proc_tests |> List.find_opt (fun test -> test.name = proc_name)
with
| Some test -> test
| None ->
Fmt.failwith "Couldn't find test for proc '%s'!" proc_name)
| None -> (
match proc_tests with
| test :: _ -> test
| _ -> failwith "No tests found!")
in
SAInterpreter.init_evaluate_proc
(fun x -> x)
prog test.name test.params test.pre_state)
let postprocess_files source_files =
let cur_source_files =
Option.value ~default:(SourceFiles.make ()) source_files
in
let call_graph = SAInterpreter.call_graph in
ResultsDir.write_verif_results cur_source_files call_graph ~diff:""
global_results
let verify_prog
~(init_data : SPState.init_data)
(prog : prog_t)
(incremental : bool)
(source_files : SourceFiles.t option) : unit =
let f prog incremental source_files =
let open ResultsDir in
let open ChangeTracker in
if incremental && prev_results_exist () then (
(* Only verify changed procedures and lemmas *)
let cur_source_files =
match source_files with
| Some files -> files
| None -> failwith "Cannot use -a in incremental mode"
in
let prev_source_files, prev_call_graph, results =
read_verif_results ()
in
let proc_changes, lemma_changes =
get_verif_changes prog ~prev_source_files ~prev_call_graph
~cur_source_files
in
let procs_to_prune =
proc_changes.changed_procs @ proc_changes.deleted_procs
@ proc_changes.dependent_procs
in
let lemmas_to_prune =
lemma_changes.changed_lemmas @ lemma_changes.deleted_lemmas
@ lemma_changes.dependent_lemmas
in
let () = CallGraph.prune_procs prev_call_graph procs_to_prune in
let () = CallGraph.prune_lemmas prev_call_graph lemmas_to_prune in
let () =
VerificationResults.prune results (procs_to_prune @ lemmas_to_prune)
in
let procs_to_verify =
SS.of_list
(proc_changes.changed_procs @ proc_changes.new_procs
@ proc_changes.dependent_procs)
in
let lemmas_to_verify =
SS.of_list
(lemma_changes.changed_lemmas @ lemma_changes.new_lemmas
@ lemma_changes.dependent_lemmas)
in
if !Config.Verification.verify_only_some_of_the_things then
failwith "Cannot use --incremental and --procs or --lemma together";
let () =
verify_procs ~init_data ~prev_results:results prog procs_to_verify
lemmas_to_verify
in
let cur_call_graph = SAInterpreter.call_graph in
let cur_results = global_results in
let call_graph = CallGraph.merge prev_call_graph cur_call_graph in
let results = VerificationResults.merge results cur_results in
let diff = Fmt.str "%a" ChangeTracker.pp_proc_changes proc_changes in
write_verif_results cur_source_files call_graph ~diff results)
else
(* Analyse all procedures and lemmas *)
let cur_source_files =
Option.value ~default:(SourceFiles.make ()) source_files
in
let procs_to_verify =
SS.of_list (Prog.get_noninternal_proc_names prog)
in
let lemmas_to_verify =
SS.of_list (Prog.get_noninternal_lemma_names prog)
in
let procs_to_verify, lemmas_to_verify =
if !Config.Verification.verify_only_some_of_the_things then
( SS.inter procs_to_verify
(SS.of_list !Config.Verification.procs_to_verify),
SS.inter lemmas_to_verify
(SS.of_list !Config.Verification.lemmas_to_verify) )
else (procs_to_verify, lemmas_to_verify)
in
let () =
verify_procs ~init_data prog procs_to_verify lemmas_to_verify
in
let call_graph = SAInterpreter.call_graph in
write_verif_results cur_source_files call_graph ~diff:"" global_results
in
L.Phase.with_normal ~title:"Program verification" (fun () ->
f prog incremental source_files)
module Debug = struct
let get_tests_for_prog ~init_data (prog : prog_t) =
let open Syntaxes.Option in
let ipreds = UP.init_preds prog.preds in
let preds = Result.get_ok ipreds in
let pred_ins =
Hashtbl.fold
(fun name (pred : UP.pred) pred_ins ->
Hashtbl.add pred_ins name pred.pred.pred_ins;
pred_ins)
preds
(Hashtbl.create Config.medium_tbl_size)
in
let specs = Prog.get_specs prog in
let tests =
specs
|> List.filter_map (fun (spec : Spec.t) ->
let tests, new_spec =
testify_spec ~init_data spec.spec_name preds pred_ins spec
in
if List.length tests > 1 then
DL.log (fun m ->
let tests_json =
("tests", `List (List.map to_yojson tests))
in
let spec_json = ("spec", Spec.to_yojson spec) in
m ~json:[ tests_json; spec_json ]
"Spec for %s gave multiple tests???" spec.spec_name);
let+ test = List_utils.hd_opt tests in
let proc = Prog.get_proc_exn prog spec.spec_name in
Hashtbl.replace prog.procs proc.proc_name
{ proc with proc_spec = Some new_spec };
(spec.spec_name, test))
in
DL.log (fun m ->
m
~json:[ ("tests", proc_tests_to_yojson tests) ]
"Verifier.Debug.get_tests_for_prog: Got tests");
tests
let analyse_result test parent_id result =
analyse_proc_result test Normal ~parent_id result
end
end
module From_scratch
(SMemory : SMemory.S)
(PC : ParserAndCompiler.S)
(External : External.T(PC.Annot).S) =
struct
module INTERNAL__ = struct
module SState = SState.Make (SMemory)
end
include
Make
(INTERNAL__.SState)
(PState.Make (SVal.M) (SVal.SESubst) (SStore) (INTERNAL__.SState)
(Preds.SPreds))
(PC)
(External)
end
| null | https://raw.githubusercontent.com/GillianPlatform/Gillian/c794de7417f29e3c23146f9958ead7e4ad0216ce/GillianCore/engine/Abstraction/Verifier.ml | ocaml | * For a given definition of a predicate, this function derives the
corresponding logical variables it hides.
* Same as add_hides, but fails in case of error
* For a given predicate, returns a new predicate where the hides have been derived
* Given a program and its unification plans, modifies the program in place
to add the hides to every predicate definition.
Adding spec vars in the post to the subst - these are effectively the existentials of the post
FIXME: This function name is very bad!
Printf.printf "Inside verify with a test for %s\n" test.name;
Reset coverage for every procedure in verification
Printf.printf "Inside verify with a test for %s\n" test.name;
It's already correct
Printf.printf "Converting symbolic tests from specs: %f\n" (cur_time -. start_time);
Printf.printf "Converting symbolic tests from lemmas: %f\n" (cur_time -. start_time);
Printf.printf "Creating unification plans: %f\n" (cur_time -. start_time);
Analyse all procedures and lemmas
Only verify changed procedures and lemmas
Analyse all procedures and lemmas | open Containers
module DL = Debugger_log
module type S = sig
type st
type heap_t
type state
type m_err
type annot
module SPState :
PState.S
with type t = state
and type vt = SVal.M.t
and type st = st
and type store_t = SStore.t
and type heap_t = heap_t
and type m_err_t = m_err
and type preds_t = Preds.SPreds.t
module SAInterpreter :
GInterpreter.S
with type vt = SVal.M.t
and type st = st
and type store_t = SStore.t
and type state_t = state
and type heap_t = heap_t
and type state_err_t = SPState.err_t
and type annot = annot
module SUnifier : Unifier.S with type st = SVal.SESubst.t
type t
type prog_t = (annot, int) Prog.t
type proc_tests = (string * t) list [@@deriving to_yojson]
val start_time : float ref
val reset : unit -> unit
val verify_prog :
init_data:SPState.init_data ->
prog_t ->
bool ->
SourceFiles.t option ->
unit
val verify_up_to_procs :
?proc_name:string ->
init_data:SPState.init_data ->
prog_t ->
SAInterpreter.result_t SAInterpreter.cont_func
val postprocess_files : SourceFiles.t option -> unit
module Debug : sig
val get_tests_for_prog : init_data:SPState.init_data -> prog_t -> proc_tests
val analyse_result :
t -> Logging.Report_id.t -> SAInterpreter.result_t -> bool
end
end
module Make
(SState : SState.S
with type vt = SVal.M.t
and type st = SVal.SESubst.t
and type store_t = SStore.t)
(SPState : PState.S
with type vt = SState.vt
and type st = SState.st
and type state_t = SState.t
and type store_t = SState.store_t
and type preds_t = Preds.SPreds.t
and type init_data = SState.init_data)
(PC : ParserAndCompiler.S)
(External : External.T(PC.Annot).S) =
struct
module L = Logging
module SSubst = SVal.SESubst
module SPState = SPState
module SAInterpreter =
GInterpreter.Make (SVal.M) (SVal.SESubst) (SStore) (SPState) (PC) (External)
module Normaliser = Normaliser.Make (SPState)
type st = SPState.st
type state = SPState.t
type heap_t = SPState.heap_t
type m_err = SPState.m_err_t
type annot = PC.Annot.t
module SUnifier =
Unifier.Make (SVal.M) (SVal.SESubst) (SStore) (SState) (Preds.SPreds)
let print_success_or_failure success =
if success then Fmt.pr "%a" (Fmt.styled `Green Fmt.string) "Success\n"
else Fmt.pr "%a" (Fmt.styled `Red Fmt.string) "Failure\n";
Format.print_flush ()
let yojson_of_expr_set set =
`List (List.map Expr.to_yojson (Expr.Set.elements set))
type t = {
name : string;
id : int * int;
params : string list;
pre_state : SPState.t;
post_up : UP.t;
flag : Flag.t option;
spec_vars : Expr.Set.t; [@to_yojson yojson_of_expr_set]
}
[@@deriving to_yojson]
type prog_t = (annot, int) Prog.t
type proc_tests = (string * t) list
let proc_tests_to_yojson tests =
`List
(tests
|> List.map (fun (name, test) -> `List [ `String name; to_yojson test ]))
let global_results = VerificationResults.make ()
let start_time = ref 0.
let reset () =
VerificationResults.reset global_results;
SAInterpreter.reset ()
module Hides_derivations = struct
let add_hides
~prog
~init_data
~pred_ins
~preds
~pred_name
~subst_params
~known_params
orig_def =
let orig_info, orig_def, orig_hides = orig_def in
let subst_def =
List.fold_left
(fun def (pv, lv) ->
Asrt.subst_expr_for_expr ~to_subst:pv ~subst_with:lv def)
orig_def subst_params
in
let subst_params = List.map snd subst_params in
let ( let* ) = Result.bind in
let def =
let info =
Option.map (fun (s, vars) -> (s, SS.of_list vars)) orig_info
in
(subst_def, (info, None, orig_hides))
in
L.verbose (fun fmt -> fmt "Examining definition: %a" Asrt.pp (fst def));
let* def_up =
UP.init known_params UP.KB.empty pred_ins [ def ]
|> Result.map_error (fun _ ->
"Creation of unification plans for predicates failed.")
in
let a, _ = def in
let* state =
match Normaliser.normalise_assertion ~init_data ~pred_defs:preds a with
| Ok [ (state, _) ] -> Ok state
| Ok _ ->
Error
"Creation of unification plans for predicates failed: \
normalisation resulted in more than one state"
| Error msg ->
Fmt.error "Creation of unification plans for predicates failed: %s"
msg
in
FOLD / UNFOLD / UNIFY
let () =
L.verbose (fun fmt -> fmt "EXACT: hiding fold:\n%a" SPState.pp state)
in
let fold_predicate = SLCmd.Fold (pred_name, subst_params, None) in
let* fstate =
match SPState.evaluate_slcmd prog fold_predicate state with
| Ok [ fstate ] -> Ok fstate
| Ok _ -> Error "EXACT: ERROR: fold resulting in multiple states"
| Error _ -> Error "EXACT: ERROR: Impossible fold"
in
let () =
L.verbose (fun fmt -> fmt "EXACT: hiding unfold:\n%a" SPState.pp fstate)
in
let unfold_predicate =
SLCmd.Unfold (pred_name, subst_params, None, false)
in
let* fustate =
match SPState.evaluate_slcmd prog unfold_predicate fstate with
| Ok [ fustate ] -> Ok fustate
| Ok _ -> Error "EXACT: ERROR: unfold resulting in multiple states"
| Error _ -> Error "EXACT: ERROR: Impossible unfold"
in
L.verbose (fun fmt -> fmt "EXACT: Hiding: Before:\n%a" SPState.pp state);
L.verbose (fun fmt -> fmt "EXACT: Hiding: After:\n%a" SPState.pp fustate);
let state, predicates, _, variants = SPState.expose fustate in
let subst = SVal.SESubst.init (List.map (fun x -> (x, x)) subst_params) in
let unification_result =
SUnifier.unify
(state, predicates, preds, variants)
subst def_up LogicCommand
in
let* subst =
match unification_result with
| UPUSucc [ (_, subst, _) ] -> Ok subst
| UPUSucc _ ->
Error
"EXACT: ERROR: initial definition unified against in multiple \
ways"
| UPUFail _ ->
Error "EXACT: ERROR: cannot unify against initial definition"
in
L.verbose (fun fmt -> fmt "EXACT: Obtained subst: %a" SSubst.pp subst);
let def_lvars =
Expr.Set.of_list
(List.map (fun x -> Expr.LVar x) (SS.elements (Asrt.lvars a)))
in
SSubst.filter_in_place subst (fun k v ->
match (Expr.equal k v, Expr.Set.mem k def_lvars) with
| _, false -> None
| true, _ -> None
| _ -> (
match k with
| Expr.LVar x when not (Names.is_spec_var_name x) -> None
| _ -> Some v));
L.verbose (fun fmt -> fmt "EXACT: Filtered subst: %a" SSubst.pp subst);
let subst = SSubst.to_list subst in
let hidden =
List.map
(fun (before, after) ->
let we_good_bro =
Expr.UnOp (UNot, Expr.BinOp (before, Equal, after))
in
(before, SPState.sat_check fustate we_good_bro))
subst
in
let hidden =
List.filter_map
(fun (before, b) ->
match (b, before) with
| false, _ -> None
| true, Expr.LVar x -> Some x
| true, _ -> Fmt.failwith "EXACT: Error: non-LVar in ESubst")
hidden
in
L.verbose (fun fmt ->
fmt "EXACT: Hidden variables: %a" Fmt.(list ~sep:comma string) hidden);
Ok (orig_info, orig_def, hidden)
let add_hides_exn
~prog
~init_data
~pred_ins
~preds
~pred_name
~subst_params
~known_params
pred_def =
match
add_hides ~prog ~init_data ~pred_ins ~preds ~pred_name ~subst_params
~known_params pred_def
with
| Ok x -> x
| Error msg -> failwith msg
let derive_predicate_hiding
~preds
~prog
~init_data
~pred_ins
(pred : Pred.t) =
let module KB = UP.KB in
L.verbose (fun fmt -> fmt "Examinining predicate: %s" pred.pred_name);
let pred_params = pred.pred_params in
let defs = pred.pred_definitions in
let subst_params =
List.map
(fun (pv, _) -> (Expr.PVar pv, Expr.LVar ("#_" ^ pv)))
pred_params
in
let known_params =
KB.of_list
(List.map (fun i -> snd (List.nth subst_params i)) pred.pred_ins)
in
let new_defs =
List.map
(add_hides_exn ~prog ~init_data ~pred_ins ~preds
~pred_name:pred.pred_name ~subst_params ~known_params)
defs
in
{ pred with pred_definitions = new_defs }
let derive_predicates_hiding
~(prog : prog_t)
~(init_data : SPState.init_data)
(preds : (string, UP.pred) Hashtbl.t) : unit =
if not !Config.Verification.exact then ()
else
let () =
L.verbose (fun fmt -> fmt "EXACT: Examining hiding in predicates")
in
let prog : annot UP.prog =
{
preds;
specs = Hashtbl.create 1;
lemmas = Hashtbl.create 1;
coverage = Hashtbl.create 1;
prog;
}
in
let module KB = UP.KB in
let pred_ins =
Hashtbl.fold
(fun name (pred_with_up : UP.pred) pred_ins ->
Hashtbl.add pred_ins name pred_with_up.pred.pred_ins;
pred_ins)
preds
(Hashtbl.create Config.medium_tbl_size)
in
Hashtbl.filter_map_inplace
(fun _pred_name up_pred ->
Some
UP.
{
up_pred with
pred =
derive_predicate_hiding ~preds ~prog ~init_data ~pred_ins
up_pred.pred;
})
preds
end
let testify
~(init_data : SPState.init_data)
(func_or_lemma_name : string)
(preds : (string, UP.pred) Hashtbl.t)
(pred_ins : (string, int list) Hashtbl.t)
(name : string)
(params : string list)
(id : int)
(pre : Asrt.t)
(posts : Asrt.t list)
(variant : Expr.t option)
(hides : string list option)
(flag : Flag.t option)
(label : (string * SS.t) option)
(to_verify : bool) : (t option * (Asrt.t * Asrt.t list) option) list =
let test_of_normalised_state id' (ss_pre, subst) =
Step 2 - spec_vars = ) -U- alocs(range(subst ) )
let lvars =
SS.fold
(fun x acc ->
if Names.is_spec_var_name x then Expr.Set.add (Expr.LVar x) acc
else acc)
(Asrt.lvars pre) Expr.Set.empty
in
let subst_dom = SSubst.domain subst None in
let alocs =
SSubst.fold subst
(fun _ v_val acc ->
match v_val with
| ALoc _ -> Expr.Set.add v_val acc
| _ -> acc)
Expr.Set.empty
in
let spec_vars = Expr.Set.union (Expr.Set.diff lvars subst_dom) alocs in
Step 3 - postconditions to symbolic states
L.verbose (fun m ->
m
"Processing one postcondition of %s with label %a and spec_vars: \
@[<h>%a@].@\n\
Original Pre:@\n\
%a\n\
Symb State Pre:@\n\
%a@\n\
Subst:@\n\
%a@\n\
Posts (%d):@\n\
%a"
name
Fmt.(
option ~none:(any "None") (fun ft (s, e) ->
Fmt.pf ft "[ %s; %a ]" s (iter ~sep:comma SS.iter string) e))
label
Fmt.(iter ~sep:comma Expr.Set.iter Expr.pp)
spec_vars Asrt.pp pre SPState.pp ss_pre SSubst.pp subst
(List.length posts)
Fmt.(list ~sep:(any "@\n") Asrt.pp)
posts);
let subst =
SSubst.filter subst (fun e _ ->
match e with
| PVar _ -> false
| _ -> true)
in
let posts =
List.filter_map
(fun p ->
let substituted = SSubst.substitute_asrt subst ~partial:true p in
let reduced = Reduction.reduce_assertion substituted in
if Simplifications.admissible_assertion reduced then Some reduced
else None)
posts
in
if not to_verify then
let pre' = Asrt.star (SPState.to_assertions ss_pre) in
(None, Some (pre', posts))
else
Step 4 - create a unification plan for the postconditions and s_test
let () =
L.verbose (fun fmt -> fmt "Creating UPs for posts of %s" name)
in
let pvar_params =
List.fold_left
(fun acc x -> Expr.Set.add (Expr.PVar x) acc)
Expr.Set.empty params
in
let known_unifiables =
Expr.Set.add (PVar Names.return_variable)
(Expr.Set.union pvar_params spec_vars)
in
let existentials =
Option.fold ~none:Expr.Set.empty
~some:(fun (_, exs) ->
SS.fold
(fun x acc -> Expr.Set.add (LVar x) acc)
exs Expr.Set.empty)
label
in
let known_unifiables = Expr.Set.union known_unifiables existentials in
let hides =
match (flag, hides) with
| None, Some hides -> hides
| None, None when !Config.Verification.exact ->
failwith "Lemma must declare hides logicals in exact verification"
| _, _ -> []
in
let simple_posts =
List.map
(fun post ->
let post_lvars = Asrt.lvars post in
let lstr_pp = Fmt.(list ~sep:comma string) in
let () =
L.verbose (fun fmt ->
fmt "OX hiding: %a\nPost lvars: %a" lstr_pp hides lstr_pp
(SS.elements post_lvars))
in
let inter = SS.inter post_lvars (SS.of_list hides) in
match SS.is_empty inter with
| true -> (post, (label, None, hides))
| false ->
failwith
("Error: Exact lemma with impossible hiding: "
^ SS.min_elt inter))
posts
in
let post_up =
UP.init known_unifiables Expr.Set.empty pred_ins simple_posts
in
L.verbose (fun m -> m "END of STEP 4@\n");
match post_up with
| Error _ ->
let msg =
Printf.sprintf "Warning: testify failed for %s. Cause: post_up \n"
name
in
Printf.printf "%s" msg;
L.verbose (fun m -> m "%s" msg);
(None, None)
| Ok post_up ->
let pre' = Asrt.star (SPState.to_assertions ss_pre) in
let ss_pre =
match flag with
Lemmas should not have stores when being proven
| None ->
let empty_store = SStore.init [] in
SPState.set_store ss_pre empty_store
| Some _ -> ss_pre
in
let test =
{
name;
id = (id, id');
params;
pre_state = ss_pre;
post_up;
flag;
spec_vars;
}
in
(Some test, Some (pre', posts))
in
try
Step 1 - normalise the precondition
match
Normaliser.normalise_assertion ~init_data ~pred_defs:preds
~pvars:(SS.of_list params) pre
with
| Error _ -> [ (None, None) ]
| Ok normalised_assertions ->
let variants = Hashtbl.create 1 in
let () = Hashtbl.add variants func_or_lemma_name variant in
let normalised_assertions =
List.map
(fun (state, subst) ->
(SPState.set_variants state (Hashtbl.copy variants), subst))
normalised_assertions
in
let result =
List.mapi test_of_normalised_state normalised_assertions
in
result
with Failure msg ->
let new_msg =
Printf.sprintf
"WARNING: testify failed for %s. Cause: normalisation with msg: %s.\n"
name msg
in
Printf.printf "%s" new_msg;
L.normal (fun m -> m "%s" new_msg);
[ (None, None) ]
let testify_sspec
~init_data
(spec_name : string)
(preds : UP.preds_tbl_t)
(pred_ins : (string, int list) Hashtbl.t)
(name : string)
(params : string list)
(id : int)
(sspec : Spec.st) : (t option * Spec.st option) list =
let ( let+ ) x f = List.map f x in
let+ stest, sspec' =
testify ~init_data spec_name preds pred_ins name params id sspec.ss_pre
sspec.ss_posts sspec.ss_variant None (Some sspec.ss_flag)
(Spec.label_vars_to_set sspec.ss_label)
sspec.ss_to_verify
in
let sspec' =
Option.map
(fun (pre, posts) -> { sspec with ss_pre = pre; ss_posts = posts })
sspec'
in
(stest, sspec')
let testify_spec
~init_data
(spec_name : string)
(preds : UP.preds_tbl_t)
(pred_ins : (string, int list) Hashtbl.t)
(spec : Spec.t) : t list * Spec.t =
if not spec.spec_to_verify then ([], spec)
else
let () =
List.iter
(fun (sspec : Spec.st) ->
if sspec.ss_posts = [] then
failwith
("Specification without post-condition for function "
^ spec.spec_name))
spec.spec_sspecs
in
L.verbose (fun m ->
m
("-------------------------------------------------------------------------@\n"
^^ "Creating symbolic tests for procedure %s: %d cases\n"
^^ "-------------------------------------------------------------------------"
)
spec.spec_name
(List.length spec.spec_sspecs));
let _, tests, spec_sspecs =
List.fold_left
(fun (id, tests, sspecs) sspec ->
let tests_and_specs =
testify_sspec ~init_data spec_name preds pred_ins spec.spec_name
spec.spec_params id sspec
in
let new_tests, new_specs =
List.fold_left
(fun (nt, ns) (t, s) ->
let nt =
match t with
| Some test -> test :: nt
| None -> nt
in
let ns =
match s with
| Some spec -> spec :: ns
| None -> ns
in
(nt, ns))
([], []) tests_and_specs
in
(id + 1, new_tests @ tests, new_specs @ sspecs))
(0, [], []) spec.spec_sspecs
in
let new_spec = { spec with spec_sspecs } in
L.verbose (fun m -> m "Simplified SPECS:@\n@[%a@]@\n" Spec.pp new_spec);
(tests, new_spec)
let testify_lemma
~init_data
(preds : UP.preds_tbl_t)
(pred_ins : (string, int list) Hashtbl.t)
(lemma : Lemma.t) : t list * Lemma.t =
let tests_and_specs =
List.concat_map
(fun Lemma.
{ lemma_hyp; lemma_concs; lemma_spec_variant; lemma_spec_hides } ->
List.map
(fun t -> (t, lemma_spec_hides))
(testify ~init_data lemma.lemma_name preds pred_ins lemma.lemma_name
lemma.lemma_params 0 lemma_hyp lemma_concs lemma_spec_variant
lemma_spec_hides None None true))
lemma.lemma_specs
in
let tests, specs =
List.fold_left
(fun (test_acc, spec_acc) ((test_opt, spec_opt), lemma_spec_hides) ->
let test_acc =
match test_opt with
| Some t -> t :: test_acc
| None -> test_acc
in
let spec_acc =
match spec_opt with
| Some (lemma_hyp, lemma_concs) ->
Lemma.
{
lemma_hyp;
lemma_concs;
lemma_spec_variant = lemma.lemma_variant;
lemma_spec_hides;
}
:: spec_acc
| None -> spec_acc
in
(test_acc, spec_acc))
([], []) tests_and_specs
in
let () =
match specs with
| [] ->
raise
(Failure
(Printf.sprintf "Could not testify lemma %s" lemma.lemma_name))
| _ -> ()
in
(tests, { lemma with lemma_specs = specs })
let analyse_result (subst : SSubst.t) (test : t) (state : SPState.t) : bool =
TODO : ASSUMING SIMPLIFICATION DOES NOT BRANCH HERE
let _, states = SPState.simplify state in
assert (List.length states = 1);
let state = List.hd states in
let subst = SSubst.copy subst in
List.iter
(fun x ->
if not (SSubst.mem subst (LVar x)) then
SSubst.add subst (LVar x) (LVar x))
(SS.elements (SPState.get_spec_vars state));
L.verbose (fun m ->
m "Analyse result: About to unify one postcondition of %s. post: %a"
test.name UP.pp test.post_up);
match SPState.unify state subst test.post_up Unifier.Postcondition with
| true ->
L.verbose (fun m ->
m "Analyse result: Postcondition unified successfully");
VerificationResults.set_result global_results test.name test.id true;
true
| false ->
L.normal (fun m -> m "Analyse result: Postcondition not unifiable.");
VerificationResults.set_result global_results test.name test.id false;
false
let make_post_subst (test : t) (post_state : SPState.t) : SSubst.t =
let subst_lst =
List.map (fun e -> (e, e)) (Expr.Set.elements test.spec_vars)
in
let params_subst_lst = SStore.bindings (SPState.get_store post_state) in
let params_subst_lst =
List.map (fun (x, v) -> (Expr.PVar x, v)) params_subst_lst
in
let subst = SSubst.init (subst_lst @ params_subst_lst) in
subst
let analyse_proc_result test flag ?parent_id result =
match (result : SAInterpreter.result_t) with
| ExecRes.RFail { proc; proc_idx; error_state; errors } ->
L.verbose (fun m ->
m
"VERIFICATION FAILURE: Procedure %s, Command %d\n\
Spec %s %a\n\
@[<v 2>State:@\n\
%a@]@\n\
@[<v 2>Errors:@\n\
%a@]@\n"
proc proc_idx test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id SPState.pp error_state
Fmt.(list ~sep:(any "@\n") SAInterpreter.Logging.pp_err)
errors);
if not !Config.debug then Fmt.pr "f @?";
false
| ExecRes.RSucc { flag = fl; final_state; last_report; _ } ->
if Some fl <> test.flag then (
L.normal (fun m ->
m
"VERIFICATION FAILURE: Spec %s %a terminated with flag %s \
instead of %s\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id (Flag.str fl) (Flag.str flag));
if not !Config.debug then Fmt.pr "f @?";
false)
else
let parent_id =
match parent_id with
| None -> last_report
| id -> id
in
DL.log (fun m ->
m "Unify: setting parent to %a"
(Fmt.option L.Report_id.pp)
parent_id);
L.Parent.with_id parent_id (fun () ->
let store = SPState.get_store final_state in
let () =
SStore.filter store (fun x v ->
if x = Names.return_variable then Some v else None)
in
let subst = make_post_subst test final_state in
if analyse_result subst test final_state then (
L.normal (fun m ->
m
"VERIFICATION SUCCESS: Spec %s %a terminated successfully\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
if not !Config.debug then Fmt.pr "s @?";
true)
else (
L.normal (fun m ->
m
"VERIFICATION FAILURE: Spec %s %a - post condition not \
unifiable\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
if not !Config.debug then Fmt.pr "f @?";
false))
let analyse_proc_results
(test : t)
(flag : Flag.t)
(rets : SAInterpreter.result_t list) : bool =
if rets = [] then (
L.(
normal (fun m ->
m "ERROR: Function %s evaluates to 0 results." test.name));
exit 1);
let success = List.for_all (analyse_proc_result test flag) rets in
print_success_or_failure success;
success
let analyse_lemma_results (test : t) (rets : SPState.t list) : bool =
let success : bool =
rets <> []
&& List.fold_left
(fun ac final_state ->
let empty_store = SStore.init [] in
let final_state = SPState.set_store final_state empty_store in
let subst = make_post_subst test final_state in
if analyse_result subst test final_state then (
L.normal (fun m ->
m
"VERIFICATION SUCCESS: Spec %s %a terminated successfully\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
ac)
else (
L.normal (fun m ->
m
"VERIFICATION FAILURE: Spec %s %a - post condition not \
unifiable\n"
test.name
(Fmt.Dump.pair Fmt.int Fmt.int)
test.id);
false))
true rets
in
if rets = [] then (
L.(
normal (fun m ->
m "ERROR: Function %s evaluates to 0 results." test.name));
exit 1);
print_success_or_failure success;
success
let verify_up_to_procs (prog : annot UP.prog) (test : t) : annot UP.prog =
match test.flag with
| Some _ ->
let msg = "Verifying one spec of procedure " ^ test.name ^ "... " in
L.tmi (fun fmt -> fmt "%s" msg);
Fmt.pr "%s@?" msg;
{ prog with coverage = Hashtbl.create 1 }
| None -> raise (Failure "Debugging lemmas unsupported!")
let verify (prog : annot UP.prog) (test : t) : bool =
let state = test.pre_state in
match test.flag with
| Some flag ->
let prog = verify_up_to_procs prog test in
let rets =
SAInterpreter.evaluate_proc
(fun x -> x)
prog test.name test.params state
in
L.verbose (fun m ->
m "Verification: Concluded evaluation: %d obtained results.%a@\n"
(List.length rets) SAInterpreter.Logging.pp_result rets);
analyse_proc_results test flag rets
| None -> (
let lemma = Prog.get_lemma_exn prog.prog test.name in
match lemma.lemma_proof with
| None ->
if !Config.lemma_proof then
raise
(Failure (Printf.sprintf "Lemma %s WITHOUT proof" test.name))
| Some proof -> (
let msg = "Verifying lemma " ^ test.name ^ "... " in
L.tmi (fun fmt -> fmt "%s" msg);
Fmt.pr "%s@?" msg;
match SAInterpreter.evaluate_lcmds prog proof state with
| Ok rets -> analyse_lemma_results test rets
| Error _ -> false))
let pred_extracting_visitor =
object
inherit [_] Visitors.reduce
inherit Visitors.Utils.ss_monoid
method! visit_Pred _ pred_name _ = SS.singleton pred_name
method! visit_Fold _ pred_name _ _ = SS.singleton pred_name
method! visit_Unfold _ pred_name _ _ _ = SS.singleton pred_name
method! visit_GUnfold _ pred_name = SS.singleton pred_name
end
let filter_internal_preds (prog : annot UP.prog) (pred_names : SS.t) =
SS.filter
(fun pred_name ->
let pred = Prog.get_pred_exn prog.prog pred_name in
not pred.pred_internal)
pred_names
let lemma_extracting_visitor =
object
inherit [_] Visitors.reduce
inherit Visitors.Utils.ss_monoid
method! visit_ApplyLem _ lemma_name _ _ = SS.singleton lemma_name
end
let filter_internal_lemmas (prog : annot UP.prog) (lemma_names : SS.t) =
SS.filter
(fun lemma_name ->
let lemma = Prog.get_lemma_exn prog.prog lemma_name in
not lemma.lemma_internal)
lemma_names
let record_proc_dependencies proc_name (prog : annot UP.prog) =
let proc = Prog.get_proc_exn prog.prog proc_name in
let preds_used =
filter_internal_preds prog (pred_extracting_visitor#visit_proc () proc)
in
let lemmas_used =
filter_internal_lemmas prog (lemma_extracting_visitor#visit_proc () proc)
in
SS.iter
(CallGraph.add_proc_pred_use SAInterpreter.call_graph proc_name)
preds_used;
SS.iter
(CallGraph.add_proc_lemma_use SAInterpreter.call_graph proc_name)
lemmas_used
let record_lemma_dependencies lemma_name (prog : annot UP.prog) =
let lemma = Prog.get_lemma_exn prog.prog lemma_name in
let preds_used =
filter_internal_preds prog (pred_extracting_visitor#visit_lemma () lemma)
in
let lemmas_used =
filter_internal_lemmas prog
(lemma_extracting_visitor#visit_lemma () lemma)
in
SS.iter
(CallGraph.add_lemma_pred_use SAInterpreter.call_graph lemma_name)
preds_used;
SS.iter
(CallGraph.add_lemma_call SAInterpreter.call_graph lemma_name)
lemmas_used
let record_preds_used_by_pred pred_name (prog : annot UP.prog) =
let pred = Prog.get_pred_exn prog.prog pred_name in
let preds_used =
filter_internal_preds prog (pred_extracting_visitor#visit_pred () pred)
in
SS.iter
(CallGraph.add_pred_call SAInterpreter.call_graph pred_name)
preds_used
let check_previously_verified prev_results cur_verified =
Option.fold ~none:true
~some:(fun res ->
VerificationResults.check_previously_verified
~printer:print_success_or_failure res cur_verified)
prev_results
let get_tests_to_verify
~init_data
(prog : prog_t)
(pnames_to_verify : SS.t)
(lnames_to_verify : SS.t) : annot UP.prog * t list * t list =
let ipreds = UP.init_preds prog.preds in
match ipreds with
| Error e ->
Fmt.pr
"Creation of unification plans for predicates failed with:\n%a\n@?"
UP.pp_up_err_t e;
Fmt.failwith "Creation of unification plans for predicates failed."
| Ok preds -> (
let () =
Hides_derivations.derive_predicates_hiding ~init_data ~prog preds
in
let preds_with_hiding = Hashtbl.create 1 in
let () =
Hashtbl.iter
(fun name (up_pred : UP.pred) ->
Hashtbl.replace preds_with_hiding name up_pred.pred)
preds
in
let ipreds = UP.init_preds preds_with_hiding in
let preds = Result.get_ok ipreds in
let pred_ins =
Hashtbl.fold
(fun name (pred : UP.pred) pred_ins ->
Hashtbl.add pred_ins name pred.pred.pred_ins;
pred_ins)
preds
(Hashtbl.create Config.medium_tbl_size)
in
STEP 1 : Get the specs to verify
Fmt.pr "Obtaining specs to verify...\n@?";
let specs_to_verify =
List.filter
(fun (spec : Spec.t) -> SS.mem spec.spec_name pnames_to_verify)
(Prog.get_specs prog)
in
STEP 2 : Convert specs to symbolic tests
let tests : t list =
List.concat_map
(fun (spec : Spec.t) ->
let tests, new_spec =
testify_spec ~init_data spec.spec_name preds pred_ins spec
in
let proc = Prog.get_proc_exn prog spec.spec_name in
Hashtbl.replace prog.procs proc.proc_name
{ proc with proc_spec = Some new_spec };
tests)
specs_to_verify
in
STEP 3 : Get the lemmas to verify
Fmt.pr "Obtaining lemmas to verify...\n@?";
let lemmas_to_verify =
List.filter
(fun (lemma : Lemma.t) -> SS.mem lemma.lemma_name lnames_to_verify)
(Prog.get_lemmas prog)
in
STEP 4 : Convert lemmas to symbolic tests
let lemmas_to_verify =
List.sort
(fun (l1 : Lemma.t) l2 ->
Stdlib.compare l1.lemma_name l2.lemma_name)
lemmas_to_verify
in
let tests' : t list =
List.concat_map
(fun lemma ->
let tests, new_lemma =
testify_lemma ~init_data preds pred_ins lemma
in
Hashtbl.replace prog.lemmas lemma.lemma_name new_lemma;
tests)
lemmas_to_verify
in
Fmt.pr "Obtained %d symbolic tests in total\n@?"
(List.length tests + List.length tests');
L.verbose (fun m ->
m
("@[-------------------------------------------------------------------------@\n"
^^ "UNFOLDED and SIMPLIFIED SPECS and LEMMAS@\n%a@\n%a"
^^ "@\n\
-------------------------------------------------------------------------@]"
)
Fmt.(list ~sep:(any "@\n") Spec.pp)
(Prog.get_specs prog)
Fmt.(list ~sep:(any "@\n") Lemma.pp)
(Prog.get_lemmas prog));
STEP 4 : Create unification plans for specs and predicates
match UP.init_prog ~preds_tbl:preds prog with
| Error _ -> failwith "Creation of unification plans failed."
| Ok prog' ->
STEP 5 : Determine static dependencies and add to call graph
List.iter
(fun test -> record_proc_dependencies test.name prog')
tests;
List.iter
(fun test -> record_lemma_dependencies test.name prog')
tests';
Hashtbl.iter
(fun pred_name _ -> record_preds_used_by_pred pred_name prog')
prog'.preds;
(prog', tests', tests))
let verify_procs
~(init_data : SPState.init_data)
?(prev_results : VerificationResults.t option)
(prog : prog_t)
(pnames_to_verify : SS.t)
(lnames_to_verify : SS.t) : unit =
let prog', tests', tests =
get_tests_to_verify ~init_data prog pnames_to_verify lnames_to_verify
in
STEP 6 : Run the symbolic tests
let cur_time = Sys.time () in
Printf.printf "Running symbolic tests: %f\n" (cur_time -. !start_time);
let success : bool =
List.fold_left
(fun ac test -> if verify prog' test then ac else false)
true (tests' @ tests)
in
let end_time = Sys.time () in
let cur_verified = SS.union pnames_to_verify lnames_to_verify in
let success =
success && check_previously_verified prev_results cur_verified
in
let msg : string =
if success then "All specs succeeded:" else "There were failures:"
in
let msg : string = Printf.sprintf "%s %f%!" msg (end_time -. !start_time) in
Printf.printf "%s\n" msg;
L.normal (fun m -> m "%s" msg)
let verify_up_to_procs
?(proc_name : string option)
~(init_data : SPState.init_data)
(prog : prog_t) : SAInterpreter.result_t SAInterpreter.cont_func =
L.Phase.with_normal ~title:"Program verification" (fun () ->
let procs_to_verify =
SS.of_list (Prog.get_noninternal_proc_names prog)
in
let lemmas_to_verify =
SS.of_list (Prog.get_noninternal_lemma_names prog)
in
let procs_to_verify, lemmas_to_verify =
if !Config.Verification.verify_only_some_of_the_things then
( SS.inter procs_to_verify
(SS.of_list !Config.Verification.procs_to_verify),
SS.inter lemmas_to_verify
(SS.of_list !Config.Verification.lemmas_to_verify) )
else (procs_to_verify, lemmas_to_verify)
in
let prog, _, proc_tests =
get_tests_to_verify ~init_data prog procs_to_verify lemmas_to_verify
in
TODO : Verify All procedures . Currently we only verify the first
procedure ( unless specified ) .
Assume there is at least one procedure
procedure (unless specified).
Assume there is at least one procedure*)
let test =
match proc_name with
| Some proc_name -> (
match
proc_tests |> List.find_opt (fun test -> test.name = proc_name)
with
| Some test -> test
| None ->
Fmt.failwith "Couldn't find test for proc '%s'!" proc_name)
| None -> (
match proc_tests with
| test :: _ -> test
| _ -> failwith "No tests found!")
in
SAInterpreter.init_evaluate_proc
(fun x -> x)
prog test.name test.params test.pre_state)
let postprocess_files source_files =
let cur_source_files =
Option.value ~default:(SourceFiles.make ()) source_files
in
let call_graph = SAInterpreter.call_graph in
ResultsDir.write_verif_results cur_source_files call_graph ~diff:""
global_results
let verify_prog
~(init_data : SPState.init_data)
(prog : prog_t)
(incremental : bool)
(source_files : SourceFiles.t option) : unit =
let f prog incremental source_files =
let open ResultsDir in
let open ChangeTracker in
if incremental && prev_results_exist () then (
let cur_source_files =
match source_files with
| Some files -> files
| None -> failwith "Cannot use -a in incremental mode"
in
let prev_source_files, prev_call_graph, results =
read_verif_results ()
in
let proc_changes, lemma_changes =
get_verif_changes prog ~prev_source_files ~prev_call_graph
~cur_source_files
in
let procs_to_prune =
proc_changes.changed_procs @ proc_changes.deleted_procs
@ proc_changes.dependent_procs
in
let lemmas_to_prune =
lemma_changes.changed_lemmas @ lemma_changes.deleted_lemmas
@ lemma_changes.dependent_lemmas
in
let () = CallGraph.prune_procs prev_call_graph procs_to_prune in
let () = CallGraph.prune_lemmas prev_call_graph lemmas_to_prune in
let () =
VerificationResults.prune results (procs_to_prune @ lemmas_to_prune)
in
let procs_to_verify =
SS.of_list
(proc_changes.changed_procs @ proc_changes.new_procs
@ proc_changes.dependent_procs)
in
let lemmas_to_verify =
SS.of_list
(lemma_changes.changed_lemmas @ lemma_changes.new_lemmas
@ lemma_changes.dependent_lemmas)
in
if !Config.Verification.verify_only_some_of_the_things then
failwith "Cannot use --incremental and --procs or --lemma together";
let () =
verify_procs ~init_data ~prev_results:results prog procs_to_verify
lemmas_to_verify
in
let cur_call_graph = SAInterpreter.call_graph in
let cur_results = global_results in
let call_graph = CallGraph.merge prev_call_graph cur_call_graph in
let results = VerificationResults.merge results cur_results in
let diff = Fmt.str "%a" ChangeTracker.pp_proc_changes proc_changes in
write_verif_results cur_source_files call_graph ~diff results)
else
let cur_source_files =
Option.value ~default:(SourceFiles.make ()) source_files
in
let procs_to_verify =
SS.of_list (Prog.get_noninternal_proc_names prog)
in
let lemmas_to_verify =
SS.of_list (Prog.get_noninternal_lemma_names prog)
in
let procs_to_verify, lemmas_to_verify =
if !Config.Verification.verify_only_some_of_the_things then
( SS.inter procs_to_verify
(SS.of_list !Config.Verification.procs_to_verify),
SS.inter lemmas_to_verify
(SS.of_list !Config.Verification.lemmas_to_verify) )
else (procs_to_verify, lemmas_to_verify)
in
let () =
verify_procs ~init_data prog procs_to_verify lemmas_to_verify
in
let call_graph = SAInterpreter.call_graph in
write_verif_results cur_source_files call_graph ~diff:"" global_results
in
L.Phase.with_normal ~title:"Program verification" (fun () ->
f prog incremental source_files)
module Debug = struct
let get_tests_for_prog ~init_data (prog : prog_t) =
let open Syntaxes.Option in
let ipreds = UP.init_preds prog.preds in
let preds = Result.get_ok ipreds in
let pred_ins =
Hashtbl.fold
(fun name (pred : UP.pred) pred_ins ->
Hashtbl.add pred_ins name pred.pred.pred_ins;
pred_ins)
preds
(Hashtbl.create Config.medium_tbl_size)
in
let specs = Prog.get_specs prog in
let tests =
specs
|> List.filter_map (fun (spec : Spec.t) ->
let tests, new_spec =
testify_spec ~init_data spec.spec_name preds pred_ins spec
in
if List.length tests > 1 then
DL.log (fun m ->
let tests_json =
("tests", `List (List.map to_yojson tests))
in
let spec_json = ("spec", Spec.to_yojson spec) in
m ~json:[ tests_json; spec_json ]
"Spec for %s gave multiple tests???" spec.spec_name);
let+ test = List_utils.hd_opt tests in
let proc = Prog.get_proc_exn prog spec.spec_name in
Hashtbl.replace prog.procs proc.proc_name
{ proc with proc_spec = Some new_spec };
(spec.spec_name, test))
in
DL.log (fun m ->
m
~json:[ ("tests", proc_tests_to_yojson tests) ]
"Verifier.Debug.get_tests_for_prog: Got tests");
tests
let analyse_result test parent_id result =
analyse_proc_result test Normal ~parent_id result
end
end
module From_scratch
(SMemory : SMemory.S)
(PC : ParserAndCompiler.S)
(External : External.T(PC.Annot).S) =
struct
module INTERNAL__ = struct
module SState = SState.Make (SMemory)
end
include
Make
(INTERNAL__.SState)
(PState.Make (SVal.M) (SVal.SESubst) (SStore) (INTERNAL__.SState)
(Preds.SPreds))
(PC)
(External)
end
|
0f44f8b9a07d192b83991f7869372c148ed6b9a629dbd5457179d547113ecc16 | clojure-interop/aws-api | AbstractAmazonInspectorAsync.clj | (ns com.amazonaws.services.inspector.AbstractAmazonInspectorAsync
"Abstract implementation of AmazonInspectorAsync. Convenient method forms pass through to the corresponding
overload that takes a request object and an AsyncHandler, which throws an
UnsupportedOperationException."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.inspector AbstractAmazonInspectorAsync]))
(defn stop-assessment-run-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.StopAssessmentRunRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StopAssessmentRun operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.StopAssessmentRunResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StopAssessmentRunRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.stopAssessmentRunAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StopAssessmentRunRequest request]
(-> this (.stopAssessmentRunAsync request))))
(defn list-rules-packages-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListRulesPackagesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListRulesPackages operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListRulesPackagesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListRulesPackagesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listRulesPackagesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListRulesPackagesRequest request]
(-> this (.listRulesPackagesAsync request))))
(defn create-assessment-template-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateAssessmentTemplateRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateAssessmentTemplate operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateAssessmentTemplateResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTemplateRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createAssessmentTemplateAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTemplateRequest request]
(-> this (.createAssessmentTemplateAsync request))))
(defn describe-exclusions-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeExclusionsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeExclusions operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeExclusionsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeExclusionsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeExclusionsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeExclusionsRequest request]
(-> this (.describeExclusionsAsync request))))
(defn delete-assessment-target-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DeleteAssessmentTargetRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteAssessmentTarget operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DeleteAssessmentTargetResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTargetRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteAssessmentTargetAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTargetRequest request]
(-> this (.deleteAssessmentTargetAsync request))))
(defn remove-attributes-from-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the RemoveAttributesFromFindings operation returned by the
service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.removeAttributesFromFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsRequest request]
(-> this (.removeAttributesFromFindingsAsync request))))
(defn list-event-subscriptions-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListEventSubscriptionsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListEventSubscriptions operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListEventSubscriptionsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListEventSubscriptionsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listEventSubscriptionsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListEventSubscriptionsRequest request]
(-> this (.listEventSubscriptionsAsync request))))
(defn describe-resource-groups-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeResourceGroupsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeResourceGroups operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeResourceGroupsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeResourceGroupsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeResourceGroupsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeResourceGroupsRequest request]
(-> this (.describeResourceGroupsAsync request))))
(defn list-assessment-targets-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentTargetsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentTargets operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentTargetsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTargetsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentTargetsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTargetsRequest request]
(-> this (.listAssessmentTargetsAsync request))))
(defn get-assessment-report-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.GetAssessmentReportRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetAssessmentReport operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.GetAssessmentReportResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetAssessmentReportRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getAssessmentReportAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetAssessmentReportRequest request]
(-> this (.getAssessmentReportAsync request))))
(defn get-exclusions-preview-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.GetExclusionsPreviewRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetExclusionsPreview operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.GetExclusionsPreviewResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetExclusionsPreviewRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getExclusionsPreviewAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetExclusionsPreviewRequest request]
(-> this (.getExclusionsPreviewAsync request))))
(defn add-attributes-to-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.AddAttributesToFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the AddAttributesToFindings operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.AddAttributesToFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.AddAttributesToFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.addAttributesToFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.AddAttributesToFindingsRequest request]
(-> this (.addAttributesToFindingsAsync request))))
(defn list-assessment-run-agents-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentRunAgentsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentRunAgents operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentRunAgentsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunAgentsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentRunAgentsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunAgentsRequest request]
(-> this (.listAssessmentRunAgentsAsync request))))
(defn create-assessment-target-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateAssessmentTargetRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateAssessmentTarget operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateAssessmentTargetResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTargetRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createAssessmentTargetAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTargetRequest request]
(-> this (.createAssessmentTargetAsync request))))
(defn unsubscribe-from-event-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.UnsubscribeFromEventRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UnsubscribeFromEvent operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.UnsubscribeFromEventResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UnsubscribeFromEventRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.unsubscribeFromEventAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UnsubscribeFromEventRequest request]
(-> this (.unsubscribeFromEventAsync request))))
(defn describe-assessment-templates-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeAssessmentTemplates operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeAssessmentTemplatesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesRequest request]
(-> this (.describeAssessmentTemplatesAsync request))))
(defn create-exclusions-preview-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateExclusionsPreviewRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateExclusionsPreview operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateExclusionsPreviewResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateExclusionsPreviewRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createExclusionsPreviewAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateExclusionsPreviewRequest request]
(-> this (.createExclusionsPreviewAsync request))))
(defn describe-rules-packages-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeRulesPackagesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeRulesPackages operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeRulesPackagesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeRulesPackagesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeRulesPackagesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeRulesPackagesRequest request]
(-> this (.describeRulesPackagesAsync request))))
(defn list-assessment-templates-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentTemplatesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentTemplates operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentTemplatesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTemplatesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentTemplatesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTemplatesRequest request]
(-> this (.listAssessmentTemplatesAsync request))))
(defn start-assessment-run-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.StartAssessmentRunRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StartAssessmentRun operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.StartAssessmentRunResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StartAssessmentRunRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.startAssessmentRunAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StartAssessmentRunRequest request]
(-> this (.startAssessmentRunAsync request))))
(defn list-assessment-runs-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentRunsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentRuns operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentRunsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentRunsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunsRequest request]
(-> this (.listAssessmentRunsAsync request))))
(defn delete-assessment-run-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DeleteAssessmentRunRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteAssessmentRun operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DeleteAssessmentRunResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentRunRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteAssessmentRunAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentRunRequest request]
(-> this (.deleteAssessmentRunAsync request))))
(defn update-assessment-target-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.UpdateAssessmentTargetRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UpdateAssessmentTarget operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.UpdateAssessmentTargetResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UpdateAssessmentTargetRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.updateAssessmentTargetAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UpdateAssessmentTargetRequest request]
(-> this (.updateAssessmentTargetAsync request))))
(defn list-tags-for-resource-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListTagsForResourceRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListTagsForResource operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListTagsForResourceResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListTagsForResourceRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listTagsForResourceAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListTagsForResourceRequest request]
(-> this (.listTagsForResourceAsync request))))
(defn describe-assessment-targets-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeAssessmentTargetsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeAssessmentTargets operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeAssessmentTargetsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTargetsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeAssessmentTargetsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTargetsRequest request]
(-> this (.describeAssessmentTargetsAsync request))))
(defn list-exclusions-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListExclusionsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListExclusions operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListExclusionsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListExclusionsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listExclusionsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListExclusionsRequest request]
(-> this (.listExclusionsAsync request))))
(defn get-telemetry-metadata-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.GetTelemetryMetadataRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetTelemetryMetadata operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.GetTelemetryMetadataResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetTelemetryMetadataRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getTelemetryMetadataAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetTelemetryMetadataRequest request]
(-> this (.getTelemetryMetadataAsync request))))
(defn describe-assessment-runs-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeAssessmentRunsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeAssessmentRuns operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeAssessmentRunsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentRunsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeAssessmentRunsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentRunsRequest request]
(-> this (.describeAssessmentRunsAsync request))))
(defn subscribe-to-event-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.SubscribeToEventRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the SubscribeToEvent operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.SubscribeToEventResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SubscribeToEventRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.subscribeToEventAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SubscribeToEventRequest request]
(-> this (.subscribeToEventAsync request))))
(defn describe-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeFindings operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeFindingsRequest request]
(-> this (.describeFindingsAsync request))))
(defn set-tags-for-resource-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.SetTagsForResourceRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the SetTagsForResource operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.SetTagsForResourceResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SetTagsForResourceRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.setTagsForResourceAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SetTagsForResourceRequest request]
(-> this (.setTagsForResourceAsync request))))
(defn delete-assessment-template-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DeleteAssessmentTemplateRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteAssessmentTemplate operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DeleteAssessmentTemplateResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTemplateRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteAssessmentTemplateAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTemplateRequest request]
(-> this (.deleteAssessmentTemplateAsync request))))
(defn list-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListFindings operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListFindingsRequest request]
(-> this (.listFindingsAsync request))))
(defn register-cross-account-access-role-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the RegisterCrossAccountAccessRole operation returned by the
service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.registerCrossAccountAccessRoleAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleRequest request]
(-> this (.registerCrossAccountAccessRoleAsync request))))
(defn preview-agents-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.PreviewAgentsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the PreviewAgents operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.PreviewAgentsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.PreviewAgentsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.previewAgentsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.PreviewAgentsRequest request]
(-> this (.previewAgentsAsync request))))
(defn create-resource-group-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateResourceGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateResourceGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateResourceGroupResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateResourceGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createResourceGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateResourceGroupRequest request]
(-> this (.createResourceGroupAsync request))))
(defn describe-cross-account-access-role-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeCrossAccountAccessRole operation returned by the
service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeCrossAccountAccessRoleAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleRequest request]
(-> this (.describeCrossAccountAccessRoleAsync request))))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.inspector/src/com/amazonaws/services/inspector/AbstractAmazonInspectorAsync.clj | clojure | (ns com.amazonaws.services.inspector.AbstractAmazonInspectorAsync
"Abstract implementation of AmazonInspectorAsync. Convenient method forms pass through to the corresponding
overload that takes a request object and an AsyncHandler, which throws an
UnsupportedOperationException."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.inspector AbstractAmazonInspectorAsync]))
(defn stop-assessment-run-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.StopAssessmentRunRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StopAssessmentRun operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.StopAssessmentRunResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StopAssessmentRunRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.stopAssessmentRunAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StopAssessmentRunRequest request]
(-> this (.stopAssessmentRunAsync request))))
(defn list-rules-packages-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListRulesPackagesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListRulesPackages operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListRulesPackagesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListRulesPackagesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listRulesPackagesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListRulesPackagesRequest request]
(-> this (.listRulesPackagesAsync request))))
(defn create-assessment-template-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateAssessmentTemplateRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateAssessmentTemplate operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateAssessmentTemplateResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTemplateRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createAssessmentTemplateAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTemplateRequest request]
(-> this (.createAssessmentTemplateAsync request))))
(defn describe-exclusions-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeExclusionsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeExclusions operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeExclusionsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeExclusionsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeExclusionsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeExclusionsRequest request]
(-> this (.describeExclusionsAsync request))))
(defn delete-assessment-target-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DeleteAssessmentTargetRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteAssessmentTarget operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DeleteAssessmentTargetResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTargetRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteAssessmentTargetAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTargetRequest request]
(-> this (.deleteAssessmentTargetAsync request))))
(defn remove-attributes-from-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the RemoveAttributesFromFindings operation returned by the
service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.removeAttributesFromFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RemoveAttributesFromFindingsRequest request]
(-> this (.removeAttributesFromFindingsAsync request))))
(defn list-event-subscriptions-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListEventSubscriptionsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListEventSubscriptions operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListEventSubscriptionsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListEventSubscriptionsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listEventSubscriptionsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListEventSubscriptionsRequest request]
(-> this (.listEventSubscriptionsAsync request))))
(defn describe-resource-groups-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeResourceGroupsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeResourceGroups operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeResourceGroupsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeResourceGroupsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeResourceGroupsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeResourceGroupsRequest request]
(-> this (.describeResourceGroupsAsync request))))
(defn list-assessment-targets-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentTargetsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentTargets operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentTargetsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTargetsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentTargetsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTargetsRequest request]
(-> this (.listAssessmentTargetsAsync request))))
(defn get-assessment-report-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.GetAssessmentReportRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetAssessmentReport operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.GetAssessmentReportResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetAssessmentReportRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getAssessmentReportAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetAssessmentReportRequest request]
(-> this (.getAssessmentReportAsync request))))
(defn get-exclusions-preview-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.GetExclusionsPreviewRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetExclusionsPreview operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.GetExclusionsPreviewResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetExclusionsPreviewRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getExclusionsPreviewAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetExclusionsPreviewRequest request]
(-> this (.getExclusionsPreviewAsync request))))
(defn add-attributes-to-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.AddAttributesToFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the AddAttributesToFindings operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.AddAttributesToFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.AddAttributesToFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.addAttributesToFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.AddAttributesToFindingsRequest request]
(-> this (.addAttributesToFindingsAsync request))))
(defn list-assessment-run-agents-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentRunAgentsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentRunAgents operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentRunAgentsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunAgentsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentRunAgentsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunAgentsRequest request]
(-> this (.listAssessmentRunAgentsAsync request))))
(defn create-assessment-target-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateAssessmentTargetRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateAssessmentTarget operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateAssessmentTargetResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTargetRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createAssessmentTargetAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateAssessmentTargetRequest request]
(-> this (.createAssessmentTargetAsync request))))
(defn unsubscribe-from-event-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.UnsubscribeFromEventRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UnsubscribeFromEvent operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.UnsubscribeFromEventResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UnsubscribeFromEventRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.unsubscribeFromEventAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UnsubscribeFromEventRequest request]
(-> this (.unsubscribeFromEventAsync request))))
(defn describe-assessment-templates-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeAssessmentTemplates operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeAssessmentTemplatesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTemplatesRequest request]
(-> this (.describeAssessmentTemplatesAsync request))))
(defn create-exclusions-preview-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateExclusionsPreviewRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateExclusionsPreview operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateExclusionsPreviewResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateExclusionsPreviewRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createExclusionsPreviewAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateExclusionsPreviewRequest request]
(-> this (.createExclusionsPreviewAsync request))))
(defn describe-rules-packages-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeRulesPackagesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeRulesPackages operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeRulesPackagesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeRulesPackagesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeRulesPackagesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeRulesPackagesRequest request]
(-> this (.describeRulesPackagesAsync request))))
(defn list-assessment-templates-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentTemplatesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentTemplates operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentTemplatesResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTemplatesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentTemplatesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentTemplatesRequest request]
(-> this (.listAssessmentTemplatesAsync request))))
(defn start-assessment-run-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.StartAssessmentRunRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the StartAssessmentRun operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.StartAssessmentRunResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StartAssessmentRunRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.startAssessmentRunAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.StartAssessmentRunRequest request]
(-> this (.startAssessmentRunAsync request))))
(defn list-assessment-runs-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListAssessmentRunsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListAssessmentRuns operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListAssessmentRunsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listAssessmentRunsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListAssessmentRunsRequest request]
(-> this (.listAssessmentRunsAsync request))))
(defn delete-assessment-run-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DeleteAssessmentRunRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteAssessmentRun operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DeleteAssessmentRunResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentRunRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteAssessmentRunAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentRunRequest request]
(-> this (.deleteAssessmentRunAsync request))))
(defn update-assessment-target-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.UpdateAssessmentTargetRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UpdateAssessmentTarget operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.UpdateAssessmentTargetResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UpdateAssessmentTargetRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.updateAssessmentTargetAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.UpdateAssessmentTargetRequest request]
(-> this (.updateAssessmentTargetAsync request))))
(defn list-tags-for-resource-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListTagsForResourceRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListTagsForResource operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListTagsForResourceResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListTagsForResourceRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listTagsForResourceAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListTagsForResourceRequest request]
(-> this (.listTagsForResourceAsync request))))
(defn describe-assessment-targets-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeAssessmentTargetsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeAssessmentTargets operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeAssessmentTargetsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTargetsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeAssessmentTargetsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentTargetsRequest request]
(-> this (.describeAssessmentTargetsAsync request))))
(defn list-exclusions-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListExclusionsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListExclusions operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListExclusionsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListExclusionsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listExclusionsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListExclusionsRequest request]
(-> this (.listExclusionsAsync request))))
(defn get-telemetry-metadata-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.GetTelemetryMetadataRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetTelemetryMetadata operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.GetTelemetryMetadataResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetTelemetryMetadataRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getTelemetryMetadataAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.GetTelemetryMetadataRequest request]
(-> this (.getTelemetryMetadataAsync request))))
(defn describe-assessment-runs-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeAssessmentRunsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeAssessmentRuns operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeAssessmentRunsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentRunsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeAssessmentRunsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeAssessmentRunsRequest request]
(-> this (.describeAssessmentRunsAsync request))))
(defn subscribe-to-event-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.SubscribeToEventRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the SubscribeToEvent operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.SubscribeToEventResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SubscribeToEventRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.subscribeToEventAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SubscribeToEventRequest request]
(-> this (.subscribeToEventAsync request))))
(defn describe-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeFindings operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeFindingsRequest request]
(-> this (.describeFindingsAsync request))))
(defn set-tags-for-resource-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.SetTagsForResourceRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the SetTagsForResource operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.SetTagsForResourceResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SetTagsForResourceRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.setTagsForResourceAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.SetTagsForResourceRequest request]
(-> this (.setTagsForResourceAsync request))))
(defn delete-assessment-template-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DeleteAssessmentTemplateRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteAssessmentTemplate operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DeleteAssessmentTemplateResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTemplateRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteAssessmentTemplateAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DeleteAssessmentTemplateRequest request]
(-> this (.deleteAssessmentTemplateAsync request))))
(defn list-findings-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.ListFindingsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListFindings operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.ListFindingsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListFindingsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listFindingsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.ListFindingsRequest request]
(-> this (.listFindingsAsync request))))
(defn register-cross-account-access-role-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the RegisterCrossAccountAccessRole operation returned by the
service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.registerCrossAccountAccessRoleAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.RegisterCrossAccountAccessRoleRequest request]
(-> this (.registerCrossAccountAccessRoleAsync request))))
(defn preview-agents-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.PreviewAgentsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the PreviewAgents operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.PreviewAgentsResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.PreviewAgentsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.previewAgentsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.PreviewAgentsRequest request]
(-> this (.previewAgentsAsync request))))
(defn create-resource-group-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.CreateResourceGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateResourceGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.CreateResourceGroupResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateResourceGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createResourceGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.CreateResourceGroupRequest request]
(-> this (.createResourceGroupAsync request))))
(defn describe-cross-account-access-role-async
"Description copied from interface: AmazonInspectorAsync
request - `com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DescribeCrossAccountAccessRole operation returned by the
service. - `java.util.concurrent.Future<com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleResult>`"
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.describeCrossAccountAccessRoleAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAmazonInspectorAsync this ^com.amazonaws.services.inspector.model.DescribeCrossAccountAccessRoleRequest request]
(-> this (.describeCrossAccountAccessRoleAsync request))))
| |
db3ad9846c66ab8d72baa2ef0e89d016302f14aa1f29023b68175b9a1d1f4d70 | stackbuilders/stache | Parser.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
Module : Text . Mustache .
Copyright : © 2016 – present Stack Builders
License : BSD 3 clause
--
Maintainer : < >
-- Stability : experimental
-- Portability : portable
--
parser for Mustache templates . You do n't usually need to
-- import the module, because "Text.Mustache" re-exports everything you may
-- need, import that module instead.
module Text.Mustache.Parser
( parseMustache,
)
where
import Control.Monad
import Control.Monad.State.Strict
import Data.Char (isSpace)
import Data.Maybe (catMaybes)
import Data.Text (Text, stripEnd)
import qualified Data.Text as T
import Data.Void
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import Text.Mustache.Type
----------------------------------------------------------------------------
Parser
-- | Parse a given Mustache template.
parseMustache ::
-- | Location of the file to parse
FilePath ->
-- | File contents (Mustache template)
Text ->
| nodes or parse error
Either (ParseErrorBundle Text Void) [Node]
parseMustache =
parse $
evalStateT (pMustache eof) (St "{{" "}}" 0)
pMustache :: Parser () -> Parser [Node]
pMustache = fmap catMaybes . manyTill (choice alts)
where
alts =
[ Nothing <$ withStandalone pComment,
Just <$> pSection "#" Section,
Just <$> pSection "^" InvertedSection,
Just <$> pStandalone (pPartial Just),
Just <$> pPartial (const Nothing),
Nothing <$ withStandalone pSetDelimiters,
Just <$> pUnescapedVariable,
Just <$> pUnescapedSpecial,
Just <$> pEscapedVariable,
Just <$> pTextBlock
]
# INLINE pMustache #
pTextBlock :: Parser Node
pTextBlock = do
start <- gets openingDel
txt <- fmap T.concat . many $ do
(void . notFollowedBy . string) start
let textChar x = x /= T.head start && x /= '\n'
string (T.take 1 start) <|> takeWhile1P (Just "text char") textChar
meol <- optional eol'
return $ case meol of
Nothing -> TextBlock txt
Just txt' -> TextBlock (txt <> txt')
# INLINE pTextBlock #
pUnescapedVariable :: Parser Node
pUnescapedVariable = UnescapedVar <$> pTag "&"
# INLINE pUnescapedVariable #
pUnescapedSpecial :: Parser Node
pUnescapedSpecial = do
start <- gets openingDel
end <- gets closingDel
between (symbol $ start <> "{") (string $ "}" <> end) $
UnescapedVar <$> pKey
{-# INLINE pUnescapedSpecial #-}
pSection :: Text -> (Key -> [Node] -> Node) -> Parser Node
pSection suffix f = do
key <- withStandalone (pTag suffix)
nodes <- (pMustache . withStandalone . pClosingTag) key
return (f key nodes)
# INLINE pSection #
pPartial :: (Pos -> Maybe Pos) -> Parser Node
pPartial f = do
pos <- f <$> L.indentLevel
key <- pTag ">"
let pname = PName $ T.intercalate (T.pack ".") (unKey key)
return (Partial pname pos)
{-# INLINE pPartial #-}
pComment :: Parser ()
pComment = void $ do
start <- gets openingDel
end <- gets closingDel
(void . symbol) (start <> "!")
manyTill (anySingle <?> "character") (string end)
# INLINE pComment #
pSetDelimiters :: Parser ()
pSetDelimiters = void $ do
start <- gets openingDel
end <- gets closingDel
(void . symbol) (start <> "=")
start' <- pDelimiter <* scn
end' <- pDelimiter <* scn
(void . string) ("=" <> end)
modify' $ \st ->
st
{ openingDel = start',
closingDel = end'
}
# INLINE pSetDelimiters #
pEscapedVariable :: Parser Node
pEscapedVariable = EscapedVar <$> pTag ""
# INLINE pEscapedVariable #
withStandalone :: Parser a -> Parser a
withStandalone p = pStandalone p <|> p
# INLINE withStandalone #
pStandalone :: Parser a -> Parser a
pStandalone p = pBol *> try (between sc (sc <* (void eol' <|> eof)) p)
# INLINE pStandalone #
pTag :: Text -> Parser Key
pTag suffix = do
start <- gets openingDel
end <- gets closingDel
between (symbol $ start <> suffix) (string end) pKey
# INLINE pTag #
pClosingTag :: Key -> Parser ()
pClosingTag key = do
start <- gets openingDel
end <- gets closingDel
let str = keyToText key
void $ between (symbol $ start <> "/") (string end) (symbol str)
# INLINE pClosingTag #
pKey :: Parser Key
pKey = (fmap Key . lexeme . label "key") (implicit <|> other)
where
implicit = [] <$ char '.'
other = do
end <- gets closingDel
let f x = x `notElem` ('.' : '}' : T.unpack end)
lbl = "key-constituent characters"
stripLast <$> sepBy1 (takeWhile1P (Just lbl) f) (char '.')
stripLast [] = []
stripLast [x] = [stripEnd x]
stripLast (x0 : x1 : xs) = x0 : stripLast (x1 : xs)
# INLINE pKey #
pDelimiter :: Parser Text
pDelimiter = takeWhile1P (Just "delimiter char") delChar <?> "delimiter"
where
delChar x = not (isSpace x) && x /= '='
# INLINE pDelimiter #
pBol :: Parser ()
pBol = do
o <- getOffset
o' <- gets newlineOffset
unless (o == o') empty
# INLINE pBol #
----------------------------------------------------------------------------
-- Auxiliary types
-- | Type of Mustache parser monad stack.
type Parser = StateT St (Parsec Void Text)
-- | State used in the parser.
data St = St
{ -- | Opening delimiter
openingDel :: Text,
-- | Closing delimiter
closingDel :: Text,
-- | The offset at which last newline character was parsed
newlineOffset :: !Int
}
----------------------------------------------------------------------------
helpers and other
scn :: Parser ()
scn = L.space space1 empty empty
# INLINE scn #
sc :: Parser ()
sc = L.space (void $ takeWhile1P Nothing f) empty empty
where
f x = x == ' ' || x == '\t'
# INLINE sc #
lexeme :: Parser a -> Parser a
lexeme = L.lexeme scn
# INLINE lexeme #
symbol :: Text -> Parser Text
symbol = L.symbol scn
# INLINE symbol #
keyToText :: Key -> Text
keyToText (Key []) = "."
keyToText (Key ks) = T.intercalate "." ks
# INLINE keyToText #
eol' :: Parser Text
eol' = do
x <- eol
o <- getOffset
modify' (\st -> st {newlineOffset = o})
return x
{-# INLINE eol' #-}
| null | https://raw.githubusercontent.com/stackbuilders/stache/e201682835d87a9e2f51ccba7b5f47866cf05f87/Text/Mustache/Parser.hs | haskell | # LANGUAGE OverloadedStrings #
|
Stability : experimental
Portability : portable
import the module, because "Text.Mustache" re-exports everything you may
need, import that module instead.
--------------------------------------------------------------------------
| Parse a given Mustache template.
| Location of the file to parse
| File contents (Mustache template)
# INLINE pUnescapedSpecial #
# INLINE pPartial #
--------------------------------------------------------------------------
Auxiliary types
| Type of Mustache parser monad stack.
| State used in the parser.
| Opening delimiter
| Closing delimiter
| The offset at which last newline character was parsed
--------------------------------------------------------------------------
# INLINE eol' # |
Module : Text . Mustache .
Copyright : © 2016 – present Stack Builders
License : BSD 3 clause
Maintainer : < >
parser for Mustache templates . You do n't usually need to
module Text.Mustache.Parser
( parseMustache,
)
where
import Control.Monad
import Control.Monad.State.Strict
import Data.Char (isSpace)
import Data.Maybe (catMaybes)
import Data.Text (Text, stripEnd)
import qualified Data.Text as T
import Data.Void
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import Text.Mustache.Type
Parser
parseMustache ::
FilePath ->
Text ->
| nodes or parse error
Either (ParseErrorBundle Text Void) [Node]
parseMustache =
parse $
evalStateT (pMustache eof) (St "{{" "}}" 0)
pMustache :: Parser () -> Parser [Node]
pMustache = fmap catMaybes . manyTill (choice alts)
where
alts =
[ Nothing <$ withStandalone pComment,
Just <$> pSection "#" Section,
Just <$> pSection "^" InvertedSection,
Just <$> pStandalone (pPartial Just),
Just <$> pPartial (const Nothing),
Nothing <$ withStandalone pSetDelimiters,
Just <$> pUnescapedVariable,
Just <$> pUnescapedSpecial,
Just <$> pEscapedVariable,
Just <$> pTextBlock
]
# INLINE pMustache #
pTextBlock :: Parser Node
pTextBlock = do
start <- gets openingDel
txt <- fmap T.concat . many $ do
(void . notFollowedBy . string) start
let textChar x = x /= T.head start && x /= '\n'
string (T.take 1 start) <|> takeWhile1P (Just "text char") textChar
meol <- optional eol'
return $ case meol of
Nothing -> TextBlock txt
Just txt' -> TextBlock (txt <> txt')
# INLINE pTextBlock #
pUnescapedVariable :: Parser Node
pUnescapedVariable = UnescapedVar <$> pTag "&"
# INLINE pUnescapedVariable #
pUnescapedSpecial :: Parser Node
pUnescapedSpecial = do
start <- gets openingDel
end <- gets closingDel
between (symbol $ start <> "{") (string $ "}" <> end) $
UnescapedVar <$> pKey
pSection :: Text -> (Key -> [Node] -> Node) -> Parser Node
pSection suffix f = do
key <- withStandalone (pTag suffix)
nodes <- (pMustache . withStandalone . pClosingTag) key
return (f key nodes)
# INLINE pSection #
pPartial :: (Pos -> Maybe Pos) -> Parser Node
pPartial f = do
pos <- f <$> L.indentLevel
key <- pTag ">"
let pname = PName $ T.intercalate (T.pack ".") (unKey key)
return (Partial pname pos)
pComment :: Parser ()
pComment = void $ do
start <- gets openingDel
end <- gets closingDel
(void . symbol) (start <> "!")
manyTill (anySingle <?> "character") (string end)
# INLINE pComment #
pSetDelimiters :: Parser ()
pSetDelimiters = void $ do
start <- gets openingDel
end <- gets closingDel
(void . symbol) (start <> "=")
start' <- pDelimiter <* scn
end' <- pDelimiter <* scn
(void . string) ("=" <> end)
modify' $ \st ->
st
{ openingDel = start',
closingDel = end'
}
# INLINE pSetDelimiters #
pEscapedVariable :: Parser Node
pEscapedVariable = EscapedVar <$> pTag ""
# INLINE pEscapedVariable #
withStandalone :: Parser a -> Parser a
withStandalone p = pStandalone p <|> p
# INLINE withStandalone #
pStandalone :: Parser a -> Parser a
pStandalone p = pBol *> try (between sc (sc <* (void eol' <|> eof)) p)
# INLINE pStandalone #
pTag :: Text -> Parser Key
pTag suffix = do
start <- gets openingDel
end <- gets closingDel
between (symbol $ start <> suffix) (string end) pKey
# INLINE pTag #
pClosingTag :: Key -> Parser ()
pClosingTag key = do
start <- gets openingDel
end <- gets closingDel
let str = keyToText key
void $ between (symbol $ start <> "/") (string end) (symbol str)
# INLINE pClosingTag #
pKey :: Parser Key
pKey = (fmap Key . lexeme . label "key") (implicit <|> other)
where
implicit = [] <$ char '.'
other = do
end <- gets closingDel
let f x = x `notElem` ('.' : '}' : T.unpack end)
lbl = "key-constituent characters"
stripLast <$> sepBy1 (takeWhile1P (Just lbl) f) (char '.')
stripLast [] = []
stripLast [x] = [stripEnd x]
stripLast (x0 : x1 : xs) = x0 : stripLast (x1 : xs)
# INLINE pKey #
pDelimiter :: Parser Text
pDelimiter = takeWhile1P (Just "delimiter char") delChar <?> "delimiter"
where
delChar x = not (isSpace x) && x /= '='
# INLINE pDelimiter #
pBol :: Parser ()
pBol = do
o <- getOffset
o' <- gets newlineOffset
unless (o == o') empty
# INLINE pBol #
type Parser = StateT St (Parsec Void Text)
data St = St
openingDel :: Text,
closingDel :: Text,
newlineOffset :: !Int
}
helpers and other
scn :: Parser ()
scn = L.space space1 empty empty
# INLINE scn #
sc :: Parser ()
sc = L.space (void $ takeWhile1P Nothing f) empty empty
where
f x = x == ' ' || x == '\t'
# INLINE sc #
lexeme :: Parser a -> Parser a
lexeme = L.lexeme scn
# INLINE lexeme #
symbol :: Text -> Parser Text
symbol = L.symbol scn
# INLINE symbol #
keyToText :: Key -> Text
keyToText (Key []) = "."
keyToText (Key ks) = T.intercalate "." ks
# INLINE keyToText #
eol' :: Parser Text
eol' = do
x <- eol
o <- getOffset
modify' (\st -> st {newlineOffset = o})
return x
|
b00153c7c6fae22015a65dce89520118d53c89b3bc612efd7cfa433667b4f853 | bmeurer/ocaml-arm | stdLabels.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, Kyoto University RIMS
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file ../LICENSE. *)
(* *)
(***********************************************************************)
$ Id$
(** Standard labeled libraries.
This meta-module provides labelized version of the {!Array},
{!List} and {!String} modules.
They only differ by their labels. Detailed interfaces can be found
in [arrayLabels.mli], [listLabels.mli] and [stringLabels.mli].
*)
module Array :
sig
external length : 'a array -> int = "%array_length"
external get : 'a array -> int -> 'a = "%array_safe_get"
external set : 'a array -> int -> 'a -> unit = "%array_safe_set"
external make : int -> 'a -> 'a array = "caml_make_vect"
external create : int -> 'a -> 'a array = "caml_make_vect"
val init : int -> f:(int -> 'a) -> 'a array
val make_matrix : dimx:int -> dimy:int -> 'a -> 'a array array
val create_matrix : dimx:int -> dimy:int -> 'a -> 'a array array
val append : 'a array -> 'a array -> 'a array
val concat : 'a array list -> 'a array
val sub : 'a array -> pos:int -> len:int -> 'a array
val copy : 'a array -> 'a array
val fill : 'a array -> pos:int -> len:int -> 'a -> unit
val blit :
src:'a array -> src_pos:int -> dst:'a array -> dst_pos:int -> len:int ->
unit
val to_list : 'a array -> 'a list
val of_list : 'a list -> 'a array
val iter : f:('a -> unit) -> 'a array -> unit
val map : f:('a -> 'b) -> 'a array -> 'b array
val iteri : f:(int -> 'a -> unit) -> 'a array -> unit
val mapi : f:(int -> 'a -> 'b) -> 'a array -> 'b array
val fold_left : f:('a -> 'b -> 'a) -> init:'a -> 'b array -> 'a
val fold_right : f:('a -> 'b -> 'b) -> 'a array -> init:'b -> 'b
val sort : cmp:('a -> 'a -> int) -> 'a array -> unit
val stable_sort : cmp:('a -> 'a -> int) -> 'a array -> unit
val fast_sort : cmp:('a -> 'a -> int) -> 'a array -> unit
external unsafe_get : 'a array -> int -> 'a = "%array_unsafe_get"
external unsafe_set : 'a array -> int -> 'a -> unit = "%array_unsafe_set"
end
module List :
sig
val length : 'a list -> int
val hd : 'a list -> 'a
val tl : 'a list -> 'a list
val nth : 'a list -> int -> 'a
val rev : 'a list -> 'a list
val append : 'a list -> 'a list -> 'a list
val rev_append : 'a list -> 'a list -> 'a list
val concat : 'a list list -> 'a list
val flatten : 'a list list -> 'a list
val iter : f:('a -> unit) -> 'a list -> unit
val map : f:('a -> 'b) -> 'a list -> 'b list
val rev_map : f:('a -> 'b) -> 'a list -> 'b list
val fold_left : f:('a -> 'b -> 'a) -> init:'a -> 'b list -> 'a
val fold_right : f:('a -> 'b -> 'b) -> 'a list -> init:'b -> 'b
val iter2 : f:('a -> 'b -> unit) -> 'a list -> 'b list -> unit
val map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
val rev_map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
val fold_left2 :
f:('a -> 'b -> 'c -> 'a) -> init:'a -> 'b list -> 'c list -> 'a
val fold_right2 :
f:('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> init:'c -> 'c
val for_all : f:('a -> bool) -> 'a list -> bool
val exists : f:('a -> bool) -> 'a list -> bool
val for_all2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
val exists2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
val mem : 'a -> set:'a list -> bool
val memq : 'a -> set:'a list -> bool
val find : f:('a -> bool) -> 'a list -> 'a
val filter : f:('a -> bool) -> 'a list -> 'a list
val find_all : f:('a -> bool) -> 'a list -> 'a list
val partition : f:('a -> bool) -> 'a list -> 'a list * 'a list
val assoc : 'a -> ('a * 'b) list -> 'b
val assq : 'a -> ('a * 'b) list -> 'b
val mem_assoc : 'a -> map:('a * 'b) list -> bool
val mem_assq : 'a -> map:('a * 'b) list -> bool
val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list
val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list
val split : ('a * 'b) list -> 'a list * 'b list
val combine : 'a list -> 'b list -> ('a * 'b) list
val sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
val stable_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
val fast_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
val merge : cmp:('a -> 'a -> int) -> 'a list -> 'a list -> 'a list
end
module String :
sig
external length : string -> int = "%string_length"
external get : string -> int -> char = "%string_safe_get"
external set : string -> int -> char -> unit = "%string_safe_set"
external create : int -> string = "caml_create_string"
val make : int -> char -> string
val copy : string -> string
val sub : string -> pos:int -> len:int -> string
val fill : string -> pos:int -> len:int -> char -> unit
val blit :
src:string -> src_pos:int -> dst:string -> dst_pos:int -> len:int ->
unit
val concat : sep:string -> string list -> string
val iter : f:(char -> unit) -> string -> unit
val iteri : f:(int -> char -> unit) -> string -> unit
val map : f:(char -> char) -> string -> string
val trim : string -> string
val escaped : string -> string
val index : string -> char -> int
val rindex : string -> char -> int
val index_from : string -> int -> char -> int
val rindex_from : string -> int -> char -> int
val contains : string -> char -> bool
val contains_from : string -> int -> char -> bool
val rcontains_from : string -> int -> char -> bool
val uppercase : string -> string
val lowercase : string -> string
val capitalize : string -> string
val uncapitalize : string -> string
type t = string
val compare: t -> t -> int
external unsafe_get : string -> int -> char = "%string_unsafe_get"
external unsafe_set : string -> int -> char -> unit = "%string_unsafe_set"
external unsafe_blit :
src:string -> src_pos:int -> dst:string -> dst_pos:int -> len:int ->
unit = "caml_blit_string" "noalloc"
external unsafe_fill : string -> pos:int -> len:int -> char -> unit
= "caml_fill_string" "noalloc"
end
| null | https://raw.githubusercontent.com/bmeurer/ocaml-arm/43f7689c76a349febe3d06ae7a4fc1d52984fd8b/stdlib/stdLabels.mli | ocaml | *********************************************************************
OCaml
the special exception on linking described in file ../LICENSE.
*********************************************************************
* Standard labeled libraries.
This meta-module provides labelized version of the {!Array},
{!List} and {!String} modules.
They only differ by their labels. Detailed interfaces can be found
in [arrayLabels.mli], [listLabels.mli] and [stringLabels.mli].
| , Kyoto University RIMS
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
$ Id$
module Array :
sig
external length : 'a array -> int = "%array_length"
external get : 'a array -> int -> 'a = "%array_safe_get"
external set : 'a array -> int -> 'a -> unit = "%array_safe_set"
external make : int -> 'a -> 'a array = "caml_make_vect"
external create : int -> 'a -> 'a array = "caml_make_vect"
val init : int -> f:(int -> 'a) -> 'a array
val make_matrix : dimx:int -> dimy:int -> 'a -> 'a array array
val create_matrix : dimx:int -> dimy:int -> 'a -> 'a array array
val append : 'a array -> 'a array -> 'a array
val concat : 'a array list -> 'a array
val sub : 'a array -> pos:int -> len:int -> 'a array
val copy : 'a array -> 'a array
val fill : 'a array -> pos:int -> len:int -> 'a -> unit
val blit :
src:'a array -> src_pos:int -> dst:'a array -> dst_pos:int -> len:int ->
unit
val to_list : 'a array -> 'a list
val of_list : 'a list -> 'a array
val iter : f:('a -> unit) -> 'a array -> unit
val map : f:('a -> 'b) -> 'a array -> 'b array
val iteri : f:(int -> 'a -> unit) -> 'a array -> unit
val mapi : f:(int -> 'a -> 'b) -> 'a array -> 'b array
val fold_left : f:('a -> 'b -> 'a) -> init:'a -> 'b array -> 'a
val fold_right : f:('a -> 'b -> 'b) -> 'a array -> init:'b -> 'b
val sort : cmp:('a -> 'a -> int) -> 'a array -> unit
val stable_sort : cmp:('a -> 'a -> int) -> 'a array -> unit
val fast_sort : cmp:('a -> 'a -> int) -> 'a array -> unit
external unsafe_get : 'a array -> int -> 'a = "%array_unsafe_get"
external unsafe_set : 'a array -> int -> 'a -> unit = "%array_unsafe_set"
end
module List :
sig
val length : 'a list -> int
val hd : 'a list -> 'a
val tl : 'a list -> 'a list
val nth : 'a list -> int -> 'a
val rev : 'a list -> 'a list
val append : 'a list -> 'a list -> 'a list
val rev_append : 'a list -> 'a list -> 'a list
val concat : 'a list list -> 'a list
val flatten : 'a list list -> 'a list
val iter : f:('a -> unit) -> 'a list -> unit
val map : f:('a -> 'b) -> 'a list -> 'b list
val rev_map : f:('a -> 'b) -> 'a list -> 'b list
val fold_left : f:('a -> 'b -> 'a) -> init:'a -> 'b list -> 'a
val fold_right : f:('a -> 'b -> 'b) -> 'a list -> init:'b -> 'b
val iter2 : f:('a -> 'b -> unit) -> 'a list -> 'b list -> unit
val map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
val rev_map2 : f:('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
val fold_left2 :
f:('a -> 'b -> 'c -> 'a) -> init:'a -> 'b list -> 'c list -> 'a
val fold_right2 :
f:('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> init:'c -> 'c
val for_all : f:('a -> bool) -> 'a list -> bool
val exists : f:('a -> bool) -> 'a list -> bool
val for_all2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
val exists2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool
val mem : 'a -> set:'a list -> bool
val memq : 'a -> set:'a list -> bool
val find : f:('a -> bool) -> 'a list -> 'a
val filter : f:('a -> bool) -> 'a list -> 'a list
val find_all : f:('a -> bool) -> 'a list -> 'a list
val partition : f:('a -> bool) -> 'a list -> 'a list * 'a list
val assoc : 'a -> ('a * 'b) list -> 'b
val assq : 'a -> ('a * 'b) list -> 'b
val mem_assoc : 'a -> map:('a * 'b) list -> bool
val mem_assq : 'a -> map:('a * 'b) list -> bool
val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list
val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list
val split : ('a * 'b) list -> 'a list * 'b list
val combine : 'a list -> 'b list -> ('a * 'b) list
val sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
val stable_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
val fast_sort : cmp:('a -> 'a -> int) -> 'a list -> 'a list
val merge : cmp:('a -> 'a -> int) -> 'a list -> 'a list -> 'a list
end
module String :
sig
external length : string -> int = "%string_length"
external get : string -> int -> char = "%string_safe_get"
external set : string -> int -> char -> unit = "%string_safe_set"
external create : int -> string = "caml_create_string"
val make : int -> char -> string
val copy : string -> string
val sub : string -> pos:int -> len:int -> string
val fill : string -> pos:int -> len:int -> char -> unit
val blit :
src:string -> src_pos:int -> dst:string -> dst_pos:int -> len:int ->
unit
val concat : sep:string -> string list -> string
val iter : f:(char -> unit) -> string -> unit
val iteri : f:(int -> char -> unit) -> string -> unit
val map : f:(char -> char) -> string -> string
val trim : string -> string
val escaped : string -> string
val index : string -> char -> int
val rindex : string -> char -> int
val index_from : string -> int -> char -> int
val rindex_from : string -> int -> char -> int
val contains : string -> char -> bool
val contains_from : string -> int -> char -> bool
val rcontains_from : string -> int -> char -> bool
val uppercase : string -> string
val lowercase : string -> string
val capitalize : string -> string
val uncapitalize : string -> string
type t = string
val compare: t -> t -> int
external unsafe_get : string -> int -> char = "%string_unsafe_get"
external unsafe_set : string -> int -> char -> unit = "%string_unsafe_set"
external unsafe_blit :
src:string -> src_pos:int -> dst:string -> dst_pos:int -> len:int ->
unit = "caml_blit_string" "noalloc"
external unsafe_fill : string -> pos:int -> len:int -> char -> unit
= "caml_fill_string" "noalloc"
end
|
7241dfbc17e2433b1c41ea7d64562667c4446c382b3ca5ead7f370073c99d451 | mokus0/junkbox | STLC.hs | {-# LANGUAGE GADTs #-}
module Math.STLC where
import qualified Data.Map as M
import Data.Monoid
data Type b
= BaseType b
| Arrow (Type b) (Type b)
deriving (Eq, Show)
instance Functor Type where
fmap f (BaseType b) = BaseType (f b)
fmap f (Arrow a b) = Arrow (fmap f a) (fmap f b)
data Term k v
= Const k
| Var v
| Abs v (Term k v)
| App (Term k v) (Term k v)
data Context b k v = Cxt
{ constType :: k -> Maybe (Type b)
, varTypes :: M.Map v (Type b)
}
emptyCxt f = Cxt f M.empty
extend v ty cxt = cxt {varTypes = M.insert v ty (varTypes cxt)}
liftCxt :: Context b k v -> Context (Maybe b) k v
liftCxt (Cxt f m) = Cxt (fmap (fmap Just) . f) (M.map (fmap Just) m)
newtype Subst k v = Subst (M.Map v k)
check :: (Eq b, Ord v) => Context b k v -> Term k v -> Type b -> Bool
check cxt (Const k) ty = infer cxt (Const k) == Just ty
check cxt (Var v) ty = infer cxt (Var v) == Just ty
check cxt (Abs v e) (Arrow vt et) = check (extend v vt cxt) e et
check cxt (App e1 e2) ty = case infer cxt e2 of
Nothing -> False
Just e2ty -> check cxt e1 (Arrow e2ty ty)
check cxt _ _ = False
-- this strategy isn't gonna work. Need to introduce unification type variables
infer :: Ord v => Context b k v -> Term k v -> Maybe (Type b)
infer cxt (Const k) = constType cxt k
infer cxt (Var v) = M.lookup v (varTypes cxt)
infer cxt (Abs v e) = undefined
infer cxt (App e1 e2) = undefined | null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/Math/STLC.hs | haskell | # LANGUAGE GADTs #
this strategy isn't gonna work. Need to introduce unification type variables | module Math.STLC where
import qualified Data.Map as M
import Data.Monoid
data Type b
= BaseType b
| Arrow (Type b) (Type b)
deriving (Eq, Show)
instance Functor Type where
fmap f (BaseType b) = BaseType (f b)
fmap f (Arrow a b) = Arrow (fmap f a) (fmap f b)
data Term k v
= Const k
| Var v
| Abs v (Term k v)
| App (Term k v) (Term k v)
data Context b k v = Cxt
{ constType :: k -> Maybe (Type b)
, varTypes :: M.Map v (Type b)
}
emptyCxt f = Cxt f M.empty
extend v ty cxt = cxt {varTypes = M.insert v ty (varTypes cxt)}
liftCxt :: Context b k v -> Context (Maybe b) k v
liftCxt (Cxt f m) = Cxt (fmap (fmap Just) . f) (M.map (fmap Just) m)
newtype Subst k v = Subst (M.Map v k)
check :: (Eq b, Ord v) => Context b k v -> Term k v -> Type b -> Bool
check cxt (Const k) ty = infer cxt (Const k) == Just ty
check cxt (Var v) ty = infer cxt (Var v) == Just ty
check cxt (Abs v e) (Arrow vt et) = check (extend v vt cxt) e et
check cxt (App e1 e2) ty = case infer cxt e2 of
Nothing -> False
Just e2ty -> check cxt e1 (Arrow e2ty ty)
check cxt _ _ = False
infer :: Ord v => Context b k v -> Term k v -> Maybe (Type b)
infer cxt (Const k) = constType cxt k
infer cxt (Var v) = M.lookup v (varTypes cxt)
infer cxt (Abs v e) = undefined
infer cxt (App e1 e2) = undefined |
307a9085f00761ccbca415171f80b46668b2284b03e806f2e33e7c028f06c5ab | garrigue/labltk | main.ml | (*************************************************************************)
(* *)
(* OCaml LablTk library *)
(* *)
, Kyoto University RIMS
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
(* General Public License, with the special exception on linking *)
(* described in file ../../../LICENSE. *)
(* *)
(*************************************************************************)
$ Id$
open StdLabels
module Unix = UnixLabels
open Tk
let fatal_error text =
let top = openTk ~clas:"OCamlBrowser" () in
let mw = Message.create top ~text ~padx:20 ~pady:10
~width:400 ~justify:`Left ~aspect:400 ~anchor:`W
and b = Button.create top ~text:"OK" ~command:(fun () -> destroy top) in
pack [mw] ~side:`Top ~fill:`Both;
pack [b] ~side:`Bottom;
mainLoop ();
exit 0
let rec get_incr key = function
[] -> raise Not_found
| (k, c, d) :: rem ->
if k = key then
match c with Arg.Set _ | Arg.Clear _ | Arg.Unit _ -> false | _ -> true
else get_incr key rem
let check ~spec argv =
let i = ref 1 in
while !i < Array.length argv do
try
let a = get_incr argv.(!i) spec in
incr i; if a then incr i
with Not_found ->
i := Array.length argv + 1
done;
!i = Array.length argv
open Printf
let print_version () =
printf "The OCaml browser, version %s\n" Sys.ocaml_version;
exit 0;
;;
let print_version_num () =
printf "%s\n" Sys.ocaml_version;
exit 0;
;;
let usage ~spec errmsg =
let b = Buffer.create 1024 in
bprintf b "%s\n" errmsg;
List.iter spec ~f:(function (key, _, doc) -> bprintf b " %s %s\n" key doc);
Buffer.contents b
let _ =
let is_win32 = Sys.os_type = "Win32" in
if is_win32 then
Format.pp_set_formatter_output_functions Format.err_formatter
(fun _ _ _ -> ()) (fun _ -> ());
let path = ref [] in
let st = ref true in
let spec =
[ "-I", Arg.String (fun s -> path := s :: !path),
"<dir> Add <dir> to the list of include directories";
"-labels", Arg.Clear Clflags.classic, " <obsolete>";
"-nolabels", Arg.Set Clflags.classic,
" Ignore non-optional labels in types";
"-oldui", Arg.Clear st, " Revert back to old UI";
"-pp", Arg.String (fun s -> Clflags.preprocessor := Some s),
"<command> Pipe sources through preprocessor <command>";
"-rectypes", Arg.Set Clflags.recursive_types,
" Allow arbitrary recursive types";
"-short-paths", Arg.Clear Clflags.real_paths, " Shorten paths in types";
"-version", Arg.Unit print_version,
" Print version and exit";
"-vnum", Arg.Unit print_version_num, " Print version number and exit";
"-w", Arg.String (fun s -> Shell.warnings := s),
"<flags> Enable or disable warnings according to <flags>"; ]
and errmsg = "Command line: ocamlbrowser <options>" in
if not (check ~spec Sys.argv) then fatal_error (usage ~spec errmsg);
Arg.parse spec
(fun name -> raise(Arg.Bad("don't know what to do with " ^ name)))
errmsg;
Load_path.init ~auto_include:Load_path.no_auto_include
(Sys.getcwd ()
:: List.rev_map ~f:(Misc.expand_directory Config.standard_library) !path
@ [Config.standard_library]);
ignore (Warnings.parse_options false !Shell.warnings);
Unix.putenv "TERM" "noterminal";
begin
try Searchid.start_env := Compmisc.initial_env ()
with _ ->
fatal_error
(Printf.sprintf "%s\nPlease check that %s %s\nCurrent value is `%s'"
"Couldn't initialize environment."
(if is_win32 then "%OCAMLLIB%" else "$OCAMLLIB")
"points to the OCaml library."
Config.standard_library)
end;
Searchpos.view_defined_ref := (fun s ~env -> Viewer.view_defined s ~env);
Searchpos.editor_ref := Editor.f;
let top = openTk ~clas:"OCamlBrowser" () in
Jg_config.init ();
(* bind top ~events:[`Destroy] ~action:(fun _ -> exit 0); *)
at_exit Shell.kill_all;
if !st then Viewer.st_viewer ~on:top ()
else Viewer.f ~on:top ();
while true do
try
if is_win32 then mainLoop ()
else Printexc.print mainLoop ()
with Protocol.TkError _ ->
if not is_win32 then flush stderr
done
| null | https://raw.githubusercontent.com/garrigue/labltk/441705df2d88de01bc6aa28c31cc45e40751ee20/browser/main.ml | ocaml | ***********************************************************************
OCaml LablTk library
General Public License, with the special exception on linking
described in file ../../../LICENSE.
***********************************************************************
bind top ~events:[`Destroy] ~action:(fun _ -> exit 0); | , Kyoto University RIMS
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
$ Id$
open StdLabels
module Unix = UnixLabels
open Tk
let fatal_error text =
let top = openTk ~clas:"OCamlBrowser" () in
let mw = Message.create top ~text ~padx:20 ~pady:10
~width:400 ~justify:`Left ~aspect:400 ~anchor:`W
and b = Button.create top ~text:"OK" ~command:(fun () -> destroy top) in
pack [mw] ~side:`Top ~fill:`Both;
pack [b] ~side:`Bottom;
mainLoop ();
exit 0
let rec get_incr key = function
[] -> raise Not_found
| (k, c, d) :: rem ->
if k = key then
match c with Arg.Set _ | Arg.Clear _ | Arg.Unit _ -> false | _ -> true
else get_incr key rem
let check ~spec argv =
let i = ref 1 in
while !i < Array.length argv do
try
let a = get_incr argv.(!i) spec in
incr i; if a then incr i
with Not_found ->
i := Array.length argv + 1
done;
!i = Array.length argv
open Printf
let print_version () =
printf "The OCaml browser, version %s\n" Sys.ocaml_version;
exit 0;
;;
let print_version_num () =
printf "%s\n" Sys.ocaml_version;
exit 0;
;;
let usage ~spec errmsg =
let b = Buffer.create 1024 in
bprintf b "%s\n" errmsg;
List.iter spec ~f:(function (key, _, doc) -> bprintf b " %s %s\n" key doc);
Buffer.contents b
let _ =
let is_win32 = Sys.os_type = "Win32" in
if is_win32 then
Format.pp_set_formatter_output_functions Format.err_formatter
(fun _ _ _ -> ()) (fun _ -> ());
let path = ref [] in
let st = ref true in
let spec =
[ "-I", Arg.String (fun s -> path := s :: !path),
"<dir> Add <dir> to the list of include directories";
"-labels", Arg.Clear Clflags.classic, " <obsolete>";
"-nolabels", Arg.Set Clflags.classic,
" Ignore non-optional labels in types";
"-oldui", Arg.Clear st, " Revert back to old UI";
"-pp", Arg.String (fun s -> Clflags.preprocessor := Some s),
"<command> Pipe sources through preprocessor <command>";
"-rectypes", Arg.Set Clflags.recursive_types,
" Allow arbitrary recursive types";
"-short-paths", Arg.Clear Clflags.real_paths, " Shorten paths in types";
"-version", Arg.Unit print_version,
" Print version and exit";
"-vnum", Arg.Unit print_version_num, " Print version number and exit";
"-w", Arg.String (fun s -> Shell.warnings := s),
"<flags> Enable or disable warnings according to <flags>"; ]
and errmsg = "Command line: ocamlbrowser <options>" in
if not (check ~spec Sys.argv) then fatal_error (usage ~spec errmsg);
Arg.parse spec
(fun name -> raise(Arg.Bad("don't know what to do with " ^ name)))
errmsg;
Load_path.init ~auto_include:Load_path.no_auto_include
(Sys.getcwd ()
:: List.rev_map ~f:(Misc.expand_directory Config.standard_library) !path
@ [Config.standard_library]);
ignore (Warnings.parse_options false !Shell.warnings);
Unix.putenv "TERM" "noterminal";
begin
try Searchid.start_env := Compmisc.initial_env ()
with _ ->
fatal_error
(Printf.sprintf "%s\nPlease check that %s %s\nCurrent value is `%s'"
"Couldn't initialize environment."
(if is_win32 then "%OCAMLLIB%" else "$OCAMLLIB")
"points to the OCaml library."
Config.standard_library)
end;
Searchpos.view_defined_ref := (fun s ~env -> Viewer.view_defined s ~env);
Searchpos.editor_ref := Editor.f;
let top = openTk ~clas:"OCamlBrowser" () in
Jg_config.init ();
at_exit Shell.kill_all;
if !st then Viewer.st_viewer ~on:top ()
else Viewer.f ~on:top ();
while true do
try
if is_win32 then mainLoop ()
else Printexc.print mainLoop ()
with Protocol.TkError _ ->
if not is_win32 then flush stderr
done
|
4a86a5d8d0447278d7389819aee3f9dbc20909fab8fe12ed51ceea7ba6d7a651 | osa1/sc-plugin | Syntax.hs | # LANGUAGE CPP , FlexibleInstances , Rank2Types , TypeSynonymInstances #
module Supercompile.Core.Syntax (
module Supercompile.Core.Syntax,
Coercion, NormalCo, mkAxInstCo, mkReflCo,
DataCon, Var, Literal, Type, PrimOp
) where
#include "GHCDefs.h"
import Supercompile.StaticFlags
import Supercompile.Utilities
import Coercion (CoVar, Coercion, coercionKind, coercionType, isReflCo,
mkAxInstCo, mkCvSubst, mkReflCo)
import qualified Coercion as Coercion
import DataCon (DataCon, dataConWorkId)
import Id (Id, idInlinePragma, idType, isId)
import Kind
import Literal (Literal, literalType)
import Name (Name, nameOccName)
import OccName (occNameString)
import OptCoercion
import Pair (pSnd)
import PprCore ()
import PrimOp (primOpType)
import PrimOp (PrimOp)
import Type (Type, applyTy, applyTys, eqType, mkForAllTy, mkFunTy, mkTyVarTy,
splitFunTy_maybe)
import TypeRep (Type (..))
import Util
import Var (TyVar, Var, isTyVar, varName, varType)
import VarEnv (InScopeSet)
import qualified Data.Traversable as Traversable
mkSymCo :: InScopeSet -> NormalCo -> NormalCo
mkSymCo iss co = optCoercion (mkCvSubst iss []) (Coercion.mkSymCo co)
mkTransCo :: InScopeSet -> NormalCo -> NormalCo -> NormalCo
mkTransCo = opt_trans
class Outputable a => OutputableLambdas a where
pprPrecLam :: a -> ([Var], Rational -> SDoc)
class Outputable1 f => OutputableLambdas1 f where
pprPrecLam1 :: OutputableLambdas a => f a -> ([Var], Rational -> SDoc)
instance (OutputableLambdas1 f, OutputableLambdas a) => OutputableLambdas (Wrapper1 f a) where
pprPrecLam = pprPrecLam1 . unWrapper1
instance OutputableLambdas1 Identity where
pprPrecLam1 (I x) = pprPrecLam x
instance (Functor f, OutputableLambdas1 f, OutputableLambdas1 g) => OutputableLambdas1 (O f g) where
pprPrecLam1 (Comp x) = pprPrecLam1 (fmap Wrapper1 x)
instance OutputableLambdas1 Tagged where
pprPrecLam1 (Tagged tg x) = second ((braces (ppr tg) <+>) .) (pprPrecLam x)
instance OutputableLambdas1 Sized where
pprPrecLam1 (Sized sz x) = second ((bananas (text (show sz)) <>) .) (pprPrecLam x)
pprPrecDefault :: OutputableLambdas a => Rational -> a -> SDoc
pprPrecDefault prec e = pPrintPrecLam prec xs (PrettyFunction ppr_prec)
where (xs, ppr_prec) = pprPrecLam e
NB : do n't use GHC 's pprBndr because its way too noisy , printing unfoldings etc
pPrintBndr :: BindingSite -> Var -> SDoc
pPrintBndr bs x = prettyParen needs_parens $ ppr x <+> superinlinable <+> text "::" <+> ppr (varType x)
where needs_parens = case bs of LambdaBind -> True
CaseBind -> True
LetBind -> False
superinlinable = if isId x then ppr (idInlinePragma x) else empty
data AltCon = DataAlt DataCon [TyVar] [CoVar] [Id] | LiteralAlt Literal | DefaultAlt
deriving (Eq)
-- Note [Case wildcards]
-- ~~~~~~~~~~~~~~~~~~~~~
--
thought that I should use the variable in the DefaultAlt to agressively rewrite occurences of a scrutinised variable .
-- The motivation is that this lets us do more inlining above the case. For example, take this code fragment from foldl':
--
-- let n' = c n y
-- in case n' of wild -> foldl' c n' ys
--
-- If we rewrite, n' becomes linear:
--
-- let n' = c n y
-- in case n' of wild -> foldl c wild ys
--
-- This lets us potentially inline n' directly into the scrutinee position (operationally, this prevent creation of a thunk for n').
-- However, I don't think that this particular form of improving linearity helps the supercompiler. We only want to inline n' in
-- somewhere if it meets some interesting context, with which it can cancel. But if we are creating an update frame for n' at all,
-- it is *probably* because we had no information about what it evaluated to.
--
-- An interesting exception is when n' binds a case expression:
--
-- let n' = case unk of T -> F; F -> T
-- in case (case n' of T -> F; F -> T) of
-- wild -> e[n']
--
-- You might think that we want n' to be linear so we can inline it into the case on it. However, the splitter will save us and produce:
--
-- case unk of
-- T -> let n' = F
-- in case (case n' of T -> F; F -> T) of wild -> e[n']
-- F -> let n' = T
-- in case (case n' of T -> F; F -> T) of wild -> e[n']
--
-- Since we now know the form of n', everything works out nicely.
--
-- Conclusion: I don't think rewriting to use the case wildcard buys us anything at all.
-- Note [CoApp]
-- ~~~~~~~~~~~~
CoApp might seem redundant because we almost never substitute CoVars for Coercions , so we you might think we could get away
with just reusing the App constructor but having the Var be either an I d or a CoVar . Unfortunately mkCoVarCo sometimes returns so
we ca n't guarantee that all CoVar substitutions will be variable - for - variable . We add CoApp to work around this fragility .
type Term = Identity (TermF Identity)
type TaggedTerm = Tagged (TermF Tagged)
data TermF ann = Var Id
| Value (ValueF ann)
| TyApp (ann (TermF ann)) Type
| CoApp (ann (TermF ann)) Coercion
| App (ann (TermF ann)) Id
| PrimOp PrimOp [Type] [ann (TermF ann)]
NB : unlike GHC , for convenience we allow the list of alternatives to be empty
NB : might bind an unlifted thing , in which case evaluation changes . Unlike GHC , we do NOT assume the RHSes of unlifted bindings are ok - for - speculation .
| LetRec [(Id, ann (TermF ann))] (ann (TermF ann))
| Cast (ann (TermF ann)) Coercion
FIXME : arguably we have just Vars as arguments in PrimOp for better Tag behaviour
-- (otherwise improving the arguments is hidden by the Tag on the whole PrimOp stack frames).
--
-- FIXME: in fact, we need to change this because *NOT ALL PRIMOP ARGUMENTS ARE STRICT* (e.g.
the lazy polymorphic arguments to newMutVar # , newArray # ) .
--
-- FIXME: the reason I haven't done this is because it means I should remove the PrimApply frame,
-- which breaks the "question or answer" evaluator normalisation property. Probably what I should
do is just remove PrimOp and stop generating wrappers for PrimOps , so they are treated as normal Vars .
-- We can then special case them in the evaluator's "force", using rules to pretend like they have a RHS.
-- The only problem with this is that if there are no wrappers there is no guarantee of saturation,
-- but we can probably ignore that.
--
-- FIXME: the way I'm splitting PrimApply isn't right. If we have
-- case ((case [x] of I# x# -> x#) +# (case y of I# y# -> y#)) of
-- 0 -> ...; _ -> e[x, y]
Then I want to eventually split to e[I # x # , I # y # ] . At the moment we will only split to e[I # x , y ] !
-- This could be achieved in the current framework by splitting to
case ( x # + # ( case [ y ] of I # y # - > y # ) ) of ...
( Where the focus is now on y rather than x , and we put x # in the first set of arguments to PrimApply
-- as if x# were an answer.) If we just removed the PrimApply frame then we wouldn't need to worry about this though.
type Alt = AltF Identity
type TaggedAlt = AltF Tagged
type AltF ann = AltG (ann (TermF ann))
type AltG term = (AltCon, term)
FIXME : I should probably implement a correct operational semantics for TyLambdas !
type Value = ValueF Identity
type TaggedValue = ValueF Tagged
type ValueF ann = ValueG (ann (TermF ann))
data ValueG term = Literal Literal | Coercion Coercion
NB : might bind a CoVar
NB : includes universal and existential type arguments , in that order
NB : not a newtype DataCon
instance Functor ValueG where
fmap = Traversable.fmapDefault
instance Foldable ValueG where
foldMap = Traversable.foldMapDefault
instance Traversable ValueG where
traverse f e = case e of
Literal l -> pure $ Literal l
Coercion co -> pure $ Coercion co
TyLambda a e -> fmap (TyLambda a) $ f e
Lambda x e -> fmap (Lambda x) $ f e
Data dc tys cos xs -> pure $ Data dc tys cos xs
instance Outputable AltCon where
pprPrec prec altcon = case altcon of
DataAlt dc as qs xs -> prettyParen (prec >= appPrec) $ ppr dc <+> hsep (map (pPrintBndr CaseBind) as ++ map (pPrintBndr CaseBind) qs ++ map (pPrintBndr CaseBind) xs)
LiteralAlt l -> pPrint l
DefaultAlt -> text "_"
instance (Functor ann, OutputableLambdas1 ann) => Outputable (TermF ann) where
pprPrec = pprPrecDefault
instance (Functor ann, OutputableLambdas1 ann) => OutputableLambdas (TermF ann) where
pprPrecLam e = case e of
Let x e1 e2 -> ([], \prec -> pPrintPrecLet prec x (asPrettyFunction1 e1) (asPrettyFunction1 e2))
LetRec xes e -> ([], \prec -> pPrintPrecLetRec prec (map (second asPrettyFunction1) xes) (asPrettyFunction1 e))
Var x -> ([], \prec -> pPrintPrec prec x)
Value v -> pprPrecLam v
TyApp e ty -> ([], \prec -> pPrintPrecApp prec (asPrettyFunction1 e) ty)
CoApp e co -> ([], \prec -> pPrintPrecApp prec (asPrettyFunction1 e) co)
App e x -> ([], \prec -> pPrintPrecApp prec (asPrettyFunction1 e) x)
PrimOp pop tys es -> ([], \prec -> pPrintPrecPrimOp prec pop (map asPrettyFunction tys) (map asPrettyFunction1 es))
Case e x _ty alts -> ([], \prec -> pPrintPrecCase prec (asPrettyFunction1 e) x (map (second asPrettyFunction1) alts))
Cast e co -> ([], \prec -> pPrintPrecCast prec (asPrettyFunction1 e) co)
pPrintPrecCast :: Outputable a => Rational -> a -> Coercion -> SDoc
pPrintPrecCast prec e co = prettyParen (prec > noPrec) $ pPrintPrec opPrec e <+> text "|>" <+> pPrintPrec appPrec co
pPrintPrecCoerced :: Outputable a => Rational -> Coerced a -> SDoc
pPrintPrecCoerced prec (CastBy co _, e) = pPrintPrecCast prec e co
pPrintPrecCoerced prec (Uncast, e) = pPrintPrec prec e
pPrintPrecApp :: (Outputable a, Outputable b) => Rational -> a -> b -> SDoc
pPrintPrecApp prec e1 e2 = prettyParen (prec >= appPrec) $ pPrintPrec opPrec e1 <+> pPrintPrec appPrec e2
pPrintPrecPrimOp :: (Outputable a, Outputable b, Outputable c) => Rational -> a -> [b] -> [c] -> SDoc
pPrintPrecPrimOp prec pop as xs = pPrintPrecApps prec (PrettyFunction (\prec -> pPrintPrecApps prec pop as)) xs
pPrintPrecCase :: (Outputable a, Outputable b, Outputable c) => Rational -> a -> Var -> [(b, c)] -> SDoc
pPrintPrecCase prec e x alts = prettyParen (prec > noPrec) $ hang (text "case" <+> pPrintPrec noPrec e <+> text "of" <+> pPrintBndr CaseBind x) 2 $ vcat (map (pPrintPrecAlt noPrec) alts)
pPrintPrecAlt :: (Outputable a, Outputable b) => Rational -> (a, b) -> SDoc
pPrintPrecAlt _ (alt_con, alt_e) = hang (pPrintPrec noPrec alt_con <+> text "->") 2 (pPrintPrec noPrec alt_e)
pPrintPrecLet :: (Outputable a, Outputable b) => Rational -> Var -> a -> b -> SDoc
pPrintPrecLet prec x e e_body = prettyParen (prec > noPrec) $ hang (text "let") 2 (pPrintBndr LetBind x <+> text "=" <+> pPrintPrec noPrec e) $$ text "in" <+> pPrintPrec noPrec e_body
pPrintPrecLetRec, pPrintPrecWhere :: (Outputable a, Outputable b) => Rational -> [(Var, a)] -> b -> SDoc
pPrintPrecLetRec prec xes e_body
| [] <- xes = pPrintPrec prec e_body
| otherwise = prettyParen (prec > noPrec) $ hang (text "letrec") 2 (vcat [hang (pPrintBndr LetBind x) 2 (text "=" <+> pPrintPrec noPrec e) | (x, e) <- xes]) $$ text "in" <+> pPrintPrec noPrec e_body
pPrintPrecWhere prec xes e_body
| [] <- xes = pPrintPrec prec e_body
| otherwise = prettyParen (prec > noPrec) $ hang (pPrintPrec noPrec e_body) 1 $ hang (text "where") 1 $ vcat [hang (pPrintBndr LetBind x) 2 (text "=" <+> pPrintPrec noPrec e) | (x, e) <- xes]
instance (Functor ann, OutputableLambdas1 ann) => Outputable (ValueF ann) where
pprPrec = pprPrecDefault
instance (Functor ann, OutputableLambdas1 ann) => OutputableLambdas (ValueF ann) where
pprPrecLam v = case v of
TyLambda x e -> (x:xs, ppr_prec)
where (xs, ppr_prec) = pprPrecLam1 e
Lambda x e -> (x:xs, ppr_prec)
where (xs, ppr_prec) = pprPrecLam1 e
Data dc tys cos xs -> ([], \prec -> pPrintPrecApps prec dc ([asPrettyFunction ty | ty <- tys] ++ [asPrettyFunction co | co <- cos] ++ [asPrettyFunction x | x <- xs]))
Literal l -> ([], flip pPrintPrec l)
Coercion co -> ([], flip pPrintPrec co)
pPrintPrecLam :: Outputable a => Rational -> [Var] -> a -> SDoc
pPrintPrecLam prec [] e = pPrintPrec prec e
pPrintPrecLam prec xs e = prettyParen (prec > noPrec) $ text "\\" <> (vcat [pPrintBndr LambdaBind y | y <- xs] $$ text "->" <+> pPrintPrec noPrec e)
pPrintPrecApps :: (Outputable a, Outputable b) => Rational -> a -> [b] -> SDoc
pPrintPrecApps prec e1 es2 = prettyParen (not (null es2) && prec >= appPrec) $ pPrintPrec opPrec e1 <+> hsep (map (pPrintPrec appPrec) es2)
-- Find those things that are Values and cannot be further evaluated. Primarily used to prevent the
speculator from re - speculating values , but also as an approximation for what GHC considers a value .
termIsValue :: Copointed ann => ann (TermF ann) -> Bool
termIsValue = isValue . extract
where
isValue (Value _) = True
isValue (Cast e _) | Value _ <- extract e = True
isValue _ = False
-- Find those things that we are willing to duplicate.
termIsCheap :: Copointed ann => ann (TermF ann) -> Bool
termIsCheap = termIsCheap' . extract
termIsCheap' :: Copointed ann => TermF ann -> Bool
termIsCheap' _ | cALL_BY_NAME = True -- A cunning hack. I think this is all that should be required... (TODO: not for stack bound things..)
termIsCheap' (Var _) = True
termIsCheap' (Value _) = True
termIsCheap' (Cast e _) = termIsCheap e
NB : important for pushing down let - bound applications of ` ` error ''
termIsCheap' _ = False
varString :: Var -> String
varString = nameString . varName
nameString :: Name -> String
nameString = occNameString . nameOccName
INVARIANT : NormalCo is not
type Coerced a = (CastBy, a)
castBy :: NormalCo -> Tag -> CastBy
castBy co tg | isReflCo co = Uncast -- TODO: this throws away a tag (and hence a deed). But do I care any longer?
| otherwise = CastBy co tg
castByCo :: CastBy -> Maybe NormalCo
castByCo Uncast = Nothing
castByCo (CastBy co _) = Just co
mkSymCastBy :: InScopeSet -> CastBy -> CastBy
mkSymCastBy _ Uncast = Uncast
mkSymCastBy ids (CastBy co tg) = CastBy (mkSymCo ids co) tg
mkTransCastBy :: InScopeSet -> CastBy -> CastBy -> CastBy
mkTransCastBy _ Uncast cast_by2 = cast_by2
mkTransCastBy _ cast_by1 Uncast = cast_by1
mkTransCastBy ids (CastBy co1 _tg1) (CastBy co2 tg2) = castBy (mkTransCo ids co1 co2) tg2
canAbstractOverTyVarOfKind :: Kind -> Bool
canAbstractOverTyVarOfKind = ok
where
TODO : I 'm not 100 % sure of the correctness of this check
-- In particular, I don't think we need to check for non-conforming
-- kinds in "negative" positions since they would only appear if the
-- definition site had erroneously abstracted over a non-conforming
-- kind. For example, this *should* never be allowed:
data ( a : : * - > # ) = Bar ( a Int )
: : ( * - > # ) - > *
-- Bar :: forall (a :: * -> #). a Int -> Foo a
ok k
| isOpenTypeKind k
TODO(osa ): I ca n't find these function in GHC , disabling for now .
{- || isUbxTupleKind k -}
{- || isArgTypeKind k -}
|| isUnliftedTypeKind k = False
ok (TyVarTy _) =
-- This is OK because kinds dont get generalised, and we assume all
-- incoming kind instantiations satisfy the kind invariant
True
ok (AppTy k1 k2) = ok k1 && ok k2
ok (TyConApp _ ks) = all ok ks
ok (FunTy k1 k2) = ok k1 && ok k2
ok (ForAllTy _ k) = ok k
ok (LitTy _) = True
valueType :: Copointed ann => ValueF ann -> Type
valueType (TyLambda a e) = mkForAllTy a (termType e)
valueType (Lambda x e) = idType x `mkFunTy` termType e
valueType (Data dc as cos xs) = ((idType (dataConWorkId dc) `applyTys` as) `applyFunTys` map coercionType cos) `applyFunTys` map idType xs
valueType (Literal l) = literalType l
valueType (Coercion co) = coercionType co
termType :: Copointed ann => ann (TermF ann) -> Type
termType = termType' . extract
termType' :: Copointed ann => TermF ann -> Type
termType' e = case e of
Var x -> idType x
Value v -> valueType v
TyApp e a -> termType e `applyTy` a
CoApp e co -> termType e `applyFunTy` coercionType co
App e x -> termType e `applyFunTy` idType x
PrimOp pop tys es -> (primOpType pop `applyTys` tys) `applyFunTys` map termType es
Case _ _ ty _ -> ty
Let _ _ e -> termType e
LetRec _ e -> termType e
Cast _ co -> pSnd (coercionKind co)
applyFunTy :: Type -> Type -> Type
applyFunTy fun_ty got_arg_ty = case splitFunTy_maybe fun_ty of
Just (expected_arg_ty, res_ty) -> ASSERT2(got_arg_ty `eqType` expected_arg_ty, text "applyFunTy:" <+> ppr got_arg_ty <+> ppr expected_arg_ty) res_ty
Nothing -> pprPanic "applyFunTy" (ppr fun_ty $$ ppr got_arg_ty)
applyFunTys :: Type -> [Type] -> Type
applyFunTys = foldl' applyFunTy
class Functor ann => Symantics ann where
var :: Var -> ann (TermF ann)
value :: ValueF ann -> ann (TermF ann)
app :: ann (TermF ann) -> Var -> ann (TermF ann)
coApp :: ann (TermF ann) -> Coercion -> ann (TermF ann)
tyApp :: ann (TermF ann) -> Type -> ann (TermF ann)
primOp :: PrimOp -> [Type] -> [ann (TermF ann)] -> ann (TermF ann)
case_ :: ann (TermF ann) -> Var -> Type -> [AltF ann] -> ann (TermF ann)
let_ :: Var -> ann (TermF ann) -> ann (TermF ann) -> ann (TermF ann)
letRec :: [(Var, ann (TermF ann))] -> ann (TermF ann) -> ann (TermF ann)
cast :: ann (TermF ann) -> Coercion -> ann (TermF ann)
instance Symantics Identity where
var = I . Var
value = I . Value
tyApp e = I . TyApp e
coApp e = I . CoApp e
app e = I . App e
primOp pop tys = I . PrimOp pop tys
case_ e x ty = I . Case e x ty
let_ x e1 = I . Let x e1
letRec xes = I . LetRec xes
cast e = I . Cast e
reify :: (forall ann. Symantics ann => ann (TermF ann)) -> Term
reify x = x
reflect :: Term -> (forall ann. Symantics ann => ann (TermF ann))
reflect (I e) = case e of
Var x -> var x
Value v -> value (reflectValue v)
TyApp e ty -> tyApp (reflect e) ty
App e x -> app (reflect e) x
CoApp e co -> coApp (reflect e) co
PrimOp pop tys es -> primOp pop tys (map reflect es)
Case e x ty alts -> case_ (reflect e) x ty (map (second reflect) alts)
Let x e1 e2 -> let_ x (reflect e1) (reflect e2)
LetRec xes e -> letRec (map (second reflect) xes) (reflect e)
Cast e co -> cast (reflect e) co
where
reflectValue :: Value -> (forall ann. Symantics ann => ValueF ann)
reflectValue v = case v of
TyLambda x e -> TyLambda x (reflect e)
Lambda x e -> Lambda x (reflect e)
Data dc tys cos xs -> Data dc tys cos xs
Literal l -> Literal l
Coercion co -> Coercion co
literal :: Symantics ann => Literal -> ann (TermF ann)
literal = value . Literal
coercion :: Symantics ann => Coercion -> ann (TermF ann)
coercion = value . Coercion
lambda : : Symantics ann = > Var - > ann ( TermF ) - > ann ( TermF )
lambda x = value . Lambda x
data _ : : Symantics ann = > DataCon - > [ Var ] - > ann ( TermF )
data _ dc = value . Data dc
lambda :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann)
lambda x = value . Lambda x
data_ :: Symantics ann => DataCon -> [Var] -> ann (TermF ann)
data_ dc = value . Data dc
-}
tyLambdas :: Symantics ann => [TyVar] -> ann (TermF ann) -> ann (TermF ann)
tyLambdas = flip $ foldr (\x -> value . TyLambda x)
lambdas :: Symantics ann => [Id] -> ann (TermF ann) -> ann (TermF ann)
lambdas = flip $ foldr (\x -> value . Lambda x)
tyVarIdLambdas :: Symantics ann => [Var] -> ann (TermF ann) -> ann (TermF ann)
tyVarIdLambdas = flip $ foldr tyVarIdLambda
tyVarIdLambda :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann)
tyVarIdLambda x e | isTyVar x = value $ TyLambda x e
| otherwise = value $ Lambda x e
tyApps :: Symantics ann => ann (TermF ann) -> [Type] -> ann (TermF ann)
tyApps = foldl tyApp
coApps :: Symantics ann => ann (TermF ann) -> [Coercion] -> ann (TermF ann)
coApps = foldl coApp
apps :: Symantics ann => ann (TermF ann) -> [Id] -> ann (TermF ann)
apps = foldl app
tyVarIdApps :: Symantics ann => ann (TermF ann) -> [Var] -> ann (TermF ann)
tyVarIdApps = foldl tyVarIdApp
tyVarIdApp :: Symantics ann => ann (TermF ann) -> Var -> ann (TermF ann)
tyVarIdApp e x | isTyVar x = e `tyApp` mkTyVarTy x
| otherwise = e `app` x
strictLet : : Symantics ann = > Var - > ann ( TermF ) - > ann ( TermF ) - > ann ( TermF )
strictLet x e1 e2 = case _ e1 [ ( DefaultAlt ( Just x ) , e2 ) ]
collectLambdas : : Term - > ( [ Var ] , Term )
collectLambdas ( I ( Value ( Lambda x e ) ) ) = first ( x :) $ collectLambdas e
collectLambdas e = ( [ ] , e )
freshFloatVar : : IdSupply - > String - > Term - > ( IdSupply , Maybe ( Var , Term ) , )
freshFloatVar ids _ ( I ( ) ) = ( ids , Nothing , x )
freshFloatVar ids s e = ( ids ' , Just ( y , e ) , y )
where ( ids ' , y ) = freshName ids s
freshFloatVars : : IdSupply - > String - > [ Term ] - > ( IdSupply , [ ( Var , Term ) ] , [ ] )
freshFloatVars ids s es = reassociate ( \ids - > associate . freshFloatVar ids s ) ids es
where reassociate ( ids , floats_xs ) = let ( mb_floats , xs ) = unzip floats_xs in ( ids , mb_floats , xs )
associate ( ids , mb_float , x ) = ( ids , ( mb_float , x ) )
strictLet :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann) -> ann (TermF ann)
strictLet x e1 e2 = case_ e1 [(DefaultAlt (Just x), e2)]
collectLambdas :: Term -> ([Var], Term)
collectLambdas (I (Value (Lambda x e))) = first (x:) $ collectLambdas e
collectLambdas e = ([], e)
freshFloatVar :: IdSupply -> String -> Term -> (IdSupply, Maybe (Var, Term), Var)
freshFloatVar ids _ (I (Var x)) = (ids, Nothing, x)
freshFloatVar ids s e = (ids', Just (y, e), y)
where (ids', y) = freshName ids s
freshFloatVars :: IdSupply -> String -> [Term] -> (IdSupply, [(Var, Term)], [Var])
freshFloatVars ids s es = reassociate $ mapAccumL (\ids -> associate . freshFloatVar ids s) ids es
where reassociate (ids, floats_xs) = let (mb_floats, xs) = unzip floats_xs in (ids, catMaybes mb_floats, xs)
associate (ids, mb_float, x) = (ids, (mb_float, x))
-}
| null | https://raw.githubusercontent.com/osa1/sc-plugin/1969ad81f16ca8ed8110ca8dadfccf6f3d463635/src/Supercompile/Core/Syntax.hs | haskell | Note [Case wildcards]
~~~~~~~~~~~~~~~~~~~~~
The motivation is that this lets us do more inlining above the case. For example, take this code fragment from foldl':
let n' = c n y
in case n' of wild -> foldl' c n' ys
If we rewrite, n' becomes linear:
let n' = c n y
in case n' of wild -> foldl c wild ys
This lets us potentially inline n' directly into the scrutinee position (operationally, this prevent creation of a thunk for n').
However, I don't think that this particular form of improving linearity helps the supercompiler. We only want to inline n' in
somewhere if it meets some interesting context, with which it can cancel. But if we are creating an update frame for n' at all,
it is *probably* because we had no information about what it evaluated to.
An interesting exception is when n' binds a case expression:
let n' = case unk of T -> F; F -> T
in case (case n' of T -> F; F -> T) of
wild -> e[n']
You might think that we want n' to be linear so we can inline it into the case on it. However, the splitter will save us and produce:
case unk of
T -> let n' = F
in case (case n' of T -> F; F -> T) of wild -> e[n']
F -> let n' = T
in case (case n' of T -> F; F -> T) of wild -> e[n']
Since we now know the form of n', everything works out nicely.
Conclusion: I don't think rewriting to use the case wildcard buys us anything at all.
Note [CoApp]
~~~~~~~~~~~~
(otherwise improving the arguments is hidden by the Tag on the whole PrimOp stack frames).
FIXME: in fact, we need to change this because *NOT ALL PRIMOP ARGUMENTS ARE STRICT* (e.g.
FIXME: the reason I haven't done this is because it means I should remove the PrimApply frame,
which breaks the "question or answer" evaluator normalisation property. Probably what I should
We can then special case them in the evaluator's "force", using rules to pretend like they have a RHS.
The only problem with this is that if there are no wrappers there is no guarantee of saturation,
but we can probably ignore that.
FIXME: the way I'm splitting PrimApply isn't right. If we have
case ((case [x] of I# x# -> x#) +# (case y of I# y# -> y#)) of
0 -> ...; _ -> e[x, y]
This could be achieved in the current framework by splitting to
as if x# were an answer.) If we just removed the PrimApply frame then we wouldn't need to worry about this though.
Find those things that are Values and cannot be further evaluated. Primarily used to prevent the
Find those things that we are willing to duplicate.
A cunning hack. I think this is all that should be required... (TODO: not for stack bound things..)
TODO: this throws away a tag (and hence a deed). But do I care any longer?
In particular, I don't think we need to check for non-conforming
kinds in "negative" positions since they would only appear if the
definition site had erroneously abstracted over a non-conforming
kind. For example, this *should* never be allowed:
Bar :: forall (a :: * -> #). a Int -> Foo a
|| isUbxTupleKind k
|| isArgTypeKind k
This is OK because kinds dont get generalised, and we assume all
incoming kind instantiations satisfy the kind invariant | # LANGUAGE CPP , FlexibleInstances , Rank2Types , TypeSynonymInstances #
module Supercompile.Core.Syntax (
module Supercompile.Core.Syntax,
Coercion, NormalCo, mkAxInstCo, mkReflCo,
DataCon, Var, Literal, Type, PrimOp
) where
#include "GHCDefs.h"
import Supercompile.StaticFlags
import Supercompile.Utilities
import Coercion (CoVar, Coercion, coercionKind, coercionType, isReflCo,
mkAxInstCo, mkCvSubst, mkReflCo)
import qualified Coercion as Coercion
import DataCon (DataCon, dataConWorkId)
import Id (Id, idInlinePragma, idType, isId)
import Kind
import Literal (Literal, literalType)
import Name (Name, nameOccName)
import OccName (occNameString)
import OptCoercion
import Pair (pSnd)
import PprCore ()
import PrimOp (primOpType)
import PrimOp (PrimOp)
import Type (Type, applyTy, applyTys, eqType, mkForAllTy, mkFunTy, mkTyVarTy,
splitFunTy_maybe)
import TypeRep (Type (..))
import Util
import Var (TyVar, Var, isTyVar, varName, varType)
import VarEnv (InScopeSet)
import qualified Data.Traversable as Traversable
mkSymCo :: InScopeSet -> NormalCo -> NormalCo
mkSymCo iss co = optCoercion (mkCvSubst iss []) (Coercion.mkSymCo co)
mkTransCo :: InScopeSet -> NormalCo -> NormalCo -> NormalCo
mkTransCo = opt_trans
class Outputable a => OutputableLambdas a where
pprPrecLam :: a -> ([Var], Rational -> SDoc)
class Outputable1 f => OutputableLambdas1 f where
pprPrecLam1 :: OutputableLambdas a => f a -> ([Var], Rational -> SDoc)
instance (OutputableLambdas1 f, OutputableLambdas a) => OutputableLambdas (Wrapper1 f a) where
pprPrecLam = pprPrecLam1 . unWrapper1
instance OutputableLambdas1 Identity where
pprPrecLam1 (I x) = pprPrecLam x
instance (Functor f, OutputableLambdas1 f, OutputableLambdas1 g) => OutputableLambdas1 (O f g) where
pprPrecLam1 (Comp x) = pprPrecLam1 (fmap Wrapper1 x)
instance OutputableLambdas1 Tagged where
pprPrecLam1 (Tagged tg x) = second ((braces (ppr tg) <+>) .) (pprPrecLam x)
instance OutputableLambdas1 Sized where
pprPrecLam1 (Sized sz x) = second ((bananas (text (show sz)) <>) .) (pprPrecLam x)
pprPrecDefault :: OutputableLambdas a => Rational -> a -> SDoc
pprPrecDefault prec e = pPrintPrecLam prec xs (PrettyFunction ppr_prec)
where (xs, ppr_prec) = pprPrecLam e
NB : do n't use GHC 's pprBndr because its way too noisy , printing unfoldings etc
pPrintBndr :: BindingSite -> Var -> SDoc
pPrintBndr bs x = prettyParen needs_parens $ ppr x <+> superinlinable <+> text "::" <+> ppr (varType x)
where needs_parens = case bs of LambdaBind -> True
CaseBind -> True
LetBind -> False
superinlinable = if isId x then ppr (idInlinePragma x) else empty
data AltCon = DataAlt DataCon [TyVar] [CoVar] [Id] | LiteralAlt Literal | DefaultAlt
deriving (Eq)
thought that I should use the variable in the DefaultAlt to agressively rewrite occurences of a scrutinised variable .
CoApp might seem redundant because we almost never substitute CoVars for Coercions , so we you might think we could get away
with just reusing the App constructor but having the Var be either an I d or a CoVar . Unfortunately mkCoVarCo sometimes returns so
we ca n't guarantee that all CoVar substitutions will be variable - for - variable . We add CoApp to work around this fragility .
type Term = Identity (TermF Identity)
type TaggedTerm = Tagged (TermF Tagged)
data TermF ann = Var Id
| Value (ValueF ann)
| TyApp (ann (TermF ann)) Type
| CoApp (ann (TermF ann)) Coercion
| App (ann (TermF ann)) Id
| PrimOp PrimOp [Type] [ann (TermF ann)]
NB : unlike GHC , for convenience we allow the list of alternatives to be empty
NB : might bind an unlifted thing , in which case evaluation changes . Unlike GHC , we do NOT assume the RHSes of unlifted bindings are ok - for - speculation .
| LetRec [(Id, ann (TermF ann))] (ann (TermF ann))
| Cast (ann (TermF ann)) Coercion
FIXME : arguably we have just Vars as arguments in PrimOp for better Tag behaviour
the lazy polymorphic arguments to newMutVar # , newArray # ) .
do is just remove PrimOp and stop generating wrappers for PrimOps , so they are treated as normal Vars .
Then I want to eventually split to e[I # x # , I # y # ] . At the moment we will only split to e[I # x , y ] !
case ( x # + # ( case [ y ] of I # y # - > y # ) ) of ...
( Where the focus is now on y rather than x , and we put x # in the first set of arguments to PrimApply
type Alt = AltF Identity
type TaggedAlt = AltF Tagged
type AltF ann = AltG (ann (TermF ann))
type AltG term = (AltCon, term)
FIXME : I should probably implement a correct operational semantics for TyLambdas !
type Value = ValueF Identity
type TaggedValue = ValueF Tagged
type ValueF ann = ValueG (ann (TermF ann))
data ValueG term = Literal Literal | Coercion Coercion
NB : might bind a CoVar
NB : includes universal and existential type arguments , in that order
NB : not a newtype DataCon
instance Functor ValueG where
fmap = Traversable.fmapDefault
instance Foldable ValueG where
foldMap = Traversable.foldMapDefault
instance Traversable ValueG where
traverse f e = case e of
Literal l -> pure $ Literal l
Coercion co -> pure $ Coercion co
TyLambda a e -> fmap (TyLambda a) $ f e
Lambda x e -> fmap (Lambda x) $ f e
Data dc tys cos xs -> pure $ Data dc tys cos xs
instance Outputable AltCon where
pprPrec prec altcon = case altcon of
DataAlt dc as qs xs -> prettyParen (prec >= appPrec) $ ppr dc <+> hsep (map (pPrintBndr CaseBind) as ++ map (pPrintBndr CaseBind) qs ++ map (pPrintBndr CaseBind) xs)
LiteralAlt l -> pPrint l
DefaultAlt -> text "_"
instance (Functor ann, OutputableLambdas1 ann) => Outputable (TermF ann) where
pprPrec = pprPrecDefault
instance (Functor ann, OutputableLambdas1 ann) => OutputableLambdas (TermF ann) where
pprPrecLam e = case e of
Let x e1 e2 -> ([], \prec -> pPrintPrecLet prec x (asPrettyFunction1 e1) (asPrettyFunction1 e2))
LetRec xes e -> ([], \prec -> pPrintPrecLetRec prec (map (second asPrettyFunction1) xes) (asPrettyFunction1 e))
Var x -> ([], \prec -> pPrintPrec prec x)
Value v -> pprPrecLam v
TyApp e ty -> ([], \prec -> pPrintPrecApp prec (asPrettyFunction1 e) ty)
CoApp e co -> ([], \prec -> pPrintPrecApp prec (asPrettyFunction1 e) co)
App e x -> ([], \prec -> pPrintPrecApp prec (asPrettyFunction1 e) x)
PrimOp pop tys es -> ([], \prec -> pPrintPrecPrimOp prec pop (map asPrettyFunction tys) (map asPrettyFunction1 es))
Case e x _ty alts -> ([], \prec -> pPrintPrecCase prec (asPrettyFunction1 e) x (map (second asPrettyFunction1) alts))
Cast e co -> ([], \prec -> pPrintPrecCast prec (asPrettyFunction1 e) co)
pPrintPrecCast :: Outputable a => Rational -> a -> Coercion -> SDoc
pPrintPrecCast prec e co = prettyParen (prec > noPrec) $ pPrintPrec opPrec e <+> text "|>" <+> pPrintPrec appPrec co
pPrintPrecCoerced :: Outputable a => Rational -> Coerced a -> SDoc
pPrintPrecCoerced prec (CastBy co _, e) = pPrintPrecCast prec e co
pPrintPrecCoerced prec (Uncast, e) = pPrintPrec prec e
pPrintPrecApp :: (Outputable a, Outputable b) => Rational -> a -> b -> SDoc
pPrintPrecApp prec e1 e2 = prettyParen (prec >= appPrec) $ pPrintPrec opPrec e1 <+> pPrintPrec appPrec e2
pPrintPrecPrimOp :: (Outputable a, Outputable b, Outputable c) => Rational -> a -> [b] -> [c] -> SDoc
pPrintPrecPrimOp prec pop as xs = pPrintPrecApps prec (PrettyFunction (\prec -> pPrintPrecApps prec pop as)) xs
pPrintPrecCase :: (Outputable a, Outputable b, Outputable c) => Rational -> a -> Var -> [(b, c)] -> SDoc
pPrintPrecCase prec e x alts = prettyParen (prec > noPrec) $ hang (text "case" <+> pPrintPrec noPrec e <+> text "of" <+> pPrintBndr CaseBind x) 2 $ vcat (map (pPrintPrecAlt noPrec) alts)
pPrintPrecAlt :: (Outputable a, Outputable b) => Rational -> (a, b) -> SDoc
pPrintPrecAlt _ (alt_con, alt_e) = hang (pPrintPrec noPrec alt_con <+> text "->") 2 (pPrintPrec noPrec alt_e)
pPrintPrecLet :: (Outputable a, Outputable b) => Rational -> Var -> a -> b -> SDoc
pPrintPrecLet prec x e e_body = prettyParen (prec > noPrec) $ hang (text "let") 2 (pPrintBndr LetBind x <+> text "=" <+> pPrintPrec noPrec e) $$ text "in" <+> pPrintPrec noPrec e_body
pPrintPrecLetRec, pPrintPrecWhere :: (Outputable a, Outputable b) => Rational -> [(Var, a)] -> b -> SDoc
pPrintPrecLetRec prec xes e_body
| [] <- xes = pPrintPrec prec e_body
| otherwise = prettyParen (prec > noPrec) $ hang (text "letrec") 2 (vcat [hang (pPrintBndr LetBind x) 2 (text "=" <+> pPrintPrec noPrec e) | (x, e) <- xes]) $$ text "in" <+> pPrintPrec noPrec e_body
pPrintPrecWhere prec xes e_body
| [] <- xes = pPrintPrec prec e_body
| otherwise = prettyParen (prec > noPrec) $ hang (pPrintPrec noPrec e_body) 1 $ hang (text "where") 1 $ vcat [hang (pPrintBndr LetBind x) 2 (text "=" <+> pPrintPrec noPrec e) | (x, e) <- xes]
instance (Functor ann, OutputableLambdas1 ann) => Outputable (ValueF ann) where
pprPrec = pprPrecDefault
instance (Functor ann, OutputableLambdas1 ann) => OutputableLambdas (ValueF ann) where
pprPrecLam v = case v of
TyLambda x e -> (x:xs, ppr_prec)
where (xs, ppr_prec) = pprPrecLam1 e
Lambda x e -> (x:xs, ppr_prec)
where (xs, ppr_prec) = pprPrecLam1 e
Data dc tys cos xs -> ([], \prec -> pPrintPrecApps prec dc ([asPrettyFunction ty | ty <- tys] ++ [asPrettyFunction co | co <- cos] ++ [asPrettyFunction x | x <- xs]))
Literal l -> ([], flip pPrintPrec l)
Coercion co -> ([], flip pPrintPrec co)
pPrintPrecLam :: Outputable a => Rational -> [Var] -> a -> SDoc
pPrintPrecLam prec [] e = pPrintPrec prec e
pPrintPrecLam prec xs e = prettyParen (prec > noPrec) $ text "\\" <> (vcat [pPrintBndr LambdaBind y | y <- xs] $$ text "->" <+> pPrintPrec noPrec e)
pPrintPrecApps :: (Outputable a, Outputable b) => Rational -> a -> [b] -> SDoc
pPrintPrecApps prec e1 es2 = prettyParen (not (null es2) && prec >= appPrec) $ pPrintPrec opPrec e1 <+> hsep (map (pPrintPrec appPrec) es2)
speculator from re - speculating values , but also as an approximation for what GHC considers a value .
termIsValue :: Copointed ann => ann (TermF ann) -> Bool
termIsValue = isValue . extract
where
isValue (Value _) = True
isValue (Cast e _) | Value _ <- extract e = True
isValue _ = False
termIsCheap :: Copointed ann => ann (TermF ann) -> Bool
termIsCheap = termIsCheap' . extract
termIsCheap' :: Copointed ann => TermF ann -> Bool
termIsCheap' (Var _) = True
termIsCheap' (Value _) = True
termIsCheap' (Cast e _) = termIsCheap e
NB : important for pushing down let - bound applications of ` ` error ''
termIsCheap' _ = False
varString :: Var -> String
varString = nameString . varName
nameString :: Name -> String
nameString = occNameString . nameOccName
INVARIANT : NormalCo is not
type Coerced a = (CastBy, a)
castBy :: NormalCo -> Tag -> CastBy
| otherwise = CastBy co tg
castByCo :: CastBy -> Maybe NormalCo
castByCo Uncast = Nothing
castByCo (CastBy co _) = Just co
mkSymCastBy :: InScopeSet -> CastBy -> CastBy
mkSymCastBy _ Uncast = Uncast
mkSymCastBy ids (CastBy co tg) = CastBy (mkSymCo ids co) tg
mkTransCastBy :: InScopeSet -> CastBy -> CastBy -> CastBy
mkTransCastBy _ Uncast cast_by2 = cast_by2
mkTransCastBy _ cast_by1 Uncast = cast_by1
mkTransCastBy ids (CastBy co1 _tg1) (CastBy co2 tg2) = castBy (mkTransCo ids co1 co2) tg2
canAbstractOverTyVarOfKind :: Kind -> Bool
canAbstractOverTyVarOfKind = ok
where
TODO : I 'm not 100 % sure of the correctness of this check
data ( a : : * - > # ) = Bar ( a Int )
: : ( * - > # ) - > *
ok k
| isOpenTypeKind k
TODO(osa ): I ca n't find these function in GHC , disabling for now .
|| isUnliftedTypeKind k = False
ok (TyVarTy _) =
True
ok (AppTy k1 k2) = ok k1 && ok k2
ok (TyConApp _ ks) = all ok ks
ok (FunTy k1 k2) = ok k1 && ok k2
ok (ForAllTy _ k) = ok k
ok (LitTy _) = True
valueType :: Copointed ann => ValueF ann -> Type
valueType (TyLambda a e) = mkForAllTy a (termType e)
valueType (Lambda x e) = idType x `mkFunTy` termType e
valueType (Data dc as cos xs) = ((idType (dataConWorkId dc) `applyTys` as) `applyFunTys` map coercionType cos) `applyFunTys` map idType xs
valueType (Literal l) = literalType l
valueType (Coercion co) = coercionType co
termType :: Copointed ann => ann (TermF ann) -> Type
termType = termType' . extract
termType' :: Copointed ann => TermF ann -> Type
termType' e = case e of
Var x -> idType x
Value v -> valueType v
TyApp e a -> termType e `applyTy` a
CoApp e co -> termType e `applyFunTy` coercionType co
App e x -> termType e `applyFunTy` idType x
PrimOp pop tys es -> (primOpType pop `applyTys` tys) `applyFunTys` map termType es
Case _ _ ty _ -> ty
Let _ _ e -> termType e
LetRec _ e -> termType e
Cast _ co -> pSnd (coercionKind co)
applyFunTy :: Type -> Type -> Type
applyFunTy fun_ty got_arg_ty = case splitFunTy_maybe fun_ty of
Just (expected_arg_ty, res_ty) -> ASSERT2(got_arg_ty `eqType` expected_arg_ty, text "applyFunTy:" <+> ppr got_arg_ty <+> ppr expected_arg_ty) res_ty
Nothing -> pprPanic "applyFunTy" (ppr fun_ty $$ ppr got_arg_ty)
applyFunTys :: Type -> [Type] -> Type
applyFunTys = foldl' applyFunTy
class Functor ann => Symantics ann where
var :: Var -> ann (TermF ann)
value :: ValueF ann -> ann (TermF ann)
app :: ann (TermF ann) -> Var -> ann (TermF ann)
coApp :: ann (TermF ann) -> Coercion -> ann (TermF ann)
tyApp :: ann (TermF ann) -> Type -> ann (TermF ann)
primOp :: PrimOp -> [Type] -> [ann (TermF ann)] -> ann (TermF ann)
case_ :: ann (TermF ann) -> Var -> Type -> [AltF ann] -> ann (TermF ann)
let_ :: Var -> ann (TermF ann) -> ann (TermF ann) -> ann (TermF ann)
letRec :: [(Var, ann (TermF ann))] -> ann (TermF ann) -> ann (TermF ann)
cast :: ann (TermF ann) -> Coercion -> ann (TermF ann)
instance Symantics Identity where
var = I . Var
value = I . Value
tyApp e = I . TyApp e
coApp e = I . CoApp e
app e = I . App e
primOp pop tys = I . PrimOp pop tys
case_ e x ty = I . Case e x ty
let_ x e1 = I . Let x e1
letRec xes = I . LetRec xes
cast e = I . Cast e
reify :: (forall ann. Symantics ann => ann (TermF ann)) -> Term
reify x = x
reflect :: Term -> (forall ann. Symantics ann => ann (TermF ann))
reflect (I e) = case e of
Var x -> var x
Value v -> value (reflectValue v)
TyApp e ty -> tyApp (reflect e) ty
App e x -> app (reflect e) x
CoApp e co -> coApp (reflect e) co
PrimOp pop tys es -> primOp pop tys (map reflect es)
Case e x ty alts -> case_ (reflect e) x ty (map (second reflect) alts)
Let x e1 e2 -> let_ x (reflect e1) (reflect e2)
LetRec xes e -> letRec (map (second reflect) xes) (reflect e)
Cast e co -> cast (reflect e) co
where
reflectValue :: Value -> (forall ann. Symantics ann => ValueF ann)
reflectValue v = case v of
TyLambda x e -> TyLambda x (reflect e)
Lambda x e -> Lambda x (reflect e)
Data dc tys cos xs -> Data dc tys cos xs
Literal l -> Literal l
Coercion co -> Coercion co
literal :: Symantics ann => Literal -> ann (TermF ann)
literal = value . Literal
coercion :: Symantics ann => Coercion -> ann (TermF ann)
coercion = value . Coercion
lambda : : Symantics ann = > Var - > ann ( TermF ) - > ann ( TermF )
lambda x = value . Lambda x
data _ : : Symantics ann = > DataCon - > [ Var ] - > ann ( TermF )
data _ dc = value . Data dc
lambda :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann)
lambda x = value . Lambda x
data_ :: Symantics ann => DataCon -> [Var] -> ann (TermF ann)
data_ dc = value . Data dc
-}
tyLambdas :: Symantics ann => [TyVar] -> ann (TermF ann) -> ann (TermF ann)
tyLambdas = flip $ foldr (\x -> value . TyLambda x)
lambdas :: Symantics ann => [Id] -> ann (TermF ann) -> ann (TermF ann)
lambdas = flip $ foldr (\x -> value . Lambda x)
tyVarIdLambdas :: Symantics ann => [Var] -> ann (TermF ann) -> ann (TermF ann)
tyVarIdLambdas = flip $ foldr tyVarIdLambda
tyVarIdLambda :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann)
tyVarIdLambda x e | isTyVar x = value $ TyLambda x e
| otherwise = value $ Lambda x e
tyApps :: Symantics ann => ann (TermF ann) -> [Type] -> ann (TermF ann)
tyApps = foldl tyApp
coApps :: Symantics ann => ann (TermF ann) -> [Coercion] -> ann (TermF ann)
coApps = foldl coApp
apps :: Symantics ann => ann (TermF ann) -> [Id] -> ann (TermF ann)
apps = foldl app
tyVarIdApps :: Symantics ann => ann (TermF ann) -> [Var] -> ann (TermF ann)
tyVarIdApps = foldl tyVarIdApp
tyVarIdApp :: Symantics ann => ann (TermF ann) -> Var -> ann (TermF ann)
tyVarIdApp e x | isTyVar x = e `tyApp` mkTyVarTy x
| otherwise = e `app` x
strictLet : : Symantics ann = > Var - > ann ( TermF ) - > ann ( TermF ) - > ann ( TermF )
strictLet x e1 e2 = case _ e1 [ ( DefaultAlt ( Just x ) , e2 ) ]
collectLambdas : : Term - > ( [ Var ] , Term )
collectLambdas ( I ( Value ( Lambda x e ) ) ) = first ( x :) $ collectLambdas e
collectLambdas e = ( [ ] , e )
freshFloatVar : : IdSupply - > String - > Term - > ( IdSupply , Maybe ( Var , Term ) , )
freshFloatVar ids _ ( I ( ) ) = ( ids , Nothing , x )
freshFloatVar ids s e = ( ids ' , Just ( y , e ) , y )
where ( ids ' , y ) = freshName ids s
freshFloatVars : : IdSupply - > String - > [ Term ] - > ( IdSupply , [ ( Var , Term ) ] , [ ] )
freshFloatVars ids s es = reassociate ( \ids - > associate . freshFloatVar ids s ) ids es
where reassociate ( ids , floats_xs ) = let ( mb_floats , xs ) = unzip floats_xs in ( ids , mb_floats , xs )
associate ( ids , mb_float , x ) = ( ids , ( mb_float , x ) )
strictLet :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann) -> ann (TermF ann)
strictLet x e1 e2 = case_ e1 [(DefaultAlt (Just x), e2)]
collectLambdas :: Term -> ([Var], Term)
collectLambdas (I (Value (Lambda x e))) = first (x:) $ collectLambdas e
collectLambdas e = ([], e)
freshFloatVar :: IdSupply -> String -> Term -> (IdSupply, Maybe (Var, Term), Var)
freshFloatVar ids _ (I (Var x)) = (ids, Nothing, x)
freshFloatVar ids s e = (ids', Just (y, e), y)
where (ids', y) = freshName ids s
freshFloatVars :: IdSupply -> String -> [Term] -> (IdSupply, [(Var, Term)], [Var])
freshFloatVars ids s es = reassociate $ mapAccumL (\ids -> associate . freshFloatVar ids s) ids es
where reassociate (ids, floats_xs) = let (mb_floats, xs) = unzip floats_xs in (ids, catMaybes mb_floats, xs)
associate (ids, mb_float, x) = (ids, (mb_float, x))
-}
|
8579133ab2b6f3acb1d367821ab001b8d6bfb4dc2da24bdfd13767f4190072ce | xclerc/ocamljava | javaStringPrintf.ml |
* This file is part of library .
* Copyright ( C ) 2007 - 2015 .
*
* library is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation ; either version 3 of the License , or
* ( at your option ) any later version .
*
* library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program . If not , see < / > .
* This file is part of OCaml-Java library.
* Copyright (C) 2007-2015 Xavier Clerc.
*
* OCaml-Java library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* OCaml-Java library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see </>.
*)
let fprintf chan str =
JavaString.output_string chan str
let printf x y = fprintf x y
let eprintf x y = fprintf x y
let sprintf () str =
JavaString.to_string str
let bprintf buff str =
Buffer.add_string buff (JavaString.to_string str)
let ifprintf _dummy _str =
()
let kfprintf x y = fprintf x y
let ikfprintf x y = fprintf x y
let ksprintf x y = sprintf x y
let kbprintf x y = bprintf x y
| null | https://raw.githubusercontent.com/xclerc/ocamljava/8330bfdfd01d0c348f2ba2f0f23d8f5a8f6015b1/library/javalib/src/javaStringPrintf.ml | ocaml |
* This file is part of library .
* Copyright ( C ) 2007 - 2015 .
*
* library is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation ; either version 3 of the License , or
* ( at your option ) any later version .
*
* library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program . If not , see < / > .
* This file is part of OCaml-Java library.
* Copyright (C) 2007-2015 Xavier Clerc.
*
* OCaml-Java library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* OCaml-Java library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see </>.
*)
let fprintf chan str =
JavaString.output_string chan str
let printf x y = fprintf x y
let eprintf x y = fprintf x y
let sprintf () str =
JavaString.to_string str
let bprintf buff str =
Buffer.add_string buff (JavaString.to_string str)
let ifprintf _dummy _str =
()
let kfprintf x y = fprintf x y
let ikfprintf x y = fprintf x y
let ksprintf x y = sprintf x y
let kbprintf x y = bprintf x y
| |
ad56164643bc0d7e041efffb6be046934c92e7743aa16410951c43168904ee3d | mfoemmel/erlang-otp | wxColourData.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
%% @doc See external documentation: <a href="">wxColourData</a>.
%% @type wxColourData(). An object reference, The representation is internal
%% and can be changed without notice. It can't be used for comparsion
%% stored on disc or distributed for use on other nodes.
-module(wxColourData).
-include("wxe.hrl").
-export([destroy/1,getChooseFull/1,getColour/1,getCustomColour/2,new/0,new/1,
setChooseFull/2,setColour/2,setCustomColour/3]).
%% inherited exports
-export([parent_class/1]).
%% @hidden
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
( ) - > wxColourData ( )
%% @doc See <a href="#wxcolourdatawxcolourdata">external documentation</a>.
new() ->
wxe_util:construct(?wxColourData_new_0,
<<>>).
%% @spec (Data::wxColourData()) -> wxColourData()
%% @doc See <a href="#wxcolourdatawxcolourdata">external documentation</a>.
new(#wx_ref{type=DataT,ref=DataRef}) ->
?CLASS(DataT,wxColourData),
wxe_util:construct(?wxColourData_new_1,
<<DataRef:32/?UI>>).
%% @spec (This::wxColourData()) -> bool()
%% @doc See <a href="#wxcolourdatagetchoosefull">external documentation</a>.
getChooseFull(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxColourData),
wxe_util:call(?wxColourData_GetChooseFull,
<<ThisRef:32/?UI>>).
%% @spec (This::wxColourData()) -> wx:colour()
%% @doc See <a href="#wxcolourdatagetcolour">external documentation</a>.
getColour(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxColourData),
wxe_util:call(?wxColourData_GetColour,
<<ThisRef:32/?UI>>).
%% @spec (This::wxColourData(), I::integer()) -> wx:colour()
%% @doc See <a href="#wxcolourdatagetcustomcolour">external documentation</a>.
getCustomColour(#wx_ref{type=ThisT,ref=ThisRef},I)
when is_integer(I) ->
?CLASS(ThisT,wxColourData),
wxe_util:call(?wxColourData_GetCustomColour,
<<ThisRef:32/?UI,I:32/?UI>>).
%% @spec (This::wxColourData(), Flag::bool()) -> ok
%% @doc See <a href="#wxcolourdatasetchoosefull">external documentation</a>.
setChooseFull(#wx_ref{type=ThisT,ref=ThisRef},Flag)
when is_boolean(Flag) ->
?CLASS(ThisT,wxColourData),
wxe_util:cast(?wxColourData_SetChooseFull,
<<ThisRef:32/?UI,(wxe_util:from_bool(Flag)):32/?UI>>).
%% @spec (This::wxColourData(), Colour::wx:colour()) -> ok
%% @doc See <a href="#wxcolourdatasetcolour">external documentation</a>.
setColour(#wx_ref{type=ThisT,ref=ThisRef},Colour)
when tuple_size(Colour) =:= 3; tuple_size(Colour) =:= 4 ->
?CLASS(ThisT,wxColourData),
wxe_util:cast(?wxColourData_SetColour,
<<ThisRef:32/?UI,(wxe_util:colour_bin(Colour)):16/binary>>).
%% @spec (This::wxColourData(), I::integer(), Colour::wx:colour()) -> ok
%% @doc See <a href="#wxcolourdatasetcustomcolour">external documentation</a>.
setCustomColour(#wx_ref{type=ThisT,ref=ThisRef},I,Colour)
when is_integer(I),tuple_size(Colour) =:= 3; tuple_size(Colour) =:= 4 ->
?CLASS(ThisT,wxColourData),
wxe_util:cast(?wxColourData_SetCustomColour,
<<ThisRef:32/?UI,I:32/?UI,(wxe_util:colour_bin(Colour)):16/binary>>).
%% @spec (This::wxColourData()) -> ok
%% @doc Destroys this object, do not use object again
destroy(Obj=#wx_ref{type=Type}) ->
?CLASS(Type,wxColourData),
wxe_util:destroy(?DESTROY_OBJECT,Obj),
ok.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/src/gen/wxColourData.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
@doc See external documentation: <a href="">wxColourData</a>.
@type wxColourData(). An object reference, The representation is internal
and can be changed without notice. It can't be used for comparsion
stored on disc or distributed for use on other nodes.
inherited exports
@hidden
@doc See <a href="#wxcolourdatawxcolourdata">external documentation</a>.
@spec (Data::wxColourData()) -> wxColourData()
@doc See <a href="#wxcolourdatawxcolourdata">external documentation</a>.
@spec (This::wxColourData()) -> bool()
@doc See <a href="#wxcolourdatagetchoosefull">external documentation</a>.
@spec (This::wxColourData()) -> wx:colour()
@doc See <a href="#wxcolourdatagetcolour">external documentation</a>.
@spec (This::wxColourData(), I::integer()) -> wx:colour()
@doc See <a href="#wxcolourdatagetcustomcolour">external documentation</a>.
@spec (This::wxColourData(), Flag::bool()) -> ok
@doc See <a href="#wxcolourdatasetchoosefull">external documentation</a>.
@spec (This::wxColourData(), Colour::wx:colour()) -> ok
@doc See <a href="#wxcolourdatasetcolour">external documentation</a>.
@spec (This::wxColourData(), I::integer(), Colour::wx:colour()) -> ok
@doc See <a href="#wxcolourdatasetcustomcolour">external documentation</a>.
@spec (This::wxColourData()) -> ok
@doc Destroys this object, do not use object again | Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(wxColourData).
-include("wxe.hrl").
-export([destroy/1,getChooseFull/1,getColour/1,getCustomColour/2,new/0,new/1,
setChooseFull/2,setColour/2,setCustomColour/3]).
-export([parent_class/1]).
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
( ) - > wxColourData ( )
new() ->
wxe_util:construct(?wxColourData_new_0,
<<>>).
new(#wx_ref{type=DataT,ref=DataRef}) ->
?CLASS(DataT,wxColourData),
wxe_util:construct(?wxColourData_new_1,
<<DataRef:32/?UI>>).
getChooseFull(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxColourData),
wxe_util:call(?wxColourData_GetChooseFull,
<<ThisRef:32/?UI>>).
getColour(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxColourData),
wxe_util:call(?wxColourData_GetColour,
<<ThisRef:32/?UI>>).
getCustomColour(#wx_ref{type=ThisT,ref=ThisRef},I)
when is_integer(I) ->
?CLASS(ThisT,wxColourData),
wxe_util:call(?wxColourData_GetCustomColour,
<<ThisRef:32/?UI,I:32/?UI>>).
setChooseFull(#wx_ref{type=ThisT,ref=ThisRef},Flag)
when is_boolean(Flag) ->
?CLASS(ThisT,wxColourData),
wxe_util:cast(?wxColourData_SetChooseFull,
<<ThisRef:32/?UI,(wxe_util:from_bool(Flag)):32/?UI>>).
setColour(#wx_ref{type=ThisT,ref=ThisRef},Colour)
when tuple_size(Colour) =:= 3; tuple_size(Colour) =:= 4 ->
?CLASS(ThisT,wxColourData),
wxe_util:cast(?wxColourData_SetColour,
<<ThisRef:32/?UI,(wxe_util:colour_bin(Colour)):16/binary>>).
setCustomColour(#wx_ref{type=ThisT,ref=ThisRef},I,Colour)
when is_integer(I),tuple_size(Colour) =:= 3; tuple_size(Colour) =:= 4 ->
?CLASS(ThisT,wxColourData),
wxe_util:cast(?wxColourData_SetCustomColour,
<<ThisRef:32/?UI,I:32/?UI,(wxe_util:colour_bin(Colour)):16/binary>>).
destroy(Obj=#wx_ref{type=Type}) ->
?CLASS(Type,wxColourData),
wxe_util:destroy(?DESTROY_OBJECT,Obj),
ok.
|
6e7df2eeb27434cf72b9b4120f9a36cf00d9307c85611cf6d0470cced9e529b1 | metaocaml/ber-metaocaml | t200-getfield2.ml | TEST
include tool - ocaml - lib
flags = " -w a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
type t = {
a : int;
b : int;
c : int;
};;
if { a = 7; b = 6; c = 5 }.c <> 5 then raise Not_found;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 CONSTINT 5
11 PUSHGETGLOBAL < 0>(7 , 6 , 5 )
13 GETFIELD2
14 NEQ
15 BRANCHIFNOT 22
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 ATOM0
23
25 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 CONSTINT 5
11 PUSHGETGLOBAL <0>(7, 6, 5)
13 GETFIELD2
14 NEQ
15 BRANCHIFNOT 22
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 ATOM0
23 SETGLOBAL T200-getfield2
25 STOP
**)
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/tool-ocaml/t200-getfield2.ml | ocaml | TEST
include tool - ocaml - lib
flags = " -w a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
type t = {
a : int;
b : int;
c : int;
};;
if { a = 7; b = 6; c = 5 }.c <> 5 then raise Not_found;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 CONSTINT 5
11 PUSHGETGLOBAL < 0>(7 , 6 , 5 )
13 GETFIELD2
14 NEQ
15 BRANCHIFNOT 22
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 ATOM0
23
25 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 CONSTINT 5
11 PUSHGETGLOBAL <0>(7, 6, 5)
13 GETFIELD2
14 NEQ
15 BRANCHIFNOT 22
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 ATOM0
23 SETGLOBAL T200-getfield2
25 STOP
**)
| |
8f5fc6fe033d130012586ab93d1b2580a49a99e0a69a17b46459629827e2860a | ralsei/graphite | dodged-bar.rkt | #lang racket
(require data-frame fancy-app
graphite)
(provide (all-defined-out))
(define all-data (df-read/csv "./data/gss_sm.csv"))
(graph #:data all-data
#:title "Where people live, I guess?"
#:x-label "Region"
#:y-label "% of total"
#:mapping (aes #:x "bigregion")
#:width 1200 #:height 600
(bar #:mode 'prop #:group-gap 2 #:mapping (aes #:group "religion")))
| null | https://raw.githubusercontent.com/ralsei/graphite/a57eacf1b9262a9bde220035e303c42a814ecfe3/graphite-examples/dodged-bar.rkt | racket | #lang racket
(require data-frame fancy-app
graphite)
(provide (all-defined-out))
(define all-data (df-read/csv "./data/gss_sm.csv"))
(graph #:data all-data
#:title "Where people live, I guess?"
#:x-label "Region"
#:y-label "% of total"
#:mapping (aes #:x "bigregion")
#:width 1200 #:height 600
(bar #:mode 'prop #:group-gap 2 #:mapping (aes #:group "religion")))
| |
6798f9bd3e7b53ec76453c399b1f6715a24b50ef71ef58026b88791ae018a4a4 | PacktWorkshops/The-Clojure-Workshop | tennis_history.clj | (ns packt-clj.tennis
(:require
[clojure.data.csv :as csv]
[clojure.java.io :as io]
[clojure.math.numeric-tower :as math]
[semantic-csv.core :as sc]))
The first part of this file comes from the tennis_history.clj
from Exercise 5.04 .
(defn match-probability [player-1-rating player-2-rating]
(/ 1
(+ 1 (math/expt 10 (/ (- player-2-rating player-1-rating) 400)))))
(defn recalculate-rating [k previous-rating expected-outcome real-outcome]
(+ previous-rating (* k (- real-outcome expected-outcome))))
(defn elo-db [csv k]
(with-open [r (io/reader csv)]
(->> (csv/read-csv r)
sc/mappify
(sc/cast-with {:winner_sets_won sc/->int
:loser_sets_won sc/->int
:winner_games_won sc/->int
:loser_games_won sc/->int})
(reduce (fn [{:keys [players] :as acc}
{:keys [_winner_name winner_slug
_loser_name loser_slug] :as match}]
(let [winner-rating (get players winner_slug 400)
loser-rating (get players loser_slug 400)
winner-probability (match-probability
winner-rating
loser-rating)
loser-probability (- 1 winner-probability)]
(-> acc
(assoc-in [:players winner_slug]
(recalculate-rating k
winner-rating
winner-probability 1))
(assoc-in [:players loser_slug]
(recalculate-rating k
loser-rating
loser-probability 0))
(update :matches
(fn [ms]
(conj ms
(assoc match
:winner_rating winner-rating
:loser_rating loser-rating)))))))
{:players {}
:matches []})
:matches
reverse)))
(def matches
(elo-db "match_scores_1991-2016_unindexed_csv.csv" 35))
(defn player-in-match? [{:keys [winner_slug loser_slug]} player-slug]
((hash-set winner_slug loser_slug) player-slug))
(defn match-tree-by-player [matches player-slug]
(lazy-seq
(cond (empty? matches)
'()
(player-in-match? (first matches) player-slug)
(cons
(first matches)
(cons
[(match-tree-by-player (rest matches) (:winner_slug (first matches)))
(match-tree-by-player (rest matches) (:loser_slug (first matches)))]
'()))
:else
(match-tree-by-player (rest matches) player-slug))))
(def federer
(match-tree-by-player matches "roger-federer"))
;;; The new functions start here
(defn take-matches
([limit tree]
(take-matches limit tree identity))
([limit tree f]
(cond (zero? limit)
'()
(= 1 limit)
(f (first tree))
:else
(cons
(f (first tree))
(cons
[(take-matches (dec limit) (first (second tree)) f)
(take-matches (dec limit) (second (second tree)) f)]
'())))))
(defn matches-with-ratings [limit tree]
(take-matches limit
tree
(fn [match]
(-> match
(update :winner_rating int)
(update :loser_rating int)
(select-keys [:winner_name
:loser_name
:winner_rating
:loser_rating])
(assoc :winner_probability_percentage
(->> (match-probability
(:winner_rating match)
(:loser_rating match))
(* 100)
(int)))))))
(matches-with-ratings 3 federer)
= > ( { : " " ,
: loser_name " " ,
: winner_rating 1129 ,
: loser_rating 625 ,
: winner_probability_percentage 94 }
[ ( { : " " ,
: loser_name " " ,
: winner_rating 1128 ,
: loser_rating 384 ,
: winner_probability_percentage 98 }
[ { : " " ,
: loser_name " Daniel Evans " ,
: winner_rating 1127 ,
: loser_rating 603 ,
: winner_probability_percentage 95 }
{ : " Pierre - Hugues Herbert " ,
: loser_name " " ,
: winner_rating 587 ,
: loser_rating 392 ,
: winner_probability_percentage 75 } ] )
( { : " " ,
: loser_name " " ,
: winner_rating 638 ,
: loser_rating 643 ,
: winner_probability_percentage 49 }
[ { : " " ,
: loser_name " " ,
: winner_rating 560 ,
: loser_rating 661 ,
: winner_probability_percentage 35 }
{ : " " ,
: loser_name " Diego Schwartzman " ,
: winner_rating 623 ,
: loser_rating 665 ,
: winner_probability_percentage 43 } ] ) ] )
| null | https://raw.githubusercontent.com/PacktWorkshops/The-Clojure-Workshop/3d309bb0e46a41ce2c93737870433b47ce0ba6a2/Chapter07/Exercise7.05/tennis_history.clj | clojure | The new functions start here | (ns packt-clj.tennis
(:require
[clojure.data.csv :as csv]
[clojure.java.io :as io]
[clojure.math.numeric-tower :as math]
[semantic-csv.core :as sc]))
The first part of this file comes from the tennis_history.clj
from Exercise 5.04 .
(defn match-probability [player-1-rating player-2-rating]
(/ 1
(+ 1 (math/expt 10 (/ (- player-2-rating player-1-rating) 400)))))
(defn recalculate-rating [k previous-rating expected-outcome real-outcome]
(+ previous-rating (* k (- real-outcome expected-outcome))))
(defn elo-db [csv k]
(with-open [r (io/reader csv)]
(->> (csv/read-csv r)
sc/mappify
(sc/cast-with {:winner_sets_won sc/->int
:loser_sets_won sc/->int
:winner_games_won sc/->int
:loser_games_won sc/->int})
(reduce (fn [{:keys [players] :as acc}
{:keys [_winner_name winner_slug
_loser_name loser_slug] :as match}]
(let [winner-rating (get players winner_slug 400)
loser-rating (get players loser_slug 400)
winner-probability (match-probability
winner-rating
loser-rating)
loser-probability (- 1 winner-probability)]
(-> acc
(assoc-in [:players winner_slug]
(recalculate-rating k
winner-rating
winner-probability 1))
(assoc-in [:players loser_slug]
(recalculate-rating k
loser-rating
loser-probability 0))
(update :matches
(fn [ms]
(conj ms
(assoc match
:winner_rating winner-rating
:loser_rating loser-rating)))))))
{:players {}
:matches []})
:matches
reverse)))
(def matches
(elo-db "match_scores_1991-2016_unindexed_csv.csv" 35))
(defn player-in-match? [{:keys [winner_slug loser_slug]} player-slug]
((hash-set winner_slug loser_slug) player-slug))
(defn match-tree-by-player [matches player-slug]
(lazy-seq
(cond (empty? matches)
'()
(player-in-match? (first matches) player-slug)
(cons
(first matches)
(cons
[(match-tree-by-player (rest matches) (:winner_slug (first matches)))
(match-tree-by-player (rest matches) (:loser_slug (first matches)))]
'()))
:else
(match-tree-by-player (rest matches) player-slug))))
(def federer
(match-tree-by-player matches "roger-federer"))
(defn take-matches
([limit tree]
(take-matches limit tree identity))
([limit tree f]
(cond (zero? limit)
'()
(= 1 limit)
(f (first tree))
:else
(cons
(f (first tree))
(cons
[(take-matches (dec limit) (first (second tree)) f)
(take-matches (dec limit) (second (second tree)) f)]
'())))))
(defn matches-with-ratings [limit tree]
(take-matches limit
tree
(fn [match]
(-> match
(update :winner_rating int)
(update :loser_rating int)
(select-keys [:winner_name
:loser_name
:winner_rating
:loser_rating])
(assoc :winner_probability_percentage
(->> (match-probability
(:winner_rating match)
(:loser_rating match))
(* 100)
(int)))))))
(matches-with-ratings 3 federer)
= > ( { : " " ,
: loser_name " " ,
: winner_rating 1129 ,
: loser_rating 625 ,
: winner_probability_percentage 94 }
[ ( { : " " ,
: loser_name " " ,
: winner_rating 1128 ,
: loser_rating 384 ,
: winner_probability_percentage 98 }
[ { : " " ,
: loser_name " Daniel Evans " ,
: winner_rating 1127 ,
: loser_rating 603 ,
: winner_probability_percentage 95 }
{ : " Pierre - Hugues Herbert " ,
: loser_name " " ,
: winner_rating 587 ,
: loser_rating 392 ,
: winner_probability_percentage 75 } ] )
( { : " " ,
: loser_name " " ,
: winner_rating 638 ,
: loser_rating 643 ,
: winner_probability_percentage 49 }
[ { : " " ,
: loser_name " " ,
: winner_rating 560 ,
: loser_rating 661 ,
: winner_probability_percentage 35 }
{ : " " ,
: loser_name " Diego Schwartzman " ,
: winner_rating 623 ,
: loser_rating 665 ,
: winner_probability_percentage 43 } ] ) ] )
|
0123bbb32e4ff07b999cfa316b35ac437eca2e5f1dfd22f56a8a0903edbc628d | may-liu/qtalk | mod_recent_contact.erl | %%%----------------------------------------------------------------------
%%% File : mod_ping.erl
Author : < >
Purpose : Support XEP-0199 XMPP Ping and periodic keepalives
Created : 11 Jul 2009 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2014 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%----------------------------------------------------------------------
-module(mod_recent_contact).
-behaviour(gen_mod).
-include("ejabberd.hrl").
-include("logger.hrl").
-include("jlib.hrl").
-define(SUPERVISOR, ejabberd_sup).
-behaviour(gen_server).
%% gen_mod callbacks
-export([start/2, stop/1,start_link/2]).
-export([init/1,handle_call/3, handle_cast/2,
handle_info/2, terminate/2, code_change/3]).
%% Hook callbacks
-export([get_recent_contact/3,
handle_block_user/3]).
-record(recent_contact,{user,recent_contact_list}).
-record(block_user,{user,block_user}).
-record(state,{host,timer}).
%%=================================
gen_svrver callbacks
%%=================================
start_link(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
gen_server:start_link({local, Proc}, ?MODULE,[Host, Opts], []).
init([Host, Opts]) ->
IQDisc = gen_mod:get_opt(iqdisc, Opts, fun gen_iq_handler:check_type/1,
one_queue),
catch ets:new(recent_contact, [named_table, ordered_set, public, {keypos,#recent_contact.user}]),
catch ets:new(block_user, [named_table, bag, public,{keypos, #block_user.user}]),
init_block_user(Host),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host,
?NS_RECENT_CONTACT, ?MODULE, get_recent_contact, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_local, Host,
?NS_RECENT_CONTACT, ?MODULE, get_recent_contact, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host,
?NS_BLOCK, ?MODULE, handle_block_user, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_local, Host,
?NS_BLOCK, ?MODULE, handle_block_user, IQDisc),
TRef = erlang:start_timer(7200 * 1000, self(),update),
{ok, #state{host = Host, timer = TRef }}.
terminate(_Reason, #state{host = Host,timer = Timer}) ->
gen_iq_handler:remove_iq_handler(ejabberd_local, Host,
?NS_RECENT_CONTACT),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host,
?NS_RECENT_CONTACT),
gen_iq_handler:remove_iq_handler(ejabberd_local, Host,
?NS_BLOCK),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host,
?NS_BLOCK),
{ok, cancel} = timer:cancel(Timer).
handle_call(stop, _From, State) ->
{stop, normal, ok, State};
handle_call(_Request, _From, State) ->
{reply, ignored, State}.
handle_cast(_Msg, State) -> {noreply, State}.
handle_info({timeout, _TRef, update}, #state{host = Server}) ->
init_block_user(Server),
TRef = erlang:start_timer(7200 * 1000, self(),update),
{ok,#state{host = Server, timer = TRef }};
handle_info(_Info, State) -> {noreply, State}.
code_change(_OldVsn, State, _Extra) -> {ok, State}.
%%====================================================================
%% gen_mod callbacks
%%====================================================================
start(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
Recent_contact_Spec = {Proc, {?MODULE, start_link, [Host, Opts]},
transient, 2000, worker, [?MODULE]},
supervisor:start_child(?SUPERVISOR, Recent_contact_Spec).
stop(Host) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
gen_server:call(Proc, stop),
supervisor:delete_child(?SUPERVISOR, Proc).
init_block_user(Server) ->
LServer = ejabberd_odbc:escape(Server),
ets:delete_all_objects(recent_contact),
ets:delete_all_objects(block_user),
case catch ejabberd_odbc:sql_query(LServer,[<<"select username,blockuser from user_block_list">>]) of
{selected, [<<"username">>,<<"blockuser">>], SRes} when is_list(SRes) ->
lists:foreach(fun([U,B]) ->
ets:insert(block_user,#block_user{user = U, block_user = B}) end,SRes);
_ ->
ok
end.
get_recent_contact(From, _To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case {Type, SubEl} of
{get, #xmlel{name = <<"recent_contact">>}} ->
IQ#iq{type = result, sub_el = [get_recent_contact_users(From)]};
_ ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_FEATURE_NOT_IMPLEMENTED]}
end.
get_recent_contact_users(From) ->
User = From#jid.user,
LServer = jlib:nameprep(From#jid.server),
Rusers = handle_recent_contact(LServer,User),
Fusers = list_to_binary(spell_user(Rusers)),
#xmlel{name = <<"recent_contact">>,
attrs = [{<<"xmlns">>,?NS_RECENT_CONTACT},
{<<"users">>,Fusers}],
children = []}.
handle_recent_contact(Server,User) ->
case catch ets:lookup(recent_contact,User) of
[] ->
get_recent_contact_user(Server,User);
[Ru] when is_record(Ru,recent_contact) ->
#recent_contact.recent_contact_list;
_ ->
[]
end.
get_recent_contact_user(Server,User) ->
Cuser =
case catch odbc_queries:get_concats(Server,User) of
{selected, [<<"u">>], SRes} when is_list(SRes) ->
lists:usort(lists:concat(SRes));
_ ->
[]
end,
Buser =
case catch ets:select(block_user,[{#block_user{user = User,block_user = '$1', _ = '_'},[], ['$1']}]) of
L when is_list(L) ->
L;
_ ->
[]
end,
Fuser =
lists:foldl(fun(U,Acc) ->
case lists:member(U,Acc) of
true ->
lists:delete(U,Acc);
_ ->
Acc
end end,Cuser,Buser),
ets:insert(recent_contact,#recent_contact{user = User ,recent_contact_list = Fuser}),
Fuser.
spell_user(Users) ->
lists:foldl(fun(U,Acc) ->
case Acc of
[] ->
[U];
_ ->
lists:concat([Acc,[<<",">>,U]])
end end,[],Users).
handle_block_user(From, _To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case {Type, SubEl} of
{get, #xmlel{name = <<"block_user">>}} ->
IQ#iq{type = result, sub_el = [get_block_user(From,SubEl)]};
{set, #xmlel{name = <<"block_user">>}} ->
IQ#iq{type = result, sub_el = [set_block_user(From,SubEl)]};
{set, #xmlel{name = <<"cancel_block_user">>}} ->
IQ#iq{type = result, sub_el = [cancel_block_user(From,SubEl)]};
_ ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_FEATURE_NOT_IMPLEMENTED]}
end.
get_block_user(From,_El) ->
User = From#jid.user,
Buser =
case catch ets:select(block_user,[{#block_user{user = User,block_user = '$1', _ = '_'},[], ['$1']}]) of
L when is_list(L) ->
L;
_ ->
[]
end,
#xmlel{name = <<"block_user">>,
attrs = [{<<"xmlns">>,?NS_BLOCK},
{<<"users">>,list_to_binary(spell_user(Buser))}],
children = []}.
set_block_user(From,El) ->
User = From#jid.user,
LServer = jlib:nameprep(From#jid.server),
Res =
case catch xml:get_tag_attr_s(<<"jid">>,El) of
<<"">> ->
<<"Sucess">>;
J ->
case catch odbc_queries:set_block_user(LServer,User,J) of
{updated,1} ->
catch ets:insert(block_user,#block_user{user = User, block_user = J}),
catch ets:delete(recent_contact,User),
<<"Sucess">>;
{error,Reason} ->
case proplists:get_value(code,Reason) of
<<"23505">> ->
catch ets:insert(block_user,#block_user{user = User, block_user = J}),
catch ets:delete(recent_contact,User),
<<"Sucess">>;
_ ->
?DEBUG("Reson is ~p ~n",[Reason]),
<<"Failed">>
end
end
end,
#xmlel{name = <<"block_user">>,
attrs = [{<<"xmlns">>,?NS_BLOCK},
{<<"result">>,Res}],
children = []}.
cancel_block_user(From,El) ->
User = From#jid.user,
LServer = jlib:nameprep(From#jid.server),
Res =
case catch xml:get_tag_attr_s(<<"jid">>,El) of
<<"">> ->
<<"Sucess">>;
J ->
case catch ejabberd_odbc:sql_query(LServer,
[<<"delete from user_block_list where username = '">>,User,<<"' and blockuser = '">>,J,<<"';">>]) of
{updated,1} ->
catch ets:delete_object(block_user,#block_user{user = User,block_user = J}),
catch ets:delete(recent_contact,User),
<<"Sucess">>;
_ ->
<<"Failed">>
end
end,
#xmlel{name = <<"cancel_block_user">>,
attrs = [{<<"xmlns">>,?NS_BLOCK},
{<<"result">>,Res}],
children = []}.
| null | https://raw.githubusercontent.com/may-liu/qtalk/f5431e5a7123975e9656e7ab239e674ce33713cd/qtalk_opensource/src/mod_recent_contact.erl | erlang | ----------------------------------------------------------------------
File : mod_ping.erl
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
----------------------------------------------------------------------
gen_mod callbacks
Hook callbacks
=================================
=================================
====================================================================
gen_mod callbacks
==================================================================== | Author : < >
Purpose : Support XEP-0199 XMPP Ping and periodic keepalives
Created : 11 Jul 2009 by < >
ejabberd , Copyright ( C ) 2002 - 2014 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
-module(mod_recent_contact).
-behaviour(gen_mod).
-include("ejabberd.hrl").
-include("logger.hrl").
-include("jlib.hrl").
-define(SUPERVISOR, ejabberd_sup).
-behaviour(gen_server).
-export([start/2, stop/1,start_link/2]).
-export([init/1,handle_call/3, handle_cast/2,
handle_info/2, terminate/2, code_change/3]).
-export([get_recent_contact/3,
handle_block_user/3]).
-record(recent_contact,{user,recent_contact_list}).
-record(block_user,{user,block_user}).
-record(state,{host,timer}).
gen_svrver callbacks
start_link(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
gen_server:start_link({local, Proc}, ?MODULE,[Host, Opts], []).
init([Host, Opts]) ->
IQDisc = gen_mod:get_opt(iqdisc, Opts, fun gen_iq_handler:check_type/1,
one_queue),
catch ets:new(recent_contact, [named_table, ordered_set, public, {keypos,#recent_contact.user}]),
catch ets:new(block_user, [named_table, bag, public,{keypos, #block_user.user}]),
init_block_user(Host),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host,
?NS_RECENT_CONTACT, ?MODULE, get_recent_contact, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_local, Host,
?NS_RECENT_CONTACT, ?MODULE, get_recent_contact, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host,
?NS_BLOCK, ?MODULE, handle_block_user, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_local, Host,
?NS_BLOCK, ?MODULE, handle_block_user, IQDisc),
TRef = erlang:start_timer(7200 * 1000, self(),update),
{ok, #state{host = Host, timer = TRef }}.
terminate(_Reason, #state{host = Host,timer = Timer}) ->
gen_iq_handler:remove_iq_handler(ejabberd_local, Host,
?NS_RECENT_CONTACT),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host,
?NS_RECENT_CONTACT),
gen_iq_handler:remove_iq_handler(ejabberd_local, Host,
?NS_BLOCK),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host,
?NS_BLOCK),
{ok, cancel} = timer:cancel(Timer).
handle_call(stop, _From, State) ->
{stop, normal, ok, State};
handle_call(_Request, _From, State) ->
{reply, ignored, State}.
handle_cast(_Msg, State) -> {noreply, State}.
handle_info({timeout, _TRef, update}, #state{host = Server}) ->
init_block_user(Server),
TRef = erlang:start_timer(7200 * 1000, self(),update),
{ok,#state{host = Server, timer = TRef }};
handle_info(_Info, State) -> {noreply, State}.
code_change(_OldVsn, State, _Extra) -> {ok, State}.
start(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
Recent_contact_Spec = {Proc, {?MODULE, start_link, [Host, Opts]},
transient, 2000, worker, [?MODULE]},
supervisor:start_child(?SUPERVISOR, Recent_contact_Spec).
stop(Host) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
gen_server:call(Proc, stop),
supervisor:delete_child(?SUPERVISOR, Proc).
init_block_user(Server) ->
LServer = ejabberd_odbc:escape(Server),
ets:delete_all_objects(recent_contact),
ets:delete_all_objects(block_user),
case catch ejabberd_odbc:sql_query(LServer,[<<"select username,blockuser from user_block_list">>]) of
{selected, [<<"username">>,<<"blockuser">>], SRes} when is_list(SRes) ->
lists:foreach(fun([U,B]) ->
ets:insert(block_user,#block_user{user = U, block_user = B}) end,SRes);
_ ->
ok
end.
get_recent_contact(From, _To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case {Type, SubEl} of
{get, #xmlel{name = <<"recent_contact">>}} ->
IQ#iq{type = result, sub_el = [get_recent_contact_users(From)]};
_ ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_FEATURE_NOT_IMPLEMENTED]}
end.
get_recent_contact_users(From) ->
User = From#jid.user,
LServer = jlib:nameprep(From#jid.server),
Rusers = handle_recent_contact(LServer,User),
Fusers = list_to_binary(spell_user(Rusers)),
#xmlel{name = <<"recent_contact">>,
attrs = [{<<"xmlns">>,?NS_RECENT_CONTACT},
{<<"users">>,Fusers}],
children = []}.
handle_recent_contact(Server,User) ->
case catch ets:lookup(recent_contact,User) of
[] ->
get_recent_contact_user(Server,User);
[Ru] when is_record(Ru,recent_contact) ->
#recent_contact.recent_contact_list;
_ ->
[]
end.
get_recent_contact_user(Server,User) ->
Cuser =
case catch odbc_queries:get_concats(Server,User) of
{selected, [<<"u">>], SRes} when is_list(SRes) ->
lists:usort(lists:concat(SRes));
_ ->
[]
end,
Buser =
case catch ets:select(block_user,[{#block_user{user = User,block_user = '$1', _ = '_'},[], ['$1']}]) of
L when is_list(L) ->
L;
_ ->
[]
end,
Fuser =
lists:foldl(fun(U,Acc) ->
case lists:member(U,Acc) of
true ->
lists:delete(U,Acc);
_ ->
Acc
end end,Cuser,Buser),
ets:insert(recent_contact,#recent_contact{user = User ,recent_contact_list = Fuser}),
Fuser.
spell_user(Users) ->
lists:foldl(fun(U,Acc) ->
case Acc of
[] ->
[U];
_ ->
lists:concat([Acc,[<<",">>,U]])
end end,[],Users).
handle_block_user(From, _To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case {Type, SubEl} of
{get, #xmlel{name = <<"block_user">>}} ->
IQ#iq{type = result, sub_el = [get_block_user(From,SubEl)]};
{set, #xmlel{name = <<"block_user">>}} ->
IQ#iq{type = result, sub_el = [set_block_user(From,SubEl)]};
{set, #xmlel{name = <<"cancel_block_user">>}} ->
IQ#iq{type = result, sub_el = [cancel_block_user(From,SubEl)]};
_ ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_FEATURE_NOT_IMPLEMENTED]}
end.
get_block_user(From,_El) ->
User = From#jid.user,
Buser =
case catch ets:select(block_user,[{#block_user{user = User,block_user = '$1', _ = '_'},[], ['$1']}]) of
L when is_list(L) ->
L;
_ ->
[]
end,
#xmlel{name = <<"block_user">>,
attrs = [{<<"xmlns">>,?NS_BLOCK},
{<<"users">>,list_to_binary(spell_user(Buser))}],
children = []}.
set_block_user(From,El) ->
User = From#jid.user,
LServer = jlib:nameprep(From#jid.server),
Res =
case catch xml:get_tag_attr_s(<<"jid">>,El) of
<<"">> ->
<<"Sucess">>;
J ->
case catch odbc_queries:set_block_user(LServer,User,J) of
{updated,1} ->
catch ets:insert(block_user,#block_user{user = User, block_user = J}),
catch ets:delete(recent_contact,User),
<<"Sucess">>;
{error,Reason} ->
case proplists:get_value(code,Reason) of
<<"23505">> ->
catch ets:insert(block_user,#block_user{user = User, block_user = J}),
catch ets:delete(recent_contact,User),
<<"Sucess">>;
_ ->
?DEBUG("Reson is ~p ~n",[Reason]),
<<"Failed">>
end
end
end,
#xmlel{name = <<"block_user">>,
attrs = [{<<"xmlns">>,?NS_BLOCK},
{<<"result">>,Res}],
children = []}.
cancel_block_user(From,El) ->
User = From#jid.user,
LServer = jlib:nameprep(From#jid.server),
Res =
case catch xml:get_tag_attr_s(<<"jid">>,El) of
<<"">> ->
<<"Sucess">>;
J ->
case catch ejabberd_odbc:sql_query(LServer,
[<<"delete from user_block_list where username = '">>,User,<<"' and blockuser = '">>,J,<<"';">>]) of
{updated,1} ->
catch ets:delete_object(block_user,#block_user{user = User,block_user = J}),
catch ets:delete(recent_contact,User),
<<"Sucess">>;
_ ->
<<"Failed">>
end
end,
#xmlel{name = <<"cancel_block_user">>,
attrs = [{<<"xmlns">>,?NS_BLOCK},
{<<"result">>,Res}],
children = []}.
|
91392a46645db11be76bbdf0187063c3c5e37fbe1be698dd21693de4dd5e4203 | esl/erlang-web | e_cache.erl | The contents of this file are subject to the Erlang Web Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
Erlang Web Public License along with this software . If not , it can be
%% retrieved via the world wide web at -consulting.com/.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
The Initial Developer of the Original Code is Erlang Training & Consulting
Ltd. Portions created by Erlang Training & Consulting Ltd are Copyright 2009 ,
Erlang Training & Consulting Ltd. All Rights Reserved .
%%%-------------------------------------------------------------------
%%% File : e_cache.erl
@author < >
%%% @doc Interface module for managing both:
%%% <ul>
%%% <li>XML parsed structures</li>
%%% <li>Generic frontend content</li>
%%% </ul>
%%% The XML parsed structure cache implementation could be changed
%%% in the <i>project.conf</i> file, by placing a proper parameter:
< i>{template_cache , < b > Type</b>}</i > where < b > Type</b > is either
%%% <i>disk</i> or <i>ets</i> (by default it is set to <i>ets</i>).
%%% @end
%%%-------------------------------------------------------------------
-module(e_cache).
-export([read_file/1, install/0]).
%%
read_file(Filename : : string ( ) ) - > XmlStructure : : term ( )
@doc Returns the content of the file parsed by the chosen XML parser .
%% If the file content is not found in the cache, the file is read and
%% put there.
%% @end
%%
-spec(read_file/1 :: (string()) -> term()).
read_file(Filename) ->
e_logger:log({?MODULE, {reading_file, Filename}}),
Mod = e_conf:template_cache_mod(),
Mod:read_file(Filename, e_conf:template_expander()).
%% @hidden
-spec(install/0 :: () -> none()).
install() ->
case e_conf:template_cache_mod() of
e_cache_ets ->
e_cache_ets:install();
_ ->
ok
end.
| null | https://raw.githubusercontent.com/esl/erlang-web/2e5c2c9725465fc5b522250c305a9d553b3b8243/lib/eptic-1.4.1/src/e_cache.erl | erlang | compliance with the License. You should have received a copy of the
retrieved via the world wide web at -consulting.com/.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-------------------------------------------------------------------
File : e_cache.erl
@doc Interface module for managing both:
<ul>
<li>XML parsed structures</li>
<li>Generic frontend content</li>
</ul>
The XML parsed structure cache implementation could be changed
in the <i>project.conf</i> file, by placing a proper parameter:
<i>disk</i> or <i>ets</i> (by default it is set to <i>ets</i>).
@end
-------------------------------------------------------------------
If the file content is not found in the cache, the file is read and
put there.
@end
@hidden | The contents of this file are subject to the Erlang Web Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
Erlang Web Public License along with this software . If not , it can be
Software distributed under the License is distributed on an " AS IS "
The Initial Developer of the Original Code is Erlang Training & Consulting
Ltd. Portions created by Erlang Training & Consulting Ltd are Copyright 2009 ,
Erlang Training & Consulting Ltd. All Rights Reserved .
@author < >
< i>{template_cache , < b > Type</b>}</i > where < b > Type</b > is either
-module(e_cache).
-export([read_file/1, install/0]).
read_file(Filename : : string ( ) ) - > XmlStructure : : term ( )
@doc Returns the content of the file parsed by the chosen XML parser .
-spec(read_file/1 :: (string()) -> term()).
read_file(Filename) ->
e_logger:log({?MODULE, {reading_file, Filename}}),
Mod = e_conf:template_cache_mod(),
Mod:read_file(Filename, e_conf:template_expander()).
-spec(install/0 :: () -> none()).
install() ->
case e_conf:template_cache_mod() of
e_cache_ets ->
e_cache_ets:install();
_ ->
ok
end.
|
bfe2c3ad586979aa1c88ebbf7b2a8c829eaab71cf68d65eaf380b37139715bbb | geophf/1HaskellADay | Solution.hs | module Y2020.M08.D27.Solution where
-
OKAY ! now we can start these documents into word - vectors .
But ... wait .
What , exactly , is a word ? I do n't have a good answer to that quesstion , because
why ? because some characters are word - parts , and some are n't , and given UTF-8
and documents that use words from multiple languages , answering this question
may require some analysis on its own .
Today 's # haskell problem .
Take a document , and analyze its characters . What are the characters ? What
are the character - counts ?
That 's one layer of analysis .
Now , do the same thing , but break up the document into words .
What are the characters of words ? Are there any surprises ? Does the function
` words ` need to be rewritten to accommodate words with special characters ?
( Or , do we need to eliminate or modify special characters to separate words
in inadvertently compound words ? Or , on the other hand , join word - parts
inadvertently separated by special characters ? )
-
OKAY! now we can start vectorizing these documents into word-vectors.
But ... wait.
What, exactly, is a word? I don't have a good answer to that quesstion, because
why? because some characters are word-parts, and some aren't, and given UTF-8
and documents that use words from multiple languages, answering this question
may require some analysis on its own.
Today's #haskell problem.
Take a document, and analyze its characters. What are the characters? What
are the character-counts?
That's one layer of analysis.
Now, do the same thing, but break up the document into words.
What are the characters of words? Are there any surprises? Does the function
`words` need to be rewritten to accommodate words with special characters?
(Or, do we need to eliminate or modify special characters to separate words
in inadvertently compound words? Or, on the other hand, join word-parts
inadvertently separated by special characters?)
--}
import Data.Char (isAlpha)
import Data.Monoid (getSum)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Y2020.M08.D25.Solution (gutenbergIndex, workingDir, gutenbergTop100Index)
import Y2020.M08.D26.Solution (importBook, Text)
import Data.Bag
type CharFreq = Bag Char
charCount :: Text -> CharFreq
charCount = fromList -- wowzorx. That was hard. ... NOT!
-
> > > let the_index = ( workingDir + + gutenbergTop100Index )
> > > let xmas_info = head . Map.toList < $ > the_index
> > > xmas_info
( " A Christmas Carol in ; Being a Ghost Story of Christmas by ( 403 ) " ,
" " )
> > > let xmas_carol = importBook = < < xmas_info
> > > length < $ > xmas_carol
182057
> > > Map.map getSum . charCount < $ > xmas_carol
{ ( ' \n',4238),('\r',4238 ) , ( ' ' , 28438),('!',411),('"',1388),('#',1),('$',2 ) ,
( ' % ' , 1),('\'',456),('(',37),(')',37),('*',28),(',',2906),('-',476),(' . ',1640 ) ,
( ' /',24),('0',20),('1',60),('2',10),('3',13),('4',17),('5',12),('6',11 ) ,
( ' 7',6),('8',14),('9',9),(':',98),(';',370),('?',159),('@',2),('A',270 ) ,
( ' ) , ... }
Now find the ' weird words ' in the book 's text . What are ' weird words ' ?
' weird words ' are words that you did n't expect would be words but are
words , and they are weirding you out . Words like " \138\016\054 " ... and
stuff .
-
>>> let the_index = gutenbergIndex (workingDir ++ gutenbergTop100Index)
>>> let xmas_info = head . Map.toList <$> the_index
>>> xmas_info
("A Christmas Carol in Prose; Being a Ghost Story of Christmas by Charles Dickens (403)",
"")
>>> let xmas_carol = importBook =<< xmas_info
>>> length <$> xmas_carol
182057
>>> Map.map getSum . charCount <$> xmas_carol
{('\n',4238),('\r',4238),(' ',28438),('!',411),('"',1388),('#',1),('$',2),
('%',1),('\'',456),('(',37),(')',37),('*',28),(',',2906),('-',476),('.',1640),
('/',24),('0',20),('1',60),('2',10),('3',13),('4',17),('5',12),('6',11),
('7',6),('8',14),('9',9),(':',98),(';',370),('?',159),('@',2),('A',270),
('B',199),('C',235),('D',108),('E',187),('F',135),('G',254),('H',261),...}
Now find the 'weird words' in the book's text. What are 'weird words'?
'weird words' are words that you didn't expect would be words but are
words, and they are weirding you out. Words like "\138\016\054" ... and
stuff.
--}
weirdWords :: Text -> Set String
weirdWords = Set.fromList
. filter (any (not . isAlpha))
. words
-
> > > let weirds = weirdWords < $ > xmas_carol
> > > weirds
{ " " , ... }
> > > length < $ > weirds
3759
So : what are the weird characters in the weird words ?
-
>>> let weirds = weirdWords <$> xmas_carol
>>> weirds
{"\"'And","\"A","\"Ah!\"","\"All","\"Always","\"Am","\"An","\"And",...}
>>> length <$> weirds
3759
So: what are the weird characters in the weird words?
--}
weirdChars :: Set String -> Set Char
weirdChars = foldl fn Set.empty
where fn set0 = Set.union set0 . Set.fromList . weirdCharsInWord
weirdCharsInWord = filter (not . isAlpha)
{--
>>> weirdChars <$> weirds
{"!\"#$%'()*,-./0123456789:;?@[]\182\187\191"}
--}
with this analysis , we 'll be able to write a word - vectorizer , ... tomorrow .
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2020/M08/D27/Solution.hs | haskell | }
wowzorx. That was hard. ... NOT!
}
}
-
>>> weirdChars <$> weirds
{"!\"#$%'()*,-./0123456789:;?@[]\182\187\191"}
- | module Y2020.M08.D27.Solution where
-
OKAY ! now we can start these documents into word - vectors .
But ... wait .
What , exactly , is a word ? I do n't have a good answer to that quesstion , because
why ? because some characters are word - parts , and some are n't , and given UTF-8
and documents that use words from multiple languages , answering this question
may require some analysis on its own .
Today 's # haskell problem .
Take a document , and analyze its characters . What are the characters ? What
are the character - counts ?
That 's one layer of analysis .
Now , do the same thing , but break up the document into words .
What are the characters of words ? Are there any surprises ? Does the function
` words ` need to be rewritten to accommodate words with special characters ?
( Or , do we need to eliminate or modify special characters to separate words
in inadvertently compound words ? Or , on the other hand , join word - parts
inadvertently separated by special characters ? )
-
OKAY! now we can start vectorizing these documents into word-vectors.
But ... wait.
What, exactly, is a word? I don't have a good answer to that quesstion, because
why? because some characters are word-parts, and some aren't, and given UTF-8
and documents that use words from multiple languages, answering this question
may require some analysis on its own.
Today's #haskell problem.
Take a document, and analyze its characters. What are the characters? What
are the character-counts?
That's one layer of analysis.
Now, do the same thing, but break up the document into words.
What are the characters of words? Are there any surprises? Does the function
`words` need to be rewritten to accommodate words with special characters?
(Or, do we need to eliminate or modify special characters to separate words
in inadvertently compound words? Or, on the other hand, join word-parts
inadvertently separated by special characters?)
import Data.Char (isAlpha)
import Data.Monoid (getSum)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Y2020.M08.D25.Solution (gutenbergIndex, workingDir, gutenbergTop100Index)
import Y2020.M08.D26.Solution (importBook, Text)
import Data.Bag
type CharFreq = Bag Char
charCount :: Text -> CharFreq
-
> > > let the_index = ( workingDir + + gutenbergTop100Index )
> > > let xmas_info = head . Map.toList < $ > the_index
> > > xmas_info
( " A Christmas Carol in ; Being a Ghost Story of Christmas by ( 403 ) " ,
" " )
> > > let xmas_carol = importBook = < < xmas_info
> > > length < $ > xmas_carol
182057
> > > Map.map getSum . charCount < $ > xmas_carol
{ ( ' \n',4238),('\r',4238 ) , ( ' ' , 28438),('!',411),('"',1388),('#',1),('$',2 ) ,
( ' % ' , 1),('\'',456),('(',37),(')',37),('*',28),(',',2906),('-',476),(' . ',1640 ) ,
( ' /',24),('0',20),('1',60),('2',10),('3',13),('4',17),('5',12),('6',11 ) ,
( ' 7',6),('8',14),('9',9),(':',98),(';',370),('?',159),('@',2),('A',270 ) ,
( ' ) , ... }
Now find the ' weird words ' in the book 's text . What are ' weird words ' ?
' weird words ' are words that you did n't expect would be words but are
words , and they are weirding you out . Words like " \138\016\054 " ... and
stuff .
-
>>> let the_index = gutenbergIndex (workingDir ++ gutenbergTop100Index)
>>> let xmas_info = head . Map.toList <$> the_index
>>> xmas_info
("A Christmas Carol in Prose; Being a Ghost Story of Christmas by Charles Dickens (403)",
"")
>>> let xmas_carol = importBook =<< xmas_info
>>> length <$> xmas_carol
182057
>>> Map.map getSum . charCount <$> xmas_carol
{('\n',4238),('\r',4238),(' ',28438),('!',411),('"',1388),('#',1),('$',2),
('%',1),('\'',456),('(',37),(')',37),('*',28),(',',2906),('-',476),('.',1640),
('/',24),('0',20),('1',60),('2',10),('3',13),('4',17),('5',12),('6',11),
('7',6),('8',14),('9',9),(':',98),(';',370),('?',159),('@',2),('A',270),
('B',199),('C',235),('D',108),('E',187),('F',135),('G',254),('H',261),...}
Now find the 'weird words' in the book's text. What are 'weird words'?
'weird words' are words that you didn't expect would be words but are
words, and they are weirding you out. Words like "\138\016\054" ... and
stuff.
weirdWords :: Text -> Set String
weirdWords = Set.fromList
. filter (any (not . isAlpha))
. words
-
> > > let weirds = weirdWords < $ > xmas_carol
> > > weirds
{ " " , ... }
> > > length < $ > weirds
3759
So : what are the weird characters in the weird words ?
-
>>> let weirds = weirdWords <$> xmas_carol
>>> weirds
{"\"'And","\"A","\"Ah!\"","\"All","\"Always","\"Am","\"An","\"And",...}
>>> length <$> weirds
3759
So: what are the weird characters in the weird words?
weirdChars :: Set String -> Set Char
weirdChars = foldl fn Set.empty
where fn set0 = Set.union set0 . Set.fromList . weirdCharsInWord
weirdCharsInWord = filter (not . isAlpha)
with this analysis , we 'll be able to write a word - vectorizer , ... tomorrow .
|
93f19e41fb33029fe4ae11d69c6b7975c2a48a9fb15604446936217b3df134ea | danieljharvey/mimsa | Solve.hs | # LANGUAGE BlockArguments #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
module Language.Mimsa.Typechecker.Solve (solve, runSolveM, SolveM) where
import Control.Monad.Except
import Control.Monad.State
import Language.Mimsa.Typechecker.TcMonad
import Language.Mimsa.Typechecker.Unify
import Language.Mimsa.Types.Error
import Language.Mimsa.Types.Typechecker
import Language.Mimsa.Types.Typechecker.Substitutions
type SolveM = ExceptT TypeError (State TypecheckState)
runSolveM ::
TypecheckState ->
SolveM a ->
Either TypeError (TypecheckState, a)
runSolveM tcState value =
case either' of
(Right a, newTcState) -> Right (newTcState, a)
(Left e, _) -> Left e
where
either' =
runState
(runExceptT value)
tcState
solve ::
( MonadState TypecheckState m,
MonadError TypeError m
) =>
[Constraint] ->
m Substitutions
solve = go mempty
where
go s [] = pure s
go s1 (constraint : rest) =
case constraint of
ShouldEqual a b -> do
s2 <- unify a b
go (s2 <> s1) (applyToConstraint (s1 <> s2) <$> rest)
applyToConstraint :: Substitutions -> Constraint -> Constraint
applyToConstraint subs (ShouldEqual a b) =
ShouldEqual (applySubst subs a) (applySubst subs b)
| null | https://raw.githubusercontent.com/danieljharvey/mimsa/319b18896379c9a1d64f4e636817f8f38a77bbd9/compiler/src/Language/Mimsa/Typechecker/Solve.hs | haskell | # LANGUAGE BlockArguments #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
module Language.Mimsa.Typechecker.Solve (solve, runSolveM, SolveM) where
import Control.Monad.Except
import Control.Monad.State
import Language.Mimsa.Typechecker.TcMonad
import Language.Mimsa.Typechecker.Unify
import Language.Mimsa.Types.Error
import Language.Mimsa.Types.Typechecker
import Language.Mimsa.Types.Typechecker.Substitutions
type SolveM = ExceptT TypeError (State TypecheckState)
runSolveM ::
TypecheckState ->
SolveM a ->
Either TypeError (TypecheckState, a)
runSolveM tcState value =
case either' of
(Right a, newTcState) -> Right (newTcState, a)
(Left e, _) -> Left e
where
either' =
runState
(runExceptT value)
tcState
solve ::
( MonadState TypecheckState m,
MonadError TypeError m
) =>
[Constraint] ->
m Substitutions
solve = go mempty
where
go s [] = pure s
go s1 (constraint : rest) =
case constraint of
ShouldEqual a b -> do
s2 <- unify a b
go (s2 <> s1) (applyToConstraint (s1 <> s2) <$> rest)
applyToConstraint :: Substitutions -> Constraint -> Constraint
applyToConstraint subs (ShouldEqual a b) =
ShouldEqual (applySubst subs a) (applySubst subs b)
| |
56ce93567ae991fe2609ae7062afd261d359e70fe0a2c3c3e458ab7742843493 | jabber-at/ejabberd | node_hometree.erl | %%%----------------------------------------------------------------------
%%% File : node_hometree.erl
Author :
%%% Purpose : Standard tree ordered node plugin
Created : 1 Dec 2007 by
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2018 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%----------------------------------------------------------------------
-module(node_hometree).
-behaviour(gen_pubsub_node).
-author('').
-include("pubsub.hrl").
-export([init/3, terminate/2, options/0, features/0,
create_node_permission/6, create_node/2, delete_node/1,
purge_node/2, subscribe_node/8, unsubscribe_node/4,
publish_item/7, delete_item/4, remove_extra_items/3,
get_entity_affiliations/2, get_node_affiliations/1,
get_affiliation/2, set_affiliation/3,
get_entity_subscriptions/2, get_node_subscriptions/1,
get_subscriptions/2, set_subscriptions/4,
get_pending_nodes/2, get_states/1, get_state/2,
set_state/1, get_items/7, get_items/3, get_item/7,
get_last_items/3,
get_item/2, set_item/1, get_item_name/3, node_to_path/1,
path_to_node/1]).
init(Host, ServerHost, Opts) ->
node_flat:init(Host, ServerHost, Opts),
Owner = mod_pubsub:service_jid(Host),
mod_pubsub:create_node(Host, ServerHost, <<"/home">>, Owner, <<"hometree">>),
mod_pubsub:create_node(Host, ServerHost, <<"/home/", ServerHost/binary>>, Owner, <<"hometree">>),
ok.
terminate(Host, ServerHost) ->
node_flat:terminate(Host, ServerHost).
options() ->
node_flat:options().
features() ->
node_flat:features().
%% @doc Checks if the current user has the permission to create the requested node
< p > In hometree node , the permission is decided by the place in the
%% hierarchy where the user is creating the node. The access parameter is also
%% checked. This parameter depends on the value of the
< tt > access_createnode</tt > ACL value in ejabberd config >
%% <p>This function also check that node can be created as a children of its
%% parent node</p>
create_node_permission(Host, ServerHost, Node, _ParentNode, Owner, Access) ->
LOwner = jid:tolower(Owner),
{User, Server, _Resource} = LOwner,
Allowed = case LOwner of
{<<"">>, Host, <<"">>} ->
true; % pubsub service always allowed
_ ->
case acl:match_rule(ServerHost, Access, LOwner) of
allow ->
case node_to_path(Node) of
[<<"home">>, Server, User | _] -> true;
_ -> false
end;
_ -> false
end
end,
{result, Allowed}.
create_node(Nidx, Owner) ->
node_flat:create_node(Nidx, Owner).
delete_node(Nodes) ->
node_flat:delete_node(Nodes).
subscribe_node(Nidx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, Options) ->
node_flat:subscribe_node(Nidx, Sender, Subscriber,
AccessModel, SendLast, PresenceSubscription,
RosterGroup, Options).
unsubscribe_node(Nidx, Sender, Subscriber, SubId) ->
node_flat:unsubscribe_node(Nidx, Sender, Subscriber, SubId).
publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts) ->
node_flat:publish_item(Nidx, Publisher, Model, MaxItems, ItemId,
Payload, PubOpts).
remove_extra_items(Nidx, MaxItems, ItemIds) ->
node_flat:remove_extra_items(Nidx, MaxItems, ItemIds).
delete_item(Nidx, Publisher, PublishModel, ItemId) ->
node_flat:delete_item(Nidx, Publisher, PublishModel, ItemId).
purge_node(Nidx, Owner) ->
node_flat:purge_node(Nidx, Owner).
get_entity_affiliations(Host, Owner) ->
node_flat:get_entity_affiliations(Host, Owner).
get_node_affiliations(Nidx) ->
node_flat:get_node_affiliations(Nidx).
get_affiliation(Nidx, Owner) ->
node_flat:get_affiliation(Nidx, Owner).
set_affiliation(Nidx, Owner, Affiliation) ->
node_flat:set_affiliation(Nidx, Owner, Affiliation).
get_entity_subscriptions(Host, Owner) ->
node_flat:get_entity_subscriptions(Host, Owner).
get_node_subscriptions(Nidx) ->
node_flat:get_node_subscriptions(Nidx).
get_subscriptions(Nidx, Owner) ->
node_flat:get_subscriptions(Nidx, Owner).
set_subscriptions(Nidx, Owner, Subscription, SubId) ->
node_flat:set_subscriptions(Nidx, Owner, Subscription, SubId).
get_pending_nodes(Host, Owner) ->
node_flat:get_pending_nodes(Host, Owner).
get_states(Nidx) ->
node_flat:get_states(Nidx).
get_state(Nidx, JID) ->
node_flat:get_state(Nidx, JID).
set_state(State) ->
node_flat:set_state(State).
get_items(Nidx, From, RSM) ->
node_flat:get_items(Nidx, From, RSM).
get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM) ->
node_flat:get_items(Nidx, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId, RSM).
get_last_items(Nidx, From, Count) ->
node_flat:get_last_items(Nidx, From, Count).
get_item(Nidx, ItemId) ->
node_flat:get_item(Nidx, ItemId).
get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId) ->
node_flat:get_item(Nidx, ItemId, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId).
set_item(Item) ->
node_flat:set_item(Item).
get_item_name(Host, Node, Id) ->
node_flat:get_item_name(Host, Node, Id).
%% @doc <p>Return the path of the node.</p>
node_to_path(Node) ->
str:tokens(Node, <<"/">>).
path_to_node([]) -> <<>>;
path_to_node(Path) -> iolist_to_binary(str:join([<<"">> | Path], <<"/">>)).
| null | https://raw.githubusercontent.com/jabber-at/ejabberd/7bfec36856eaa4df21b26e879d3ba90285bad1aa/src/node_hometree.erl | erlang | ----------------------------------------------------------------------
File : node_hometree.erl
Purpose : Standard tree ordered node plugin
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
----------------------------------------------------------------------
@doc Checks if the current user has the permission to create the requested node
hierarchy where the user is creating the node. The access parameter is also
checked. This parameter depends on the value of the
<p>This function also check that node can be created as a children of its
parent node</p>
pubsub service always allowed
@doc <p>Return the path of the node.</p> | Author :
Created : 1 Dec 2007 by
ejabberd , Copyright ( C ) 2002 - 2018 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
-module(node_hometree).
-behaviour(gen_pubsub_node).
-author('').
-include("pubsub.hrl").
-export([init/3, terminate/2, options/0, features/0,
create_node_permission/6, create_node/2, delete_node/1,
purge_node/2, subscribe_node/8, unsubscribe_node/4,
publish_item/7, delete_item/4, remove_extra_items/3,
get_entity_affiliations/2, get_node_affiliations/1,
get_affiliation/2, set_affiliation/3,
get_entity_subscriptions/2, get_node_subscriptions/1,
get_subscriptions/2, set_subscriptions/4,
get_pending_nodes/2, get_states/1, get_state/2,
set_state/1, get_items/7, get_items/3, get_item/7,
get_last_items/3,
get_item/2, set_item/1, get_item_name/3, node_to_path/1,
path_to_node/1]).
init(Host, ServerHost, Opts) ->
node_flat:init(Host, ServerHost, Opts),
Owner = mod_pubsub:service_jid(Host),
mod_pubsub:create_node(Host, ServerHost, <<"/home">>, Owner, <<"hometree">>),
mod_pubsub:create_node(Host, ServerHost, <<"/home/", ServerHost/binary>>, Owner, <<"hometree">>),
ok.
terminate(Host, ServerHost) ->
node_flat:terminate(Host, ServerHost).
options() ->
node_flat:options().
features() ->
node_flat:features().
< p > In hometree node , the permission is decided by the place in the
< tt > access_createnode</tt > ACL value in ejabberd config >
create_node_permission(Host, ServerHost, Node, _ParentNode, Owner, Access) ->
LOwner = jid:tolower(Owner),
{User, Server, _Resource} = LOwner,
Allowed = case LOwner of
{<<"">>, Host, <<"">>} ->
_ ->
case acl:match_rule(ServerHost, Access, LOwner) of
allow ->
case node_to_path(Node) of
[<<"home">>, Server, User | _] -> true;
_ -> false
end;
_ -> false
end
end,
{result, Allowed}.
create_node(Nidx, Owner) ->
node_flat:create_node(Nidx, Owner).
delete_node(Nodes) ->
node_flat:delete_node(Nodes).
subscribe_node(Nidx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, Options) ->
node_flat:subscribe_node(Nidx, Sender, Subscriber,
AccessModel, SendLast, PresenceSubscription,
RosterGroup, Options).
unsubscribe_node(Nidx, Sender, Subscriber, SubId) ->
node_flat:unsubscribe_node(Nidx, Sender, Subscriber, SubId).
publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts) ->
node_flat:publish_item(Nidx, Publisher, Model, MaxItems, ItemId,
Payload, PubOpts).
remove_extra_items(Nidx, MaxItems, ItemIds) ->
node_flat:remove_extra_items(Nidx, MaxItems, ItemIds).
delete_item(Nidx, Publisher, PublishModel, ItemId) ->
node_flat:delete_item(Nidx, Publisher, PublishModel, ItemId).
purge_node(Nidx, Owner) ->
node_flat:purge_node(Nidx, Owner).
get_entity_affiliations(Host, Owner) ->
node_flat:get_entity_affiliations(Host, Owner).
get_node_affiliations(Nidx) ->
node_flat:get_node_affiliations(Nidx).
get_affiliation(Nidx, Owner) ->
node_flat:get_affiliation(Nidx, Owner).
set_affiliation(Nidx, Owner, Affiliation) ->
node_flat:set_affiliation(Nidx, Owner, Affiliation).
get_entity_subscriptions(Host, Owner) ->
node_flat:get_entity_subscriptions(Host, Owner).
get_node_subscriptions(Nidx) ->
node_flat:get_node_subscriptions(Nidx).
get_subscriptions(Nidx, Owner) ->
node_flat:get_subscriptions(Nidx, Owner).
set_subscriptions(Nidx, Owner, Subscription, SubId) ->
node_flat:set_subscriptions(Nidx, Owner, Subscription, SubId).
get_pending_nodes(Host, Owner) ->
node_flat:get_pending_nodes(Host, Owner).
get_states(Nidx) ->
node_flat:get_states(Nidx).
get_state(Nidx, JID) ->
node_flat:get_state(Nidx, JID).
set_state(State) ->
node_flat:set_state(State).
get_items(Nidx, From, RSM) ->
node_flat:get_items(Nidx, From, RSM).
get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM) ->
node_flat:get_items(Nidx, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId, RSM).
get_last_items(Nidx, From, Count) ->
node_flat:get_last_items(Nidx, From, Count).
get_item(Nidx, ItemId) ->
node_flat:get_item(Nidx, ItemId).
get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId) ->
node_flat:get_item(Nidx, ItemId, JID, AccessModel,
PresenceSubscription, RosterGroup, SubId).
set_item(Item) ->
node_flat:set_item(Item).
get_item_name(Host, Node, Id) ->
node_flat:get_item_name(Host, Node, Id).
node_to_path(Node) ->
str:tokens(Node, <<"/">>).
path_to_node([]) -> <<>>;
path_to_node(Path) -> iolist_to_binary(str:join([<<"">> | Path], <<"/">>)).
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.