_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
ac6368619fec55f191d7fbe58f72d3bf0aa7df306858b10632d8a4ad0ec81a4c | dpiponi/Moodler | negate.hs | do
plane <- currentPlane
(x, y) <- fmap (quantise2 quantum) mouse
panel <- container' "panel_3x1.png" (x, y) (Inside plane)
lab <- label' "negate" (x-25.0, y+75.0) (Inside plane)
parent panel lab
name <- new' "negate"
inp <- plugin' (name ++ ".signal") (x-21, y) (Inside plane)
setColour inp "#sample"
parent panel inp
out <- plugout' (name ++ ".result") (x+20, y) (Inside plane)
setColour out "#sample"
parent panel out
recompile
return ()
| null | https://raw.githubusercontent.com/dpiponi/Moodler/a0c984c36abae52668d00f25eb3749e97e8936d3/Moodler/scripts/negate.hs | haskell | do
plane <- currentPlane
(x, y) <- fmap (quantise2 quantum) mouse
panel <- container' "panel_3x1.png" (x, y) (Inside plane)
lab <- label' "negate" (x-25.0, y+75.0) (Inside plane)
parent panel lab
name <- new' "negate"
inp <- plugin' (name ++ ".signal") (x-21, y) (Inside plane)
setColour inp "#sample"
parent panel inp
out <- plugout' (name ++ ".result") (x+20, y) (Inside plane)
setColour out "#sample"
parent panel out
recompile
return ()
| |
de1612e3c1cec404105428168104690f934cd7e05bd7355176533d5d1c732fbd | janestreet/ppx_accessor | name.ml | open! Base
open! Import
type t = string
let of_string = Fn.id
let to_constructor_string = Fn.id
let to_lowercase_string t =
let string = String.lowercase t in
if Keyword.is_keyword string then string ^ "_" else string
;;
| null | https://raw.githubusercontent.com/janestreet/ppx_accessor/19e3bce0d1b7fccc44826cf1519d053fde3499e1/src/name.ml | ocaml | open! Base
open! Import
type t = string
let of_string = Fn.id
let to_constructor_string = Fn.id
let to_lowercase_string t =
let string = String.lowercase t in
if Keyword.is_keyword string then string ^ "_" else string
;;
| |
8801331241a3814232606dc418496785e07fd8450cd97b0cfb2aae76b694d207 | fetburner/Coq2SML | elim.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Pp
open Util
open Names
open Term
open Termops
open Environ
open Libnames
open Reduction
open Inductiveops
open Proof_type
open Clenv
open Hipattern
open Tacmach
open Tacticals
open Tactics
open Hiddentac
open Genarg
open Tacexpr
let introElimAssumsThen tac ba =
let nassums =
List.fold_left
(fun acc b -> if b then acc+2 else acc+1)
0 ba.branchsign
in
let introElimAssums = tclDO nassums intro in
(tclTHEN introElimAssums (elim_on_ba tac ba))
let introCaseAssumsThen tac ba =
let case_thin_sign =
List.flatten
(List.map (function b -> if b then [false;true] else [false])
ba.branchsign)
in
let n1 = List.length case_thin_sign in
let n2 = List.length ba.branchnames in
let (l1,l2),l3 =
if n1 < n2 then list_chop n1 ba.branchnames, []
else
(ba.branchnames, []),
if n1 > n2 then snd (list_chop n2 case_thin_sign) else [] in
let introCaseAssums =
tclTHEN (intros_pattern no_move l1) (intros_clearing l3) in
(tclTHEN introCaseAssums (case_on_ba (tac l2) ba))
The following tactic Decompose repeatedly applies the
elimination(s ) rule(s ) of the types satisfying the predicate
` ` recognizer '' onto a certain hypothesis . For example :
Require Elim .
Require Le .
Goal ( y : nat){x : | ( le O x)/\(le x y)}->{x : | ( le O x ) } .
Intros y H.
Decompose [ sig and ] H;EAuto .
Qed .
Another example :
Goal ( A , B , C : Prop)(A/\B/\C \/ B/\C \/ C/\A ) - > C.
Intros A B C H ; Decompose [ and or ] H ; Assumption .
Qed .
elimination(s) rule(s) of the types satisfying the predicate
``recognizer'' onto a certain hypothesis. For example :
Require Elim.
Require Le.
Goal (y:nat){x:nat | (le O x)/\(le x y)}->{x:nat | (le O x)}.
Intros y H.
Decompose [sig and] H;EAuto.
Qed.
Another example :
Goal (A,B,C:Prop)(A/\B/\C \/ B/\C \/ C/\A) -> C.
Intros A B C H; Decompose [and or] H; Assumption.
Qed.
*)
let elimHypThen tac id gl =
elimination_then tac ([],[]) (mkVar id) gl
let rec general_decompose_on_hyp recognizer =
ifOnHyp recognizer (general_decompose_aux recognizer) (fun _ -> tclIDTAC)
and general_decompose_aux recognizer id =
elimHypThen
(introElimAssumsThen
(fun bas ->
tclTHEN (clear [id])
(tclMAP (general_decompose_on_hyp recognizer)
(ids_of_named_context bas.assums))))
id
Faudrait ajouter un COMPLETE pour que pas si aucune élimination n'est possible
pas si aucune élimination n'est possible *)
(* Meilleures stratégies mais perte de compatibilité *)
let tmphyp_name = id_of_string "_TmpHyp"
let up_to_delta = ref false (* true *)
let general_decompose recognizer c gl =
let typc = pf_type_of gl c in
tclTHENSV (cut typc)
[| tclTHEN (intro_using tmphyp_name)
(onLastHypId
(ifOnHyp recognizer (general_decompose_aux recognizer)
(fun id -> clear [id])));
exact_no_check c |] gl
let head_in gls indl t =
try
let ity,_ =
if !up_to_delta
then find_mrectype (pf_env gls) (project gls) t
else extract_mrectype t
in List.mem ity indl
with Not_found -> false
let decompose_these c l gls =
List.map inductive_of
general_decompose (fun (_,t) -> head_in gls indl t) c gls
let decompose_nonrec c gls =
general_decompose
(fun (_,t) -> is_non_recursive_type t)
c gls
let decompose_and c gls =
general_decompose
(fun (_,t) -> is_record t)
c gls
let decompose_or c gls =
general_decompose
(fun (_,t) -> is_disjunction t)
c gls
let h_decompose l c =
Refiner.abstract_tactic (TacDecompose (l,c)) (decompose_these c l)
let h_decompose_or c =
Refiner.abstract_tactic (TacDecomposeOr c) (decompose_or c)
let h_decompose_and c =
Refiner.abstract_tactic (TacDecomposeAnd c) (decompose_and c)
(* The tactic Double performs a double induction *)
let simple_elimination c gls =
simple_elimination_then (fun _ -> tclIDTAC) c gls
let induction_trailer abs_i abs_j bargs =
tclTHEN
(tclDO (abs_j - abs_i) intro)
(onLastHypId
(fun id gls ->
let idty = pf_type_of gls (mkVar id) in
let fvty = global_vars (pf_env gls) idty in
let possible_bring_hyps =
(List.tl (nLastDecls (abs_j - abs_i) gls)) @ bargs.assums
in
let (hyps,_) =
List.fold_left
(fun (bring_ids,leave_ids) (cid,_,cidty as d) ->
if not (List.mem cid leave_ids)
then (d::bring_ids,leave_ids)
else (bring_ids,cid::leave_ids))
([],fvty) possible_bring_hyps
in
let ids = List.rev (ids_of_named_context hyps) in
(tclTHENSEQ
[bring_hyps hyps; tclTRY (clear ids);
simple_elimination (mkVar id)])
gls))
let double_ind h1 h2 gls =
let abs_i = depth_of_quantified_hypothesis true h1 gls in
let abs_j = depth_of_quantified_hypothesis true h2 gls in
let (abs_i,abs_j) =
if abs_i < abs_j then (abs_i,abs_j) else
if abs_i > abs_j then (abs_j,abs_i) else
error "Both hypotheses are the same." in
(tclTHEN (tclDO abs_i intro)
(onLastHypId
(fun id ->
elimination_then
(introElimAssumsThen (induction_trailer abs_i abs_j))
([],[]) (mkVar id)))) gls
let h_double_induction h1 h2 =
Refiner.abstract_tactic (TacDoubleInduction (h1,h2)) (double_ind h1 h2)
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/tactics/elim.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
Meilleures stratégies mais perte de compatibilité
true
The tactic Double performs a double induction | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Pp
open Util
open Names
open Term
open Termops
open Environ
open Libnames
open Reduction
open Inductiveops
open Proof_type
open Clenv
open Hipattern
open Tacmach
open Tacticals
open Tactics
open Hiddentac
open Genarg
open Tacexpr
let introElimAssumsThen tac ba =
let nassums =
List.fold_left
(fun acc b -> if b then acc+2 else acc+1)
0 ba.branchsign
in
let introElimAssums = tclDO nassums intro in
(tclTHEN introElimAssums (elim_on_ba tac ba))
let introCaseAssumsThen tac ba =
let case_thin_sign =
List.flatten
(List.map (function b -> if b then [false;true] else [false])
ba.branchsign)
in
let n1 = List.length case_thin_sign in
let n2 = List.length ba.branchnames in
let (l1,l2),l3 =
if n1 < n2 then list_chop n1 ba.branchnames, []
else
(ba.branchnames, []),
if n1 > n2 then snd (list_chop n2 case_thin_sign) else [] in
let introCaseAssums =
tclTHEN (intros_pattern no_move l1) (intros_clearing l3) in
(tclTHEN introCaseAssums (case_on_ba (tac l2) ba))
The following tactic Decompose repeatedly applies the
elimination(s ) rule(s ) of the types satisfying the predicate
` ` recognizer '' onto a certain hypothesis . For example :
Require Elim .
Require Le .
Goal ( y : nat){x : | ( le O x)/\(le x y)}->{x : | ( le O x ) } .
Intros y H.
Decompose [ sig and ] H;EAuto .
Qed .
Another example :
Goal ( A , B , C : Prop)(A/\B/\C \/ B/\C \/ C/\A ) - > C.
Intros A B C H ; Decompose [ and or ] H ; Assumption .
Qed .
elimination(s) rule(s) of the types satisfying the predicate
``recognizer'' onto a certain hypothesis. For example :
Require Elim.
Require Le.
Goal (y:nat){x:nat | (le O x)/\(le x y)}->{x:nat | (le O x)}.
Intros y H.
Decompose [sig and] H;EAuto.
Qed.
Another example :
Goal (A,B,C:Prop)(A/\B/\C \/ B/\C \/ C/\A) -> C.
Intros A B C H; Decompose [and or] H; Assumption.
Qed.
*)
let elimHypThen tac id gl =
elimination_then tac ([],[]) (mkVar id) gl
let rec general_decompose_on_hyp recognizer =
ifOnHyp recognizer (general_decompose_aux recognizer) (fun _ -> tclIDTAC)
and general_decompose_aux recognizer id =
elimHypThen
(introElimAssumsThen
(fun bas ->
tclTHEN (clear [id])
(tclMAP (general_decompose_on_hyp recognizer)
(ids_of_named_context bas.assums))))
id
Faudrait ajouter un COMPLETE pour que pas si aucune élimination n'est possible
pas si aucune élimination n'est possible *)
let tmphyp_name = id_of_string "_TmpHyp"
let general_decompose recognizer c gl =
let typc = pf_type_of gl c in
tclTHENSV (cut typc)
[| tclTHEN (intro_using tmphyp_name)
(onLastHypId
(ifOnHyp recognizer (general_decompose_aux recognizer)
(fun id -> clear [id])));
exact_no_check c |] gl
let head_in gls indl t =
try
let ity,_ =
if !up_to_delta
then find_mrectype (pf_env gls) (project gls) t
else extract_mrectype t
in List.mem ity indl
with Not_found -> false
let decompose_these c l gls =
List.map inductive_of
general_decompose (fun (_,t) -> head_in gls indl t) c gls
let decompose_nonrec c gls =
general_decompose
(fun (_,t) -> is_non_recursive_type t)
c gls
let decompose_and c gls =
general_decompose
(fun (_,t) -> is_record t)
c gls
let decompose_or c gls =
general_decompose
(fun (_,t) -> is_disjunction t)
c gls
let h_decompose l c =
Refiner.abstract_tactic (TacDecompose (l,c)) (decompose_these c l)
let h_decompose_or c =
Refiner.abstract_tactic (TacDecomposeOr c) (decompose_or c)
let h_decompose_and c =
Refiner.abstract_tactic (TacDecomposeAnd c) (decompose_and c)
let simple_elimination c gls =
simple_elimination_then (fun _ -> tclIDTAC) c gls
let induction_trailer abs_i abs_j bargs =
tclTHEN
(tclDO (abs_j - abs_i) intro)
(onLastHypId
(fun id gls ->
let idty = pf_type_of gls (mkVar id) in
let fvty = global_vars (pf_env gls) idty in
let possible_bring_hyps =
(List.tl (nLastDecls (abs_j - abs_i) gls)) @ bargs.assums
in
let (hyps,_) =
List.fold_left
(fun (bring_ids,leave_ids) (cid,_,cidty as d) ->
if not (List.mem cid leave_ids)
then (d::bring_ids,leave_ids)
else (bring_ids,cid::leave_ids))
([],fvty) possible_bring_hyps
in
let ids = List.rev (ids_of_named_context hyps) in
(tclTHENSEQ
[bring_hyps hyps; tclTRY (clear ids);
simple_elimination (mkVar id)])
gls))
let double_ind h1 h2 gls =
let abs_i = depth_of_quantified_hypothesis true h1 gls in
let abs_j = depth_of_quantified_hypothesis true h2 gls in
let (abs_i,abs_j) =
if abs_i < abs_j then (abs_i,abs_j) else
if abs_i > abs_j then (abs_j,abs_i) else
error "Both hypotheses are the same." in
(tclTHEN (tclDO abs_i intro)
(onLastHypId
(fun id ->
elimination_then
(introElimAssumsThen (induction_trailer abs_i abs_j))
([],[]) (mkVar id)))) gls
let h_double_induction h1 h2 =
Refiner.abstract_tactic (TacDoubleInduction (h1,h2)) (double_ind h1 h2)
|
b17efcadffbff47a670b1bd7a1bc774a09d8a6828f206eca00dab46843574517 | softwarelanguageslab/maf | R5RS_ad_prioq-5.scm | ; Changes:
* removed : 0
* added : 3
* swaps : 1
; * negated predicates: 0
; * swapped branches: 0
* calls to i d fun : 4
(letrec ((true #t)
(false #f)
(make-item (lambda (priority element)
(cons priority element)))
(get-priority (lambda (item)
(car item)))
(get-element (lambda (item)
(cdr item)))
(create-priority-queue (lambda ()
(let ((front (cons 'boe ())))
(letrec ((content (lambda ()
(<change>
(cdr front)
((lambda (x) x) (cdr front)))))
(insert-after! (lambda (cell item)
(<change>
(let ((new-cell (cons item ())))
(set-cdr! new-cell (cdr cell))
(set-cdr! cell new-cell))
((lambda (x) x)
(let ((new-cell (cons item ())))
(set-cdr! new-cell (cdr cell))
(set-cdr! cell new-cell))))))
(find-prev-cell (lambda (priority)
(letrec ((find-iter (lambda (rest prev)
(if (null? rest)
prev
(if (> (get-priority (car rest)) priority)
(find-iter (cdr rest) rest)
prev)))))
(find-iter (content) front))))
(empty? (lambda ()
(null? (content))))
(enqueue (lambda (priority element)
(<change>
()
(display (find-prev-cell priority)))
(insert-after! (find-prev-cell priority) (make-item priority element))
true))
(dequeue (lambda ()
(if (null? (content))
false
(let ((temp (car (content))))
(set-cdr! front (cdr (content)))
(get-element temp)))))
(serve (lambda ()
(if (null? (content))
false
(get-element (car (content))))))
(dispatch (lambda (m)
(<change>
(if (eq? m 'empty?)
empty?
(if (eq? m 'enqueue)
enqueue
(if (eq? m 'dequeue)
dequeue
(if (eq? m 'serve)
serve
(error "unknown request
-- create-priority-queue" m)))))
((lambda (x) x)
(if (eq? m 'empty?)
empty?
(if (eq? m 'enqueue)
enqueue
(if (eq? m 'dequeue)
dequeue
(if (eq? m 'serve)
serve
(error "unknown request
-- create-priority-queue" m))))))))))
(<change>
()
(display dispatch))
dispatch))))
(pq (create-priority-queue)))
(<change>
((pq 'enqueue) 66 'Patrick)
((lambda (x) x) ((pq 'enqueue) 66 'Patrick)))
((pq 'enqueue) -106 'Octo)
((pq 'enqueue) 0 'Sandy)
((pq 'enqueue) 89 'Spongebob)
(<change>
()
((pq 'enqueue) -106 'Octo))
(<change>
((pq 'dequeue))
(equal? ((pq 'dequeue)) 'Patrick))
(<change>
(equal? ((pq 'dequeue)) 'Patrick)
((pq 'dequeue)))) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_ad_prioq-5.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0 | * removed : 0
* added : 3
* swaps : 1
* calls to i d fun : 4
(letrec ((true #t)
(false #f)
(make-item (lambda (priority element)
(cons priority element)))
(get-priority (lambda (item)
(car item)))
(get-element (lambda (item)
(cdr item)))
(create-priority-queue (lambda ()
(let ((front (cons 'boe ())))
(letrec ((content (lambda ()
(<change>
(cdr front)
((lambda (x) x) (cdr front)))))
(insert-after! (lambda (cell item)
(<change>
(let ((new-cell (cons item ())))
(set-cdr! new-cell (cdr cell))
(set-cdr! cell new-cell))
((lambda (x) x)
(let ((new-cell (cons item ())))
(set-cdr! new-cell (cdr cell))
(set-cdr! cell new-cell))))))
(find-prev-cell (lambda (priority)
(letrec ((find-iter (lambda (rest prev)
(if (null? rest)
prev
(if (> (get-priority (car rest)) priority)
(find-iter (cdr rest) rest)
prev)))))
(find-iter (content) front))))
(empty? (lambda ()
(null? (content))))
(enqueue (lambda (priority element)
(<change>
()
(display (find-prev-cell priority)))
(insert-after! (find-prev-cell priority) (make-item priority element))
true))
(dequeue (lambda ()
(if (null? (content))
false
(let ((temp (car (content))))
(set-cdr! front (cdr (content)))
(get-element temp)))))
(serve (lambda ()
(if (null? (content))
false
(get-element (car (content))))))
(dispatch (lambda (m)
(<change>
(if (eq? m 'empty?)
empty?
(if (eq? m 'enqueue)
enqueue
(if (eq? m 'dequeue)
dequeue
(if (eq? m 'serve)
serve
(error "unknown request
-- create-priority-queue" m)))))
((lambda (x) x)
(if (eq? m 'empty?)
empty?
(if (eq? m 'enqueue)
enqueue
(if (eq? m 'dequeue)
dequeue
(if (eq? m 'serve)
serve
(error "unknown request
-- create-priority-queue" m))))))))))
(<change>
()
(display dispatch))
dispatch))))
(pq (create-priority-queue)))
(<change>
((pq 'enqueue) 66 'Patrick)
((lambda (x) x) ((pq 'enqueue) 66 'Patrick)))
((pq 'enqueue) -106 'Octo)
((pq 'enqueue) 0 'Sandy)
((pq 'enqueue) 89 'Spongebob)
(<change>
()
((pq 'enqueue) -106 'Octo))
(<change>
((pq 'dequeue))
(equal? ((pq 'dequeue)) 'Patrick))
(<change>
(equal? ((pq 'dequeue)) 'Patrick)
((pq 'dequeue)))) |
2ca83a402c6d0fec6a01feea23cea630d0daa767783d35b70c31710a7b04eee7 | input-output-hk/cardano-explorer | BlockPagesTotal.hs | {-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
module Explorer.Web.Api.Legacy.BlockPagesTotal
( blockPagesTotal
) where
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Trans.Reader (ReaderT)
import Data.Maybe (listToMaybe)
import Database.Esqueleto ((^.), countRows, from, select, unValue, where_)
import Database.Persist.Sql (SqlBackend)
import Explorer.DB (EntityField (..), isJust)
import Explorer.Web.Error (ExplorerError (..))
import Explorer.Web.Api.Legacy (PageNumber)
import Explorer.Web.Api.Legacy.Util (divRoundUp, runQuery, toPageSize)
import Explorer.Web.Api.Legacy.Types (PageSize (..))
import Servant (Handler)
blockPagesTotal
:: SqlBackend -> Maybe PageSize
-> Handler (Either ExplorerError PageNumber)
blockPagesTotal backend mPageSize =
runQuery backend $ do
blockCount <- queryMainBlockCount
if | blockCount < 1 -> pure $ Left (Internal "There are currently no block to display.")
| pageSize < 1 -> pure $ Left (Internal "Page size must be greater than 1 if you want to display blocks.")
| otherwise -> pure $ Right $ divRoundUp blockCount pageSize
where
pageSize = unPageSize $ toPageSize mPageSize
queryMainBlockCount :: MonadIO m => ReaderT SqlBackend m Word
queryMainBlockCount = do
res <- select . from $ \ blk -> do
where_ (isJust $ blk ^. BlockBlockNo)
pure countRows
pure $ maybe 0 unValue (listToMaybe res)
| null | https://raw.githubusercontent.com/input-output-hk/cardano-explorer/3abcb32339edac7c2397114a1d170cc76b82e9b6/cardano-explorer-webapi/src/Explorer/Web/Api/Legacy/BlockPagesTotal.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings # | module Explorer.Web.Api.Legacy.BlockPagesTotal
( blockPagesTotal
) where
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Trans.Reader (ReaderT)
import Data.Maybe (listToMaybe)
import Database.Esqueleto ((^.), countRows, from, select, unValue, where_)
import Database.Persist.Sql (SqlBackend)
import Explorer.DB (EntityField (..), isJust)
import Explorer.Web.Error (ExplorerError (..))
import Explorer.Web.Api.Legacy (PageNumber)
import Explorer.Web.Api.Legacy.Util (divRoundUp, runQuery, toPageSize)
import Explorer.Web.Api.Legacy.Types (PageSize (..))
import Servant (Handler)
blockPagesTotal
:: SqlBackend -> Maybe PageSize
-> Handler (Either ExplorerError PageNumber)
blockPagesTotal backend mPageSize =
runQuery backend $ do
blockCount <- queryMainBlockCount
if | blockCount < 1 -> pure $ Left (Internal "There are currently no block to display.")
| pageSize < 1 -> pure $ Left (Internal "Page size must be greater than 1 if you want to display blocks.")
| otherwise -> pure $ Right $ divRoundUp blockCount pageSize
where
pageSize = unPageSize $ toPageSize mPageSize
queryMainBlockCount :: MonadIO m => ReaderT SqlBackend m Word
queryMainBlockCount = do
res <- select . from $ \ blk -> do
where_ (isJust $ blk ^. BlockBlockNo)
pure countRows
pure $ maybe 0 unValue (listToMaybe res)
|
a8a544e20dc13eb8e2a13c533094d93d12566ee84d78930e536b56e64a1729d0 | scravy/simplex | Specials.hs | {-# LANGUAGE Haskell2010 #-}
module Simplex.Specials (
processSpecials,
newSpec, Spec(..)
) where
import Simplex.Config
import Simplex.ConfigData
import Simplex.CmdLineOpts
import Simplex.Parser
import Simplex.Util
import System.Directory
import System.Random
import Data.Maybe
import Data.List
import Data.List.Split
data Spec = Spec {
sRemoveFiles :: [String]
}
newSpec = Spec {
sRemoveFiles = []
}
processSpecials :: Opts -> Spec -> Document -> IO (Spec, Document)
processSpecials o s (Document b m) = do
(s', b') <- processSpecials' o s b
return (s', Document b' m)
processSpecials' :: Opts -> Spec -> [Block] -> IO (Spec, [Block])
processSpecials' opts spec (BVerbatim "digraph" b : xs) = do
(spec', pdf) <- mkGraph "dot" "digraph" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .digraph failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (BVerbatim "graph" b : xs) = do
(spec', pdf) <- mkGraph "neato" "graph" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .graph failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (BVerbatim "neato" b : xs) = do
(spec', pdf) <- mkGraph "neato" "" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .neato failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (BVerbatim "dot" b : xs) = do
(spec', pdf) <- mkGraph "dot" "" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .dot failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (x : xs) = do
(spec', rest) <- processSpecials' opts spec xs
return (spec', x : rest)
processSpecials' _ spec [] = return (spec, [])
randomString :: Int -> IO String
randomString 0 = return ""
randomString n = do
char <- getStdRandom (randomR ('a', 'z'))
str <- randomString (n-1)
return $ char : str
mkGraph e g opts spec c = do
file <- randomString 10
let spec' = spec { sRemoveFiles = (file ++ ".pdf") : (file ++ ".dot") : sRemoveFiles spec }
writeFile (file ++ ".dot") (if null g then c else g ++ " G {\n" ++ c ++ "\n}\n")
r <- exec (optVerbose opts) (optGraphviz opts) ["-Tpdf", "-K" ++ e, file ++ ".dot", "-o" ++ file ++ ".pdf"]
return (spec', (either (const "") (const $ file ++ ".pdf") r))
| null | https://raw.githubusercontent.com/scravy/simplex/520e17ab5838c45c73c6a3c1601d4c7a386044b4/src/Simplex/Specials.hs | haskell | # LANGUAGE Haskell2010 # |
module Simplex.Specials (
processSpecials,
newSpec, Spec(..)
) where
import Simplex.Config
import Simplex.ConfigData
import Simplex.CmdLineOpts
import Simplex.Parser
import Simplex.Util
import System.Directory
import System.Random
import Data.Maybe
import Data.List
import Data.List.Split
data Spec = Spec {
sRemoveFiles :: [String]
}
newSpec = Spec {
sRemoveFiles = []
}
processSpecials :: Opts -> Spec -> Document -> IO (Spec, Document)
processSpecials o s (Document b m) = do
(s', b') <- processSpecials' o s b
return (s', Document b' m)
processSpecials' :: Opts -> Spec -> [Block] -> IO (Spec, [Block])
processSpecials' opts spec (BVerbatim "digraph" b : xs) = do
(spec', pdf) <- mkGraph "dot" "digraph" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .digraph failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (BVerbatim "graph" b : xs) = do
(spec', pdf) <- mkGraph "neato" "graph" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .graph failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (BVerbatim "neato" b : xs) = do
(spec', pdf) <- mkGraph "neato" "" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .neato failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (BVerbatim "dot" b : xs) = do
(spec', pdf) <- mkGraph "dot" "" opts spec b
(spec'', rest) <- processSpecials' opts spec' xs
return (spec'', (if null pdf
then BVerbatim "error" "Graphviz .dot failed"
else BCommand "image" [pdf]) : rest)
processSpecials' opts spec (x : xs) = do
(spec', rest) <- processSpecials' opts spec xs
return (spec', x : rest)
processSpecials' _ spec [] = return (spec, [])
randomString :: Int -> IO String
randomString 0 = return ""
randomString n = do
char <- getStdRandom (randomR ('a', 'z'))
str <- randomString (n-1)
return $ char : str
mkGraph e g opts spec c = do
file <- randomString 10
let spec' = spec { sRemoveFiles = (file ++ ".pdf") : (file ++ ".dot") : sRemoveFiles spec }
writeFile (file ++ ".dot") (if null g then c else g ++ " G {\n" ++ c ++ "\n}\n")
r <- exec (optVerbose opts) (optGraphviz opts) ["-Tpdf", "-K" ++ e, file ++ ".dot", "-o" ++ file ++ ".pdf"]
return (spec', (either (const "") (const $ file ++ ".pdf") r))
|
5a546f55c75ed345bcbf9bea62d5f43804c73881b9f70321c7eab18b7fffce29 | vaibhavsagar/experiments | XPathNamespaceScope.hs | module Dotnet.System.Xml.XPath.XPathNamespaceScope where
import Dotnet
import qualified IOExts
import qualified Dotnet.System.Type
import qualified Dotnet.System.Enum
data XPathNamespaceScope_ a
type XPathNamespaceScope a = Dotnet.System.Enum.Enum (XPathNamespaceScope_ a)
data XPathNamespaceScopeTy
= All
| ExcludeXml
| Local
deriving ( Enum, Show, Read )
toXPathNamespaceScope :: XPathNamespaceScopeTy -> XPathNamespaceScope ()
toXPathNamespaceScope tag = IOExts.unsafePerformIO (Dotnet.System.Enum.parse (IOExts.unsafePerformIO (Dotnet.System.Type.getType "System.Xml.XPath.XPathNamespaceScope, System.Xml, Version=1.0.3300.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")) (show tag))
fromXPathNamespaceScope :: XPathNamespaceScope () -> XPathNamespaceScopeTy
fromXPathNamespaceScope obj = IOExts.unsafePerformIO (toString obj >>= return.read)
| null | https://raw.githubusercontent.com/vaibhavsagar/experiments/378d7ba97eabfc7bbeaa4116380369ea6612bfeb/hugs/dotnet/lib/Dotnet/System/Xml/XPath/XPathNamespaceScope.hs | haskell | module Dotnet.System.Xml.XPath.XPathNamespaceScope where
import Dotnet
import qualified IOExts
import qualified Dotnet.System.Type
import qualified Dotnet.System.Enum
data XPathNamespaceScope_ a
type XPathNamespaceScope a = Dotnet.System.Enum.Enum (XPathNamespaceScope_ a)
data XPathNamespaceScopeTy
= All
| ExcludeXml
| Local
deriving ( Enum, Show, Read )
toXPathNamespaceScope :: XPathNamespaceScopeTy -> XPathNamespaceScope ()
toXPathNamespaceScope tag = IOExts.unsafePerformIO (Dotnet.System.Enum.parse (IOExts.unsafePerformIO (Dotnet.System.Type.getType "System.Xml.XPath.XPathNamespaceScope, System.Xml, Version=1.0.3300.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")) (show tag))
fromXPathNamespaceScope :: XPathNamespaceScope () -> XPathNamespaceScopeTy
fromXPathNamespaceScope obj = IOExts.unsafePerformIO (toString obj >>= return.read)
| |
c318d0fb666eab901be79b01b6f6e58bd27ae02cbffabcc1d257c224150f143a | Guest0x0/normalization-bench | gen_random_terms.ml |
let _ =
let _ = Random.self_init () in
let size = int_of_string Sys.argv.(1) in
let env_len = int_of_string Sys.argv.(2) in
let num_terms = int_of_string Sys.argv.(3) in
let target_file = Sys.argv.(4) in
let term_count_file =
open_in_bin (try Sys.argv.(5) with _ -> "data/term_counts")
in
let table : Common.ApproxNum.t array array = input_value term_count_file in
close_in term_count_file;
let max_size = Array.length table in
let max_env_len = Array.length table.(0) in
if size <= 0 then
raise(Invalid_argument "gen_random_terms: non-positive size");
if env_len < 0 then
raise(Invalid_argument "gen_random_terms: negative number of free variables");
if size >= max_size then
raise(Invalid_argument "gen_random_terms: size too large");
if env_len >= max_env_len then
raise(Invalid_argument "gen_random_terms: free variable number too large");
let exception Fail in
let rec gen size env_len =
let open Common.ApproxNum in
let open Common.Syntax in
match size with
| 1 ->
Idx(Random.int env_len)
| _ ->
let count = table.(size).(env_len) in
let p = Random.float 1. in
let nth = p *> count in
let n_lam =
if env_len + 1 < max_env_len
then table.(size - 1).(env_len + 1)
else of_int 0
in
if compare nth n_lam <= 0
then
Lam(gen (size - 1) (env_len + 1))
else if size > 2 then
let rec loop acc i =
let n_func = table.(i ).(env_len) in
let n_arg = table.(size - 1 - i).(env_len) in
let acc' = acc <+> (n_func <*> n_arg) in
if compare nth acc' <= 0
then App( gen i env_len
, gen (size - 1 - i) env_len )
else loop acc' (i + 1)
in
loop n_lam 1
else
raise Fail
in
let out = open_out target_file in
let rec loop i =
if i >= num_terms
then ()
else
let i' =
match gen size env_len with
| tm ->
let buf = Common.Syntax.serialize tm in
Buffer.output_buffer out buf;
i + 1
| exception Fail ->
i
in
loop i'
in
loop 1;
close_out out
| null | https://raw.githubusercontent.com/Guest0x0/normalization-bench/a51e41b4be30fd950f46b489a3ce02bb8face672/gen_random_terms.ml | ocaml |
let _ =
let _ = Random.self_init () in
let size = int_of_string Sys.argv.(1) in
let env_len = int_of_string Sys.argv.(2) in
let num_terms = int_of_string Sys.argv.(3) in
let target_file = Sys.argv.(4) in
let term_count_file =
open_in_bin (try Sys.argv.(5) with _ -> "data/term_counts")
in
let table : Common.ApproxNum.t array array = input_value term_count_file in
close_in term_count_file;
let max_size = Array.length table in
let max_env_len = Array.length table.(0) in
if size <= 0 then
raise(Invalid_argument "gen_random_terms: non-positive size");
if env_len < 0 then
raise(Invalid_argument "gen_random_terms: negative number of free variables");
if size >= max_size then
raise(Invalid_argument "gen_random_terms: size too large");
if env_len >= max_env_len then
raise(Invalid_argument "gen_random_terms: free variable number too large");
let exception Fail in
let rec gen size env_len =
let open Common.ApproxNum in
let open Common.Syntax in
match size with
| 1 ->
Idx(Random.int env_len)
| _ ->
let count = table.(size).(env_len) in
let p = Random.float 1. in
let nth = p *> count in
let n_lam =
if env_len + 1 < max_env_len
then table.(size - 1).(env_len + 1)
else of_int 0
in
if compare nth n_lam <= 0
then
Lam(gen (size - 1) (env_len + 1))
else if size > 2 then
let rec loop acc i =
let n_func = table.(i ).(env_len) in
let n_arg = table.(size - 1 - i).(env_len) in
let acc' = acc <+> (n_func <*> n_arg) in
if compare nth acc' <= 0
then App( gen i env_len
, gen (size - 1 - i) env_len )
else loop acc' (i + 1)
in
loop n_lam 1
else
raise Fail
in
let out = open_out target_file in
let rec loop i =
if i >= num_terms
then ()
else
let i' =
match gen size env_len with
| tm ->
let buf = Common.Syntax.serialize tm in
Buffer.output_buffer out buf;
i + 1
| exception Fail ->
i
in
loop i'
in
loop 1;
close_out out
| |
181dff6a7c90bdbe9ce59556d20d012fc80cf466c71858bc9a01b958282a0bc4 | nikodemus/SBCL | host-c-call.lisp | This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!ALIEN")
(/show0 "host-c-call.lisp 12")
(define-alien-type-class (c-string :include pointer :include-args (to))
(external-format :default :type keyword)
(element-type 'character :type (member character base-char))
(not-null nil :type boolean))
(define-alien-type-translator c-string
(&key (external-format :default)
(element-type 'character)
(not-null nil))
(make-alien-c-string-type
:to (parse-alien-type 'char (sb!kernel:make-null-lexenv))
:element-type element-type
:external-format external-format
:not-null not-null))
(defun c-string-external-format (type)
(let ((external-format (alien-c-string-type-external-format type)))
(if (eq external-format :default)
(default-c-string-external-format)
external-format)))
(define-alien-type-method (c-string :unparse) (type)
(let* ((external-format (alien-c-string-type-external-format type))
(element-type (alien-c-string-type-element-type type))
(not-null (alien-c-string-type-not-null type))
(tail
(append (unless (eq :default external-format)
(list :external-format external-format))
(unless (eq 'character element-type)
(list :element-type element-type))
(when not-null
(list :not-null t)))))
(if tail
(cons 'c-string tail)
'c-string)))
(define-alien-type-method (c-string :lisp-rep) (type)
(let ((possibilities '(simple-string (alien (* char)) (simple-array (unsigned-byte 8)))))
(if (alien-c-string-type-not-null type)
`(or ,@possibilities)
`(or null ,@possibilities))))
(define-alien-type-method (c-string :deport-pin-p) (type)
(declare (ignore type))
t)
(defun c-string-needs-conversion-p (type)
#+sb-xc-host
(declare (ignore type))
#+sb-xc-host
t
#-sb-xc-host
(let ((external-format (sb!impl::get-external-format
;; Can't use C-STRING-EXTERNAL-FORMAT here,
since the meaning of : DEFAULT can change
when * - FORMAT *
;; changes.
(alien-c-string-type-external-format type))))
(not (and external-format
(or (eq (first (sb!impl::ef-names external-format)) :ascii)
On all latin-1 codepoints will fit
into a base - char , on SB - UNICODE they wo n't .
#!-sb-unicode
(eq (first (sb!impl::ef-names external-format)) :latin-1))))))
(declaim (ftype (sfunction (t) nil) null-error))
(defun null-error (type)
(aver (alien-c-string-type-not-null type))
(error 'type-error
:expected-type `(alien ,(unparse-alien-type type))
:datum nil))
(define-alien-type-method (c-string :naturalize-gen) (type alien)
`(if (zerop (sap-int ,alien))
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil)
;; Check whether we need to do a full external-format
;; conversion, or whether we can just do a cheap byte-by-byte
;; copy of the c-string data.
;;
;; On SB-UNICODE we can never do the cheap copy, even if the
;; external format and element-type are suitable, since
simple - base - strings may not contain ISO-8859 - 1 characters .
;; If we need to check for non-ascii data in the input, we
;; might as well go through the usual external-format machinery
;; instead of rewriting another version of it.
,(if #!+sb-unicode t
#!-sb-unicode (c-string-needs-conversion-p type)
`(sb!alien::c-string-to-string ,alien
(c-string-external-format ,type)
(alien-c-string-type-element-type
,type))
`(%naturalize-c-string ,alien))))
(define-alien-type-method (c-string :deport-gen) (type value)
This SAP taking is safe as DEPORT callers pin the VALUE when
;; necessary.
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
`(int-sap 0)))
((alien (* char)) (alien-sap ,value))
(vector (vector-sap ,value))))
(define-alien-type-method (c-string :deport-alloc-gen) (type value)
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil))
((alien (* char)) ,value)
(simple-base-string
,(if (c-string-needs-conversion-p type)
;; If the alien type is not ascii-compatible (+SB-UNICODE)
;; or latin-1-compatible (-SB-UNICODE), we need to do
;; external format conversion.
`(string-to-c-string ,value
(c-string-external-format ,type))
;; Otherwise we can just pass it uncopied.
value))
(simple-string
(string-to-c-string ,value
(c-string-external-format ,type)))))
(/show0 "host-c-call.lisp end of file")
| null | https://raw.githubusercontent.com/nikodemus/SBCL/3c11847d1e12db89b24a7887b18a137c45ed4661/src/code/host-c-call.lisp | lisp | more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
Can't use C-STRING-EXTERNAL-FORMAT here,
changes.
Check whether we need to do a full external-format
conversion, or whether we can just do a cheap byte-by-byte
copy of the c-string data.
On SB-UNICODE we can never do the cheap copy, even if the
external format and element-type are suitable, since
If we need to check for non-ascii data in the input, we
might as well go through the usual external-format machinery
instead of rewriting another version of it.
necessary.
If the alien type is not ascii-compatible (+SB-UNICODE)
or latin-1-compatible (-SB-UNICODE), we need to do
external format conversion.
Otherwise we can just pass it uncopied. | This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!ALIEN")
(/show0 "host-c-call.lisp 12")
(define-alien-type-class (c-string :include pointer :include-args (to))
(external-format :default :type keyword)
(element-type 'character :type (member character base-char))
(not-null nil :type boolean))
(define-alien-type-translator c-string
(&key (external-format :default)
(element-type 'character)
(not-null nil))
(make-alien-c-string-type
:to (parse-alien-type 'char (sb!kernel:make-null-lexenv))
:element-type element-type
:external-format external-format
:not-null not-null))
(defun c-string-external-format (type)
(let ((external-format (alien-c-string-type-external-format type)))
(if (eq external-format :default)
(default-c-string-external-format)
external-format)))
(define-alien-type-method (c-string :unparse) (type)
(let* ((external-format (alien-c-string-type-external-format type))
(element-type (alien-c-string-type-element-type type))
(not-null (alien-c-string-type-not-null type))
(tail
(append (unless (eq :default external-format)
(list :external-format external-format))
(unless (eq 'character element-type)
(list :element-type element-type))
(when not-null
(list :not-null t)))))
(if tail
(cons 'c-string tail)
'c-string)))
(define-alien-type-method (c-string :lisp-rep) (type)
(let ((possibilities '(simple-string (alien (* char)) (simple-array (unsigned-byte 8)))))
(if (alien-c-string-type-not-null type)
`(or ,@possibilities)
`(or null ,@possibilities))))
(define-alien-type-method (c-string :deport-pin-p) (type)
(declare (ignore type))
t)
(defun c-string-needs-conversion-p (type)
#+sb-xc-host
(declare (ignore type))
#+sb-xc-host
t
#-sb-xc-host
(let ((external-format (sb!impl::get-external-format
since the meaning of : DEFAULT can change
when * - FORMAT *
(alien-c-string-type-external-format type))))
(not (and external-format
(or (eq (first (sb!impl::ef-names external-format)) :ascii)
On all latin-1 codepoints will fit
into a base - char , on SB - UNICODE they wo n't .
#!-sb-unicode
(eq (first (sb!impl::ef-names external-format)) :latin-1))))))
(declaim (ftype (sfunction (t) nil) null-error))
(defun null-error (type)
(aver (alien-c-string-type-not-null type))
(error 'type-error
:expected-type `(alien ,(unparse-alien-type type))
:datum nil))
(define-alien-type-method (c-string :naturalize-gen) (type alien)
`(if (zerop (sap-int ,alien))
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil)
simple - base - strings may not contain ISO-8859 - 1 characters .
,(if #!+sb-unicode t
#!-sb-unicode (c-string-needs-conversion-p type)
`(sb!alien::c-string-to-string ,alien
(c-string-external-format ,type)
(alien-c-string-type-element-type
,type))
`(%naturalize-c-string ,alien))))
(define-alien-type-method (c-string :deport-gen) (type value)
This SAP taking is safe as DEPORT callers pin the VALUE when
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
`(int-sap 0)))
((alien (* char)) (alien-sap ,value))
(vector (vector-sap ,value))))
(define-alien-type-method (c-string :deport-alloc-gen) (type value)
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil))
((alien (* char)) ,value)
(simple-base-string
,(if (c-string-needs-conversion-p type)
`(string-to-c-string ,value
(c-string-external-format ,type))
value))
(simple-string
(string-to-c-string ,value
(c-string-external-format ,type)))))
(/show0 "host-c-call.lisp end of file")
|
1f2f4c2a54180566dd9dffd9749c3428bc887bee7a2cb67b902bce6190d0a3f4 | erlydtl/erlydtl | erlydtl_lib_test2.erl | -module(erlydtl_lib_test2).
%% test multiple behaviours
-behaviour(erlydtl_lib_test1).
-behaviour(erlydtl_library).
-export([version/0, inventory/1, reverse/1]).
version() -> 1.
inventory(filters) -> [reverse];
inventory(tags) -> [].
reverse(String) when is_list(String) ->
lists:reverse(String);
reverse(String) when is_binary(String) ->
reverse(binary_to_list(String)).
| null | https://raw.githubusercontent.com/erlydtl/erlydtl/c1f3df8379b09894d333de4e9a3ca2f3e260cba3/test/erlydtl_lib_test2.erl | erlang | test multiple behaviours | -module(erlydtl_lib_test2).
-behaviour(erlydtl_lib_test1).
-behaviour(erlydtl_library).
-export([version/0, inventory/1, reverse/1]).
version() -> 1.
inventory(filters) -> [reverse];
inventory(tags) -> [].
reverse(String) when is_list(String) ->
lists:reverse(String);
reverse(String) when is_binary(String) ->
reverse(binary_to_list(String)).
|
9d2a5909817ab8040f123423a87b08760c22b3dcbafce761995ddf1ae6254b58 | coccinelle/coccinelle | pycocci_aux.ml |
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
open Ast_c
open Common
let caller s f a =
let str = ref ([] : string list) in
let pr_elem info =
let comments_before =
List.map Token_c.str_of_token (Ast_c.get_comments_before info) in
let comments_after =
List.map Token_c.str_of_token (Ast_c.get_comments_after info) in
(* constructed backwards *)
str := (List.rev comments_after) @ (Ast_c.str_of_info info) ::
(List.rev comments_before) @ !str in
let pr_sp _ = () in
f ~pr_elem ~pr_space:pr_sp a;
String.concat s (List.rev !str)
let call_pretty f a = caller " " f a
let call_pretty0 f a = caller "" f a
let exprrep = call_pretty Pretty_print_c.pp_expression_gen
let commalistrep list_printer elem_printer comma_printer x =
(call_pretty list_printer x,
List.map
(function x ->
call_pretty elem_printer (comma_printer x) (* drop commas *))
x)
let exprlistrep =
commalistrep Pretty_print_c.pp_arg_list_gen Pretty_print_c.pp_arg_gen
Ast_c.unwrap
let paramlistrep =
commalistrep Pretty_print_c.pp_param_list_gen Pretty_print_c.pp_param_gen
Ast_c.unwrap
let initlistrep (newlines,inits) =
(call_pretty Pretty_print_c.pp_init_list_gen (newlines,inits),
List.map
(function x ->
call_pretty Pretty_print_c.pp_init_gen (Ast_c.unwrap x) (* drop commas *))
inits)
let fieldlistrep =
commalistrep Pretty_print_c.pp_field_list_gen Pretty_print_c.pp_field_gen
(function x -> x)
let stringrep = function
Ast_c.MetaIdVal s -> s
| Ast_c.MetaAssignOpVal op -> call_pretty Pretty_print_c.pp_assignOp_gen op
| Ast_c.MetaBinaryOpVal op -> call_pretty Pretty_print_c.pp_binaryOp_gen op
| Ast_c.MetaFuncVal s -> s
| Ast_c.MetaLocalFuncVal s -> s
| Ast_c.MetaExprVal (expr,_,_) -> exprrep expr
| Ast_c.MetaExprListVal expr_list ->
call_pretty Pretty_print_c.pp_arg_list_gen expr_list
| Ast_c.MetaTypeVal typ -> call_pretty Pretty_print_c.pp_type_gen typ
| Ast_c.MetaInitVal ini -> call_pretty Pretty_print_c.pp_init_gen ini
| Ast_c.MetaInitListVal (newlines,ini) ->
call_pretty Pretty_print_c.pp_init_list_gen (newlines,ini)
| Ast_c.MetaDeclVal (decl,_) ->
call_pretty Pretty_print_c.pp_decl_gen decl
| Ast_c.MetaFieldVal field ->
call_pretty Pretty_print_c.pp_field_gen field
| Ast_c.MetaFieldListVal field ->
call_pretty Pretty_print_c.pp_field_list_gen field
| Ast_c.MetaStmtVal (statement,_,_) ->
call_pretty Pretty_print_c.pp_statement_gen statement
| Ast_c.MetaStmtListVal (statxs,_) ->
call_pretty Pretty_print_c.pp_statement_seq_list_gen statxs
| Ast_c.MetaParamVal param ->
call_pretty Pretty_print_c.pp_param_gen param
| Ast_c.MetaParamListVal params ->
call_pretty Pretty_print_c.pp_param_list_gen params
| Ast_c.MetaDParamListVal params ->
call_pretty Pretty_print_c.pp_define_param_list_gen params
| Ast_c.MetaFragListVal frags ->
call_pretty0 Pretty_print_c.pp_string_fragment_list_gen frags
| Ast_c.MetaFmtVal fmt ->
call_pretty0 Pretty_print_c.pp_string_format_gen fmt
| Ast_c.MetaAttrArgVal name ->
call_pretty0 Pretty_print_c.pp_attr_arg_gen name
| Ast_c.MetaListlenVal n -> string_of_int n
| Ast_c.MetaPosVal (pos1, pos2) ->
let print_pos = function
Ast_cocci.Real x -> string_of_int x
| Ast_cocci.Virt(x,off) -> Printf.sprintf "%d+%d" x off in
Printf.sprintf ("pos(%s,%s)") (print_pos pos1) (print_pos pos2)
| Ast_c.MetaPosValList positions -> "TODO: <<postvallist>>"
| Ast_c.MetaComValList _ -> "TODO: <<postvallist>>"
| Ast_c.MetaNoVal -> failwith "no value, should not occur"
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/94b827ed272cb10c03da467c009b2e381162e5ad/python/pycocci_aux.ml | ocaml | constructed backwards
drop commas
drop commas |
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
open Ast_c
open Common
let caller s f a =
let str = ref ([] : string list) in
let pr_elem info =
let comments_before =
List.map Token_c.str_of_token (Ast_c.get_comments_before info) in
let comments_after =
List.map Token_c.str_of_token (Ast_c.get_comments_after info) in
str := (List.rev comments_after) @ (Ast_c.str_of_info info) ::
(List.rev comments_before) @ !str in
let pr_sp _ = () in
f ~pr_elem ~pr_space:pr_sp a;
String.concat s (List.rev !str)
let call_pretty f a = caller " " f a
let call_pretty0 f a = caller "" f a
let exprrep = call_pretty Pretty_print_c.pp_expression_gen
let commalistrep list_printer elem_printer comma_printer x =
(call_pretty list_printer x,
List.map
(function x ->
x)
let exprlistrep =
commalistrep Pretty_print_c.pp_arg_list_gen Pretty_print_c.pp_arg_gen
Ast_c.unwrap
let paramlistrep =
commalistrep Pretty_print_c.pp_param_list_gen Pretty_print_c.pp_param_gen
Ast_c.unwrap
let initlistrep (newlines,inits) =
(call_pretty Pretty_print_c.pp_init_list_gen (newlines,inits),
List.map
(function x ->
inits)
let fieldlistrep =
commalistrep Pretty_print_c.pp_field_list_gen Pretty_print_c.pp_field_gen
(function x -> x)
let stringrep = function
Ast_c.MetaIdVal s -> s
| Ast_c.MetaAssignOpVal op -> call_pretty Pretty_print_c.pp_assignOp_gen op
| Ast_c.MetaBinaryOpVal op -> call_pretty Pretty_print_c.pp_binaryOp_gen op
| Ast_c.MetaFuncVal s -> s
| Ast_c.MetaLocalFuncVal s -> s
| Ast_c.MetaExprVal (expr,_,_) -> exprrep expr
| Ast_c.MetaExprListVal expr_list ->
call_pretty Pretty_print_c.pp_arg_list_gen expr_list
| Ast_c.MetaTypeVal typ -> call_pretty Pretty_print_c.pp_type_gen typ
| Ast_c.MetaInitVal ini -> call_pretty Pretty_print_c.pp_init_gen ini
| Ast_c.MetaInitListVal (newlines,ini) ->
call_pretty Pretty_print_c.pp_init_list_gen (newlines,ini)
| Ast_c.MetaDeclVal (decl,_) ->
call_pretty Pretty_print_c.pp_decl_gen decl
| Ast_c.MetaFieldVal field ->
call_pretty Pretty_print_c.pp_field_gen field
| Ast_c.MetaFieldListVal field ->
call_pretty Pretty_print_c.pp_field_list_gen field
| Ast_c.MetaStmtVal (statement,_,_) ->
call_pretty Pretty_print_c.pp_statement_gen statement
| Ast_c.MetaStmtListVal (statxs,_) ->
call_pretty Pretty_print_c.pp_statement_seq_list_gen statxs
| Ast_c.MetaParamVal param ->
call_pretty Pretty_print_c.pp_param_gen param
| Ast_c.MetaParamListVal params ->
call_pretty Pretty_print_c.pp_param_list_gen params
| Ast_c.MetaDParamListVal params ->
call_pretty Pretty_print_c.pp_define_param_list_gen params
| Ast_c.MetaFragListVal frags ->
call_pretty0 Pretty_print_c.pp_string_fragment_list_gen frags
| Ast_c.MetaFmtVal fmt ->
call_pretty0 Pretty_print_c.pp_string_format_gen fmt
| Ast_c.MetaAttrArgVal name ->
call_pretty0 Pretty_print_c.pp_attr_arg_gen name
| Ast_c.MetaListlenVal n -> string_of_int n
| Ast_c.MetaPosVal (pos1, pos2) ->
let print_pos = function
Ast_cocci.Real x -> string_of_int x
| Ast_cocci.Virt(x,off) -> Printf.sprintf "%d+%d" x off in
Printf.sprintf ("pos(%s,%s)") (print_pos pos1) (print_pos pos2)
| Ast_c.MetaPosValList positions -> "TODO: <<postvallist>>"
| Ast_c.MetaComValList _ -> "TODO: <<postvallist>>"
| Ast_c.MetaNoVal -> failwith "no value, should not occur"
|
068444687cb5001d24768d1230168a56efab7c8b9a565853df422d1c1adcc8d0 | rescript-lang/rescript-compiler | classify_function.ml | Copyright ( C ) 2020- , Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
let rec is_obj_literal (x : _ Flow_ast.Expression.t) : bool =
match snd x with
| Identifier (_, { name = "undefined" }) | Literal _ -> true
| Unary { operator = Minus; argument } -> is_obj_literal argument
| Object { properties } -> Ext_list.for_all properties is_literal_kv
| Array { elements } ->
Ext_list.for_all elements (fun x ->
match x with Expression x -> is_obj_literal x | _ -> false)
| _ -> false
and is_literal_kv (x : _ Flow_ast.Expression.Object.property) =
match x with
| Property (_, Init { value }) -> is_obj_literal value
| _ -> false
let classify_exp (prog : _ Flow_ast.Expression.t) : Js_raw_info.exp =
match prog with
| ( _,
Function
{
id = _;
params = _, { params };
async = false;
generator = false;
predicate = None;
} ) ->
Js_function { arity = List.length params; arrow = false }
| ( _,
ArrowFunction
{
id = None;
params = _, { params };
async = false;
generator = false;
predicate = None;
} ) ->
Js_function { arity = List.length params; arrow = true }
| _, Literal { comments } ->
let comment =
match comments with
| None -> None
| Some { leading = [ (_, { kind = Block; text = comment }) ] } ->
Some ("/*" ^ comment ^ "*/")
| Some { leading = [ (_, { kind = Line; text = comment }) ] } ->
Some ("//" ^ comment)
| Some _ -> None
in
Js_literal { comment }
| _, Identifier (_, { name = "undefined" }) -> Js_literal { comment = None }
| _, _ ->
if is_obj_literal prog then Js_literal { comment = None }
else Js_exp_unknown
| exception _ -> Js_exp_unknown
(** It seems we do the parse twice
- in parsing
- in code generation
*)
let classify ?(check : (Location.t * int) option) (prog : string) :
Js_raw_info.exp =
let prog, errors =
Parser_flow.parse_expression (Parser_env.init_env None prog) false
in
match (check, errors) with
| Some (loc, offset), _ :: _ ->
Bs_flow_ast_utils.check_flow_errors ~loc ~offset errors;
Js_exp_unknown
| Some _, [] | None, [] -> classify_exp prog
| None, _ :: _ -> Js_exp_unknown
let classify_stmt (prog : string) : Js_raw_info.stmt =
let result = Parser_flow.parse_program false None prog in
match fst result with
| _loc, { statements = [] } -> Js_stmt_comment
| _ -> Js_stmt_unknown
we can also throw
x.x pure access
x.x pure access
*)
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/62ad5bc54c811f1a5ec6db8ee333c737160ff6b9/jscomp/frontend/classify_function.ml | ocaml | * It seems we do the parse twice
- in parsing
- in code generation
| Copyright ( C ) 2020- , Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
let rec is_obj_literal (x : _ Flow_ast.Expression.t) : bool =
match snd x with
| Identifier (_, { name = "undefined" }) | Literal _ -> true
| Unary { operator = Minus; argument } -> is_obj_literal argument
| Object { properties } -> Ext_list.for_all properties is_literal_kv
| Array { elements } ->
Ext_list.for_all elements (fun x ->
match x with Expression x -> is_obj_literal x | _ -> false)
| _ -> false
and is_literal_kv (x : _ Flow_ast.Expression.Object.property) =
match x with
| Property (_, Init { value }) -> is_obj_literal value
| _ -> false
let classify_exp (prog : _ Flow_ast.Expression.t) : Js_raw_info.exp =
match prog with
| ( _,
Function
{
id = _;
params = _, { params };
async = false;
generator = false;
predicate = None;
} ) ->
Js_function { arity = List.length params; arrow = false }
| ( _,
ArrowFunction
{
id = None;
params = _, { params };
async = false;
generator = false;
predicate = None;
} ) ->
Js_function { arity = List.length params; arrow = true }
| _, Literal { comments } ->
let comment =
match comments with
| None -> None
| Some { leading = [ (_, { kind = Block; text = comment }) ] } ->
Some ("/*" ^ comment ^ "*/")
| Some { leading = [ (_, { kind = Line; text = comment }) ] } ->
Some ("//" ^ comment)
| Some _ -> None
in
Js_literal { comment }
| _, Identifier (_, { name = "undefined" }) -> Js_literal { comment = None }
| _, _ ->
if is_obj_literal prog then Js_literal { comment = None }
else Js_exp_unknown
| exception _ -> Js_exp_unknown
let classify ?(check : (Location.t * int) option) (prog : string) :
Js_raw_info.exp =
let prog, errors =
Parser_flow.parse_expression (Parser_env.init_env None prog) false
in
match (check, errors) with
| Some (loc, offset), _ :: _ ->
Bs_flow_ast_utils.check_flow_errors ~loc ~offset errors;
Js_exp_unknown
| Some _, [] | None, [] -> classify_exp prog
| None, _ :: _ -> Js_exp_unknown
let classify_stmt (prog : string) : Js_raw_info.stmt =
let result = Parser_flow.parse_program false None prog in
match fst result with
| _loc, { statements = [] } -> Js_stmt_comment
| _ -> Js_stmt_unknown
we can also throw
x.x pure access
x.x pure access
*)
|
22f4b63ea6ed2d430576b3c24358950ef675194cac1a3e58d09e06adc8a3ddf8 | input-output-hk/plutus | Let.hs | -- editorconfig-checker-disable-file
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
-- | Functions for compiling PIR let terms.
module PlutusIR.Compiler.Let (compileLets, LetKind(..)) where
import PlutusIR
import PlutusIR.Compiler.Datatype
import PlutusIR.Compiler.Definitions
import PlutusIR.Compiler.Provenance
import PlutusIR.Compiler.Recursion
import PlutusIR.Compiler.Types
import PlutusIR.Error
import PlutusIR.MkPir qualified as PIR
import Control.Monad
import Control.Monad.Error.Lens
import Control.Monad.Trans
import Control.Lens hiding (Strict)
import Data.List.NonEmpty hiding (partition, reverse)
import Data.List.NonEmpty qualified as NE
Note [ Extra definitions while compiling let - bindings ]
The let - compiling passes can generate some additional definitions , so we use the
support from ' Definitions ' to ease this .
Specifically , only the recursive term pass should do this ( it helps to share fixpoint combinators ) .
So putting in the definitions should mostly be a no - op , and we 'll get errors if it 's not .
It would be more elegant to somehow indicate that only one of the let - compiling passes needs
this , but this does the job .
Also we should pull out more stuff ( e.g. see ' NonStrict ' which uses unit ) .
The let-compiling passes can generate some additional definitions, so we use the
support from 'Definitions' to ease this.
Specifically, only the recursive term pass should do this (it helps to share fixpoint combinators).
So putting in the definitions should mostly be a no-op, and we'll get errors if it's not.
It would be more elegant to somehow indicate that only one of the let-compiling passes needs
this, but this does the job.
Also we should pull out more stuff (e.g. see 'NonStrict' which uses unit).
-}
Note [ Right - associative compilation of let - bindings for linear scoping ]
The ' foldM ' function for lists is left - associative , but we need right - associative for our case , i.e.
every right let must be wrapped / scoped by its left let
An pseudocode PIR example :
let b1 = rhs1 ;
b2 = rhs2 ( b1 is visible in rhs2 ) ;
in ...
must be treated the same as let b1 = rhs in ( let b2 = rhs2 in ... )
Since there is no ' foldrM ' in the stdlib , so we first reverse the bindings list ,
and then apply the left - associative ' foldM ' on them ,
which yields the same results as doing a right - associative fold .
The 'foldM' function for lists is left-associative, but we need right-associative for our case, i.e.
every right let must be wrapped/scoped by its left let
An pseudocode PIR example:
let b1 = rhs1;
b2 = rhs2 (b1 is visible in rhs2);
in ...
must be treated the same as let b1 = rhs in (let b2 = rhs2 in ... )
Since there is no 'foldrM' in the stdlib, so we first reverse the bindings list,
and then apply the left-associative 'foldM' on them,
which yields the same results as doing a right-associative fold.
-}
data LetKind = RecTerms | NonRecTerms | Types | DataTypes
-- | Compile the let terms out of a 'Term'. Note: the result does *not* have globally unique names.
compileLets :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> m (PIRTerm uni fun a)
compileLets kind t = getEnclosing >>= \p ->
-- See Note [Extra definitions while compiling let-bindings]
runDefT p $ transformMOf termSubterms (compileLet kind) t
compileLet :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> DefT SharedName uni fun (Provenance a) m (PIRTerm uni fun a)
compileLet kind = \case
Let p r bs body -> withEnclosing (const $ LetBinding r p) $ case r of
-- See Note [Right-associative compilation of let-bindings for linear scoping]
NonRec -> lift $ foldM (compileNonRecBinding kind) body (NE.reverse bs)
Rec -> compileRecBindings kind body bs
x -> pure x
compileRecBindings
:: Compiling m e uni fun a
=> LetKind
-> PIRTerm uni fun a
-> NE.NonEmpty (Binding TyName Name uni fun (Provenance a))
-> DefT SharedName uni fun (Provenance a) m (PIRTerm uni fun a)
compileRecBindings kind body bs =
case grouped of
singleGroup :| [] ->
case NE.head singleGroup of
TermBind {} -> compileRecTermBindings kind body singleGroup
DatatypeBind {} -> lift $ compileRecDataBindings kind body singleGroup
TypeBind {} -> lift $ getEnclosing >>= \p -> throwing _Error $ CompilationError p "Type bindings cannot appear in recursive let, use datatypebind instead"
only one single group should appear , we do not allow mixing of bind styles
_ -> lift $ getEnclosing >>= \p -> throwing _Error $ CompilationError p "Mixed term/type/data bindings in recursive let"
where
-- We group the bindings by their binding style, i.e.: term , data or type bindingstyle
-- All bindings of a let should be of the same style; for that, we make use of the `groupWith1`
and we expect to see exactly 1 group returned by it .
The ` NE.groupWith1 ` returns N>=1 of " adjacent " grouppings , compared to the similar ` NE.groupAllWith1 `
which returns at most 3 groups ( 1 = > termbind , 2 - > typebind , 3 - > databind ) .
` NE.groupAllWith1 ` is an overkill here , since we do n't care about the minimal number of groups , just that there is exactly 1 group .
grouped = NE.groupWith1 (\case { TermBind {} -> 1 ::Int ; TypeBind {} -> 2; _ -> 3 }) bs
compileRecTermBindings
:: Compiling m e uni fun a
=> LetKind
-> PIRTerm uni fun a
-> NE.NonEmpty (Binding TyName Name uni fun (Provenance a))
-> DefT SharedName uni fun (Provenance a) m (PIRTerm uni fun a)
compileRecTermBindings RecTerms body bs = do
binds <- forM bs $ \case
TermBind _ Strict vd rhs -> pure $ PIR.Def vd rhs
_ -> lift $ getEnclosing >>= \p -> throwing _Error $ CompilationError p "Internal error: type binding in term binding group"
compileRecTerms body binds
compileRecTermBindings _ body bs = lift $ getEnclosing >>= \p -> pure $ Let p Rec bs body
compileRecDataBindings :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> NE.NonEmpty (Binding TyName Name uni fun (Provenance a)) -> m (PIRTerm uni fun a)
compileRecDataBindings DataTypes body bs = do
binds <- forM bs $ \case
DatatypeBind _ d -> pure d
_ -> getEnclosing >>= \p -> throwing _Error $ CompilationError p "Internal error: term or type binding in datatype binding group"
compileRecDatatypes body binds
compileRecDataBindings _ body bs = getEnclosing >>= \p -> pure $ Let p Rec bs body
compileNonRecBinding :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> Binding TyName Name uni fun (Provenance a) -> m (PIRTerm uni fun a)
compileNonRecBinding NonRecTerms body (TermBind x Strict d rhs) = withEnclosing (const $ TermBinding (varDeclNameString d) x) $
PIR.mkImmediateLamAbs <$> getEnclosing <*> pure (PIR.Def d rhs) <*> pure body
compileNonRecBinding Types body (TypeBind x d rhs) = withEnclosing (const $ TypeBinding (tyVarDeclNameString d) x) $
PIR.mkImmediateTyAbs <$> getEnclosing <*> pure (PIR.Def d rhs) <*> pure body
compileNonRecBinding DataTypes body (DatatypeBind x d) = withEnclosing (const $ TypeBinding (datatypeNameString d) x) $
compileDatatype NonRec body d
compileNonRecBinding _ body b = getEnclosing >>= \p -> pure $ Let p NonRec (pure b) body
| null | https://raw.githubusercontent.com/input-output-hk/plutus/c8d4364d0e639fef4d5b93f7d6c0912d992b54f9/plutus-core/plutus-ir/src/PlutusIR/Compiler/Let.hs | haskell | editorconfig-checker-disable-file
# LANGUAGE OverloadedStrings #
| Functions for compiling PIR let terms.
| Compile the let terms out of a 'Term'. Note: the result does *not* have globally unique names.
See Note [Extra definitions while compiling let-bindings]
See Note [Right-associative compilation of let-bindings for linear scoping]
We group the bindings by their binding style, i.e.: term , data or type bindingstyle
All bindings of a let should be of the same style; for that, we make use of the `groupWith1` | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
module PlutusIR.Compiler.Let (compileLets, LetKind(..)) where
import PlutusIR
import PlutusIR.Compiler.Datatype
import PlutusIR.Compiler.Definitions
import PlutusIR.Compiler.Provenance
import PlutusIR.Compiler.Recursion
import PlutusIR.Compiler.Types
import PlutusIR.Error
import PlutusIR.MkPir qualified as PIR
import Control.Monad
import Control.Monad.Error.Lens
import Control.Monad.Trans
import Control.Lens hiding (Strict)
import Data.List.NonEmpty hiding (partition, reverse)
import Data.List.NonEmpty qualified as NE
Note [ Extra definitions while compiling let - bindings ]
The let - compiling passes can generate some additional definitions , so we use the
support from ' Definitions ' to ease this .
Specifically , only the recursive term pass should do this ( it helps to share fixpoint combinators ) .
So putting in the definitions should mostly be a no - op , and we 'll get errors if it 's not .
It would be more elegant to somehow indicate that only one of the let - compiling passes needs
this , but this does the job .
Also we should pull out more stuff ( e.g. see ' NonStrict ' which uses unit ) .
The let-compiling passes can generate some additional definitions, so we use the
support from 'Definitions' to ease this.
Specifically, only the recursive term pass should do this (it helps to share fixpoint combinators).
So putting in the definitions should mostly be a no-op, and we'll get errors if it's not.
It would be more elegant to somehow indicate that only one of the let-compiling passes needs
this, but this does the job.
Also we should pull out more stuff (e.g. see 'NonStrict' which uses unit).
-}
Note [ Right - associative compilation of let - bindings for linear scoping ]
The ' foldM ' function for lists is left - associative , but we need right - associative for our case , i.e.
every right let must be wrapped / scoped by its left let
An pseudocode PIR example :
let b1 = rhs1 ;
b2 = rhs2 ( b1 is visible in rhs2 ) ;
in ...
must be treated the same as let b1 = rhs in ( let b2 = rhs2 in ... )
Since there is no ' foldrM ' in the stdlib , so we first reverse the bindings list ,
and then apply the left - associative ' foldM ' on them ,
which yields the same results as doing a right - associative fold .
The 'foldM' function for lists is left-associative, but we need right-associative for our case, i.e.
every right let must be wrapped/scoped by its left let
An pseudocode PIR example:
let b1 = rhs1;
b2 = rhs2 (b1 is visible in rhs2);
in ...
must be treated the same as let b1 = rhs in (let b2 = rhs2 in ... )
Since there is no 'foldrM' in the stdlib, so we first reverse the bindings list,
and then apply the left-associative 'foldM' on them,
which yields the same results as doing a right-associative fold.
-}
data LetKind = RecTerms | NonRecTerms | Types | DataTypes
compileLets :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> m (PIRTerm uni fun a)
compileLets kind t = getEnclosing >>= \p ->
runDefT p $ transformMOf termSubterms (compileLet kind) t
compileLet :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> DefT SharedName uni fun (Provenance a) m (PIRTerm uni fun a)
compileLet kind = \case
Let p r bs body -> withEnclosing (const $ LetBinding r p) $ case r of
NonRec -> lift $ foldM (compileNonRecBinding kind) body (NE.reverse bs)
Rec -> compileRecBindings kind body bs
x -> pure x
compileRecBindings
:: Compiling m e uni fun a
=> LetKind
-> PIRTerm uni fun a
-> NE.NonEmpty (Binding TyName Name uni fun (Provenance a))
-> DefT SharedName uni fun (Provenance a) m (PIRTerm uni fun a)
compileRecBindings kind body bs =
case grouped of
singleGroup :| [] ->
case NE.head singleGroup of
TermBind {} -> compileRecTermBindings kind body singleGroup
DatatypeBind {} -> lift $ compileRecDataBindings kind body singleGroup
TypeBind {} -> lift $ getEnclosing >>= \p -> throwing _Error $ CompilationError p "Type bindings cannot appear in recursive let, use datatypebind instead"
only one single group should appear , we do not allow mixing of bind styles
_ -> lift $ getEnclosing >>= \p -> throwing _Error $ CompilationError p "Mixed term/type/data bindings in recursive let"
where
and we expect to see exactly 1 group returned by it .
The ` NE.groupWith1 ` returns N>=1 of " adjacent " grouppings , compared to the similar ` NE.groupAllWith1 `
which returns at most 3 groups ( 1 = > termbind , 2 - > typebind , 3 - > databind ) .
` NE.groupAllWith1 ` is an overkill here , since we do n't care about the minimal number of groups , just that there is exactly 1 group .
grouped = NE.groupWith1 (\case { TermBind {} -> 1 ::Int ; TypeBind {} -> 2; _ -> 3 }) bs
compileRecTermBindings
:: Compiling m e uni fun a
=> LetKind
-> PIRTerm uni fun a
-> NE.NonEmpty (Binding TyName Name uni fun (Provenance a))
-> DefT SharedName uni fun (Provenance a) m (PIRTerm uni fun a)
compileRecTermBindings RecTerms body bs = do
binds <- forM bs $ \case
TermBind _ Strict vd rhs -> pure $ PIR.Def vd rhs
_ -> lift $ getEnclosing >>= \p -> throwing _Error $ CompilationError p "Internal error: type binding in term binding group"
compileRecTerms body binds
compileRecTermBindings _ body bs = lift $ getEnclosing >>= \p -> pure $ Let p Rec bs body
compileRecDataBindings :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> NE.NonEmpty (Binding TyName Name uni fun (Provenance a)) -> m (PIRTerm uni fun a)
compileRecDataBindings DataTypes body bs = do
binds <- forM bs $ \case
DatatypeBind _ d -> pure d
_ -> getEnclosing >>= \p -> throwing _Error $ CompilationError p "Internal error: term or type binding in datatype binding group"
compileRecDatatypes body binds
compileRecDataBindings _ body bs = getEnclosing >>= \p -> pure $ Let p Rec bs body
compileNonRecBinding :: Compiling m e uni fun a => LetKind -> PIRTerm uni fun a -> Binding TyName Name uni fun (Provenance a) -> m (PIRTerm uni fun a)
compileNonRecBinding NonRecTerms body (TermBind x Strict d rhs) = withEnclosing (const $ TermBinding (varDeclNameString d) x) $
PIR.mkImmediateLamAbs <$> getEnclosing <*> pure (PIR.Def d rhs) <*> pure body
compileNonRecBinding Types body (TypeBind x d rhs) = withEnclosing (const $ TypeBinding (tyVarDeclNameString d) x) $
PIR.mkImmediateTyAbs <$> getEnclosing <*> pure (PIR.Def d rhs) <*> pure body
compileNonRecBinding DataTypes body (DatatypeBind x d) = withEnclosing (const $ TypeBinding (datatypeNameString d) x) $
compileDatatype NonRec body d
compileNonRecBinding _ body b = getEnclosing >>= \p -> pure $ Let p NonRec (pure b) body
|
1a9e19a17597bb062cca9eb6154f0e41ddf6239b7a48d5c07538124a83df4964 | mattaudesse/haskell-99-problems | H04.hs | module Problems.H04 where
-- |
-- Find the number of elements of a list.
--
> > > myLength [ 123 , 456 , 789 ]
3
--
> > > " Hello , world ! "
13
--
prop > [ a ] = = length [ a ]
myLength :: [a] -> Int
myLength [] = 0
myLength (_:xs) = 1 + sum [1 | _ <- xs]
| null | https://raw.githubusercontent.com/mattaudesse/haskell-99-problems/f7d57c0bd45c245f10073cf708fbc5e2107e0e23/Problems/H04.hs | haskell | |
Find the number of elements of a list.
| module Problems.H04 where
> > > myLength [ 123 , 456 , 789 ]
3
> > > " Hello , world ! "
13
prop > [ a ] = = length [ a ]
myLength :: [a] -> Int
myLength [] = 0
myLength (_:xs) = 1 + sum [1 | _ <- xs]
|
967a051b8f8f6c0c8541382e3c4c6cb393a3e58ee1cee009b63ce3d57256dcca | theodormoroianu/SecondYearCourses | varEither.hs | import Data.Maybe
- Limbajul si Interpretorul
type M = Either String
showM :: Show a => M a -> String
showM (Right a) = show a
showM (Left s) = "<wrong: " ++ s ++ ">"
type Name = String
data Term = Var Name
| Con Integer
| Term :+: Term
| Lam Name Term
| App Term Term
deriving (Show)
pgm :: Term
pgm = App
(Lam "y"
(App
(App
(Lam "f"
(Lam "y"
(App (Var "f") (Var "y"))
)
)
(Lam "x"
(Var "x" :+: Var "z")
)
)
(Con 3)
)
)
(Con 4)
data Value = Num Integer
| Fun (Value -> M Value)
instance Show Value where
show (Num x) = show x
show (Fun _) = "<function>"
type Environment = [(Name, Value)]
interp :: Term -> Environment -> M Value
interp (Var name) env =
case lookup name env of
Just x -> Right x
Nothing -> Left $ "Unbound variable " ++ name
interp (Con x) _ = Right $ Num x
interp (a :+: b) env = do
v1 <- interp a env
v2 <- interp b env
case (v1, v2) of
(Num x, Num y) -> Right $ Num (x + y)
_ -> Left $ "Should be numbers: " ++ show v1 ++ show v2
interp (Lam name exp) env =
Right $ Fun (\x -> interp exp ((name, x) : env))
interp (App f v) env = do
intf <- interp f env
intv <- interp v env
case intf of
Fun a -> a intv
_ -> Left $ "Should be function: " ++ show f
test :: Term -> String
test t = showM $ interp t []
pgm1:: Term
pgm1 = App
(Lam "x" ((Var "x") :+: (Var "x")))
((Con 10) :+: (Con 11))
s1 = test pgm
s2 = test pgm1
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/99185b0e97119135e7301c2c7be0f07ae7258006/FLP/laborator/lab4/varEither.hs | haskell | import Data.Maybe
- Limbajul si Interpretorul
type M = Either String
showM :: Show a => M a -> String
showM (Right a) = show a
showM (Left s) = "<wrong: " ++ s ++ ">"
type Name = String
data Term = Var Name
| Con Integer
| Term :+: Term
| Lam Name Term
| App Term Term
deriving (Show)
pgm :: Term
pgm = App
(Lam "y"
(App
(App
(Lam "f"
(Lam "y"
(App (Var "f") (Var "y"))
)
)
(Lam "x"
(Var "x" :+: Var "z")
)
)
(Con 3)
)
)
(Con 4)
data Value = Num Integer
| Fun (Value -> M Value)
instance Show Value where
show (Num x) = show x
show (Fun _) = "<function>"
type Environment = [(Name, Value)]
interp :: Term -> Environment -> M Value
interp (Var name) env =
case lookup name env of
Just x -> Right x
Nothing -> Left $ "Unbound variable " ++ name
interp (Con x) _ = Right $ Num x
interp (a :+: b) env = do
v1 <- interp a env
v2 <- interp b env
case (v1, v2) of
(Num x, Num y) -> Right $ Num (x + y)
_ -> Left $ "Should be numbers: " ++ show v1 ++ show v2
interp (Lam name exp) env =
Right $ Fun (\x -> interp exp ((name, x) : env))
interp (App f v) env = do
intf <- interp f env
intv <- interp v env
case intf of
Fun a -> a intv
_ -> Left $ "Should be function: " ++ show f
test :: Term -> String
test t = showM $ interp t []
pgm1:: Term
pgm1 = App
(Lam "x" ((Var "x") :+: (Var "x")))
((Con 10) :+: (Con 11))
s1 = test pgm
s2 = test pgm1
| |
7d201012c10e4d7f8d2dd24f80ecaab5d734caf1cda0479b49c49b5f4a2f858f | Helium4Haskell/helium | SynUnusedTyvar.hs | module SynUnusedTyvar where
type A a = Int
| null | https://raw.githubusercontent.com/Helium4Haskell/helium/5928bff479e6f151b4ceb6c69bbc15d71e29eb47/test/staticwarnings/SynUnusedTyvar.hs | haskell | module SynUnusedTyvar where
type A a = Int
| |
e4632cda306e0943795f65f53f1be1288b2ecfbb227827ab4b881e1cb63cda67 | travelping/hello | hello_log.erl | Copyright ( c ) 2010 - 2015 by Travelping GmbH < >
% Permission is hereby granted, free of charge, to any person obtaining a
% copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction , including without limitation
% the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software , and to permit persons to whom the
% Software is furnished to do so, subject to the following conditions:
% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
% DEALINGS IN THE SOFTWARE.
@private
-module(hello_log).
-export([format/1, get_id/1, get_method/1]).
-include("hello.hrl").
%% --------------------------------------------------------------------------------
-- Formaters for messages
%% -- request formatting
format([ Request = #request{} ]) ->
"[ " ++ format(Request) ++ " ]";
format([ Request = #request{} | Requests]) ->
"[ " ++ format(Request) ++ " ], " ++ format(Requests);
format(#request{id = ID, method = Method, args = Args}) ->
lists:append(["ID: ", stringify(ID), "; METHOD: ", stringify(Method),
"; ARGS: ", stringify(Args)]);
-- response formatting ; first for record responses , then for arbitrary data blobs
format([ Response = #response{} ]) ->
"[ " ++ format(Response) ++ " ]";
format([ Response = #response{} | Responses]) ->
"[ " ++ format(Response) ++ " ], " ++ format(Responses);
format(#response{id = ID, response = CallbackResponse}) ->
lists:append(["ID: ", stringify(ID), "; RESPONSE: ", stringify(CallbackResponse)]);
format(ignore) -> ["ignored"];
format({ok, CallbackResponse}) -> stringify(CallbackResponse);
format(Msg) -> stringify(Msg).
%% -- get internal hello request id
get_id([ #request{id = Id} ]) -> stringify(Id);
get_id([ #request{id = Id} | Requests]) -> stringify(Id) ++ ", " ++ get_id(Requests);
get_id(#request{id = Id}) -> stringify(Id);
get_id([ #response{id = Id} ]) -> stringify(Id);
get_id([ #response{id = Id} | Responses]) -> stringify(Id) ++ ", " ++ get_id(Responses);
get_id(#response{id = Id}) -> stringify(Id).
%% -- get request method
get_method([ #request{method = Method} ]) ->
stringify(Method);
get_method([ #request{method = Method} | Requests]) ->
stringify(Method) ++ ", " ++ get_method(Requests);
get_method(#request{method = Method}) ->
stringify(Method).
stringify(Term) when is_binary(Term) ->
stringify(binary_to_list(Term));
stringify(Term) ->
String = lists:flatten(io_lib:format("~p", [Term])),
% remove quotes to enhance readability
re:replace(String, "\"", "", [global,{return,list}]).
| null | https://raw.githubusercontent.com/travelping/hello/b2697428efe777e8be657d31ca22d80378041d7c/src/hello_log.erl | erlang | Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
-- request formatting
-- get internal hello request id
-- get request method
remove quotes to enhance readability | Copyright ( c ) 2010 - 2015 by Travelping GmbH < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
@private
-module(hello_log).
-export([format/1, get_id/1, get_method/1]).
-include("hello.hrl").
-- Formaters for messages
format([ Request = #request{} ]) ->
"[ " ++ format(Request) ++ " ]";
format([ Request = #request{} | Requests]) ->
"[ " ++ format(Request) ++ " ], " ++ format(Requests);
format(#request{id = ID, method = Method, args = Args}) ->
lists:append(["ID: ", stringify(ID), "; METHOD: ", stringify(Method),
"; ARGS: ", stringify(Args)]);
-- response formatting ; first for record responses , then for arbitrary data blobs
format([ Response = #response{} ]) ->
"[ " ++ format(Response) ++ " ]";
format([ Response = #response{} | Responses]) ->
"[ " ++ format(Response) ++ " ], " ++ format(Responses);
format(#response{id = ID, response = CallbackResponse}) ->
lists:append(["ID: ", stringify(ID), "; RESPONSE: ", stringify(CallbackResponse)]);
format(ignore) -> ["ignored"];
format({ok, CallbackResponse}) -> stringify(CallbackResponse);
format(Msg) -> stringify(Msg).
get_id([ #request{id = Id} ]) -> stringify(Id);
get_id([ #request{id = Id} | Requests]) -> stringify(Id) ++ ", " ++ get_id(Requests);
get_id(#request{id = Id}) -> stringify(Id);
get_id([ #response{id = Id} ]) -> stringify(Id);
get_id([ #response{id = Id} | Responses]) -> stringify(Id) ++ ", " ++ get_id(Responses);
get_id(#response{id = Id}) -> stringify(Id).
get_method([ #request{method = Method} ]) ->
stringify(Method);
get_method([ #request{method = Method} | Requests]) ->
stringify(Method) ++ ", " ++ get_method(Requests);
get_method(#request{method = Method}) ->
stringify(Method).
stringify(Term) when is_binary(Term) ->
stringify(binary_to_list(Term));
stringify(Term) ->
String = lists:flatten(io_lib:format("~p", [Term])),
re:replace(String, "\"", "", [global,{return,list}]).
|
d571f0f6051313d3bad98164041eb7911edd89b3e57b758eeedcd24384d2ee32 | lilactown/7-humble-guis | gui_6.clj | (ns town.lilac.humble.app.gui-6
(:require
[io.github.humbleui.app :as app]
[io.github.humbleui.canvas :as canvas]
[io.github.humbleui.core :as core]
[io.github.humbleui.paint :as paint]
[io.github.humbleui.protocols :as protocols]
[io.github.humbleui.ui :as ui]
[io.github.humbleui.window :as window]
[town.lilac.humble.app.state :as state]
[town.lilac.humble.ui :as ui2])
(:import
[io.github.humbleui.skija Canvas]
[io.github.humbleui.types IRect]))
(core/deftype+ Circle [fill stroke ^:mut my-rect]
protocols/IComponent
(-measure
[_ ctx cs]
cs)
(-draw
[this ctx ^IRect rect ^Canvas canvas]
(set! my-rect rect)
(let [{:keys [x y right bottom]} rect
width (- right x)
height (- bottom y)
r (/ (min width height) 2)]
(canvas/draw-circle
canvas
;; TODO scale
(+ (:x rect) r) (+ (:y rect) r) r
stroke)
(when fill
(canvas/draw-circle
canvas
;; TODO scale
(+ (:x rect) r) (+ (:y rect) r) r
fill))))
(-event [_ ctx event])
(-iterate
[this ctx cb]
(cb this)))
(defn circle
[fill]
(->Circle fill (paint/stroke 0xFF999999 2) nil))
(defn app
[{:keys [on-add-circle
on-adjust-size
on-undo
on-redo
on-select
on-show-menu
on-show-modal
on-hide-modal]} *state]
(ui2/with-theme
(ui/stack
(ui/padding
10
(ui/column
(ui/center
(ui/row
(ui/dynamic
_ctx
[disabled? (empty? (:undo-history @*state))]
(ui2/disabled
disabled?
(ui2/button on-undo (ui/label "Undo"))))
(ui/gap 10 10)
(ui/dynamic
_ctx
[disabled? (empty? (:redo-history @*state))]
(ui2/disabled
disabled?
(ui2/button on-redo (ui/label "Redo"))))))
(ui/gap 10 10)
[:stretch 1
(ui/clickable
{:on-click (fn [e] (on-add-circle (:x e) (:y e)))}
(ui/rounded-rect
{:radius 4}
(paint/stroke 0xFFCCCCCC 2)
(ui/row
(ui/dynamic
_ctx [circles (:circles @*state)
selected (:selected @*state)
menu? (:menu? @*state)]
(ui2/fragment
(for [[i rect] (map-indexed vector circles)]
(ui2/absolute-rect
(select-keys rect [:x :y :bottom :right])
(ui/clickable
{:on-click (fn [e]
(case (:button e)
:primary (on-select i)
:secondary (on-show-menu)
nil))}
(ui2/relative-rect
{:shackle :bottom-right}
(if (and (= selected i) menu?)
(ui/clickable
{:on-click (fn [_] (on-show-modal))}
(ui/rect
(paint/fill 0xFFE9E9E9)
(ui/padding 10 10 (ui/label "Adjust diameter"))))
(ui/gap 0 0))
(circle
(when (= selected i)
(paint/fill 0xFFDDDDDD))))))))))))]))
credit to @oakmac for their humble - modal - example
;; -modal-example
(ui/dynamic
_ctx
[show-modal? (:show-modal? @*state)]
(when show-modal?
(ui/stack
(ui/clickable
{:on-click (fn [_] (on-hide-modal))}
(ui/rect
(paint/fill 0x44000000)
(ui/label "")))
(ui/valign
0.8
(ui/halign
0.5
(ui/clip-rrect
5
(ui/rect
(paint/fill 0xFFFFFFFF)
(ui/padding
30 30
(ui/column
(ui/slider
(let [selected (:selected @*state)
{:keys [x right]} (get-in @*state [:circles selected])
*state (atom {:value (- right x)
:min 10
:max 500})]
(add-watch
*state
::size-change
(fn [_ _ _ {:keys [value]}]
(on-adjust-size value)))
*state))
(ui/gap 0 20)
(ui/button on-hide-modal (ui/label "Done"))))))))))))))
(defn start!
[]
(let [*state (atom {:circles [{:x 200 :y 200 :bottom 240 :right 240}]
:undo-history ()
:redo-history ()
:selected nil
:menu? false
:show-modal? false})]
(reset!
state/*app
(app
{:on-add-circle
(fn [x y]
(swap!
*state
(fn [state]
(-> state
(update :circles conj {:x (- x 20)
:y (- y 20)
:right (+ x 20)
:bottom (+ y 20)})
(update :undo-history conj
(:circles state))
(assoc :redo-history ()
:selected nil)))))
:on-adjust-size
(fn [d']
(swap!
*state
(fn [state]
(let [selected (:selected state)
{:keys [x y right bottom]} (get-in state [:circles selected])
d (- right x)
delta (/ (- d' d) 2)]
(-> state
(update-in [:circles selected :x] - delta)
(update-in [:circles selected :y] - delta)
(update-in [:circles selected :bottom] + delta)
(update-in [:circles selected :right] + delta))))))
:on-hide-modal
#(swap! *state assoc :show-modal? false)
:on-undo
#(swap!
*state
(fn [state]
(if-let [circles (peek (:undo-history state))]
(-> state
(assoc :circles circles
:selected nil)
(update :undo-history pop)
(update :redo-history conj (:circles state)))
state)))
:on-redo
#(swap!
*state
(fn [state]
(if-let [circles (peek (:redo-history state))]
(-> state
(update :redo-history pop)
(update :undo-history conj (:circles state))
(assoc :circles circles
:selected nil))
state)))
:on-select (fn [i] (swap! *state assoc :selected i))
:on-show-menu #(swap! *state assoc :menu? true)
:on-show-modal
#(swap! *state
(fn [state]
(-> state
(assoc :menu? false
:show-modal? true
:redo-history ())
(update :undo-history conj (:circles state)))))}
*state)))
(state/redraw!)
(app/doui
(window/set-content-size @state/*window 1000 800)))
(start!)
| null | https://raw.githubusercontent.com/lilactown/7-humble-guis/b65561017b2ad0df26dba567d4095108187f6c19/src/town/lilac/humble/app/gui_6.clj | clojure | TODO scale
TODO scale
-modal-example | (ns town.lilac.humble.app.gui-6
(:require
[io.github.humbleui.app :as app]
[io.github.humbleui.canvas :as canvas]
[io.github.humbleui.core :as core]
[io.github.humbleui.paint :as paint]
[io.github.humbleui.protocols :as protocols]
[io.github.humbleui.ui :as ui]
[io.github.humbleui.window :as window]
[town.lilac.humble.app.state :as state]
[town.lilac.humble.ui :as ui2])
(:import
[io.github.humbleui.skija Canvas]
[io.github.humbleui.types IRect]))
(core/deftype+ Circle [fill stroke ^:mut my-rect]
protocols/IComponent
(-measure
[_ ctx cs]
cs)
(-draw
[this ctx ^IRect rect ^Canvas canvas]
(set! my-rect rect)
(let [{:keys [x y right bottom]} rect
width (- right x)
height (- bottom y)
r (/ (min width height) 2)]
(canvas/draw-circle
canvas
(+ (:x rect) r) (+ (:y rect) r) r
stroke)
(when fill
(canvas/draw-circle
canvas
(+ (:x rect) r) (+ (:y rect) r) r
fill))))
(-event [_ ctx event])
(-iterate
[this ctx cb]
(cb this)))
(defn circle
[fill]
(->Circle fill (paint/stroke 0xFF999999 2) nil))
(defn app
[{:keys [on-add-circle
on-adjust-size
on-undo
on-redo
on-select
on-show-menu
on-show-modal
on-hide-modal]} *state]
(ui2/with-theme
(ui/stack
(ui/padding
10
(ui/column
(ui/center
(ui/row
(ui/dynamic
_ctx
[disabled? (empty? (:undo-history @*state))]
(ui2/disabled
disabled?
(ui2/button on-undo (ui/label "Undo"))))
(ui/gap 10 10)
(ui/dynamic
_ctx
[disabled? (empty? (:redo-history @*state))]
(ui2/disabled
disabled?
(ui2/button on-redo (ui/label "Redo"))))))
(ui/gap 10 10)
[:stretch 1
(ui/clickable
{:on-click (fn [e] (on-add-circle (:x e) (:y e)))}
(ui/rounded-rect
{:radius 4}
(paint/stroke 0xFFCCCCCC 2)
(ui/row
(ui/dynamic
_ctx [circles (:circles @*state)
selected (:selected @*state)
menu? (:menu? @*state)]
(ui2/fragment
(for [[i rect] (map-indexed vector circles)]
(ui2/absolute-rect
(select-keys rect [:x :y :bottom :right])
(ui/clickable
{:on-click (fn [e]
(case (:button e)
:primary (on-select i)
:secondary (on-show-menu)
nil))}
(ui2/relative-rect
{:shackle :bottom-right}
(if (and (= selected i) menu?)
(ui/clickable
{:on-click (fn [_] (on-show-modal))}
(ui/rect
(paint/fill 0xFFE9E9E9)
(ui/padding 10 10 (ui/label "Adjust diameter"))))
(ui/gap 0 0))
(circle
(when (= selected i)
(paint/fill 0xFFDDDDDD))))))))))))]))
credit to @oakmac for their humble - modal - example
(ui/dynamic
_ctx
[show-modal? (:show-modal? @*state)]
(when show-modal?
(ui/stack
(ui/clickable
{:on-click (fn [_] (on-hide-modal))}
(ui/rect
(paint/fill 0x44000000)
(ui/label "")))
(ui/valign
0.8
(ui/halign
0.5
(ui/clip-rrect
5
(ui/rect
(paint/fill 0xFFFFFFFF)
(ui/padding
30 30
(ui/column
(ui/slider
(let [selected (:selected @*state)
{:keys [x right]} (get-in @*state [:circles selected])
*state (atom {:value (- right x)
:min 10
:max 500})]
(add-watch
*state
::size-change
(fn [_ _ _ {:keys [value]}]
(on-adjust-size value)))
*state))
(ui/gap 0 20)
(ui/button on-hide-modal (ui/label "Done"))))))))))))))
(defn start!
[]
(let [*state (atom {:circles [{:x 200 :y 200 :bottom 240 :right 240}]
:undo-history ()
:redo-history ()
:selected nil
:menu? false
:show-modal? false})]
(reset!
state/*app
(app
{:on-add-circle
(fn [x y]
(swap!
*state
(fn [state]
(-> state
(update :circles conj {:x (- x 20)
:y (- y 20)
:right (+ x 20)
:bottom (+ y 20)})
(update :undo-history conj
(:circles state))
(assoc :redo-history ()
:selected nil)))))
:on-adjust-size
(fn [d']
(swap!
*state
(fn [state]
(let [selected (:selected state)
{:keys [x y right bottom]} (get-in state [:circles selected])
d (- right x)
delta (/ (- d' d) 2)]
(-> state
(update-in [:circles selected :x] - delta)
(update-in [:circles selected :y] - delta)
(update-in [:circles selected :bottom] + delta)
(update-in [:circles selected :right] + delta))))))
:on-hide-modal
#(swap! *state assoc :show-modal? false)
:on-undo
#(swap!
*state
(fn [state]
(if-let [circles (peek (:undo-history state))]
(-> state
(assoc :circles circles
:selected nil)
(update :undo-history pop)
(update :redo-history conj (:circles state)))
state)))
:on-redo
#(swap!
*state
(fn [state]
(if-let [circles (peek (:redo-history state))]
(-> state
(update :redo-history pop)
(update :undo-history conj (:circles state))
(assoc :circles circles
:selected nil))
state)))
:on-select (fn [i] (swap! *state assoc :selected i))
:on-show-menu #(swap! *state assoc :menu? true)
:on-show-modal
#(swap! *state
(fn [state]
(-> state
(assoc :menu? false
:show-modal? true
:redo-history ())
(update :undo-history conj (:circles state)))))}
*state)))
(state/redraw!)
(app/doui
(window/set-content-size @state/*window 1000 800)))
(start!)
|
1cae03da3c76640a3622f83753b4d82e6c571cea8c7ad090b6819432652e85da | mariachris/Concuerror | etsi_7.erl | -module(etsi_7).
-export([etsi_7/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
etsi_7() ->
Parent = self(),
ets:new(table, [public, named_table]),
ets:insert(table, {x, 0}),
ets:insert(table, {y, 0}),
ets:insert(table, {z, 0}),
ets:insert(table, {z5, 0}),
ets:insert(table, {xy, 0}),
P1 =
spawn(fun() ->
cover(?LINE),
ets:insert(table, {y, 1}),
receive
ok -> Parent ! ok
end
end),
P2 =
spawn(fun() ->
cover(?LINE),
ets:insert(table, {x, 1}),
receive
ok -> P1 ! ok
end
end),
P3 =
spawn(fun() ->
cover(?LINE),
[{x,Y}] = ets:lookup(table, x),
cover(?LINE),
case Y of
1 -> ok;
0 -> ets:insert(table, {z, 1})
end,
receive
ok -> P2 ! ok
end
end),
P4 =
spawn(fun() ->
cover(?LINE),
[{x,X}] = ets:lookup(table, x),
cover(?LINE),
[{y,Y}] = ets:lookup(table, y),
ets:insert(table, {xy, {X,Y}}),
receive
ok -> P3 ! ok
end
end),
spawn(fun() ->
cover(?LINE),
[{z,Z}] = ets:lookup(table, z),
ets:insert(table, {z5, Z}),
P4 ! ok
end),
receive
ok -> ok
end,
P3D = ets:lookup(table, z),
P4D = ets:lookup(table, xy),
P5D = ets:lookup(table, z5),
throw(P3D++P4D++P5D).
cover(L) ->
ets:insert(table, {L, ok}).
| null | https://raw.githubusercontent.com/mariachris/Concuerror/87e63f10ac615bf2eeac5b0916ef54d11a933e0b/testsuite/suites/dpor/src/etsi_7.erl | erlang | -module(etsi_7).
-export([etsi_7/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
etsi_7() ->
Parent = self(),
ets:new(table, [public, named_table]),
ets:insert(table, {x, 0}),
ets:insert(table, {y, 0}),
ets:insert(table, {z, 0}),
ets:insert(table, {z5, 0}),
ets:insert(table, {xy, 0}),
P1 =
spawn(fun() ->
cover(?LINE),
ets:insert(table, {y, 1}),
receive
ok -> Parent ! ok
end
end),
P2 =
spawn(fun() ->
cover(?LINE),
ets:insert(table, {x, 1}),
receive
ok -> P1 ! ok
end
end),
P3 =
spawn(fun() ->
cover(?LINE),
[{x,Y}] = ets:lookup(table, x),
cover(?LINE),
case Y of
1 -> ok;
0 -> ets:insert(table, {z, 1})
end,
receive
ok -> P2 ! ok
end
end),
P4 =
spawn(fun() ->
cover(?LINE),
[{x,X}] = ets:lookup(table, x),
cover(?LINE),
[{y,Y}] = ets:lookup(table, y),
ets:insert(table, {xy, {X,Y}}),
receive
ok -> P3 ! ok
end
end),
spawn(fun() ->
cover(?LINE),
[{z,Z}] = ets:lookup(table, z),
ets:insert(table, {z5, Z}),
P4 ! ok
end),
receive
ok -> ok
end,
P3D = ets:lookup(table, z),
P4D = ets:lookup(table, xy),
P5D = ets:lookup(table, z5),
throw(P3D++P4D++P5D).
cover(L) ->
ets:insert(table, {L, ok}).
| |
593bc4e3230ab2f59adfae711c91ad541b9f05602c58990afa5eda7c732258ad | ghc/nofib | Main.hs | module Main where
import TG_iter
import Data8
import Gen_net
import S_Array
import Defs
import Quad_def
main = putStr call_tg
call_tg =
tg_iter
mon simpl m_iter m_toler max_jcb_iter jcb_toler relax
dlt_t node_lists (tri_fac ()) (init_vec ())
where
node_lists =
get_node_list p_total n_total (coord ()) (v_steer ()) (bry_nodes ()) p_fixed
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/parallel/cfd/Main.hs | haskell | module Main where
import TG_iter
import Data8
import Gen_net
import S_Array
import Defs
import Quad_def
main = putStr call_tg
call_tg =
tg_iter
mon simpl m_iter m_toler max_jcb_iter jcb_toler relax
dlt_t node_lists (tri_fac ()) (init_vec ())
where
node_lists =
get_node_list p_total n_total (coord ()) (v_steer ()) (bry_nodes ()) p_fixed
| |
76d0e9ac075fa32d40c5fd835eef76318f976cc21221cb8566cdc56c05175f75 | mirage/jitsu | test_options.ml |
* Copyright ( c ) 2015 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2015 Magnus Skjegstad <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let hashtbl =
let t = Hashtbl.create 15 in
Hashtbl.add t "int" "100";
Hashtbl.add t "not_int" "xxx";
Hashtbl.add t "string" "hello world";
Hashtbl.add t "dns_name" "www.example.org";
Hashtbl.add t "str_list" "val1";
Hashtbl.add t "str_list" "val2";
Hashtbl.add t "str_list" "val3";
Hashtbl.add t "str_list" "val4";
Hashtbl.add t "tuple_str_list" "left1";
Hashtbl.add t "tuple_str_list" "left2:";
Hashtbl.add t "tuple_str_list" "left3:r";
Hashtbl.add t "tuple_str_list" "left4:right4";
Hashtbl.add t "tuple_str_list" ":right5";
Hashtbl.add t "tuple_str_list" "l:r";
Hashtbl.add t "tuple_str_list" ":";
Hashtbl.add t "tuple_str_list" "";
Hashtbl.add t "bool" "0";
Hashtbl.add t "bool_list" "true";
Hashtbl.add t "bool_list" "false";
Hashtbl.add t "bool_list" "0";
Hashtbl.add t "bool_list" "1";
t
let test_get_int () =
match (Options.get_int hashtbl "int") with
| `Ok i -> Alcotest.(check int) "int" 100 i
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_int_str () =
match (Options.get_int hashtbl "not_int") with
| `Ok _ -> Alcotest.fail "get_int succeeded for string value"
| `Error _ -> ()
let test_get_str () =
match (Options.get_str hashtbl "string") with
| `Ok s -> Alcotest.(check string) "string" "hello world" s
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_str_list () =
let expected = ["val1";"val2";"val3";"val4"] in
match (Options.get_str_list hashtbl "str_list") with
| `Ok s -> Alcotest.(check (list string)) "string list" expected s
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_bool () =
match (Options.get_bool hashtbl "bool") with
| `Ok s -> if not s = false then Alcotest.fail "expected false"
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_bool_list () =
let expected = [ true ; false ; false ; true ] in
match (Options.get_bool_list hashtbl "bool_list") with
| `Ok s -> let _ = List.map2 (fun a b ->
if not a=b then
Alcotest.fail (Printf.sprintf "Bool lists are not equal. Expected %B, got %B" a b)
else
()) expected s in
()
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_dns_name () =
match (Options.get_dns_name hashtbl "dns_name") with
| `Ok i -> Alcotest.(check string) "dns_name" "www.example.org" (Dns.Name.to_string i)
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_str_tuple_list () =
let is_eq a b = (* compare string option *)
match a,b with
| Some a, Some b -> String.compare a b = 0
| None, None -> true
| _ -> false
in
let is_eq_tup a b = (* compare (string option * string option) *)
let a_l,a_r = a in
let b_l,b_r = b in
(is_eq a_l b_l) && (is_eq a_r b_r)
in
let opt_s a = (* string option to string *)
match a with
| Some s -> (Printf.sprintf "Some '%s'" s)
| None -> "None"
in
let t_s t = (* (string option * string option) to string *)
let left, right = t in
Printf.sprintf "(%s, %s)" (opt_s left) (opt_s right)
in
let expected = [ (Some "left1", None) ;
(Some "left2", None) ;
(Some "left3", Some "r") ;
(Some "left4", Some "right4") ;
(None, Some "right5") ;
(Some "l", Some "r") ;
(None, None) ;
(None, None) ] in
match (Options.get_str_tuple_list hashtbl "tuple_str_list" ~sep:':' ()) with
| `Ok s -> let _ = List.map2 (fun a b ->
if not (is_eq_tup a b) then
Alcotest.fail (Printf.sprintf "Tuple lists are not equal. Expected %s, got %s" (t_s a) (t_s b))
else
()) expected s in
()
| `Error e -> Alcotest.fail (Options.string_of_error e)
(* Run it *)
let test_options =
["Test options",
[ "get_int on int", `Quick, test_get_int ;
"get_int on string", `Quick, test_get_int_str ;
"get_str", `Quick, test_get_str ;
"get_str_list", `Quick, test_get_str_list ;
"get_bool", `Quick, test_get_bool ;
"get_bool_list", `Quick, test_get_bool_list ;
"get_dns_name", `Quick, test_get_dns_name ;
"get_str_tuple_list", `Quick, test_get_str_tuple_list ;
]
]
| null | https://raw.githubusercontent.com/mirage/jitsu/6be8a950d831a6055b7611747497bd418f4ff1ba/lib_test/test_options.ml | ocaml | compare string option
compare (string option * string option)
string option to string
(string option * string option) to string
Run it |
* Copyright ( c ) 2015 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2015 Magnus Skjegstad <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let hashtbl =
let t = Hashtbl.create 15 in
Hashtbl.add t "int" "100";
Hashtbl.add t "not_int" "xxx";
Hashtbl.add t "string" "hello world";
Hashtbl.add t "dns_name" "www.example.org";
Hashtbl.add t "str_list" "val1";
Hashtbl.add t "str_list" "val2";
Hashtbl.add t "str_list" "val3";
Hashtbl.add t "str_list" "val4";
Hashtbl.add t "tuple_str_list" "left1";
Hashtbl.add t "tuple_str_list" "left2:";
Hashtbl.add t "tuple_str_list" "left3:r";
Hashtbl.add t "tuple_str_list" "left4:right4";
Hashtbl.add t "tuple_str_list" ":right5";
Hashtbl.add t "tuple_str_list" "l:r";
Hashtbl.add t "tuple_str_list" ":";
Hashtbl.add t "tuple_str_list" "";
Hashtbl.add t "bool" "0";
Hashtbl.add t "bool_list" "true";
Hashtbl.add t "bool_list" "false";
Hashtbl.add t "bool_list" "0";
Hashtbl.add t "bool_list" "1";
t
let test_get_int () =
match (Options.get_int hashtbl "int") with
| `Ok i -> Alcotest.(check int) "int" 100 i
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_int_str () =
match (Options.get_int hashtbl "not_int") with
| `Ok _ -> Alcotest.fail "get_int succeeded for string value"
| `Error _ -> ()
let test_get_str () =
match (Options.get_str hashtbl "string") with
| `Ok s -> Alcotest.(check string) "string" "hello world" s
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_str_list () =
let expected = ["val1";"val2";"val3";"val4"] in
match (Options.get_str_list hashtbl "str_list") with
| `Ok s -> Alcotest.(check (list string)) "string list" expected s
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_bool () =
match (Options.get_bool hashtbl "bool") with
| `Ok s -> if not s = false then Alcotest.fail "expected false"
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_bool_list () =
let expected = [ true ; false ; false ; true ] in
match (Options.get_bool_list hashtbl "bool_list") with
| `Ok s -> let _ = List.map2 (fun a b ->
if not a=b then
Alcotest.fail (Printf.sprintf "Bool lists are not equal. Expected %B, got %B" a b)
else
()) expected s in
()
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_dns_name () =
match (Options.get_dns_name hashtbl "dns_name") with
| `Ok i -> Alcotest.(check string) "dns_name" "www.example.org" (Dns.Name.to_string i)
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_get_str_tuple_list () =
match a,b with
| Some a, Some b -> String.compare a b = 0
| None, None -> true
| _ -> false
in
let a_l,a_r = a in
let b_l,b_r = b in
(is_eq a_l b_l) && (is_eq a_r b_r)
in
match a with
| Some s -> (Printf.sprintf "Some '%s'" s)
| None -> "None"
in
let left, right = t in
Printf.sprintf "(%s, %s)" (opt_s left) (opt_s right)
in
let expected = [ (Some "left1", None) ;
(Some "left2", None) ;
(Some "left3", Some "r") ;
(Some "left4", Some "right4") ;
(None, Some "right5") ;
(Some "l", Some "r") ;
(None, None) ;
(None, None) ] in
match (Options.get_str_tuple_list hashtbl "tuple_str_list" ~sep:':' ()) with
| `Ok s -> let _ = List.map2 (fun a b ->
if not (is_eq_tup a b) then
Alcotest.fail (Printf.sprintf "Tuple lists are not equal. Expected %s, got %s" (t_s a) (t_s b))
else
()) expected s in
()
| `Error e -> Alcotest.fail (Options.string_of_error e)
let test_options =
["Test options",
[ "get_int on int", `Quick, test_get_int ;
"get_int on string", `Quick, test_get_int_str ;
"get_str", `Quick, test_get_str ;
"get_str_list", `Quick, test_get_str_list ;
"get_bool", `Quick, test_get_bool ;
"get_bool_list", `Quick, test_get_bool_list ;
"get_dns_name", `Quick, test_get_dns_name ;
"get_str_tuple_list", `Quick, test_get_str_tuple_list ;
]
]
|
992ca6d6fc9f938ee77ed6c3492696e34acd0036c3b00f23acfd8cdc163e7f7d | uber/queryparser | Columns.hs | Copyright ( c ) 2017 Uber Technologies , Inc.
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
--
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-- THE SOFTWARE.
# LANGUAGE FlexibleContexts #
module Database.Sql.Util.Columns ( Clause, ColumnAccess
, HasColumns(..), getColumns
, bindClause, clauseObservation
) where
import Data.Either
import Data.Map (Map)
import qualified Data.Map as M
import Data.List.NonEmpty (NonEmpty(..))
import Data.List (transpose)
import Data.Set (Set)
import qualified Data.Set as S
import Data.Text.Lazy (Text)
import Control.Monad.Identity
import Control.Monad.Reader
import Control.Monad.Writer
import Database.Sql.Type
import Database.Sql.Util.Scope (queryColumnNames)
SELECT , WHERE , , etc ... for nested clauses ,
-- report the innermost clause.
type ColumnAccess = (FQCN, Clause)
-- To support dereferencing of column aliases, employ the following algorithm:
--
Traverse the resolved AST to write two maps .
--
1 . " alias map " which is Map ColumnAlias ( Set RColumnRef )
--
-- To populate the alias map, emit at the site of every alias definition,
i.e. for every SelectExpr . The key is always the ColumnAlias . The value is
-- the set of columns/aliases referenced in the expr.
--
2 . " clause map " which is Map RColumnRef ( Set Clause )
--
-- To populate the clause map, emit the current-clause for every RColumnRef.
--
-- Then at the end, stitch the results together by walking over the clause
-- map. If the key is an RColumnRef/FQColumnName, emit the column, for every
clause . If the key is an RColumnAlias / ColumnAlias , look it up recursively
-- into the alias map until everything is an RColumnRef/FQColumnName, and then
-- emit every column for every clause.
type AliasInfo = (ColumnAliasId, Set (RColumnRef ()))
type AliasMap = Map ColumnAliasId (Set (RColumnRef ()))
type ClauseInfo = (RColumnRef (), Set Clause)
type ClauseMap = Map (RColumnRef ()) (Set Clause)
Stuff both info - types into an Either , so we only traverse the AST once .
aliasObservation :: ColumnAlias a -> Set (RColumnRef b) -> Observation
aliasObservation (ColumnAlias _ _ cid) refs = Left (cid, S.map void refs)
clauseObservation :: RColumnRef a -> Clause -> Observation
clauseObservation ref clause = Right (void ref, S.singleton clause)
toAliasMap :: [Observation] -> AliasMap
toAliasMap = M.fromListWith S.union . lefts
toClauseMap :: [Observation] -> ClauseMap
toClauseMap = M.fromListWith S.union . rights
type Observer = ReaderT Clause (Writer [Observation]) ()
class HasColumns q where
goColumns :: q -> Observer
baseClause :: Clause
baseClause = "BASE"
bindClause :: MonadReader Clause m => Clause -> m r -> m r
bindClause clause = local (const clause)
getColumns :: HasColumns q => q -> Set ColumnAccess
getColumns q = foldMap columnAccesses $ M.toList clauseMap
where
observations = execWriter $ runReaderT (goColumns q) baseClause
aliasMap = toAliasMap observations
clauseMap = toClauseMap observations
columnAccesses :: ClauseInfo -> Set ColumnAccess
columnAccesses (ref, clauses) =
S.fromList [(fqcn, clause) | fqcn <- S.toList $ getAllFQCNs ref
, clause <- S.toList clauses]
getAllFQCNs :: RColumnRef () -> Set FQCN
getAllFQCNs ref = recur [ref] [] S.empty
recur : : refsToVisit - > allRefsVisited - > fqcnsVisited - > all the fqcns !
recur :: [RColumnRef ()] -> [RColumnRef ()] -> Set FQCN -> Set FQCN
recur [] _ fqcns = fqcns
recur (ref:refs) visited fqcns =
if ref `elem` visited
then recur refs visited fqcns
else case ref of
RColumnRef fqcn -> recur refs (ref:visited) (S.insert (fqcnToFQCN fqcn) fqcns)
RColumnAlias (ColumnAlias _ _ cid) -> case M.lookup cid aliasMap of
Nothing -> error $ "column alias missing from aliasMap: " ++ show ref ++ ", " ++ show aliasMap
Just moarRefs -> recur (refs ++ S.toList moarRefs) (ref:visited) fqcns
instance HasColumns a => HasColumns (NonEmpty a) where
goColumns ne = mapM_ goColumns ne
instance HasColumns a => HasColumns (Maybe a) where
goColumns Nothing = return ()
goColumns (Just a) = goColumns a
instance HasColumns (Statement d ResolvedNames a) where
goColumns (QueryStmt q) = goColumns q
goColumns (InsertStmt i) = goColumns i
goColumns (UpdateStmt u) = goColumns u
goColumns (DeleteStmt d) = goColumns d
goColumns (TruncateStmt _) = return ()
goColumns (CreateTableStmt c) = goColumns c
goColumns (AlterTableStmt a) = goColumns a
goColumns (DropTableStmt _) = return ()
goColumns (CreateViewStmt c) = goColumns c
goColumns (DropViewStmt _) = return ()
goColumns (CreateSchemaStmt _) = return ()
goColumns (GrantStmt _) = return ()
goColumns (RevokeStmt _) = return ()
goColumns (BeginStmt _) = return ()
goColumns (CommitStmt _) = return ()
goColumns (RollbackStmt _) = return ()
goColumns (ExplainStmt _ s) = goColumns s
goColumns (EmptyStmt _) = return ()
instance HasColumns (Query ResolvedNames a) where
goColumns (QuerySelect _ select) = goColumns select
goColumns (QueryExcept _ cc lhs rhs) = goColumnsComposed cc [lhs, rhs]
goColumns (QueryUnion _ _ cc lhs rhs) = goColumnsComposed cc [lhs, rhs]
goColumns (QueryIntersect _ cc lhs rhs) = goColumnsComposed cc [lhs, rhs]
goColumns (QueryWith _ ctes query) = goColumns query >> mapM_ goColumns ctes
goColumns (QueryOrder _ orders query) = sequence_
[ bindClause "ORDER" $ mapM_ (handleOrderTopLevel query) orders
, goColumns query
]
goColumns (QueryLimit _ _ query) = goColumns query
goColumns (QueryOffset _ _ query) = goColumns query
goColumnsComposed :: ColumnAliasList a -> [Query ResolvedNames a] -> Observer
goColumnsComposed (ColumnAliasList as) qs = do
mapM_ goColumns qs
let deps = map S.unions $ transpose $ map queryColumnDeps qs
tell $ zipWith aliasObservation as deps
handleOrderTopLevel :: Query ResolvedNames a -> Order ResolvedNames a -> Observer
handleOrderTopLevel query (Order _ posOrExpr _ _) = case posOrExpr of
PositionOrExprPosition _ pos _ -> handlePos pos query
PositionOrExprExpr expr -> goColumns expr
handlePos :: Int -> Query ResolvedNames a -> Observer
handlePos pos (QuerySelect _ select) = do
let selections = selectColumnsList $ selectCols select
starsConcatted = selections >>= (\case
SelectStar _ _ (StarColumnNames refs) -> refs
SelectExpr _ cAliases _ -> map RColumnAlias cAliases
)
SQL is 1 indexed , is 0 indexed
clause <- ask
tell $ [clauseObservation posRef clause]
handlePos pos (QueryExcept _ _ lhs rhs) = handlePos pos lhs >> handlePos pos rhs
handlePos pos (QueryUnion _ _ _ lhs rhs) = handlePos pos lhs >> handlePos pos rhs
handlePos pos (QueryIntersect _ _ lhs rhs) = handlePos pos lhs >> handlePos pos rhs
handlePos pos (QueryWith _ _ q) = handlePos pos q
handlePos pos (QueryOrder _ _ q) = handlePos pos q
handlePos pos (QueryLimit _ _ q) = handlePos pos q
handlePos pos (QueryOffset _ _ q) = handlePos pos q
instance HasColumns (CTE ResolvedNames a) where
goColumns CTE{..} = do
-- recurse to emit clause infos
goColumns cteQuery
-- also emit alias infos
case cteColumns of
[] -> return ()
aliases -> tell $ zipWith aliasObservation aliases (queryColumnDeps cteQuery)
-- for every column returned by the query, what columns did it depend on?
queryColumnDeps :: Query ResolvedNames a -> [Set (RColumnRef ())]
queryColumnDeps = map (S.singleton . void) . queryColumnNames
instance HasColumns (Insert ResolvedNames a) where
goColumns Insert{..} = bindClause "INSERT" $ goColumns insertValues
instance HasColumns (InsertValues ResolvedNames a) where
goColumns (InsertExprValues _ e) = goColumns e
goColumns (InsertSelectValues q) = goColumns q
goColumns (InsertDefaultValues _) = return ()
goColumns (InsertDataFromFile _ _) = return ()
instance HasColumns (DefaultExpr ResolvedNames a) where
goColumns (DefaultValue _) = return ()
goColumns (ExprValue e) = goColumns e
instance HasColumns (Update ResolvedNames a) where
goColumns Update{..} = bindClause "UPDATE" $ do
mapM_ (goColumns . snd) updateSetExprs
mapM_ goColumns updateFrom
mapM_ goColumns updateWhere
instance HasColumns (Delete ResolvedNames a) where
goColumns (Delete _ _ expr) = bindClause "WHERE" $ goColumns expr
instance HasColumns (CreateTable d ResolvedNames a) where
goColumns CreateTable{..} = bindClause "CREATE" $ do
TODO handle createTableExtra , and the dialect instances
goColumns createTableDefinition
instance HasColumns (TableDefinition d ResolvedNames a) where
goColumns (TableColumns _ cs) = goColumns cs
goColumns (TableLike _ _) = return ()
goColumns (TableAs _ _ query) = goColumns query
goColumns (TableNoColumnInfo _) = return ()
instance HasColumns (ColumnOrConstraint d ResolvedNames a) where
goColumns (ColumnOrConstraintColumn c) = goColumns c
goColumns (ColumnOrConstraintConstraint _) = return ()
instance HasColumns (ColumnDefinition d ResolvedNames a) where
goColumns ColumnDefinition{..} = goColumns columnDefinitionDefault
instance HasColumns (AlterTable ResolvedNames a) where
goColumns (AlterTableRenameTable _ _ _) = return ()
goColumns (AlterTableRenameColumn _ _ _ _) = return ()
goColumns (AlterTableAddColumns _ _ _) = return ()
instance HasColumns (CreateView ResolvedNames a) where
goColumns CreateView{..} = bindClause "CREATE" $ goColumns createViewQuery
instance HasColumns (Select ResolvedNames a) where
goColumns select@(Select {..}) = sequence_
[ bindClause "SELECT" $ goColumns $ selectCols
, bindClause "FROM" $ goColumns selectFrom
, bindClause "WHERE" $ goColumns selectWhere
, bindClause "TIMESERIES" $ goColumns selectTimeseries
, bindClause "GROUPBY" $ handleGroup select selectGroup
, bindClause "HAVING" $ goColumns selectHaving
, bindClause "NAMEDWINDOW" $ goColumns selectNamedWindow
]
instance HasColumns (SelectColumns ResolvedNames a) where
goColumns (SelectColumns _ selections) = mapM_ goColumns selections
instance HasColumns (SelectFrom ResolvedNames a) where
goColumns (SelectFrom _ tablishes) = mapM_ goColumns tablishes
instance HasColumns (SelectWhere ResolvedNames a) where
goColumns (SelectWhere _ condition) = goColumns condition
instance HasColumns (SelectTimeseries ResolvedNames a) where
goColumns (SelectTimeseries _ alias _ partition order) = do
-- recurse to emit clause infos
goColumns partition
bindClause "ORDER" $ goColumns order
-- also emit alias infos
clause <- ask
let observations = execWriter $ runReaderT (goColumns order) clause
cols = S.fromList $ map fst $ rights observations
tell $ [aliasObservation alias cols]
instance HasColumns (Partition ResolvedNames a) where
goColumns (PartitionBy _ exprs) = bindClause "PARTITION" $ mapM_ goColumns exprs
goColumns (PartitionBest _) = return ()
goColumns (PartitionNodes _) = return ()
handleGroup :: Select ResolvedNames a -> Maybe (SelectGroup ResolvedNames a) -> Observer
handleGroup _ Nothing = return ()
handleGroup select (Just (SelectGroup _ groupingElements)) = mapM_ handleElement groupingElements
where
handleElement (GroupingElementExpr _ (PositionOrExprExpr expr)) =
goColumns expr
handleElement (GroupingElementExpr _ (PositionOrExprPosition _ pos _)) =
handlePos pos $ QuerySelect (selectInfo select) select
handleElement (GroupingElementSet _ exprs) =
mapM_ goColumns exprs
instance HasColumns (SelectHaving ResolvedNames a) where
goColumns (SelectHaving _ havings) = mapM_ goColumns havings
instance HasColumns (SelectNamedWindow ResolvedNames a) where
goColumns (SelectNamedWindow _ windowExprs) = mapM_ goColumns windowExprs
instance HasColumns (Selection ResolvedNames a) where
goColumns (SelectStar _ _ starColumns) = goColumns starColumns
goColumns (SelectExpr _ aliases expr) = do
-- recurse to emit clause infos
goColumns expr
-- also emit alias infos
clause <- ask
let observations = execWriter $ runReaderT (goColumns expr) clause
cols = S.fromList $ map fst $ rights observations
tell $ map (\a -> aliasObservation a cols) aliases
instance HasColumns (StarColumnNames a) where
goColumns (StarColumnNames rColumnRefs) = mapM_ goColumns rColumnRefs
instance HasColumns (RColumnRef a) where
-- treat RColumnRef and RColumnAlias the same, here :)
goColumns ref = do
clause <- ask
tell $ [clauseObservation ref clause]
instance HasColumns (Tablish ResolvedNames a) where
goColumns (TablishTable _ tablishAliases tableRef) = do
-- no clause infos to emit
-- but there are potentially alias infos
case tablishAliases of
TablishAliasesNone -> return ()
TablishAliasesT _ -> return ()
TablishAliasesTC _ cAliases ->
let cRefSets = map S.singleton $ getColumnList tableRef
in tell $ zipWith aliasObservation cAliases cRefSets
goColumns (TablishSubQuery _ tablishAliases query) = do
-- recurse to emit clause infos
bindClause "SUBQUERY" $ goColumns query
-- also emit alias infos (if any)
case tablishAliases of
TablishAliasesNone -> return ()
TablishAliasesT _ -> return ()
TablishAliasesTC _ cAliases ->
tell $ zipWith aliasObservation cAliases (queryColumnDeps query)
goColumns (TablishJoin _ _ cond lhs rhs) = do
bindClause "JOIN" $ goColumns cond
goColumns lhs
goColumns rhs
goColumns (TablishLateralView _ LateralView{..} lhs) = do
-- recurse to emit clause infos
bindClause "LATERALVIEW" $ do
goColumns lhs
mapM_ goColumns lateralViewExprs
-- also emit alias infos (if any)
--
NB this is tricky . In general , lateral views ( like UNNEST ) can
-- expand their input exprs into variable numbers of columns. E.g. in
Presto , UNNEST will expand arrays into 1 col and maps into 2
-- cols. Since we don't keep track of column types, we can't map column
-- aliases to the (Set RColumnRefs) they refer to in the general case
-- :-( So let's just handle the particular case where lateralViewExpr
-- is a singleton list :-)
case lateralViewAliases of
TablishAliasesNone -> return ()
TablishAliasesT _ -> return ()
TablishAliasesTC _ cAliases -> case lateralViewExprs of
[FunctionExpr _ _ _ [e] _ _ _] ->
let observations = execWriter $ runReaderT (goColumns e) baseClause
refs = S.fromList $ map fst $ rights observations
in tell $ zipWith aliasObservation cAliases (repeat refs)
_ -> return () -- alas, the general case
instance HasColumns (LateralView ResolvedNames a) where
goColumns (LateralView _ _ exprs _ _) = mapM_ goColumns exprs
instance HasColumns (JoinCondition ResolvedNames a) where
goColumns (JoinNatural _ cs) = goColumns cs
goColumns (JoinOn expr) = goColumns expr
goColumns (JoinUsing _ cs) = mapM_ goColumns cs
instance HasColumns (RNaturalColumns a) where
goColumns (RNaturalColumns cs) = mapM_ goColumns cs
instance HasColumns (RUsingColumn a) where
goColumns (RUsingColumn c1 c2) = goColumns c1 >> goColumns c2
instance HasColumns (NamedWindowExpr ResolvedNames a) where
goColumns (NamedWindowExpr _ _ expr) = goColumns expr
goColumns (NamedPartialWindowExpr _ _ expr) = goColumns expr
handleOrderForWindow :: Order ResolvedNames a -> Observer
handleOrderForWindow (Order _ (PositionOrExprPosition _ _ _) _ _) = error "unexpected positional reference"
handleOrderForWindow (Order _ (PositionOrExprExpr expr) _ _) = goColumns expr
instance HasColumns (WindowExpr ResolvedNames a) where
goColumns (WindowExpr _ partition orders _) = do
goColumns partition
bindClause "ORDER" $ mapM_ handleOrderForWindow orders
instance HasColumns (PartialWindowExpr ResolvedNames a) where
goColumns (PartialWindowExpr _ _ partition orders _) = do
goColumns partition
bindClause "ORDER" $ mapM_ handleOrderForWindow orders
instance HasColumns (Expr ResolvedNames a) where
goColumns (BinOpExpr _ _ lhs rhs) = mapM_ goColumns [lhs, rhs]
goColumns (CaseExpr _ whens else') = do
mapM_ ( \ (when', then') -> goColumns when' >> goColumns then') whens
goColumns else'
goColumns (UnOpExpr _ _ expr) = goColumns expr
goColumns (LikeExpr _ _ escape pattern expr) = do
goColumns escape
goColumns pattern
goColumns expr
goColumns (ConstantExpr _ _) = return ()
goColumns (ColumnExpr _ c) = goColumns c
goColumns (InListExpr _ exprs expr) = mapM_ goColumns (expr:exprs)
goColumns (InSubqueryExpr _ query expr) = do
goColumns query
goColumns expr
goColumns (BetweenExpr _ expr start end) = mapM_ goColumns [expr, start, end]
goColumns (OverlapsExpr _ (e1, e2) (e3, e4)) = mapM_ goColumns [e1, e2, e3, e4]
goColumns (FunctionExpr _ _ _ exprs params filter' over) = do
mapM_ goColumns exprs
mapM_ (goColumns . snd) params
goColumns filter'
goColumns over
goColumns (AtTimeZoneExpr _ expr tz) = mapM_ goColumns [expr, tz]
goColumns (SubqueryExpr _ query) = bindClause "SUBQUERY" $ goColumns query
goColumns (ArrayExpr _ exprs) = mapM_ goColumns exprs
goColumns (ExistsExpr _ query) = goColumns query
NB we are n't emitting any special info about field access ( for now )
NB we are n't emitting any special info about array access ( for now )
goColumns (TypeCastExpr _ _ expr _) = goColumns expr
goColumns (VariableSubstitutionExpr _) = return ()
instance HasColumns (Escape ResolvedNames a) where
goColumns (Escape expr) = goColumns expr
instance HasColumns (Pattern ResolvedNames a) where
goColumns (Pattern expr) = goColumns expr
instance HasColumns (Filter ResolvedNames a) where
goColumns (Filter _ expr) = goColumns expr
instance HasColumns (OverSubExpr ResolvedNames a) where
goColumns (OverWindowExpr _ expr) = goColumns expr
goColumns (OverWindowName _ _) = return ()
goColumns (OverPartialWindowExpr _ expr) = goColumns expr
| null | https://raw.githubusercontent.com/uber/queryparser/6ae2e94567189cf842f7134d65b14e1089f06970/src/Database/Sql/Util/Columns.hs | haskell |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
report the innermost clause.
To support dereferencing of column aliases, employ the following algorithm:
To populate the alias map, emit at the site of every alias definition,
the set of columns/aliases referenced in the expr.
To populate the clause map, emit the current-clause for every RColumnRef.
Then at the end, stitch the results together by walking over the clause
map. If the key is an RColumnRef/FQColumnName, emit the column, for every
into the alias map until everything is an RColumnRef/FQColumnName, and then
emit every column for every clause.
recurse to emit clause infos
also emit alias infos
for every column returned by the query, what columns did it depend on?
recurse to emit clause infos
also emit alias infos
recurse to emit clause infos
also emit alias infos
treat RColumnRef and RColumnAlias the same, here :)
no clause infos to emit
but there are potentially alias infos
recurse to emit clause infos
also emit alias infos (if any)
recurse to emit clause infos
also emit alias infos (if any)
expand their input exprs into variable numbers of columns. E.g. in
cols. Since we don't keep track of column types, we can't map column
aliases to the (Set RColumnRefs) they refer to in the general case
:-( So let's just handle the particular case where lateralViewExpr
is a singleton list :-)
alas, the general case | Copyright ( c ) 2017 Uber Technologies , Inc.
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
# LANGUAGE FlexibleContexts #
module Database.Sql.Util.Columns ( Clause, ColumnAccess
, HasColumns(..), getColumns
, bindClause, clauseObservation
) where
import Data.Either
import Data.Map (Map)
import qualified Data.Map as M
import Data.List.NonEmpty (NonEmpty(..))
import Data.List (transpose)
import Data.Set (Set)
import qualified Data.Set as S
import Data.Text.Lazy (Text)
import Control.Monad.Identity
import Control.Monad.Reader
import Control.Monad.Writer
import Database.Sql.Type
import Database.Sql.Util.Scope (queryColumnNames)
SELECT , WHERE , , etc ... for nested clauses ,
type ColumnAccess = (FQCN, Clause)
Traverse the resolved AST to write two maps .
1 . " alias map " which is Map ColumnAlias ( Set RColumnRef )
i.e. for every SelectExpr . The key is always the ColumnAlias . The value is
2 . " clause map " which is Map RColumnRef ( Set Clause )
clause . If the key is an RColumnAlias / ColumnAlias , look it up recursively
type AliasInfo = (ColumnAliasId, Set (RColumnRef ()))
type AliasMap = Map ColumnAliasId (Set (RColumnRef ()))
type ClauseInfo = (RColumnRef (), Set Clause)
type ClauseMap = Map (RColumnRef ()) (Set Clause)
Stuff both info - types into an Either , so we only traverse the AST once .
aliasObservation :: ColumnAlias a -> Set (RColumnRef b) -> Observation
aliasObservation (ColumnAlias _ _ cid) refs = Left (cid, S.map void refs)
clauseObservation :: RColumnRef a -> Clause -> Observation
clauseObservation ref clause = Right (void ref, S.singleton clause)
toAliasMap :: [Observation] -> AliasMap
toAliasMap = M.fromListWith S.union . lefts
toClauseMap :: [Observation] -> ClauseMap
toClauseMap = M.fromListWith S.union . rights
type Observer = ReaderT Clause (Writer [Observation]) ()
class HasColumns q where
goColumns :: q -> Observer
baseClause :: Clause
baseClause = "BASE"
bindClause :: MonadReader Clause m => Clause -> m r -> m r
bindClause clause = local (const clause)
getColumns :: HasColumns q => q -> Set ColumnAccess
getColumns q = foldMap columnAccesses $ M.toList clauseMap
where
observations = execWriter $ runReaderT (goColumns q) baseClause
aliasMap = toAliasMap observations
clauseMap = toClauseMap observations
columnAccesses :: ClauseInfo -> Set ColumnAccess
columnAccesses (ref, clauses) =
S.fromList [(fqcn, clause) | fqcn <- S.toList $ getAllFQCNs ref
, clause <- S.toList clauses]
getAllFQCNs :: RColumnRef () -> Set FQCN
getAllFQCNs ref = recur [ref] [] S.empty
recur : : refsToVisit - > allRefsVisited - > fqcnsVisited - > all the fqcns !
recur :: [RColumnRef ()] -> [RColumnRef ()] -> Set FQCN -> Set FQCN
recur [] _ fqcns = fqcns
recur (ref:refs) visited fqcns =
if ref `elem` visited
then recur refs visited fqcns
else case ref of
RColumnRef fqcn -> recur refs (ref:visited) (S.insert (fqcnToFQCN fqcn) fqcns)
RColumnAlias (ColumnAlias _ _ cid) -> case M.lookup cid aliasMap of
Nothing -> error $ "column alias missing from aliasMap: " ++ show ref ++ ", " ++ show aliasMap
Just moarRefs -> recur (refs ++ S.toList moarRefs) (ref:visited) fqcns
instance HasColumns a => HasColumns (NonEmpty a) where
goColumns ne = mapM_ goColumns ne
instance HasColumns a => HasColumns (Maybe a) where
goColumns Nothing = return ()
goColumns (Just a) = goColumns a
instance HasColumns (Statement d ResolvedNames a) where
goColumns (QueryStmt q) = goColumns q
goColumns (InsertStmt i) = goColumns i
goColumns (UpdateStmt u) = goColumns u
goColumns (DeleteStmt d) = goColumns d
goColumns (TruncateStmt _) = return ()
goColumns (CreateTableStmt c) = goColumns c
goColumns (AlterTableStmt a) = goColumns a
goColumns (DropTableStmt _) = return ()
goColumns (CreateViewStmt c) = goColumns c
goColumns (DropViewStmt _) = return ()
goColumns (CreateSchemaStmt _) = return ()
goColumns (GrantStmt _) = return ()
goColumns (RevokeStmt _) = return ()
goColumns (BeginStmt _) = return ()
goColumns (CommitStmt _) = return ()
goColumns (RollbackStmt _) = return ()
goColumns (ExplainStmt _ s) = goColumns s
goColumns (EmptyStmt _) = return ()
instance HasColumns (Query ResolvedNames a) where
goColumns (QuerySelect _ select) = goColumns select
goColumns (QueryExcept _ cc lhs rhs) = goColumnsComposed cc [lhs, rhs]
goColumns (QueryUnion _ _ cc lhs rhs) = goColumnsComposed cc [lhs, rhs]
goColumns (QueryIntersect _ cc lhs rhs) = goColumnsComposed cc [lhs, rhs]
goColumns (QueryWith _ ctes query) = goColumns query >> mapM_ goColumns ctes
goColumns (QueryOrder _ orders query) = sequence_
[ bindClause "ORDER" $ mapM_ (handleOrderTopLevel query) orders
, goColumns query
]
goColumns (QueryLimit _ _ query) = goColumns query
goColumns (QueryOffset _ _ query) = goColumns query
goColumnsComposed :: ColumnAliasList a -> [Query ResolvedNames a] -> Observer
goColumnsComposed (ColumnAliasList as) qs = do
mapM_ goColumns qs
let deps = map S.unions $ transpose $ map queryColumnDeps qs
tell $ zipWith aliasObservation as deps
handleOrderTopLevel :: Query ResolvedNames a -> Order ResolvedNames a -> Observer
handleOrderTopLevel query (Order _ posOrExpr _ _) = case posOrExpr of
PositionOrExprPosition _ pos _ -> handlePos pos query
PositionOrExprExpr expr -> goColumns expr
handlePos :: Int -> Query ResolvedNames a -> Observer
handlePos pos (QuerySelect _ select) = do
let selections = selectColumnsList $ selectCols select
starsConcatted = selections >>= (\case
SelectStar _ _ (StarColumnNames refs) -> refs
SelectExpr _ cAliases _ -> map RColumnAlias cAliases
)
SQL is 1 indexed , is 0 indexed
clause <- ask
tell $ [clauseObservation posRef clause]
handlePos pos (QueryExcept _ _ lhs rhs) = handlePos pos lhs >> handlePos pos rhs
handlePos pos (QueryUnion _ _ _ lhs rhs) = handlePos pos lhs >> handlePos pos rhs
handlePos pos (QueryIntersect _ _ lhs rhs) = handlePos pos lhs >> handlePos pos rhs
handlePos pos (QueryWith _ _ q) = handlePos pos q
handlePos pos (QueryOrder _ _ q) = handlePos pos q
handlePos pos (QueryLimit _ _ q) = handlePos pos q
handlePos pos (QueryOffset _ _ q) = handlePos pos q
instance HasColumns (CTE ResolvedNames a) where
goColumns CTE{..} = do
goColumns cteQuery
case cteColumns of
[] -> return ()
aliases -> tell $ zipWith aliasObservation aliases (queryColumnDeps cteQuery)
queryColumnDeps :: Query ResolvedNames a -> [Set (RColumnRef ())]
queryColumnDeps = map (S.singleton . void) . queryColumnNames
instance HasColumns (Insert ResolvedNames a) where
goColumns Insert{..} = bindClause "INSERT" $ goColumns insertValues
instance HasColumns (InsertValues ResolvedNames a) where
goColumns (InsertExprValues _ e) = goColumns e
goColumns (InsertSelectValues q) = goColumns q
goColumns (InsertDefaultValues _) = return ()
goColumns (InsertDataFromFile _ _) = return ()
instance HasColumns (DefaultExpr ResolvedNames a) where
goColumns (DefaultValue _) = return ()
goColumns (ExprValue e) = goColumns e
instance HasColumns (Update ResolvedNames a) where
goColumns Update{..} = bindClause "UPDATE" $ do
mapM_ (goColumns . snd) updateSetExprs
mapM_ goColumns updateFrom
mapM_ goColumns updateWhere
instance HasColumns (Delete ResolvedNames a) where
goColumns (Delete _ _ expr) = bindClause "WHERE" $ goColumns expr
instance HasColumns (CreateTable d ResolvedNames a) where
goColumns CreateTable{..} = bindClause "CREATE" $ do
TODO handle createTableExtra , and the dialect instances
goColumns createTableDefinition
instance HasColumns (TableDefinition d ResolvedNames a) where
goColumns (TableColumns _ cs) = goColumns cs
goColumns (TableLike _ _) = return ()
goColumns (TableAs _ _ query) = goColumns query
goColumns (TableNoColumnInfo _) = return ()
instance HasColumns (ColumnOrConstraint d ResolvedNames a) where
goColumns (ColumnOrConstraintColumn c) = goColumns c
goColumns (ColumnOrConstraintConstraint _) = return ()
instance HasColumns (ColumnDefinition d ResolvedNames a) where
goColumns ColumnDefinition{..} = goColumns columnDefinitionDefault
instance HasColumns (AlterTable ResolvedNames a) where
goColumns (AlterTableRenameTable _ _ _) = return ()
goColumns (AlterTableRenameColumn _ _ _ _) = return ()
goColumns (AlterTableAddColumns _ _ _) = return ()
instance HasColumns (CreateView ResolvedNames a) where
goColumns CreateView{..} = bindClause "CREATE" $ goColumns createViewQuery
instance HasColumns (Select ResolvedNames a) where
goColumns select@(Select {..}) = sequence_
[ bindClause "SELECT" $ goColumns $ selectCols
, bindClause "FROM" $ goColumns selectFrom
, bindClause "WHERE" $ goColumns selectWhere
, bindClause "TIMESERIES" $ goColumns selectTimeseries
, bindClause "GROUPBY" $ handleGroup select selectGroup
, bindClause "HAVING" $ goColumns selectHaving
, bindClause "NAMEDWINDOW" $ goColumns selectNamedWindow
]
instance HasColumns (SelectColumns ResolvedNames a) where
goColumns (SelectColumns _ selections) = mapM_ goColumns selections
instance HasColumns (SelectFrom ResolvedNames a) where
goColumns (SelectFrom _ tablishes) = mapM_ goColumns tablishes
instance HasColumns (SelectWhere ResolvedNames a) where
goColumns (SelectWhere _ condition) = goColumns condition
instance HasColumns (SelectTimeseries ResolvedNames a) where
goColumns (SelectTimeseries _ alias _ partition order) = do
goColumns partition
bindClause "ORDER" $ goColumns order
clause <- ask
let observations = execWriter $ runReaderT (goColumns order) clause
cols = S.fromList $ map fst $ rights observations
tell $ [aliasObservation alias cols]
instance HasColumns (Partition ResolvedNames a) where
goColumns (PartitionBy _ exprs) = bindClause "PARTITION" $ mapM_ goColumns exprs
goColumns (PartitionBest _) = return ()
goColumns (PartitionNodes _) = return ()
handleGroup :: Select ResolvedNames a -> Maybe (SelectGroup ResolvedNames a) -> Observer
handleGroup _ Nothing = return ()
handleGroup select (Just (SelectGroup _ groupingElements)) = mapM_ handleElement groupingElements
where
handleElement (GroupingElementExpr _ (PositionOrExprExpr expr)) =
goColumns expr
handleElement (GroupingElementExpr _ (PositionOrExprPosition _ pos _)) =
handlePos pos $ QuerySelect (selectInfo select) select
handleElement (GroupingElementSet _ exprs) =
mapM_ goColumns exprs
instance HasColumns (SelectHaving ResolvedNames a) where
goColumns (SelectHaving _ havings) = mapM_ goColumns havings
instance HasColumns (SelectNamedWindow ResolvedNames a) where
goColumns (SelectNamedWindow _ windowExprs) = mapM_ goColumns windowExprs
instance HasColumns (Selection ResolvedNames a) where
goColumns (SelectStar _ _ starColumns) = goColumns starColumns
goColumns (SelectExpr _ aliases expr) = do
goColumns expr
clause <- ask
let observations = execWriter $ runReaderT (goColumns expr) clause
cols = S.fromList $ map fst $ rights observations
tell $ map (\a -> aliasObservation a cols) aliases
instance HasColumns (StarColumnNames a) where
goColumns (StarColumnNames rColumnRefs) = mapM_ goColumns rColumnRefs
instance HasColumns (RColumnRef a) where
goColumns ref = do
clause <- ask
tell $ [clauseObservation ref clause]
instance HasColumns (Tablish ResolvedNames a) where
goColumns (TablishTable _ tablishAliases tableRef) = do
case tablishAliases of
TablishAliasesNone -> return ()
TablishAliasesT _ -> return ()
TablishAliasesTC _ cAliases ->
let cRefSets = map S.singleton $ getColumnList tableRef
in tell $ zipWith aliasObservation cAliases cRefSets
goColumns (TablishSubQuery _ tablishAliases query) = do
bindClause "SUBQUERY" $ goColumns query
case tablishAliases of
TablishAliasesNone -> return ()
TablishAliasesT _ -> return ()
TablishAliasesTC _ cAliases ->
tell $ zipWith aliasObservation cAliases (queryColumnDeps query)
goColumns (TablishJoin _ _ cond lhs rhs) = do
bindClause "JOIN" $ goColumns cond
goColumns lhs
goColumns rhs
goColumns (TablishLateralView _ LateralView{..} lhs) = do
bindClause "LATERALVIEW" $ do
goColumns lhs
mapM_ goColumns lateralViewExprs
NB this is tricky . In general , lateral views ( like UNNEST ) can
Presto , UNNEST will expand arrays into 1 col and maps into 2
case lateralViewAliases of
TablishAliasesNone -> return ()
TablishAliasesT _ -> return ()
TablishAliasesTC _ cAliases -> case lateralViewExprs of
[FunctionExpr _ _ _ [e] _ _ _] ->
let observations = execWriter $ runReaderT (goColumns e) baseClause
refs = S.fromList $ map fst $ rights observations
in tell $ zipWith aliasObservation cAliases (repeat refs)
instance HasColumns (LateralView ResolvedNames a) where
goColumns (LateralView _ _ exprs _ _) = mapM_ goColumns exprs
instance HasColumns (JoinCondition ResolvedNames a) where
goColumns (JoinNatural _ cs) = goColumns cs
goColumns (JoinOn expr) = goColumns expr
goColumns (JoinUsing _ cs) = mapM_ goColumns cs
instance HasColumns (RNaturalColumns a) where
goColumns (RNaturalColumns cs) = mapM_ goColumns cs
instance HasColumns (RUsingColumn a) where
goColumns (RUsingColumn c1 c2) = goColumns c1 >> goColumns c2
instance HasColumns (NamedWindowExpr ResolvedNames a) where
goColumns (NamedWindowExpr _ _ expr) = goColumns expr
goColumns (NamedPartialWindowExpr _ _ expr) = goColumns expr
handleOrderForWindow :: Order ResolvedNames a -> Observer
handleOrderForWindow (Order _ (PositionOrExprPosition _ _ _) _ _) = error "unexpected positional reference"
handleOrderForWindow (Order _ (PositionOrExprExpr expr) _ _) = goColumns expr
instance HasColumns (WindowExpr ResolvedNames a) where
goColumns (WindowExpr _ partition orders _) = do
goColumns partition
bindClause "ORDER" $ mapM_ handleOrderForWindow orders
instance HasColumns (PartialWindowExpr ResolvedNames a) where
goColumns (PartialWindowExpr _ _ partition orders _) = do
goColumns partition
bindClause "ORDER" $ mapM_ handleOrderForWindow orders
instance HasColumns (Expr ResolvedNames a) where
goColumns (BinOpExpr _ _ lhs rhs) = mapM_ goColumns [lhs, rhs]
goColumns (CaseExpr _ whens else') = do
mapM_ ( \ (when', then') -> goColumns when' >> goColumns then') whens
goColumns else'
goColumns (UnOpExpr _ _ expr) = goColumns expr
goColumns (LikeExpr _ _ escape pattern expr) = do
goColumns escape
goColumns pattern
goColumns expr
goColumns (ConstantExpr _ _) = return ()
goColumns (ColumnExpr _ c) = goColumns c
goColumns (InListExpr _ exprs expr) = mapM_ goColumns (expr:exprs)
goColumns (InSubqueryExpr _ query expr) = do
goColumns query
goColumns expr
goColumns (BetweenExpr _ expr start end) = mapM_ goColumns [expr, start, end]
goColumns (OverlapsExpr _ (e1, e2) (e3, e4)) = mapM_ goColumns [e1, e2, e3, e4]
goColumns (FunctionExpr _ _ _ exprs params filter' over) = do
mapM_ goColumns exprs
mapM_ (goColumns . snd) params
goColumns filter'
goColumns over
goColumns (AtTimeZoneExpr _ expr tz) = mapM_ goColumns [expr, tz]
goColumns (SubqueryExpr _ query) = bindClause "SUBQUERY" $ goColumns query
goColumns (ArrayExpr _ exprs) = mapM_ goColumns exprs
goColumns (ExistsExpr _ query) = goColumns query
NB we are n't emitting any special info about field access ( for now )
NB we are n't emitting any special info about array access ( for now )
goColumns (TypeCastExpr _ _ expr _) = goColumns expr
goColumns (VariableSubstitutionExpr _) = return ()
instance HasColumns (Escape ResolvedNames a) where
goColumns (Escape expr) = goColumns expr
instance HasColumns (Pattern ResolvedNames a) where
goColumns (Pattern expr) = goColumns expr
instance HasColumns (Filter ResolvedNames a) where
goColumns (Filter _ expr) = goColumns expr
instance HasColumns (OverSubExpr ResolvedNames a) where
goColumns (OverWindowExpr _ expr) = goColumns expr
goColumns (OverWindowName _ _) = return ()
goColumns (OverPartialWindowExpr _ expr) = goColumns expr
|
8a44a71b5e6ca0ef7a895ddcfbb2a4d868094bd1d9e1eede4017c19cd9d00db0 | tolysz/ghcjs-stack | Glob.hs | # LANGUAGE CPP , DeriveGeneric #
--TODO: [code cleanup] plausibly much of this module should be merged with
similar functionality in Cabal .
module Distribution.Client.Glob
( FilePathGlob(..)
, FilePathRoot(..)
, FilePathGlobRel(..)
, Glob
, GlobPiece(..)
, matchFileGlob
, matchFileGlobRel
, matchGlob
, isTrivialFilePathGlob
, getFilePathRootDirectory
) where
import Data.Char (toUpper)
import Data.List (stripPrefix)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad
import Distribution.Compat.Binary
import GHC.Generics (Generic)
import Distribution.Text
import Distribution.Compat.ReadP (ReadP, (<++), (+++))
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import System.FilePath
import System.Directory
-- | A file path specified by globbing
--
data FilePathGlob = FilePathGlob FilePathRoot FilePathGlobRel
deriving (Eq, Show, Generic)
data FilePathGlobRel
= GlobDir !Glob !FilePathGlobRel
| GlobFile !Glob
^ trailing dir , a glob ending in @/@
deriving (Eq, Show, Generic)
-- | A single directory or file component of a globbed path
type Glob = [GlobPiece]
-- | A piece of a globbing pattern
data GlobPiece = WildCard
| Literal String
| Union [Glob]
deriving (Eq, Show, Generic)
data FilePathRoot
= FilePathRelative
| FilePathRoot FilePath -- ^ e.g. @"/"@, @"c:\"@ or result of 'takeDrive'
| FilePathHomeDir
deriving (Eq, Show, Generic)
instance Binary FilePathGlob
instance Binary FilePathRoot
instance Binary FilePathGlobRel
instance Binary GlobPiece
-- | Check if a 'FilePathGlob' doesn't actually make use of any globbing and
is in fact equivalent to a non - glob ' FilePath ' .
--
-- If it is trivial in this sense then the result is the equivalent constant
' FilePath ' . On the other hand if it is not trivial ( so could in principle
match more than one file ) then the result is @Nothing@.
--
isTrivialFilePathGlob :: FilePathGlob -> Maybe FilePath
isTrivialFilePathGlob (FilePathGlob root pathglob) =
case root of
FilePathRelative -> go [] pathglob
FilePathRoot root' -> go [root'] pathglob
FilePathHomeDir -> Nothing
where
go paths (GlobDir [Literal path] globs) = go (path:paths) globs
go paths (GlobFile [Literal path]) = Just (joinPath (reverse (path:paths)))
go paths GlobDirTrailing = Just (addTrailingPathSeparator
(joinPath (reverse paths)))
go _ _ = Nothing
-- | Get the 'FilePath' corresponding to a 'FilePathRoot'.
--
The ' FilePath ' argument is required to supply the path for the
-- 'FilePathRelative' case.
--
getFilePathRootDirectory :: FilePathRoot
-> FilePath -- ^ root for relative paths
-> IO FilePath
getFilePathRootDirectory FilePathRelative root = return root
getFilePathRootDirectory (FilePathRoot root) _ = return root
getFilePathRootDirectory FilePathHomeDir _ = getHomeDirectory
------------------------------------------------------------------------------
-- Matching
--
-- | Match a 'FilePathGlob' against the file system, starting from a given
-- root directory for relative paths. The results of relative globs are
-- relative to the given root. Matches for absolute globs are absolute.
--
matchFileGlob :: FilePath -> FilePathGlob -> IO [FilePath]
matchFileGlob relroot (FilePathGlob globroot glob) = do
root <- getFilePathRootDirectory globroot relroot
matches <- matchFileGlobRel root glob
case globroot of
FilePathRelative -> return matches
_ -> return (map (root </>) matches)
-- | Match a 'FilePathGlobRel' against the file system, starting from a
-- given root directory. The results are all relative to the given root.
--
matchFileGlobRel :: FilePath -> FilePathGlobRel -> IO [FilePath]
matchFileGlobRel root glob0 = go glob0 ""
where
go (GlobFile glob) dir = do
entries <- getDirectoryContents (root </> dir)
let files = filter (matchGlob glob) entries
return (map (dir </>) files)
go (GlobDir glob globPath) dir = do
entries <- getDirectoryContents (root </> dir)
subdirs <- filterM (\subdir -> doesDirectoryExist
(root </> dir </> subdir))
$ filter (matchGlob glob) entries
concat <$> mapM (\subdir -> go globPath (dir </> subdir)) subdirs
go GlobDirTrailing dir = return [dir]
-- | Match a globbing pattern against a file path component
--
matchGlob :: Glob -> String -> Bool
matchGlob = goStart
where
-- From the man page, glob(7):
-- "If a filename starts with a '.', this character must be
-- matched explicitly."
go, goStart :: [GlobPiece] -> String -> Bool
goStart (WildCard:_) ('.':_) = False
goStart (Union globs:rest) cs = any (\glob -> goStart (glob ++ rest) cs)
globs
goStart rest cs = go rest cs
go [] "" = True
go (Literal lit:rest) cs
| Just cs' <- stripPrefix lit cs
= go rest cs'
| otherwise = False
go [WildCard] "" = True
go (WildCard:rest) (c:cs) = go rest (c:cs) || go (WildCard:rest) cs
go (Union globs:rest) cs = any (\glob -> go (glob ++ rest) cs) globs
go [] (_:_) = False
go (_:_) "" = False
------------------------------------------------------------------------------
-- Parsing & printing
--
instance Text FilePathGlob where
disp (FilePathGlob root pathglob) = disp root Disp.<> disp pathglob
parse =
parse >>= \root ->
(FilePathGlob root <$> parse)
<++ (when (root == FilePathRelative) Parse.pfail >>
return (FilePathGlob root GlobDirTrailing))
instance Text FilePathRoot where
disp FilePathRelative = Disp.empty
disp (FilePathRoot root) = Disp.text root
disp FilePathHomeDir = Disp.char '~' Disp.<> Disp.char '/'
parse =
( (Parse.char '/' >> return (FilePathRoot "/"))
+++ (Parse.char '~' >> Parse.char '/' >> return FilePathHomeDir)
+++ (do drive <- Parse.satisfy (\c -> (c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z'))
_ <- Parse.char ':'
_ <- Parse.char '/' +++ Parse.char '\\'
return (FilePathRoot (toUpper drive : ":\\")))
)
<++ return FilePathRelative
instance Text FilePathGlobRel where
disp (GlobDir glob pathglob) = dispGlob glob
Disp.<> Disp.char '/'
Disp.<> disp pathglob
disp (GlobFile glob) = dispGlob glob
disp GlobDirTrailing = Disp.empty
parse = parsePath
where
parsePath :: ReadP r FilePathGlobRel
parsePath =
parseGlob >>= \globpieces ->
asDir globpieces
<++ asTDir globpieces
<++ asFile globpieces
asDir glob = do dirSep
globs <- parsePath
return (GlobDir glob globs)
asTDir glob = do dirSep
return (GlobDir glob GlobDirTrailing)
asFile glob = return (GlobFile glob)
dirSep = (Parse.char '/' >> return ())
+++ (do _ <- Parse.char '\\'
-- check this isn't an escape code
following <- Parse.look
case following of
(c:_) | isGlobEscapedChar c -> Parse.pfail
_ -> return ())
dispGlob :: Glob -> Disp.Doc
dispGlob = Disp.hcat . map dispPiece
where
dispPiece WildCard = Disp.char '*'
dispPiece (Literal str) = Disp.text (escape str)
dispPiece (Union globs) = Disp.braces
(Disp.hcat (Disp.punctuate
(Disp.char ',')
(map dispGlob globs)))
escape [] = []
escape (c:cs)
| isGlobEscapedChar c = '\\' : c : escape cs
| otherwise = c : escape cs
parseGlob :: ReadP r Glob
parseGlob = Parse.many1 parsePiece
where
parsePiece = literal +++ wildcard +++ union
wildcard = Parse.char '*' >> return WildCard
union = Parse.between (Parse.char '{') (Parse.char '}') $
fmap Union (Parse.sepBy1 parseGlob (Parse.char ','))
literal = Literal `fmap` litchars1
litchar = normal +++ escape
normal = Parse.satisfy (\c -> not (isGlobEscapedChar c)
&& c /= '/' && c /= '\\')
escape = Parse.char '\\' >> Parse.satisfy isGlobEscapedChar
litchars1 :: ReadP r [Char]
litchars1 = liftM2 (:) litchar litchars
litchars :: ReadP r [Char]
litchars = litchars1 <++ return []
isGlobEscapedChar :: Char -> Bool
isGlobEscapedChar '*' = True
isGlobEscapedChar '{' = True
isGlobEscapedChar '}' = True
isGlobEscapedChar ',' = True
isGlobEscapedChar _ = False
| null | https://raw.githubusercontent.com/tolysz/ghcjs-stack/83d5be83e87286d984e89635d5926702c55b9f29/special/cabal/cabal-install/Distribution/Client/Glob.hs | haskell | TODO: [code cleanup] plausibly much of this module should be merged with
| A file path specified by globbing
| A single directory or file component of a globbed path
| A piece of a globbing pattern
^ e.g. @"/"@, @"c:\"@ or result of 'takeDrive'
| Check if a 'FilePathGlob' doesn't actually make use of any globbing and
If it is trivial in this sense then the result is the equivalent constant
| Get the 'FilePath' corresponding to a 'FilePathRoot'.
'FilePathRelative' case.
^ root for relative paths
----------------------------------------------------------------------------
Matching
| Match a 'FilePathGlob' against the file system, starting from a given
root directory for relative paths. The results of relative globs are
relative to the given root. Matches for absolute globs are absolute.
| Match a 'FilePathGlobRel' against the file system, starting from a
given root directory. The results are all relative to the given root.
| Match a globbing pattern against a file path component
From the man page, glob(7):
"If a filename starts with a '.', this character must be
matched explicitly."
----------------------------------------------------------------------------
Parsing & printing
check this isn't an escape code | # LANGUAGE CPP , DeriveGeneric #
similar functionality in Cabal .
module Distribution.Client.Glob
( FilePathGlob(..)
, FilePathRoot(..)
, FilePathGlobRel(..)
, Glob
, GlobPiece(..)
, matchFileGlob
, matchFileGlobRel
, matchGlob
, isTrivialFilePathGlob
, getFilePathRootDirectory
) where
import Data.Char (toUpper)
import Data.List (stripPrefix)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad
import Distribution.Compat.Binary
import GHC.Generics (Generic)
import Distribution.Text
import Distribution.Compat.ReadP (ReadP, (<++), (+++))
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import System.FilePath
import System.Directory
data FilePathGlob = FilePathGlob FilePathRoot FilePathGlobRel
deriving (Eq, Show, Generic)
data FilePathGlobRel
= GlobDir !Glob !FilePathGlobRel
| GlobFile !Glob
^ trailing dir , a glob ending in @/@
deriving (Eq, Show, Generic)
type Glob = [GlobPiece]
data GlobPiece = WildCard
| Literal String
| Union [Glob]
deriving (Eq, Show, Generic)
data FilePathRoot
= FilePathRelative
| FilePathHomeDir
deriving (Eq, Show, Generic)
instance Binary FilePathGlob
instance Binary FilePathRoot
instance Binary FilePathGlobRel
instance Binary GlobPiece
is in fact equivalent to a non - glob ' FilePath ' .
' FilePath ' . On the other hand if it is not trivial ( so could in principle
match more than one file ) then the result is @Nothing@.
isTrivialFilePathGlob :: FilePathGlob -> Maybe FilePath
isTrivialFilePathGlob (FilePathGlob root pathglob) =
case root of
FilePathRelative -> go [] pathglob
FilePathRoot root' -> go [root'] pathglob
FilePathHomeDir -> Nothing
where
go paths (GlobDir [Literal path] globs) = go (path:paths) globs
go paths (GlobFile [Literal path]) = Just (joinPath (reverse (path:paths)))
go paths GlobDirTrailing = Just (addTrailingPathSeparator
(joinPath (reverse paths)))
go _ _ = Nothing
The ' FilePath ' argument is required to supply the path for the
getFilePathRootDirectory :: FilePathRoot
-> IO FilePath
getFilePathRootDirectory FilePathRelative root = return root
getFilePathRootDirectory (FilePathRoot root) _ = return root
getFilePathRootDirectory FilePathHomeDir _ = getHomeDirectory
matchFileGlob :: FilePath -> FilePathGlob -> IO [FilePath]
matchFileGlob relroot (FilePathGlob globroot glob) = do
root <- getFilePathRootDirectory globroot relroot
matches <- matchFileGlobRel root glob
case globroot of
FilePathRelative -> return matches
_ -> return (map (root </>) matches)
matchFileGlobRel :: FilePath -> FilePathGlobRel -> IO [FilePath]
matchFileGlobRel root glob0 = go glob0 ""
where
go (GlobFile glob) dir = do
entries <- getDirectoryContents (root </> dir)
let files = filter (matchGlob glob) entries
return (map (dir </>) files)
go (GlobDir glob globPath) dir = do
entries <- getDirectoryContents (root </> dir)
subdirs <- filterM (\subdir -> doesDirectoryExist
(root </> dir </> subdir))
$ filter (matchGlob glob) entries
concat <$> mapM (\subdir -> go globPath (dir </> subdir)) subdirs
go GlobDirTrailing dir = return [dir]
matchGlob :: Glob -> String -> Bool
matchGlob = goStart
where
go, goStart :: [GlobPiece] -> String -> Bool
goStart (WildCard:_) ('.':_) = False
goStart (Union globs:rest) cs = any (\glob -> goStart (glob ++ rest) cs)
globs
goStart rest cs = go rest cs
go [] "" = True
go (Literal lit:rest) cs
| Just cs' <- stripPrefix lit cs
= go rest cs'
| otherwise = False
go [WildCard] "" = True
go (WildCard:rest) (c:cs) = go rest (c:cs) || go (WildCard:rest) cs
go (Union globs:rest) cs = any (\glob -> go (glob ++ rest) cs) globs
go [] (_:_) = False
go (_:_) "" = False
instance Text FilePathGlob where
disp (FilePathGlob root pathglob) = disp root Disp.<> disp pathglob
parse =
parse >>= \root ->
(FilePathGlob root <$> parse)
<++ (when (root == FilePathRelative) Parse.pfail >>
return (FilePathGlob root GlobDirTrailing))
instance Text FilePathRoot where
disp FilePathRelative = Disp.empty
disp (FilePathRoot root) = Disp.text root
disp FilePathHomeDir = Disp.char '~' Disp.<> Disp.char '/'
parse =
( (Parse.char '/' >> return (FilePathRoot "/"))
+++ (Parse.char '~' >> Parse.char '/' >> return FilePathHomeDir)
+++ (do drive <- Parse.satisfy (\c -> (c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z'))
_ <- Parse.char ':'
_ <- Parse.char '/' +++ Parse.char '\\'
return (FilePathRoot (toUpper drive : ":\\")))
)
<++ return FilePathRelative
instance Text FilePathGlobRel where
disp (GlobDir glob pathglob) = dispGlob glob
Disp.<> Disp.char '/'
Disp.<> disp pathglob
disp (GlobFile glob) = dispGlob glob
disp GlobDirTrailing = Disp.empty
parse = parsePath
where
parsePath :: ReadP r FilePathGlobRel
parsePath =
parseGlob >>= \globpieces ->
asDir globpieces
<++ asTDir globpieces
<++ asFile globpieces
asDir glob = do dirSep
globs <- parsePath
return (GlobDir glob globs)
asTDir glob = do dirSep
return (GlobDir glob GlobDirTrailing)
asFile glob = return (GlobFile glob)
dirSep = (Parse.char '/' >> return ())
+++ (do _ <- Parse.char '\\'
following <- Parse.look
case following of
(c:_) | isGlobEscapedChar c -> Parse.pfail
_ -> return ())
dispGlob :: Glob -> Disp.Doc
dispGlob = Disp.hcat . map dispPiece
where
dispPiece WildCard = Disp.char '*'
dispPiece (Literal str) = Disp.text (escape str)
dispPiece (Union globs) = Disp.braces
(Disp.hcat (Disp.punctuate
(Disp.char ',')
(map dispGlob globs)))
escape [] = []
escape (c:cs)
| isGlobEscapedChar c = '\\' : c : escape cs
| otherwise = c : escape cs
parseGlob :: ReadP r Glob
parseGlob = Parse.many1 parsePiece
where
parsePiece = literal +++ wildcard +++ union
wildcard = Parse.char '*' >> return WildCard
union = Parse.between (Parse.char '{') (Parse.char '}') $
fmap Union (Parse.sepBy1 parseGlob (Parse.char ','))
literal = Literal `fmap` litchars1
litchar = normal +++ escape
normal = Parse.satisfy (\c -> not (isGlobEscapedChar c)
&& c /= '/' && c /= '\\')
escape = Parse.char '\\' >> Parse.satisfy isGlobEscapedChar
litchars1 :: ReadP r [Char]
litchars1 = liftM2 (:) litchar litchars
litchars :: ReadP r [Char]
litchars = litchars1 <++ return []
isGlobEscapedChar :: Char -> Bool
isGlobEscapedChar '*' = True
isGlobEscapedChar '{' = True
isGlobEscapedChar '}' = True
isGlobEscapedChar ',' = True
isGlobEscapedChar _ = False
|
2d83a0d1878f9724389b623f625720ef6495d50b1bf6844ce0cb0cc68377b059 | clash-lang/clash-compiler | Prelude.hs | |
Copyright : ( C ) 2013 - 2016 , University of Twente ,
2017 - 2019 , Myrtle Software Ltd
2017 , Google Inc. ,
2021 - 2023 , QBayLogic B.V.
License : BSD2 ( see the file LICENSE )
Maintainer : QBayLogic B.V. < >
Clash is a functional hardware description language that borrows both its
syntax and semantics from the functional programming language . The
merits of using a functional language to describe hardware comes from the fact
that combinational circuits can be directly modeled as mathematical functions
and that functional languages lend themselves very well at describing and
( de-)composing mathematical functions .
This package provides :
* Prelude library containing datatypes and functions for circuit design
To use the library :
* Import " Clash . Prelude " ; by default clock and reset lines are implicitly
routed for all the components found in " Clash . Prelude " . You can read more
about implicit clock and reset lines in " Clash . Signal#implicitclockandreset "
* Alternatively , if you want to explicitly route clock and reset ports ,
for more straightforward multi - clock designs , you can import the
" Clash . Explicit . Prelude " module . Note that you should not import
" Clash . Prelude " and " Clash . Explicit . Prelude " at the same time as they
have overlapping definitions .
For now , " Clash . Prelude " is also the best starting point for exploring the
library . A preliminary version of a tutorial can be found in " Clash . Tutorial " .
Some circuit examples can be found in " Clash . Examples " .
Copyright : (C) 2013-2016, University of Twente,
2017-2019, Myrtle Software Ltd
2017 , Google Inc.,
2021-2023, QBayLogic B.V.
License : BSD2 (see the file LICENSE)
Maintainer : QBayLogic B.V. <>
Clash is a functional hardware description language that borrows both its
syntax and semantics from the functional programming language Haskell. The
merits of using a functional language to describe hardware comes from the fact
that combinational circuits can be directly modeled as mathematical functions
and that functional languages lend themselves very well at describing and
(de-)composing mathematical functions.
This package provides:
* Prelude library containing datatypes and functions for circuit design
To use the library:
* Import "Clash.Prelude"; by default clock and reset lines are implicitly
routed for all the components found in "Clash.Prelude". You can read more
about implicit clock and reset lines in "Clash.Signal#implicitclockandreset"
* Alternatively, if you want to explicitly route clock and reset ports,
for more straightforward multi-clock designs, you can import the
"Clash.Explicit.Prelude" module. Note that you should not import
"Clash.Prelude" and "Clash.Explicit.Prelude" at the same time as they
have overlapping definitions.
For now, "Clash.Prelude" is also the best starting point for exploring the
library. A preliminary version of a tutorial can be found in "Clash.Tutorial".
Some circuit examples can be found in "Clash.Examples".
-}
# LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE MonoLocalBinds #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE Unsafe #
{-# OPTIONS_HADDOCK show-extensions, not-home #-}
module Clash.Prelude
( -- * Creating synchronous sequential circuits
mealy
, mealyB
, (<^>)
, moore
, mooreB
, registerB
#ifdef CLASH_MULTIPLE_HIDDEN
-- * Synchronizer circuits for safe clock domain crossings
, dualFlipFlopSynchronizer
, asyncFIFOSynchronizer
#endif
-- * ROMs
, asyncRom
, asyncRomPow2
, rom
, romPow2
-- ** ROMs defined by a 'MemBlob'
, asyncRomBlob
, asyncRomBlobPow2
, romBlob
, romBlobPow2
-- ** ROMs defined by a data file
, asyncRomFile
, asyncRomFilePow2
, romFile
, romFilePow2
-- * RAM primitives with a combinational read port
, asyncRam
, asyncRamPow2
-- * Block RAM primitives
, blockRam
, blockRamPow2
, blockRamU
, blockRam1
, E.ResetStrategy(..)
-- ** Block RAM primitives initialized with a 'MemBlob'
, blockRamBlob
, blockRamBlobPow2
-- *** Creating and inspecting 'MemBlob'
, MemBlob
, createMemBlob
, memBlobTH
, unpackMemBlob
-- ** Block RAM primitives initialized with a data file
, blockRamFile
, blockRamFilePow2
-- ** Block RAM read/write conflict resolution
, readNew
* * True dual - port block RAM
, trueDualPortBlockRam
, RamOp(..)
-- * Utility functions
, window
, windowD
, isRising
, isFalling
, riseEvery
, oscillate
-- * Static assertions
, clashCompileError
-- * Tracing
-- ** Simple
, traceSignal1
, traceVecSignal1
-- ** Tracing in a multi-clock environment
, traceSignal
, traceVecSignal
-- ** VCD dump functions
, dumpVCD
-- * Exported modules
-- ** Synchronous signals
, module Clash.Signal
, module Clash.Signal.Delayed
* *
-- *** Bit vectors
, module Clash.Sized.BitVector
-- *** Arbitrary-width numbers
, module Clash.Sized.Signed
, module Clash.Sized.Unsigned
, module Clash.Sized.Index
-- *** Fixed point numbers
, module Clash.Sized.Fixed
-- *** Fixed size vectors
, module Clash.Sized.Vector
-- *** Perfect depth trees
, module Clash.Sized.RTree
-- ** Annotations
, module Clash.Annotations.TopEntity
-- ** Generics type-classes
, Generic
, Generic1
-- ** Type-level natural numbers
, module GHC.TypeLits
, module GHC.TypeLits.Extra
, module Clash.Promoted.Nat
, module Clash.Promoted.Nat.Literals
, module Clash.Promoted.Nat.TH
-- ** Type-level strings
, module Clash.Promoted.Symbol
-- ** Template Haskell
, Lift (..)
-- ** Type classes
-- *** Clash
, AutoReg, autoReg, deriveAutoReg
, module Clash.Class.BitPack
, module Clash.Class.Exp
, module Clash.Class.Num
, module Clash.Class.Parity
, module Clash.Class.Resize
-- *** Other
, module Control.Applicative
, module Data.Bits
, module Data.Default.Class
, module Data.Kind
-- ** Exceptions
, module Clash.XException
-- ** Named types
, module Clash.NamedTypes
-- ** Hidden arguments
, module Clash.Hidden
-- ** Magic
, module Clash.Magic
-- ** Haskell Prelude
-- $hiding
, module Clash.HaskellPrelude
)
where
import Control.Applicative
import Data.Bits
import Data.Default.Class
import Data.Kind (Type, Constraint)
import GHC.Stack (HasCallStack)
import GHC.TypeLits
import GHC.TypeLits.Extra
import Language.Haskell.TH.Syntax (Lift(..))
import Clash.HaskellPrelude
import Clash.Annotations.TopEntity
import Clash.Class.AutoReg (AutoReg, deriveAutoReg)
import Clash.Class.BitPack
import Clash.Class.Exp
import Clash.Class.Num
import Clash.Class.Parity
import Clash.Class.Resize
import qualified Clash.Explicit.Prelude as E
import Clash.Explicit.Prelude (clashCompileError)
import Clash.Hidden
import Clash.Magic
import Clash.NamedTypes
import Clash.Prelude.BlockRam
import Clash.Prelude.BlockRam.Blob
import Clash.Prelude.BlockRam.File
import Clash.Prelude.ROM.Blob
import Clash.Prelude.ROM.File
import Clash.Prelude.Safe
#ifdef CLASH_MULTIPLE_HIDDEN
import Clash.Prelude.Synchronizer
#endif
import Clash.Promoted.Nat
import Clash.Promoted.Nat.TH
import Clash.Promoted.Nat.Literals
import Clash.Promoted.Symbol
import Clash.Sized.BitVector
import Clash.Sized.Fixed
import Clash.Sized.Index
import Clash.Sized.RTree
import Clash.Sized.Signed
import Clash.Sized.Unsigned
import Clash.Sized.Vector hiding (fromList, unsafeFromList)
import Clash.Signal hiding
(HiddenClockName, HiddenResetName, HiddenEnableName)
import Clash.Signal.Delayed
import Clash.Signal.Trace
import Clash.XException
$ setup
> > > : set -XDataKinds -XFlexibleContexts -XTypeApplications
> > > let window4 = window : : HiddenClockResetEnable dom = > Signal dom Int - > Vec 4 ( Signal dom Int )
> > > let windowD3 = windowD : : HiddenClockResetEnable dom = > Signal dom Int - > Vec 3 ( Signal dom Int )
>>> :set -XDataKinds -XFlexibleContexts -XTypeApplications
>>> let window4 = window :: HiddenClockResetEnable dom => Signal dom Int -> Vec 4 (Signal dom Int)
>>> let windowD3 = windowD :: HiddenClockResetEnable dom => Signal dom Int -> Vec 3 (Signal dom Int)
-}
$ hiding
" Clash . Prelude " re - exports most of the " Prelude " with the exception of
those functions that the Clash API defines to work on ' ' from
" Clash . Sized . Vector " instead of on lists as the Haskell Prelude does .
In addition , for the ' Clash.Class.Parity.odd ' and ' Clash.Class.Parity.even '
functions a type class called ' Clash . Class . Parity . Parity ' is available at
" Clash . Class . Parity " .
"Clash.Prelude" re-exports most of the Haskell "Prelude" with the exception of
those functions that the Clash API defines to work on 'Vec' from
"Clash.Sized.Vector" instead of on lists as the Haskell Prelude does.
In addition, for the 'Clash.Class.Parity.odd' and 'Clash.Class.Parity.even'
functions a type class called 'Clash.Class.Parity.Parity' is available at
"Clash.Class.Parity".
-}
| Give a window over a ' Signal '
--
> window4 : : HiddenClockResetEnable
> = > Signal dom Int - > Vec 4 ( Signal dom Int )
-- > window4 = window
--
> > > simulateB [ 1::Int,2,3,4,5 ] : : [ Vec 4 Int ]
[ 1 :> 0 :> 0 :> 0 :> Nil,2 :> 1 :> 0 :> 0 :> Nil,3 :> 2 :> 1 :> 0 :> Nil,4 :> 3 :> 2 :> 1 :> Nil,5 :> 4 :> 3 :> 2 :> Nil , ...
-- ...
window
:: ( HiddenClockResetEnable dom
, KnownNat n
, Default a
, NFDataX a )
=> Signal dom a
-- ^ Signal to create a window over
-> Vec (n + 1) (Signal dom a)
^ Window of at least size 1
window = hideClockResetEnable E.window
{-# INLINE window #-}
| Give a delayed window over a ' Signal '
--
-- > windowD3
> : : HiddenClockResetEnable
-- > => Signal dom Int
> - > Vec 3 ( Signal dom Int )
-- > windowD3 = windowD
--
> > > simulateB windowD3 [ 1::Int,2,3,4 ] : : [ Vec 3 Int ]
[ 0 :> 0 :> 0 :> Nil,1 :> 0 :> 0 :> Nil,2 :> 1 :> 0 :> Nil,3 :> 2 :> 1 :> Nil,4 :> 3 :> 2 :> Nil , ...
-- ...
windowD
:: ( HiddenClockResetEnable dom
, KnownNat n
, Default a
, NFDataX a )
=> Signal dom a
-- ^ Signal to create a window over
-> Vec (n + 1) (Signal dom a)
^ Window of at least size 1
windowD = hideClockResetEnable E.windowD
# INLINE windowD #
| Implicit version of ' Clash . Class . AutoReg.autoReg '
autoReg
:: (HasCallStack, HiddenClockResetEnable dom, AutoReg a)
=> a
-> Signal dom a
-> Signal dom a
autoReg = hideClockResetEnable E.autoReg
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/4ed79bec0794f259db38a3b8b29065546fb75443/clash-prelude/src/Clash/Prelude.hs | haskell | # OPTIONS_HADDOCK show-extensions, not-home #
* Creating synchronous sequential circuits
* Synchronizer circuits for safe clock domain crossings
* ROMs
** ROMs defined by a 'MemBlob'
** ROMs defined by a data file
* RAM primitives with a combinational read port
* Block RAM primitives
** Block RAM primitives initialized with a 'MemBlob'
*** Creating and inspecting 'MemBlob'
** Block RAM primitives initialized with a data file
** Block RAM read/write conflict resolution
* Utility functions
* Static assertions
* Tracing
** Simple
** Tracing in a multi-clock environment
** VCD dump functions
* Exported modules
** Synchronous signals
*** Bit vectors
*** Arbitrary-width numbers
*** Fixed point numbers
*** Fixed size vectors
*** Perfect depth trees
** Annotations
** Generics type-classes
** Type-level natural numbers
** Type-level strings
** Template Haskell
** Type classes
*** Clash
*** Other
** Exceptions
** Named types
** Hidden arguments
** Magic
** Haskell Prelude
$hiding
> window4 = window
...
^ Signal to create a window over
# INLINE window #
> windowD3
> => Signal dom Int
> windowD3 = windowD
...
^ Signal to create a window over | |
Copyright : ( C ) 2013 - 2016 , University of Twente ,
2017 - 2019 , Myrtle Software Ltd
2017 , Google Inc. ,
2021 - 2023 , QBayLogic B.V.
License : BSD2 ( see the file LICENSE )
Maintainer : QBayLogic B.V. < >
Clash is a functional hardware description language that borrows both its
syntax and semantics from the functional programming language . The
merits of using a functional language to describe hardware comes from the fact
that combinational circuits can be directly modeled as mathematical functions
and that functional languages lend themselves very well at describing and
( de-)composing mathematical functions .
This package provides :
* Prelude library containing datatypes and functions for circuit design
To use the library :
* Import " Clash . Prelude " ; by default clock and reset lines are implicitly
routed for all the components found in " Clash . Prelude " . You can read more
about implicit clock and reset lines in " Clash . Signal#implicitclockandreset "
* Alternatively , if you want to explicitly route clock and reset ports ,
for more straightforward multi - clock designs , you can import the
" Clash . Explicit . Prelude " module . Note that you should not import
" Clash . Prelude " and " Clash . Explicit . Prelude " at the same time as they
have overlapping definitions .
For now , " Clash . Prelude " is also the best starting point for exploring the
library . A preliminary version of a tutorial can be found in " Clash . Tutorial " .
Some circuit examples can be found in " Clash . Examples " .
Copyright : (C) 2013-2016, University of Twente,
2017-2019, Myrtle Software Ltd
2017 , Google Inc.,
2021-2023, QBayLogic B.V.
License : BSD2 (see the file LICENSE)
Maintainer : QBayLogic B.V. <>
Clash is a functional hardware description language that borrows both its
syntax and semantics from the functional programming language Haskell. The
merits of using a functional language to describe hardware comes from the fact
that combinational circuits can be directly modeled as mathematical functions
and that functional languages lend themselves very well at describing and
(de-)composing mathematical functions.
This package provides:
* Prelude library containing datatypes and functions for circuit design
To use the library:
* Import "Clash.Prelude"; by default clock and reset lines are implicitly
routed for all the components found in "Clash.Prelude". You can read more
about implicit clock and reset lines in "Clash.Signal#implicitclockandreset"
* Alternatively, if you want to explicitly route clock and reset ports,
for more straightforward multi-clock designs, you can import the
"Clash.Explicit.Prelude" module. Note that you should not import
"Clash.Prelude" and "Clash.Explicit.Prelude" at the same time as they
have overlapping definitions.
For now, "Clash.Prelude" is also the best starting point for exploring the
library. A preliminary version of a tutorial can be found in "Clash.Tutorial".
Some circuit examples can be found in "Clash.Examples".
-}
# LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE MonoLocalBinds #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE Unsafe #
module Clash.Prelude
mealy
, mealyB
, (<^>)
, moore
, mooreB
, registerB
#ifdef CLASH_MULTIPLE_HIDDEN
, dualFlipFlopSynchronizer
, asyncFIFOSynchronizer
#endif
, asyncRom
, asyncRomPow2
, rom
, romPow2
, asyncRomBlob
, asyncRomBlobPow2
, romBlob
, romBlobPow2
, asyncRomFile
, asyncRomFilePow2
, romFile
, romFilePow2
, asyncRam
, asyncRamPow2
, blockRam
, blockRamPow2
, blockRamU
, blockRam1
, E.ResetStrategy(..)
, blockRamBlob
, blockRamBlobPow2
, MemBlob
, createMemBlob
, memBlobTH
, unpackMemBlob
, blockRamFile
, blockRamFilePow2
, readNew
* * True dual - port block RAM
, trueDualPortBlockRam
, RamOp(..)
, window
, windowD
, isRising
, isFalling
, riseEvery
, oscillate
, clashCompileError
, traceSignal1
, traceVecSignal1
, traceSignal
, traceVecSignal
, dumpVCD
, module Clash.Signal
, module Clash.Signal.Delayed
* *
, module Clash.Sized.BitVector
, module Clash.Sized.Signed
, module Clash.Sized.Unsigned
, module Clash.Sized.Index
, module Clash.Sized.Fixed
, module Clash.Sized.Vector
, module Clash.Sized.RTree
, module Clash.Annotations.TopEntity
, Generic
, Generic1
, module GHC.TypeLits
, module GHC.TypeLits.Extra
, module Clash.Promoted.Nat
, module Clash.Promoted.Nat.Literals
, module Clash.Promoted.Nat.TH
, module Clash.Promoted.Symbol
, Lift (..)
, AutoReg, autoReg, deriveAutoReg
, module Clash.Class.BitPack
, module Clash.Class.Exp
, module Clash.Class.Num
, module Clash.Class.Parity
, module Clash.Class.Resize
, module Control.Applicative
, module Data.Bits
, module Data.Default.Class
, module Data.Kind
, module Clash.XException
, module Clash.NamedTypes
, module Clash.Hidden
, module Clash.Magic
, module Clash.HaskellPrelude
)
where
import Control.Applicative
import Data.Bits
import Data.Default.Class
import Data.Kind (Type, Constraint)
import GHC.Stack (HasCallStack)
import GHC.TypeLits
import GHC.TypeLits.Extra
import Language.Haskell.TH.Syntax (Lift(..))
import Clash.HaskellPrelude
import Clash.Annotations.TopEntity
import Clash.Class.AutoReg (AutoReg, deriveAutoReg)
import Clash.Class.BitPack
import Clash.Class.Exp
import Clash.Class.Num
import Clash.Class.Parity
import Clash.Class.Resize
import qualified Clash.Explicit.Prelude as E
import Clash.Explicit.Prelude (clashCompileError)
import Clash.Hidden
import Clash.Magic
import Clash.NamedTypes
import Clash.Prelude.BlockRam
import Clash.Prelude.BlockRam.Blob
import Clash.Prelude.BlockRam.File
import Clash.Prelude.ROM.Blob
import Clash.Prelude.ROM.File
import Clash.Prelude.Safe
#ifdef CLASH_MULTIPLE_HIDDEN
import Clash.Prelude.Synchronizer
#endif
import Clash.Promoted.Nat
import Clash.Promoted.Nat.TH
import Clash.Promoted.Nat.Literals
import Clash.Promoted.Symbol
import Clash.Sized.BitVector
import Clash.Sized.Fixed
import Clash.Sized.Index
import Clash.Sized.RTree
import Clash.Sized.Signed
import Clash.Sized.Unsigned
import Clash.Sized.Vector hiding (fromList, unsafeFromList)
import Clash.Signal hiding
(HiddenClockName, HiddenResetName, HiddenEnableName)
import Clash.Signal.Delayed
import Clash.Signal.Trace
import Clash.XException
$ setup
> > > : set -XDataKinds -XFlexibleContexts -XTypeApplications
> > > let window4 = window : : HiddenClockResetEnable dom = > Signal dom Int - > Vec 4 ( Signal dom Int )
> > > let windowD3 = windowD : : HiddenClockResetEnable dom = > Signal dom Int - > Vec 3 ( Signal dom Int )
>>> :set -XDataKinds -XFlexibleContexts -XTypeApplications
>>> let window4 = window :: HiddenClockResetEnable dom => Signal dom Int -> Vec 4 (Signal dom Int)
>>> let windowD3 = windowD :: HiddenClockResetEnable dom => Signal dom Int -> Vec 3 (Signal dom Int)
-}
$ hiding
" Clash . Prelude " re - exports most of the " Prelude " with the exception of
those functions that the Clash API defines to work on ' ' from
" Clash . Sized . Vector " instead of on lists as the Haskell Prelude does .
In addition , for the ' Clash.Class.Parity.odd ' and ' Clash.Class.Parity.even '
functions a type class called ' Clash . Class . Parity . Parity ' is available at
" Clash . Class . Parity " .
"Clash.Prelude" re-exports most of the Haskell "Prelude" with the exception of
those functions that the Clash API defines to work on 'Vec' from
"Clash.Sized.Vector" instead of on lists as the Haskell Prelude does.
In addition, for the 'Clash.Class.Parity.odd' and 'Clash.Class.Parity.even'
functions a type class called 'Clash.Class.Parity.Parity' is available at
"Clash.Class.Parity".
-}
| Give a window over a ' Signal '
> window4 : : HiddenClockResetEnable
> = > Signal dom Int - > Vec 4 ( Signal dom Int )
> > > simulateB [ 1::Int,2,3,4,5 ] : : [ Vec 4 Int ]
[ 1 :> 0 :> 0 :> 0 :> Nil,2 :> 1 :> 0 :> 0 :> Nil,3 :> 2 :> 1 :> 0 :> Nil,4 :> 3 :> 2 :> 1 :> Nil,5 :> 4 :> 3 :> 2 :> Nil , ...
window
:: ( HiddenClockResetEnable dom
, KnownNat n
, Default a
, NFDataX a )
=> Signal dom a
-> Vec (n + 1) (Signal dom a)
^ Window of at least size 1
window = hideClockResetEnable E.window
| Give a delayed window over a ' Signal '
> : : HiddenClockResetEnable
> - > Vec 3 ( Signal dom Int )
> > > simulateB windowD3 [ 1::Int,2,3,4 ] : : [ Vec 3 Int ]
[ 0 :> 0 :> 0 :> Nil,1 :> 0 :> 0 :> Nil,2 :> 1 :> 0 :> Nil,3 :> 2 :> 1 :> Nil,4 :> 3 :> 2 :> Nil , ...
windowD
:: ( HiddenClockResetEnable dom
, KnownNat n
, Default a
, NFDataX a )
=> Signal dom a
-> Vec (n + 1) (Signal dom a)
^ Window of at least size 1
windowD = hideClockResetEnable E.windowD
# INLINE windowD #
| Implicit version of ' Clash . Class . AutoReg.autoReg '
autoReg
:: (HasCallStack, HiddenClockResetEnable dom, AutoReg a)
=> a
-> Signal dom a
-> Signal dom a
autoReg = hideClockResetEnable E.autoReg
|
7d8a4c4e11a8cbe8e851779d626ea33915a0f6ca9fda4dfdc287a13be4b1ded0 | besport/ocaml-aliases | ptree.ml |
* This file is part of Ocaml - aliases .
*
* is free software : you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation .
*
* is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with Ocaml - aliases . If not , see < / > .
*
* Copyright 2012 Be Sport
* This file is part of Ocaml-aliases.
*
* Ocaml-quadtree is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation.
*
* Ocaml-quadtree is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Ocaml-aliases. If not, see </>.
*
* Copyright 2012 Be Sport
*)
type 'a t = 'a Radix.t ref
let create () = ref Radix.empty
let clear r = r:=Radix.empty
let fold ptree prefix f acc = Radix.fold !ptree prefix f acc
let fold_with_max ptree ~max prefix f acc =
Radix.fold_with_max !ptree ~max prefix f acc
let insert ptree label value =
ptree := Radix.bind (!ptree) label value
let remove ptree label value =
ptree := Radix.remove (!ptree) label value
| null | https://raw.githubusercontent.com/besport/ocaml-aliases/70493e6957b58fd0cbf5d32d1e9b2531d7c6ce60/src/ptree.ml | ocaml |
* This file is part of Ocaml - aliases .
*
* is free software : you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation .
*
* is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with Ocaml - aliases . If not , see < / > .
*
* Copyright 2012 Be Sport
* This file is part of Ocaml-aliases.
*
* Ocaml-quadtree is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation.
*
* Ocaml-quadtree is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Ocaml-aliases. If not, see </>.
*
* Copyright 2012 Be Sport
*)
type 'a t = 'a Radix.t ref
let create () = ref Radix.empty
let clear r = r:=Radix.empty
let fold ptree prefix f acc = Radix.fold !ptree prefix f acc
let fold_with_max ptree ~max prefix f acc =
Radix.fold_with_max !ptree ~max prefix f acc
let insert ptree label value =
ptree := Radix.bind (!ptree) label value
let remove ptree label value =
ptree := Radix.remove (!ptree) label value
| |
5ecd6c529d7de7ed1b45b27e731cd23135087b95f8bcc1e8a456c83b78729d99 | waddlaw/TAPL | Let.hs | | 図 11.4 let 束縛
module Language.FullSimpleLambda.System.Let
( Term (..),
Ty (..),
Context (..),
eval,
typeof,
)
where
import Language.FullSimpleLambda.Class
import RIO
data Let
type Value = Term Let
instance System Let where
data Term Let
= -- | 変数
TmVar Int
| -- | ラムダ抽象
TmLam VarName (Ty Let) (Term Let)
| 関数適用
TmApp (Term Let) (Term Let)
| -- | let 束縛
TmLet VarName (Term Let) (Term Let)
deriving stock (Show, Eq)
data Ty Let
= -- | 関数の型
TyArr (Ty Let) (Ty Let)
deriving stock (Show, Eq)
data Context Let
| 空の文脈
CtxEmpty
| 項変数の束縛
CtxVar (Context Let) VarName (Ty Let)
deriving stock (Show, Eq)
data Pattern Let
eval :: Term Let -> Term Let
eval = \case
TmApp t1@(TmLam _ _ t12) t2
E - APP1
| not (isValue t1) -> TmApp (eval t1) t2
-- E-APP2
| isValue t1 && not (isValue t2) -> TmApp t1 (eval t2)
E - APPABS
| isValue t1 && isValue t2 -> shift 0 (-1) $ subst 0 (shift 0 1 t2) t12
TmLet x t1 t2
-- E-LETV
| isValue t1 -> shift 0 (-1) $ subst 0 (shift 0 1 t1) t2
-- E-LET
| not (isValue t1) -> TmLet x (eval t1) t2
_ -> error "unexpected term"
typeof :: Context Let -> Term Let -> Ty Let
typeof ctx = \case
-- T-VAR
TmVar i -> case getTypeFromContext i ctx of
Nothing -> error "Not found type variable in Context"
Just ty -> ty
-- T-ABS
TmLam x tyT1 t2 -> TyArr tyT1 tyT2
where
tyT2 = typeof ctx' t2
ctx' = CtxVar ctx x tyT1
-- T-APP
TmApp t1 t2 ->
case tyT1 of
TyArr tyT11 tyT12 ->
if tyT2 == tyT11
then tyT12
else
error . unlines $
[ "parameter type mismatch (T-APP): ",
"tyT2: " <> show tyT2,
"tyT11: " <> show tyT11
]
where
tyT1 = typeof ctx t1
tyT2 = typeof ctx t2
-- T-LET
TmLet x t1 t2 -> typeof ctx' t2
where
tyT1 = typeof ctx t1
ctx' = CtxVar ctx x tyT1
desugar :: Term Let -> Term Let
desugar = id
isValue :: Term Let -> Bool
isValue = \case
TmLam {} -> True -- ラムダ抽象値
_ -> False
subst :: Int -> Value -> Term Let -> Term Let
subst j s = \case
t@(TmVar k)
| k == j -> s
| otherwise -> t
TmLam x ty t -> TmLam x ty $ subst (j + 1) (shift 0 1 s) t
TmApp t1 t2 -> (TmApp `on` subst j s) t1 t2
TODO check
shift :: Int -> Int -> Term Let -> Term Let
shift c d = \case
TmVar k
| k < c -> TmVar k
| otherwise -> TmVar (k + d)
TmLam x ty t -> TmLam x ty $ shift (c + 1) d t
TmApp t1 t2 -> (TmApp `on` shift c d) t1 t2
TODO ( check )
getTypeFromContext :: Int -> Context Let -> Maybe (Ty Let)
getTypeFromContext 0 = \case
CtxEmpty -> Nothing
CtxVar _ _ ty -> Just ty
getTypeFromContext i = \case
CtxEmpty -> Nothing
CtxVar ctx' _ _ -> getTypeFromContext (i -1) ctx'
| null | https://raw.githubusercontent.com/waddlaw/TAPL/94576e46821aaf7abce6d1d828fc3ce6d05a40b8/subs/lambda-fullsimple/src/Language/FullSimpleLambda/System/Let.hs | haskell | | 変数
| ラムダ抽象
| let 束縛
| 関数の型
E-APP2
E-LETV
E-LET
T-VAR
T-ABS
T-APP
T-LET
ラムダ抽象値 | | 図 11.4 let 束縛
module Language.FullSimpleLambda.System.Let
( Term (..),
Ty (..),
Context (..),
eval,
typeof,
)
where
import Language.FullSimpleLambda.Class
import RIO
data Let
type Value = Term Let
instance System Let where
data Term Let
TmVar Int
TmLam VarName (Ty Let) (Term Let)
| 関数適用
TmApp (Term Let) (Term Let)
TmLet VarName (Term Let) (Term Let)
deriving stock (Show, Eq)
data Ty Let
TyArr (Ty Let) (Ty Let)
deriving stock (Show, Eq)
data Context Let
| 空の文脈
CtxEmpty
| 項変数の束縛
CtxVar (Context Let) VarName (Ty Let)
deriving stock (Show, Eq)
data Pattern Let
eval :: Term Let -> Term Let
eval = \case
TmApp t1@(TmLam _ _ t12) t2
E - APP1
| not (isValue t1) -> TmApp (eval t1) t2
| isValue t1 && not (isValue t2) -> TmApp t1 (eval t2)
E - APPABS
| isValue t1 && isValue t2 -> shift 0 (-1) $ subst 0 (shift 0 1 t2) t12
TmLet x t1 t2
| isValue t1 -> shift 0 (-1) $ subst 0 (shift 0 1 t1) t2
| not (isValue t1) -> TmLet x (eval t1) t2
_ -> error "unexpected term"
typeof :: Context Let -> Term Let -> Ty Let
typeof ctx = \case
TmVar i -> case getTypeFromContext i ctx of
Nothing -> error "Not found type variable in Context"
Just ty -> ty
TmLam x tyT1 t2 -> TyArr tyT1 tyT2
where
tyT2 = typeof ctx' t2
ctx' = CtxVar ctx x tyT1
TmApp t1 t2 ->
case tyT1 of
TyArr tyT11 tyT12 ->
if tyT2 == tyT11
then tyT12
else
error . unlines $
[ "parameter type mismatch (T-APP): ",
"tyT2: " <> show tyT2,
"tyT11: " <> show tyT11
]
where
tyT1 = typeof ctx t1
tyT2 = typeof ctx t2
TmLet x t1 t2 -> typeof ctx' t2
where
tyT1 = typeof ctx t1
ctx' = CtxVar ctx x tyT1
desugar :: Term Let -> Term Let
desugar = id
isValue :: Term Let -> Bool
isValue = \case
_ -> False
subst :: Int -> Value -> Term Let -> Term Let
subst j s = \case
t@(TmVar k)
| k == j -> s
| otherwise -> t
TmLam x ty t -> TmLam x ty $ subst (j + 1) (shift 0 1 s) t
TmApp t1 t2 -> (TmApp `on` subst j s) t1 t2
TODO check
shift :: Int -> Int -> Term Let -> Term Let
shift c d = \case
TmVar k
| k < c -> TmVar k
| otherwise -> TmVar (k + d)
TmLam x ty t -> TmLam x ty $ shift (c + 1) d t
TmApp t1 t2 -> (TmApp `on` shift c d) t1 t2
TODO ( check )
getTypeFromContext :: Int -> Context Let -> Maybe (Ty Let)
getTypeFromContext 0 = \case
CtxEmpty -> Nothing
CtxVar _ _ ty -> Just ty
getTypeFromContext i = \case
CtxEmpty -> Nothing
CtxVar ctx' _ _ -> getTypeFromContext (i -1) ctx'
|
d575cca9054a5466290273467be66859805a8a80940ace45706fb4c4cfaeb852 | hellonico/origami-fun | rotating.clj |
(ns opencv4.rotating
(:require
[opencv4.core :refer :all]
[opencv4.utils :as u]
[opencv4.colors.rgb :as color]))
;
; generate image
;
(def rotation-angle (rand 180))
(defn generative-art []
(let [
height 1000
width 1000
img (new-mat width height CV_8UC3)
]
(set-to img color/white)
(dotimes [ i (inc (rand 5)) ]
(line img (new-point (rand width) (rand height)) (new-point (rand width) (rand height)) color/cyan-3 (+ 100 (rand 50))) )
(dotimes [ i (inc (rand 5)) ]
(circle img (new-point (rand width) (rand height)) (+ 50 (rand 50)) color/greenyellow FILLED))
img))
(def img (generative-art))
(def img-2 (u/mat-from img))
(def M2
(get-rotation-matrix-2-d
(new-point (/ (.width img) 2) (/ (.height img) 2)) rotation-angle 1))
(warp-affine img img-2 M2 (.size img))
(def mask (new-mat))
(in-range img-2 (new-scalar 0 0 0) (new-scalar 0 255 255) mask)
(dilate! mask (get-structuring-element MORPH_RECT (new-size 5 5)))
(def img-3 (u/mat-from img-2))
(set-to img-3 color/white)
(copy-to img-3 img-2 mask)
(def output (new-mat))
(hconcat [img (-> mask clone (cvt-color! COLOR_GRAY2RGB)) img-2 ] output)
(u/show
(-> output clone (u/resize-by 0.2))
{:frame {:width 1024 :heighy 200 :title "heavy rotation"}} )
| null | https://raw.githubusercontent.com/hellonico/origami-fun/80117788530d942eaa9a80e2995b37409fa24889/test/opencv4/rotating.clj | clojure |
generate image
|
(ns opencv4.rotating
(:require
[opencv4.core :refer :all]
[opencv4.utils :as u]
[opencv4.colors.rgb :as color]))
(def rotation-angle (rand 180))
(defn generative-art []
(let [
height 1000
width 1000
img (new-mat width height CV_8UC3)
]
(set-to img color/white)
(dotimes [ i (inc (rand 5)) ]
(line img (new-point (rand width) (rand height)) (new-point (rand width) (rand height)) color/cyan-3 (+ 100 (rand 50))) )
(dotimes [ i (inc (rand 5)) ]
(circle img (new-point (rand width) (rand height)) (+ 50 (rand 50)) color/greenyellow FILLED))
img))
(def img (generative-art))
(def img-2 (u/mat-from img))
(def M2
(get-rotation-matrix-2-d
(new-point (/ (.width img) 2) (/ (.height img) 2)) rotation-angle 1))
(warp-affine img img-2 M2 (.size img))
(def mask (new-mat))
(in-range img-2 (new-scalar 0 0 0) (new-scalar 0 255 255) mask)
(dilate! mask (get-structuring-element MORPH_RECT (new-size 5 5)))
(def img-3 (u/mat-from img-2))
(set-to img-3 color/white)
(copy-to img-3 img-2 mask)
(def output (new-mat))
(hconcat [img (-> mask clone (cvt-color! COLOR_GRAY2RGB)) img-2 ] output)
(u/show
(-> output clone (u/resize-by 0.2))
{:frame {:width 1024 :heighy 200 :title "heavy rotation"}} )
|
79324c977014900df74f7080d9dfe894bd559608fe21df797edb063cb561a7af | scrintal/heroicons-reagent | viewfinder_circle.cljs | (ns com.scrintal.heroicons.mini.viewfinder-circle)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:d "M4.25 2A2.25 2.25 0 002 4.25v2a.75.75 0 001.5 0v-2a.75.75 0 01.75-.75h2a.75.75 0 000-1.5h-2zM13.75 2a.75.75 0 000 1.5h2a.75.75 0 01.75.75v2a.75.75 0 001.5 0v-2A2.25 2.25 0 0015.75 2h-2zM3.5 13.75a.75.75 0 00-1.5 0v2A2.25 2.25 0 004.25 18h2a.75.75 0 000-1.5h-2a.75.75 0 01-.75-.75v-2zM18 13.75a.75.75 0 00-1.5 0v2a.75.75 0 01-.75.75h-2a.75.75 0 000 1.5h2A2.25 2.25 0 0018 15.75v-2zM7 10a3 3 0 116 0 3 3 0 01-6 0z"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/mini/viewfinder_circle.cljs | clojure | (ns com.scrintal.heroicons.mini.viewfinder-circle)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:d "M4.25 2A2.25 2.25 0 002 4.25v2a.75.75 0 001.5 0v-2a.75.75 0 01.75-.75h2a.75.75 0 000-1.5h-2zM13.75 2a.75.75 0 000 1.5h2a.75.75 0 01.75.75v2a.75.75 0 001.5 0v-2A2.25 2.25 0 0015.75 2h-2zM3.5 13.75a.75.75 0 00-1.5 0v2A2.25 2.25 0 004.25 18h2a.75.75 0 000-1.5h-2a.75.75 0 01-.75-.75v-2zM18 13.75a.75.75 0 00-1.5 0v2a.75.75 0 01-.75.75h-2a.75.75 0 000 1.5h2A2.25 2.25 0 0018 15.75v-2zM7 10a3 3 0 116 0 3 3 0 01-6 0z"}]]) | |
b4c96c4a2eb7a25c35008b5867d8e89ca074808f174247f55d971500026e7d3f | hsyl20/haskus-system | Modules.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
-- | Kernel module management
module Haskus.System.Linux.Modules
( loadModuleFromFile
, loadModuleFromMemory
, LoadModuleFlag(..)
, LoadModuleFlags
)
where
import Haskus.System.Linux.ErrorCode
import Haskus.System.Linux.Handle
import Haskus.System.Linux.Syscalls
import Haskus.Format.Binary.BitSet as BitSet
import Haskus.Format.Binary.Word
import Foreign.Ptr
import Haskus.Format.String (withCString)
import Haskus.Utils.Flow
-- | Load module flag
data LoadModuleFlag
= IgnoreSymbolVersions
| IgnoreKernelVersion
deriving (Show,Eq,Enum,CBitSet)
-- | Load module flags
type LoadModuleFlags = BitSet Word LoadModuleFlag
-- | Load a module from a file
loadModuleFromFile :: MonadInIO m => Handle -> String -> LoadModuleFlags -> Excepts '[ErrorCode] m ()
loadModuleFromFile (Handle fd) params flags = do
withCString params $ \params' ->
checkErrorCode_ =<< liftIO (syscall_finit_module fd params' (BitSet.toBits flags))
-- | Load a module from memory
loadModuleFromMemory :: MonadInIO m => Ptr () -> Word64 -> String -> Excepts '[ErrorCode] m ()
loadModuleFromMemory ptr sz params =
withCString params $ \params' ->
checkErrorCode_ =<< liftIO (syscall_init_module ptr sz params')
| null | https://raw.githubusercontent.com/hsyl20/haskus-system/2f389c6ecae5b0180b464ddef51e36f6e567d690/haskus-system/src/lib/Haskus/System/Linux/Modules.hs | haskell | # LANGUAGE DeriveAnyClass #
| Kernel module management
| Load module flag
| Load module flags
| Load a module from a file
| Load a module from memory | # LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
module Haskus.System.Linux.Modules
( loadModuleFromFile
, loadModuleFromMemory
, LoadModuleFlag(..)
, LoadModuleFlags
)
where
import Haskus.System.Linux.ErrorCode
import Haskus.System.Linux.Handle
import Haskus.System.Linux.Syscalls
import Haskus.Format.Binary.BitSet as BitSet
import Haskus.Format.Binary.Word
import Foreign.Ptr
import Haskus.Format.String (withCString)
import Haskus.Utils.Flow
data LoadModuleFlag
= IgnoreSymbolVersions
| IgnoreKernelVersion
deriving (Show,Eq,Enum,CBitSet)
type LoadModuleFlags = BitSet Word LoadModuleFlag
loadModuleFromFile :: MonadInIO m => Handle -> String -> LoadModuleFlags -> Excepts '[ErrorCode] m ()
loadModuleFromFile (Handle fd) params flags = do
withCString params $ \params' ->
checkErrorCode_ =<< liftIO (syscall_finit_module fd params' (BitSet.toBits flags))
loadModuleFromMemory :: MonadInIO m => Ptr () -> Word64 -> String -> Excepts '[ErrorCode] m ()
loadModuleFromMemory ptr sz params =
withCString params $ \params' ->
checkErrorCode_ =<< liftIO (syscall_init_module ptr sz params')
|
001ebe3e11635d8a0a4c1e0d3bd6fe4a0b9652ceec9381306763c33ac91ff78c | shortishly/pgmp | pgmp_message_tags_tests.erl | Copyright ( c ) 2022 < >
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(pgmp_message_tags_tests).
-include_lib("eunit/include/eunit.hrl").
name_test_() ->
{setup,
fun pgmp_message_tags:start/0,
fun
({ok, Pid}) when is_pid(Pid) ->
gen_statem:stop(Pid)
end,
lists:map(
t(fun pgmp_message_tags:name/2),
[{authentication, {backend, <<$R>>}},
{close, {frontend, <<$C>>}},
{data_row, {backend, <<$D>>}},
{execute, {frontend, <<$E>>}},
{row_description, {backend, <<$T>>}},
{sasl_initial_response, {frontend, <<$p>>}},
{copy_data, {backend, <<$d>>}}])}.
t(F) ->
fun
({Expected, {Role, Tag}} = Test) ->
{nm(Test), ?_assertEqual(Expected, F(Role, Tag))}
end.
nm(Test) ->
iolist_to_binary(io_lib:fwrite("~p", [Test])).
| null | https://raw.githubusercontent.com/shortishly/pgmp/8176188fe27e5f7048f124e6e8ba6a2b8373d41e/test/pgmp_message_tags_tests.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright ( c ) 2022 < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(pgmp_message_tags_tests).
-include_lib("eunit/include/eunit.hrl").
name_test_() ->
{setup,
fun pgmp_message_tags:start/0,
fun
({ok, Pid}) when is_pid(Pid) ->
gen_statem:stop(Pid)
end,
lists:map(
t(fun pgmp_message_tags:name/2),
[{authentication, {backend, <<$R>>}},
{close, {frontend, <<$C>>}},
{data_row, {backend, <<$D>>}},
{execute, {frontend, <<$E>>}},
{row_description, {backend, <<$T>>}},
{sasl_initial_response, {frontend, <<$p>>}},
{copy_data, {backend, <<$d>>}}])}.
t(F) ->
fun
({Expected, {Role, Tag}} = Test) ->
{nm(Test), ?_assertEqual(Expected, F(Role, Tag))}
end.
nm(Test) ->
iolist_to_binary(io_lib:fwrite("~p", [Test])).
|
e46aff3178966760f26fb442e53e4ab4ebba70e2101d414f65f7de18de10be4d | ftovagliari/ocamleditor | common.ml |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
[@@@warning "-33"]
open Miscellanea
open Shell
open Spawn
open App_config
open File_util
open List_opt
open Argc
open Option_syntax
module Log = Log.Make(struct
let channel = stderr
let verbosity = `DEBUG
let print_timestamp = false
end)
| null | https://raw.githubusercontent.com/ftovagliari/ocamleditor/53284253cf7603b96051e7425e85a731f09abcd1/src/common/common.ml | ocaml |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
[@@@warning "-33"]
open Miscellanea
open Shell
open Spawn
open App_config
open File_util
open List_opt
open Argc
open Option_syntax
module Log = Log.Make(struct
let channel = stderr
let verbosity = `DEBUG
let print_timestamp = false
end)
| |
d321ff22d41b10ff0ee06d73d91c92df84cee7478142d4ffb20213dcf199576d | Rhywun/get-programming-with-haskell | quotes.hs | quotes = ["Quote 1", "Quote 2", "Quote 3", "Quote 4", "Quote 5"]
lookupQuote [ " 2","1","3","n " ] = = [ " Quote 2","Quote 1","Quote 3 " ]
lookupQuote ["2","1","3","n"] == ["Quote 2","Quote 1","Quote 3"]
-}
lookupQuote :: [String] -> [String]
lookupQuote [] = []
lookupQuote ("n" : xs) = []
lookupQuote (x : xs) = quote : lookupQuote xs where quote = quotes !! (read x - 1)
main :: IO ()
main = do
-- How do I print a prompt for each input?
putStrLn " Enter a number from 1 - 5 or n to quit : "
input <- getContents
mapM_ putStrLn (lookupQuote (lines input))
| null | https://raw.githubusercontent.com/Rhywun/get-programming-with-haskell/b9cf06f725b2ef038d69ed49f7d2900f55e98ca3/Unit04/Lesson22/quotes.hs | haskell | How do I print a prompt for each input? | quotes = ["Quote 1", "Quote 2", "Quote 3", "Quote 4", "Quote 5"]
lookupQuote [ " 2","1","3","n " ] = = [ " Quote 2","Quote 1","Quote 3 " ]
lookupQuote ["2","1","3","n"] == ["Quote 2","Quote 1","Quote 3"]
-}
lookupQuote :: [String] -> [String]
lookupQuote [] = []
lookupQuote ("n" : xs) = []
lookupQuote (x : xs) = quote : lookupQuote xs where quote = quotes !! (read x - 1)
main :: IO ()
main = do
putStrLn " Enter a number from 1 - 5 or n to quit : "
input <- getContents
mapM_ putStrLn (lookupQuote (lines input))
|
621685ae35beca2d03864b237e3a9f7471abffcaab6dab7cf04419d7c4e6bb8b | tweag/asterius | cgrun036.hs | -- !! Won't compile unless the compile succeeds on
-- !! the "single occurrence of big thing in a duplicated small thing"
! ! inlining old - chestnut . WDP 95/03
--
module Main ( main, g ) where
main = putStr (shows (g 42 45 45) "\n")
g :: Int -> Int -> Int -> [Int]
g x y z
= let
f a b = a + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b
g c = f c c
in
[g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y]
| null | https://raw.githubusercontent.com/tweag/asterius/e7b823c87499656860f87b9b468eb0567add1de8/asterius/test/ghc-testsuite/codeGen/cgrun036.hs | haskell | !! Won't compile unless the compile succeeds on
!! the "single occurrence of big thing in a duplicated small thing"
| ! ! inlining old - chestnut . WDP 95/03
module Main ( main, g ) where
main = putStr (shows (g 42 45 45) "\n")
g :: Int -> Int -> Int -> [Int]
g x y z
= let
f a b = a + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b + b * a * b - a + a + b
g c = f c c
in
[g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y, g z, g x, g y]
|
83b2e6a9cbcd412baf8d39612761ee92a05a5754dde1705191a9c5d3731a8e9d | kutyel/haskell-book | print3.hs | module Print3 where
myGreeting :: String
myGreeting = "hello" ++ " world!"
hello :: String
hello = "hello"
world :: String
world = "world!"
main :: IO ()
main = do
putStrLn myGreeting
putStrLn secondGreeting
where
secondGreeting = concat [hello, " ", world]
| null | https://raw.githubusercontent.com/kutyel/haskell-book/fd4dc0332b67575cfaf5e3fb0e26687dc01772a0/src/print3.hs | haskell | module Print3 where
myGreeting :: String
myGreeting = "hello" ++ " world!"
hello :: String
hello = "hello"
world :: String
world = "world!"
main :: IO ()
main = do
putStrLn myGreeting
putStrLn secondGreeting
where
secondGreeting = concat [hello, " ", world]
| |
054e484f6d47c0297201182aa8a40807fe79fd225c2a058dd19ca35c2bafaf32 | LighghtEeloo/data-types-a-la-carte | Inject.hs | # LANGUAGE TypeOperators #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances , FlexibleContexts #
module Inject where
import Prelude hiding (Either, Left, Right)
newtype Mu f = In (f (Mu f))
foldExpr :: Functor f => (f a -> a) -> Mu f -> a
foldExpr f (In e) =
f $ fmap (foldExpr f) e
data (a :+: b) e = Left (a e) | Right (b e)
instance (Functor a, Functor b) => Functor (a :+: b) where
fmap f (Left x) = Left $ fmap f x
fmap f (Right x) = Right $ fmap f x
class (Functor sub, Functor sup) => sub :<: sup where
inj :: sub e -> sup e
instance Functor f => f :<: f where
inj = id
instance (Functor f, Functor g) => g :<: (f :+: g) where
inj = Right
instance {-# OVERLAPPABLE #-} (Functor f, Functor g, Functor h, f :<: g) => f :<: (g :+: h) where
inj = Left . inj
inject :: (g :<: f) => g (Mu f) -> Mu f
inject = In . inj
class (Functor f) => Eval f where
eval :: f Int -> Int
instance (Eval a, Eval b) => Eval (a :+: b) where
eval (Left x) = eval x
eval (Right x) = eval x
type Expr = Mu (Val :+: Add)
newtype Val e = Val Int
val :: Int -> Expr
val n = inject $ Val n
instance Functor Val where
fmap _ (Val n) = Val n
instance Eval Val where
eval (Val n) = n
data Add e = Add e e
add :: Expr -> Expr -> Expr
add x y = inject $ Add x y
instance Functor Add where
fmap f (Add a b) = Add (f a) (f b)
instance Eval Add where
eval (Add a b) = a + b
evalExpr :: Expr -> Int
evalExpr = foldExpr eval
main0 :: IO ()
main0 = do
print $ evalExpr $ add (val 1) (val 2)
| null | https://raw.githubusercontent.com/LighghtEeloo/data-types-a-la-carte/7f56995656aeebe3ad3e7e35cfdd4245cefefa18/app/Inject.hs | haskell | # OVERLAPPABLE # | # LANGUAGE TypeOperators #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances , FlexibleContexts #
module Inject where
import Prelude hiding (Either, Left, Right)
newtype Mu f = In (f (Mu f))
foldExpr :: Functor f => (f a -> a) -> Mu f -> a
foldExpr f (In e) =
f $ fmap (foldExpr f) e
data (a :+: b) e = Left (a e) | Right (b e)
instance (Functor a, Functor b) => Functor (a :+: b) where
fmap f (Left x) = Left $ fmap f x
fmap f (Right x) = Right $ fmap f x
class (Functor sub, Functor sup) => sub :<: sup where
inj :: sub e -> sup e
instance Functor f => f :<: f where
inj = id
instance (Functor f, Functor g) => g :<: (f :+: g) where
inj = Right
inj = Left . inj
inject :: (g :<: f) => g (Mu f) -> Mu f
inject = In . inj
class (Functor f) => Eval f where
eval :: f Int -> Int
instance (Eval a, Eval b) => Eval (a :+: b) where
eval (Left x) = eval x
eval (Right x) = eval x
type Expr = Mu (Val :+: Add)
newtype Val e = Val Int
val :: Int -> Expr
val n = inject $ Val n
instance Functor Val where
fmap _ (Val n) = Val n
instance Eval Val where
eval (Val n) = n
data Add e = Add e e
add :: Expr -> Expr -> Expr
add x y = inject $ Add x y
instance Functor Add where
fmap f (Add a b) = Add (f a) (f b)
instance Eval Add where
eval (Add a b) = a + b
evalExpr :: Expr -> Int
evalExpr = foldExpr eval
main0 :: IO ()
main0 = do
print $ evalExpr $ add (val 1) (val 2)
|
61dec848eefbda365ea7ec81d58e42e2c11221f48f6a761c75717be8c7249205 | mkoppmann/eselsohr | Server.hs | module Lib.Ui.Server
( Api
, application
) where
import qualified Network.Wai.Middleware.EnforceHTTPS as EnforceHTTPS
import Network.Wai (Middleware)
import Network.Wai.Handler.Warp (Port)
import Network.Wai.Middleware.AddHeaders (addHeaders)
import Network.Wai.Middleware.AddHsts (addHsts)
import Network.Wai.Middleware.Gzip
( def
, gzip
)
import Network.Wai.Middleware.MethodOverridePost (methodOverridePost)
import Network.Wai.Middleware.NoOp (noOp)
import Network.Wai.Middleware.RealIp (realIpHeader)
import Servant
( Application
, Server
, hoistServer
, serve
, (:<|>) (..)
)
import Servant.API.Generic (toServant)
import Servant.Server.StaticFiles (serveDirectoryWebApp)
import qualified Lib.App.Env as Env
import qualified Lib.Ui.Web.Controller.ArticleList as Controller
import qualified Lib.Ui.Web.Controller.Collection as Controller
import qualified Lib.Ui.Web.Controller.Static as Controller
import Lib.Infra.Log (runAppAsHandler)
import Lib.Infra.Monad (AppEnv)
import Lib.Ui.Web.Route (Api)
server :: FilePath -> AppEnv -> Server Api
server staticFolder env =
hoistServer (Proxy @Api) (runAppAsHandler env) $
toServant Controller.collection
:<|> toServant Controller.articleList
:<|> toServant Controller.static
:<|> serveDirectoryWebApp staticFolder
application :: Port -> FilePath -> AppEnv -> Application
application port staticFolder {..} =
serve (Proxy @Api) (server staticFolder env)
-- Request middlewares
& methodOverridePost
& enforceHttps
-- Response middlewares
& disableCache
& addSecurityHeaders
& realIpHeader "X-Forwarded-For"
& hstsHeader
& gzip def
where
enforceHttps :: Middleware
enforceHttps = case https of
Env.HttpsOn -> EnforceHTTPS.withConfig $ EnforceHTTPS.defaultConfig{EnforceHTTPS.httpsPort = port}
Env.HttpsOff -> noOp
hstsHeader :: Middleware
hstsHeader = case hsts of
Env.HstsOn -> addHsts
Env.HstsOff -> noOp
disableCache :: Middleware
disableCache = addHeaders [("Cache-Control", "no-store, must-revalidate, max-age=0")]
addSecurityHeaders :: Middleware
addSecurityHeaders =
addHeaders
[ ("Referrer-Policy", "no-referrer")
, ("X-Content-Type-Options", "nosniff")
,
( "Content-Security-Policy"
, "default-src 'none';\
\ style-src 'self';\
\ img-src 'self';\
\ form-action 'self';\
\ upgrade-insecure-requests;"
)
]
| null | https://raw.githubusercontent.com/mkoppmann/eselsohr/082da85348d30e092d001e76ffe045468bdddb9f/src/Lib/Ui/Server.hs | haskell | Request middlewares
Response middlewares | module Lib.Ui.Server
( Api
, application
) where
import qualified Network.Wai.Middleware.EnforceHTTPS as EnforceHTTPS
import Network.Wai (Middleware)
import Network.Wai.Handler.Warp (Port)
import Network.Wai.Middleware.AddHeaders (addHeaders)
import Network.Wai.Middleware.AddHsts (addHsts)
import Network.Wai.Middleware.Gzip
( def
, gzip
)
import Network.Wai.Middleware.MethodOverridePost (methodOverridePost)
import Network.Wai.Middleware.NoOp (noOp)
import Network.Wai.Middleware.RealIp (realIpHeader)
import Servant
( Application
, Server
, hoistServer
, serve
, (:<|>) (..)
)
import Servant.API.Generic (toServant)
import Servant.Server.StaticFiles (serveDirectoryWebApp)
import qualified Lib.App.Env as Env
import qualified Lib.Ui.Web.Controller.ArticleList as Controller
import qualified Lib.Ui.Web.Controller.Collection as Controller
import qualified Lib.Ui.Web.Controller.Static as Controller
import Lib.Infra.Log (runAppAsHandler)
import Lib.Infra.Monad (AppEnv)
import Lib.Ui.Web.Route (Api)
server :: FilePath -> AppEnv -> Server Api
server staticFolder env =
hoistServer (Proxy @Api) (runAppAsHandler env) $
toServant Controller.collection
:<|> toServant Controller.articleList
:<|> toServant Controller.static
:<|> serveDirectoryWebApp staticFolder
application :: Port -> FilePath -> AppEnv -> Application
application port staticFolder {..} =
serve (Proxy @Api) (server staticFolder env)
& methodOverridePost
& enforceHttps
& disableCache
& addSecurityHeaders
& realIpHeader "X-Forwarded-For"
& hstsHeader
& gzip def
where
enforceHttps :: Middleware
enforceHttps = case https of
Env.HttpsOn -> EnforceHTTPS.withConfig $ EnforceHTTPS.defaultConfig{EnforceHTTPS.httpsPort = port}
Env.HttpsOff -> noOp
hstsHeader :: Middleware
hstsHeader = case hsts of
Env.HstsOn -> addHsts
Env.HstsOff -> noOp
disableCache :: Middleware
disableCache = addHeaders [("Cache-Control", "no-store, must-revalidate, max-age=0")]
addSecurityHeaders :: Middleware
addSecurityHeaders =
addHeaders
[ ("Referrer-Policy", "no-referrer")
, ("X-Content-Type-Options", "nosniff")
,
( "Content-Security-Policy"
, "default-src 'none';\
\ style-src 'self';\
\ img-src 'self';\
\ form-action 'self';\
\ upgrade-insecure-requests;"
)
]
|
9f4e09377749fce5b0b8b90cb9366a013d6412acd3e6273617dec2951106157d | esl/MongooseIM | mod_event_pusher.erl | %%==============================================================================
Copyright 2016 Erlang Solutions Ltd.
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
-module(mod_event_pusher).
-author('').
-behaviour(gen_mod).
-behaviour(mongoose_module_metrics).
-include("mod_event_pusher_events.hrl").
-include("mongoose_config_spec.hrl").
-type backend() :: http | push | rabbit | sns.
-type event() :: #user_status_event{} | #chat_event{} | #unack_msg_event{}.
-export_type([event/0]).
-export([deps/2, start/2, stop/1, config_spec/0, push_event/2]).
-export([config_metrics/1]).
-ignore_xref([behaviour_info/1]).
%%--------------------------------------------------------------------
%% Callbacks
%%--------------------------------------------------------------------
-callback push_event(mongoose_acc:t(), event()) -> mongoose_acc:t().
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
@doc Pushes the event to each backend registered with the .
-spec push_event(mongoose_acc:t(), event()) -> mongoose_acc:t().
push_event(Acc, Event) ->
HostType = mongoose_acc:host_type(Acc),
Backends = maps:keys(gen_mod:get_loaded_module_opts(HostType, ?MODULE)),
lists:foldl(fun(B, Acc0) -> (backend_module(B)):push_event(Acc0, Event) end, Acc, Backends).
%%--------------------------------------------------------------------
%% gen_mod API
%%--------------------------------------------------------------------
-spec deps(mongooseim:host_type(), gen_mod:module_opts()) -> gen_mod_deps:deps().
deps(_HostType, Opts) ->
[{backend_module(Backend), BackendOpts, hard} || {Backend, BackendOpts} <- maps:to_list(Opts)].
-spec start(mongooseim:host_type(), gen_mod:module_opts()) -> any().
start(HostType, _Opts) ->
mod_event_pusher_hook_translator:add_hooks(HostType).
-spec stop(mongooseim:host_type()) -> any().
stop(HostType) ->
mod_event_pusher_hook_translator:delete_hooks(HostType).
-spec config_spec() -> mongoose_config_spec:config_section().
config_spec() ->
BackendItems = [{atom_to_binary(B, utf8),
(backend_module(B)):config_spec()} || B <- all_backends()],
#section{items = maps:from_list(BackendItems)}.
-spec config_metrics(mongooseim:host_type()) -> [{gen_mod:opt_key(), gen_mod:opt_value()}].
config_metrics(HostType) ->
case gen_mod:get_module_opts(HostType, ?MODULE) of
Empty when Empty =:= #{};
TODO remove when get_module_opts does not return [ ] anymore
[{none, none}];
Opts ->
[{backend, Backend} || Backend <- maps:keys(Opts)]
end.
%%--------------------------------------------------------------------
%% Helpers
%%--------------------------------------------------------------------
-spec backend_module(backend()) -> module().
backend_module(http) -> mod_event_pusher_http;
backend_module(push) -> mod_event_pusher_push;
backend_module(rabbit) -> mod_event_pusher_rabbit;
backend_module(sns) -> mod_event_pusher_sns.
all_backends() ->
[http, push, rabbit, sns].
| null | https://raw.githubusercontent.com/esl/MongooseIM/a465408fcc98a171cbfbcad242dedbdb5abb022d/src/event_pusher/mod_event_pusher.erl | erlang | ==============================================================================
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================
--------------------------------------------------------------------
Callbacks
--------------------------------------------------------------------
--------------------------------------------------------------------
API
--------------------------------------------------------------------
--------------------------------------------------------------------
gen_mod API
--------------------------------------------------------------------
--------------------------------------------------------------------
Helpers
-------------------------------------------------------------------- | Copyright 2016 Erlang Solutions Ltd.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mod_event_pusher).
-author('').
-behaviour(gen_mod).
-behaviour(mongoose_module_metrics).
-include("mod_event_pusher_events.hrl").
-include("mongoose_config_spec.hrl").
-type backend() :: http | push | rabbit | sns.
-type event() :: #user_status_event{} | #chat_event{} | #unack_msg_event{}.
-export_type([event/0]).
-export([deps/2, start/2, stop/1, config_spec/0, push_event/2]).
-export([config_metrics/1]).
-ignore_xref([behaviour_info/1]).
-callback push_event(mongoose_acc:t(), event()) -> mongoose_acc:t().
@doc Pushes the event to each backend registered with the .
-spec push_event(mongoose_acc:t(), event()) -> mongoose_acc:t().
push_event(Acc, Event) ->
HostType = mongoose_acc:host_type(Acc),
Backends = maps:keys(gen_mod:get_loaded_module_opts(HostType, ?MODULE)),
lists:foldl(fun(B, Acc0) -> (backend_module(B)):push_event(Acc0, Event) end, Acc, Backends).
-spec deps(mongooseim:host_type(), gen_mod:module_opts()) -> gen_mod_deps:deps().
deps(_HostType, Opts) ->
[{backend_module(Backend), BackendOpts, hard} || {Backend, BackendOpts} <- maps:to_list(Opts)].
-spec start(mongooseim:host_type(), gen_mod:module_opts()) -> any().
start(HostType, _Opts) ->
mod_event_pusher_hook_translator:add_hooks(HostType).
-spec stop(mongooseim:host_type()) -> any().
stop(HostType) ->
mod_event_pusher_hook_translator:delete_hooks(HostType).
-spec config_spec() -> mongoose_config_spec:config_section().
config_spec() ->
BackendItems = [{atom_to_binary(B, utf8),
(backend_module(B)):config_spec()} || B <- all_backends()],
#section{items = maps:from_list(BackendItems)}.
-spec config_metrics(mongooseim:host_type()) -> [{gen_mod:opt_key(), gen_mod:opt_value()}].
config_metrics(HostType) ->
case gen_mod:get_module_opts(HostType, ?MODULE) of
Empty when Empty =:= #{};
TODO remove when get_module_opts does not return [ ] anymore
[{none, none}];
Opts ->
[{backend, Backend} || Backend <- maps:keys(Opts)]
end.
-spec backend_module(backend()) -> module().
backend_module(http) -> mod_event_pusher_http;
backend_module(push) -> mod_event_pusher_push;
backend_module(rabbit) -> mod_event_pusher_rabbit;
backend_module(sns) -> mod_event_pusher_sns.
all_backends() ->
[http, push, rabbit, sns].
|
17df327cbc5c26a7a4378f6e22636ec13e26276ffdad500f92174aa7d92a020e | avsm/mirage-duniverse | topkg_result.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2016 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2016 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
open Result
let ( >>= ) v f = match v with Ok v -> f v | Error _ as e -> e
let ( >>| ) v f = match v with Ok v -> Ok (f v) | Error _ as e -> e
type ('a, 'b) r = ('a, 'b) Result.result = Ok of 'a | Error of 'b
type 'a result = ('a, [`Msg of string]) r
module R = struct
type msg = [`Msg of string ]
let msgf fmt =
let kmsg _ = `Msg (Format.flush_str_formatter ()) in
Format.kfprintf kmsg Format.str_formatter fmt
let reword_error reword = function
| Ok _ as r -> r
| Error e -> Error (reword e)
let error_msg m = Error (`Msg m)
let error_msgf fmt =
let kerr _ = Error (`Msg (Format.flush_str_formatter ())) in
Format.kfprintf kerr Format.str_formatter fmt
let reword_error_msg ?(replace = false) reword = function
| Ok _ as r -> r
| Error (`Msg e) ->
let (`Msg e' as v) = reword e in
if replace then Error v else error_msgf "%s\n%s" e e'
end
---------------------------------------------------------------------------
Copyright ( c ) 2016
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2016 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/topkg/src/topkg_result.ml | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2016 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2016 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
open Result
let ( >>= ) v f = match v with Ok v -> f v | Error _ as e -> e
let ( >>| ) v f = match v with Ok v -> Ok (f v) | Error _ as e -> e
type ('a, 'b) r = ('a, 'b) Result.result = Ok of 'a | Error of 'b
type 'a result = ('a, [`Msg of string]) r
module R = struct
type msg = [`Msg of string ]
let msgf fmt =
let kmsg _ = `Msg (Format.flush_str_formatter ()) in
Format.kfprintf kmsg Format.str_formatter fmt
let reword_error reword = function
| Ok _ as r -> r
| Error e -> Error (reword e)
let error_msg m = Error (`Msg m)
let error_msgf fmt =
let kerr _ = Error (`Msg (Format.flush_str_formatter ())) in
Format.kfprintf kerr Format.str_formatter fmt
let reword_error_msg ?(replace = false) reword = function
| Ok _ as r -> r
| Error (`Msg e) ->
let (`Msg e' as v) = reword e in
if replace then Error v else error_msgf "%s\n%s" e e'
end
---------------------------------------------------------------------------
Copyright ( c ) 2016
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2016 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| |
214ee6434207876e618e87dc640cf38094a202ce92f9f872e528fe0fb1d3fc62 | athos/syntactic-closure | util.clj | (ns syntactic-closure.util)
(defn error [& msgs]
(throw (Exception. (apply str msgs))))
(defn var->qualified-symbol [^clojure.lang.Var var]
(let [^clojure.lang.Namespace ns (.ns var)]
(symbol (str (.name ns)) (str (.sym var)))))
(defn macro? [var]
(if-let [m (meta var)]
(:macro m)))
(defn add-meta [x m]
(if (meta x)
(vary-meta x #(into % m))
(with-meta x m)))
| null | https://raw.githubusercontent.com/athos/syntactic-closure/e251b03a199507df4bbc35788230d434d6506634/src/syntactic_closure/util.clj | clojure | (ns syntactic-closure.util)
(defn error [& msgs]
(throw (Exception. (apply str msgs))))
(defn var->qualified-symbol [^clojure.lang.Var var]
(let [^clojure.lang.Namespace ns (.ns var)]
(symbol (str (.name ns)) (str (.sym var)))))
(defn macro? [var]
(if-let [m (meta var)]
(:macro m)))
(defn add-meta [x m]
(if (meta x)
(vary-meta x #(into % m))
(with-meta x m)))
| |
f631f9563b83eda09e84c381a046cc37c4351fd8adff9ab5c31ca61f31103b9d | exoscale/pithos | meta.clj | (ns io.pithos.meta
"The metastore is region-local and stores details of bucket content
(bucket contents are region-local as well)."
(:refer-clojure :exclude [update])
(:require [qbits.alia :as a]
[qbits.hayt :refer [select where set-columns columns
delete update limit map-type
create-table column-definitions
create-index index-name]]
[clojure.tools.logging :refer [debug]]
[clojure.set :refer [union]]
[io.pithos.util :refer [inc-prefix string->pattern]]
[io.pithos.store :as store]))
(defprotocol Metastore
"All necessary functions to manipulate bucket metadata"
(prefixes [this bucket params])
(abort-multipart-upload! [this bucket object upload])
(update-part! [this bucket object upload partno columns])
(initiate-upload! [this bucket object upload metadata])
(get-upload-details [this bucket object upload])
(list-uploads [this bucket prefix])
(list-object-uploads [this bucket object])
(list-upload-parts [this bucket object upload]))
;; schema definition
(def object-table
"Objects are keyed by bucket and object and contain
several direct properties as well as a map of additional
schema-less properties"
(create-table
:object
(column-definitions {:bucket :text
:object :text
:inode :uuid
:version :timeuuid
:atime :text
:size :bigint
:checksum :text
:storageclass :text
:acl :text
:metadata (map-type :text :text)
:primary-key [:bucket :object]})))
(def object_inode-index
"Objects are indexed by inode"
(create-index
:object
:inode
(index-name :object_inode)))
(def upload-table
"Uploads are keyed by bucket, object and upload since several concurrent
uploads can be performed"
(create-table
:upload
(column-definitions {:upload :uuid
:version :uuid
:bucket :text
:object :text
:inode :uuid
:partno :int
:modified :text
:size :bigint
:checksum :text
:primary-key [[:bucket :object :upload] :partno]})))
(def object_uploads-table
"Uploads are also referenced by object"
(create-table
:object_uploads
(column-definitions {:bucket :text
:object :text
:upload :uuid
:metadata (map-type :text :text)
:primary-key [[:bucket :object] :upload]})))
(def upload_bucket-index
"Uploads are indexed by bucket for easy lookup"
(create-index
:object_uploads
:bucket
(index-name :upload_bucket)))
CQL Queries
;; Note: Possible improvements, all of these are preparable, with the
;; exception of initiate-upload-q and update-part-q (unless we freeze
;; the fields to update making them parameters). A function taking a
;; session that returns a map of prepared queries could be invoked from
;; cassandra-meta-store, destructured in a let via {:keys [...]} then
;; used with execute in that scope.
(defn abort-multipart-upload-q
"Delete an upload reference"
[bucket object upload]
(delete :object_uploads (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn delete-upload-parts-q
"Delete all upload parts"
[bucket object upload]
(delete :upload (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn initiate-upload-q
"Create an upload reference"
[bucket object upload metadata]
(update :object_uploads
(set-columns {:metadata metadata})
(where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn update-part-q
"Update an upload part's properties"
[bucket object upload partno columns]
(update :upload
(set-columns columns)
(where [[= :bucket bucket]
[= :object object]
[= :upload upload]
[= :partno partno]])))
(defn list-uploads-q
"List all uploads by bucket"
[bucket]
(select :object_uploads (where [[= :bucket bucket]])))
(defn list-upload-parts-q
"List all parts of an upload"
[bucket object upload]
(select :upload (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn get-upload-details-q
[bucket object upload]
(select :object_uploads (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn list-object-uploads-q
"List all uploads of an object"
[bucket object]
(select :object_uploads (where [[= :bucket bucket]
[= :object object]])))
(defn fetch-object-q
"List objects"
[bucket prefix marker max init?]
(let [object-def [[= :bucket bucket]]
next-prefix (when (seq prefix) (inc-prefix prefix))]
(select :object
(cond
(empty? prefix)
(where [[= :bucket bucket]
[> :object (or marker "")]])
init?
(where [[= :bucket bucket]
[>= :object marker]
[< :object next-prefix]])
:else
(where [[= :bucket bucket]
[> :object marker]
[< :object next-prefix]]))
(limit max))))
(defn get-object-q
"Fetch object properties"
[bucket object]
(select :object
(where [[= :bucket bucket]
[= :object object]])
(limit 1)))
(defn update-object-q
"Update object properties"
[bucket object columns]
(update :object
(set-columns columns)
(where [[= :bucket bucket]
[= :object object]])))
(defn delete-object-q
"Delete an object"
[bucket object]
(delete :object (where [[= :bucket bucket]
[= :object object]])))
# # # Utility functions
(defn filter-keys
"Keep only contents in a list of objects"
[objects prefix delimiter]
(if (seq objects)
(let [prefix (or prefix "")
suffix (if delimiter (str "[^\\" (string->pattern delimiter) "]") ".")
pat (str "^" (string->pattern prefix) suffix "*$")
keep? (comp (partial re-find (re-pattern pat)) :object)]
(filter keep? objects))
objects))
(defn filter-prefixes
"Keep only prefixes from a list of objects"
[objects prefix delim]
(set
(when (and (seq delim) (seq objects))
(let [prefix (or (string->pattern prefix) "")
delim (string->pattern delim)
regex (re-pattern
(str "^(" prefix "[^\\" delim "]*\\" delim ").*$"))
->prefix (comp second
(partial re-find regex)
:object)]
(remove nil? (map ->prefix objects))))))
(defn normalize-params
[{:keys [delimiter] :as params}]
(if (seq delimiter)
params
(dissoc params :delimiter)))
(defn get-prefixes
"Paging logic for keys"
[fetcher {:keys [prefix delimiter max-keys marker]}]
(loop [objects (fetcher prefix (or marker prefix) max-keys true)
prefixes #{}
keys []]
(let [prefixes (if delimiter
(union prefixes (filter-prefixes objects prefix delimiter))
#{})
new-keys (remove prefixes (filter-keys objects prefix delimiter))
keys (concat keys new-keys)
found (count (concat keys prefixes))
next (:object (last objects))
trunc? (boolean (seq next))]
(if (or (>= found max-keys) (not trunc?))
(-> {:keys keys
:prefixes prefixes
:truncated? trunc?}
(cond-> (and delimiter trunc?)
(assoc :next-marker next
:marker (or marker ""))))
(recur (fetcher prefix next max-keys false) prefixes keys)))))
(defn cassandra-meta-store
"Given a cluster configuration, reify an instance of Metastore"
[{:keys [read-consistency write-consistency] :as config}]
(let [copts (dissoc config :read-consistency :write-consistency)
session (store/cassandra-store copts)
rdcty (or (some-> read-consistency keyword) :quorum)
wrcty (or (some-> write-consistency keyword) :quorum)
read! (fn [query] (a/execute session query {:consistency rdcty}))
write! (fn [query] (a/execute session query {:consistency wrcty}))]
(reify
store/Convergeable
(converge! [this]
(write! object-table)
(write! upload-table)
(write! object_uploads-table)
(write! object_inode-index)
(write! upload_bucket-index))
store/Crudable
(fetch [this bucket object fail?]
(or
(first (read! (get-object-q bucket object)))
(when fail?
(throw (ex-info "no such key" {:type :no-such-key
:status-code 404
:key object})))))
(fetch [this bucket object]
(store/fetch this bucket object true))
(update! [this bucket object columns]
(write! (update-object-q bucket object columns)))
(delete! [this bucket object]
(write! (delete-object-q bucket object)))
Metastore
(prefixes [this bucket params]
(get-prefixes
(fn [prefix marker limit init?]
(when (and (number? limit) (pos? limit))
(read! (fetch-object-q bucket prefix marker limit init?))))
(normalize-params params)))
(initiate-upload! [this bucket object upload metadata]
(write! (initiate-upload-q bucket object upload metadata)))
(abort-multipart-upload! [this bucket object upload]
(write! (abort-multipart-upload-q bucket object upload))
(write! (delete-upload-parts-q bucket object upload)))
(update-part! [this bucket object upload partno columns]
(write! (update-part-q bucket object upload partno columns)))
(get-upload-details [this bucket object upload]
(first
(read! (get-upload-details-q bucket object upload))))
(list-uploads [this bucket prefix]
(filter #(.startsWith (:object %) prefix)
(read! (list-uploads-q bucket))))
(list-object-uploads [this bucket object]
(read! (list-object-uploads-q bucket object)))
(list-upload-parts [this bucket object upload]
(read! (list-upload-parts-q bucket object upload))))))
| null | https://raw.githubusercontent.com/exoscale/pithos/54790f00fbfd330c6196d42e5408385028d5e029/src/io/pithos/meta.clj | clojure | schema definition
Note: Possible improvements, all of these are preparable, with the
exception of initiate-upload-q and update-part-q (unless we freeze
the fields to update making them parameters). A function taking a
session that returns a map of prepared queries could be invoked from
cassandra-meta-store, destructured in a let via {:keys [...]} then
used with execute in that scope. | (ns io.pithos.meta
"The metastore is region-local and stores details of bucket content
(bucket contents are region-local as well)."
(:refer-clojure :exclude [update])
(:require [qbits.alia :as a]
[qbits.hayt :refer [select where set-columns columns
delete update limit map-type
create-table column-definitions
create-index index-name]]
[clojure.tools.logging :refer [debug]]
[clojure.set :refer [union]]
[io.pithos.util :refer [inc-prefix string->pattern]]
[io.pithos.store :as store]))
(defprotocol Metastore
"All necessary functions to manipulate bucket metadata"
(prefixes [this bucket params])
(abort-multipart-upload! [this bucket object upload])
(update-part! [this bucket object upload partno columns])
(initiate-upload! [this bucket object upload metadata])
(get-upload-details [this bucket object upload])
(list-uploads [this bucket prefix])
(list-object-uploads [this bucket object])
(list-upload-parts [this bucket object upload]))
(def object-table
"Objects are keyed by bucket and object and contain
several direct properties as well as a map of additional
schema-less properties"
(create-table
:object
(column-definitions {:bucket :text
:object :text
:inode :uuid
:version :timeuuid
:atime :text
:size :bigint
:checksum :text
:storageclass :text
:acl :text
:metadata (map-type :text :text)
:primary-key [:bucket :object]})))
(def object_inode-index
"Objects are indexed by inode"
(create-index
:object
:inode
(index-name :object_inode)))
(def upload-table
"Uploads are keyed by bucket, object and upload since several concurrent
uploads can be performed"
(create-table
:upload
(column-definitions {:upload :uuid
:version :uuid
:bucket :text
:object :text
:inode :uuid
:partno :int
:modified :text
:size :bigint
:checksum :text
:primary-key [[:bucket :object :upload] :partno]})))
(def object_uploads-table
"Uploads are also referenced by object"
(create-table
:object_uploads
(column-definitions {:bucket :text
:object :text
:upload :uuid
:metadata (map-type :text :text)
:primary-key [[:bucket :object] :upload]})))
(def upload_bucket-index
"Uploads are indexed by bucket for easy lookup"
(create-index
:object_uploads
:bucket
(index-name :upload_bucket)))
CQL Queries
(defn abort-multipart-upload-q
"Delete an upload reference"
[bucket object upload]
(delete :object_uploads (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn delete-upload-parts-q
"Delete all upload parts"
[bucket object upload]
(delete :upload (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn initiate-upload-q
"Create an upload reference"
[bucket object upload metadata]
(update :object_uploads
(set-columns {:metadata metadata})
(where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn update-part-q
"Update an upload part's properties"
[bucket object upload partno columns]
(update :upload
(set-columns columns)
(where [[= :bucket bucket]
[= :object object]
[= :upload upload]
[= :partno partno]])))
(defn list-uploads-q
"List all uploads by bucket"
[bucket]
(select :object_uploads (where [[= :bucket bucket]])))
(defn list-upload-parts-q
"List all parts of an upload"
[bucket object upload]
(select :upload (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn get-upload-details-q
[bucket object upload]
(select :object_uploads (where [[= :bucket bucket]
[= :object object]
[= :upload upload]])))
(defn list-object-uploads-q
"List all uploads of an object"
[bucket object]
(select :object_uploads (where [[= :bucket bucket]
[= :object object]])))
(defn fetch-object-q
"List objects"
[bucket prefix marker max init?]
(let [object-def [[= :bucket bucket]]
next-prefix (when (seq prefix) (inc-prefix prefix))]
(select :object
(cond
(empty? prefix)
(where [[= :bucket bucket]
[> :object (or marker "")]])
init?
(where [[= :bucket bucket]
[>= :object marker]
[< :object next-prefix]])
:else
(where [[= :bucket bucket]
[> :object marker]
[< :object next-prefix]]))
(limit max))))
(defn get-object-q
"Fetch object properties"
[bucket object]
(select :object
(where [[= :bucket bucket]
[= :object object]])
(limit 1)))
(defn update-object-q
"Update object properties"
[bucket object columns]
(update :object
(set-columns columns)
(where [[= :bucket bucket]
[= :object object]])))
(defn delete-object-q
"Delete an object"
[bucket object]
(delete :object (where [[= :bucket bucket]
[= :object object]])))
# # # Utility functions
(defn filter-keys
"Keep only contents in a list of objects"
[objects prefix delimiter]
(if (seq objects)
(let [prefix (or prefix "")
suffix (if delimiter (str "[^\\" (string->pattern delimiter) "]") ".")
pat (str "^" (string->pattern prefix) suffix "*$")
keep? (comp (partial re-find (re-pattern pat)) :object)]
(filter keep? objects))
objects))
(defn filter-prefixes
"Keep only prefixes from a list of objects"
[objects prefix delim]
(set
(when (and (seq delim) (seq objects))
(let [prefix (or (string->pattern prefix) "")
delim (string->pattern delim)
regex (re-pattern
(str "^(" prefix "[^\\" delim "]*\\" delim ").*$"))
->prefix (comp second
(partial re-find regex)
:object)]
(remove nil? (map ->prefix objects))))))
(defn normalize-params
[{:keys [delimiter] :as params}]
(if (seq delimiter)
params
(dissoc params :delimiter)))
(defn get-prefixes
"Paging logic for keys"
[fetcher {:keys [prefix delimiter max-keys marker]}]
(loop [objects (fetcher prefix (or marker prefix) max-keys true)
prefixes #{}
keys []]
(let [prefixes (if delimiter
(union prefixes (filter-prefixes objects prefix delimiter))
#{})
new-keys (remove prefixes (filter-keys objects prefix delimiter))
keys (concat keys new-keys)
found (count (concat keys prefixes))
next (:object (last objects))
trunc? (boolean (seq next))]
(if (or (>= found max-keys) (not trunc?))
(-> {:keys keys
:prefixes prefixes
:truncated? trunc?}
(cond-> (and delimiter trunc?)
(assoc :next-marker next
:marker (or marker ""))))
(recur (fetcher prefix next max-keys false) prefixes keys)))))
(defn cassandra-meta-store
"Given a cluster configuration, reify an instance of Metastore"
[{:keys [read-consistency write-consistency] :as config}]
(let [copts (dissoc config :read-consistency :write-consistency)
session (store/cassandra-store copts)
rdcty (or (some-> read-consistency keyword) :quorum)
wrcty (or (some-> write-consistency keyword) :quorum)
read! (fn [query] (a/execute session query {:consistency rdcty}))
write! (fn [query] (a/execute session query {:consistency wrcty}))]
(reify
store/Convergeable
(converge! [this]
(write! object-table)
(write! upload-table)
(write! object_uploads-table)
(write! object_inode-index)
(write! upload_bucket-index))
store/Crudable
(fetch [this bucket object fail?]
(or
(first (read! (get-object-q bucket object)))
(when fail?
(throw (ex-info "no such key" {:type :no-such-key
:status-code 404
:key object})))))
(fetch [this bucket object]
(store/fetch this bucket object true))
(update! [this bucket object columns]
(write! (update-object-q bucket object columns)))
(delete! [this bucket object]
(write! (delete-object-q bucket object)))
Metastore
(prefixes [this bucket params]
(get-prefixes
(fn [prefix marker limit init?]
(when (and (number? limit) (pos? limit))
(read! (fetch-object-q bucket prefix marker limit init?))))
(normalize-params params)))
(initiate-upload! [this bucket object upload metadata]
(write! (initiate-upload-q bucket object upload metadata)))
(abort-multipart-upload! [this bucket object upload]
(write! (abort-multipart-upload-q bucket object upload))
(write! (delete-upload-parts-q bucket object upload)))
(update-part! [this bucket object upload partno columns]
(write! (update-part-q bucket object upload partno columns)))
(get-upload-details [this bucket object upload]
(first
(read! (get-upload-details-q bucket object upload))))
(list-uploads [this bucket prefix]
(filter #(.startsWith (:object %) prefix)
(read! (list-uploads-q bucket))))
(list-object-uploads [this bucket object]
(read! (list-object-uploads-q bucket object)))
(list-upload-parts [this bucket object upload]
(read! (list-upload-parts-q bucket object upload))))))
|
536db13c456a482e786a435bd2bd6c7d797011c4cabe45e39772968a497579c6 | wiseman/cl-zeroconf | sysdeps.lisp | ;;; ------------------------------------------------- -*- Mode: LISP -*-
CL - ZEROCONF -- A Lisp library for service discovery .
;;;
Copyright 2005
( )
2005 - 02 - 10
;;;
Licensed under the MIT license -- see the accompanying LICENSE.txt
;;; file.
;;;
;;; This file contains implementation-specific code (for locks,
;;; select-like functionality for sockets, callbacks from foreign
;;; code).
(in-package "DNS-SD")
;; Each Lisp implementation needs to provide a MAKE-LOCK function to
;; create a mutex, and a WITH-LOCK macro to use the mutex. The mutex
;; must be recursive.
#+lispworks
(defun make-lock (name)
(mp:make-lock :name name))
#+lispworks
(defmacro with-lock ((lock) &body body)
`(mp:with-lock (,lock)
,@body))
#+allegro
(defun make-lock (name)
(mp:make-process-lock :name name))
#+allegro
(defmacro with-lock ((lock) &body body)
`(mp:with-process-lock (,lock)
,@body))
#+sbcl
(defun make-lock (name)
(sb-thread:make-mutex :name name))
#+sbcl
(defmacro with-lock ((lock) &body body)
`(sb-thread:with-recursive-lock (,lock)
,@body))
;; Each implementation needs to define a function
FDS - INPUT - AVAILABLE - P which acts a little like the UNIX select(2 )
;; system call. It must take a list of file descriptors and an
;; optional timeout, and return the subset of the descriptors for
;; which input is available (or the empty list if the timeout expires
;; without any descriptor being ready for input).
;;
The following implementations of this function for different
fall into two categories : They either use the UNIX select(2 ) system
;; call or they poll all descriptors, sleep for a short time, then
;; loop. The select(2) method should be more efficient, but is less
;; portable (it will probably have to be changed for Windows).
;; We need this in order to do the (ccl::syscall os::select ...) stuff.
#+openmcl
(eval-when (:load-toplevel :compile-toplevel)
#+linuxppc-target
(require "LINUX-SYSCALLS")
#+darwinppc-target
(require "DARWIN-SYSCALLS"))
;; The OpenMCL implementation uses select(2). This code is based on
the CCL::FD - INPUT - AVAILABLE - P function that 's internal to OpenMCL
;; and handles single file descriptors.
#+openmcl
(defun fds-input-available-p (fd-list &optional timeout)
(if (null fd-list)
'()
(ccl:rletZ ((tv :timeval))
(let ((ticks (if timeout (ceiling (* timeout ccl::*ticks-per-second*)) nil)))
(ccl::ticks-to-timeval ticks tv))
(ccl:%stack-block ((infds ccl::*fd-set-size*)
(errfds ccl::*fd-set-size*))
(ccl::fd-zero infds)
(ccl::fd-zero errfds)
(dolist (fd fd-list)
(ccl::fd-set fd infds)
(ccl::fd-set fd errfds))
(let* ((result (ccl::syscall syscalls::select
(1+ (reduce #'max fd-list)) infds
(ccl:%null-ptr) errfds
(if timeout tv (ccl:%null-ptr)))))
(cond ((eql result 0)
;; The select timed out.
'())
((and result (> result 0))
There 's activity on at least one fd .
(remove-if-not #'(lambda (fd)
(or (ccl::fd-is-set fd infds)
(ccl::fd-is-set fd errfds)))
fd-list))
((eql result #.(read-from-string "#$EINTR"))
;; Got an interrupt, try again. I'm no UNIX
;; expert, is this check really required?
(fds-input-available-p fd-list timeout))
(T
(error "select returned the error code ~S." result))))))))
The LispWorks implementation uses the polling approach , using the
;; SOCKET-LISTEN function internal to the COMM package.
#+lispworks
(require "comm")
#+lispworks
(defun fds-input-available-p (fd-list &optional timeout)
(if (and timeout (<= timeout 0))
'()
(if (null fd-list)
'()
(let ((ready-fds (remove-if-not #'comm::socket-listen fd-list)))
(if ready-fds
ready-fds
(progn
(sleep 0.1)
(fds-input-available-p fd-list (if timeout (- timeout 0.1) nil))))))))
;; The ACL implementation uses the polling approach.
#+allegro
(defun fds-input-available-p (fd-list &optional timeout)
(if (or (and timeout (<= timeout 0)) (null fd-list))
'()
(let ((ready-fds (remove-if-not #'excl:stream-listen fd-list)))
(if ready-fds
ready-fds
(progn
(sleep 0.1)
(fds-input-available-p fd-list (if timeout (- timeout 0.1) nil)))))))
The SBCL version uses select(2 ) . This is based on some old version
of CMUCL 's SUB - SERVE - EVENT I had lying around .
#+sbcl
(defun fds-input-available-p (fd-list &optional timeout)
(if (null fd-list)
'()
(multiple-value-bind (secs usecs)
(sb-impl::decode-timeout (/ timeout 1000.0))
(sb-alien:with-alien ((read-fds (sb-alien:struct sb-unix:fd-set))
(write-fds (sb-alien:struct sb-unix:fd-set))
(error-fds (sb-alien:struct sb-unix:fd-set)))
(sb-unix:fd-zero read-fds)
(sb-unix:fd-zero write-fds)
(sb-unix:fd-zero error-fds)
(dolist (fd fd-list)
(sb-unix:fd-set fd read-fds)
(sb-unix:fd-set fd error-fds))
(multiple-value-bind (value error)
(sb-unix:unix-fast-select (1+ (reduce #'max fd-list))
(sb-alien:addr read-fds)
(sb-alien:addr write-fds)
(sb-alien:addr error-fds)
secs usecs)
(cond ((eql value 0)
;; The select timed out.
'())
((and value (> value 0))
There 's activity on at least one fd .
(remove-if-not #'(lambda (fd)
(or (sb-unix:fd-isset fd read-fds)
(sb-unix:fd-isset fd error-fds)))
fd-list))
((eql error sb-posix:eintr)
;; Got an interrupt, try again. We do need to
;; check for this, right?
(fds-input-available-p fd-list timeout))
(T
(error "unix-fast-select returned the error code ~S." error))))))))
#+openmcl
(defun make-lock (name)
(ccl:make-lock name))
#+openmcl
(defmacro with-lock ((lock) &body body)
`(ccl:with-lock-grabbed (,lock)
,@body))
;; ----------
;; Callbacks (implementation-specific)
;; ----------
Lispworks
#+lispworks
(fli:define-foreign-callable (%%publish-callback-trampoline :result-type :void)
((oid dns-service-ref)
(flags dns-service-flags)
(error-code dns-service-error-type)
(name :pointer)
(type :pointer)
(domain :pointer)
(context :pointer))
(publish-callback-trampoline oid flags error-code
(fli:convert-from-foreign-string name)
(fli:convert-from-foreign-string type)
(fli:convert-from-foreign-string domain)
context))
#+lispworks
(defparameter %publish-callback-trampoline
(fli:make-pointer :symbol-name '%%publish-callback-trampoline :type 'dns-service-register-reply))
#+lispworks
(fli:define-foreign-callable (%%browse-callback-trampoline :result-type :void)
((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :long :unsigned)
(error-code dns-service-error-type)
(name :pointer)
(type :pointer)
(domain :pointer)
(context :pointer))
(browse-callback-trampoline oid flags interface-index error-code
(fli:convert-from-foreign-string name)
(fli:convert-from-foreign-string type)
(fli:convert-from-foreign-string domain)
context))
#+lispworks
(defparameter %browse-callback-trampoline
(fli:make-pointer :symbol-name '%%browse-callback-trampoline :type 'dns-service-browse-reply))
#+lispworks
(fli:define-foreign-callable (%%resolve-callback-trampoline :result-type :void)
((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :long :unsigned)
(error-code dns-service-error-type)
(full-name :pointer)
(host-target :pointer)
(port :short :unsigned)
(txt-len :short :unsigned)
(txt-record (:pointer (:unsigned :char)))
(context :pointer))
(resolve-callback-trampoline oid flags interface-index error-code
(fli:convert-from-foreign-string full-name)
(fli:convert-from-foreign-string host-target)
port txt-len txt-record context))
#+lispworks
(defparameter %resolve-callback-trampoline
(fli:make-pointer :symbol-name '%%resolve-callback-trampoline :type 'dns-service-resolve-reply))
SBCL
#+sbcl
(define-alien-function %%publish-callback-trampoline
(void (oid dns-service-ref)
(flags dns-service-flags)
(error-code dns-service-error-type)
(name c-string)
(type c-string)
(domain c-string)
(context (* t)))
(publish-callback-trampoline oid flags error-code name type domain context))
#+sbcl
(defparameter %publish-callback-trampoline (alien-function-sap %%publish-callback-trampoline))
#+sbcl
(define-alien-function %%browse-callback-trampoline
(void (oid dns-service-ref)
(flags dns-service-flags)
(interface-index unsigned-long)
(error-code dns-service-error-type)
(name c-string)
(type c-string)
(domain c-string)
(context (* T)))
(browse-callback-trampoline oid flags interface-index error-code
name type domain context))
#+sbcl
(defparameter %browse-callback-trampoline (alien-function-sap %%browse-callback-trampoline))
#+sbcl
(define-alien-function %%resolve-callback-trampoline
(void (oid dns-service-ref)
(flags dns-service-flags)
(interface-index unsigned-long)
(error-code dns-service-error-type)
(full-name c-string)
(host-target c-string)
(port unsigned-short)
(txt-len unsigned-short)
(txt-record (* unsigned-char))
(context (* T)))
(resolve-callback-trampoline oid flags interface-index error-code
full-name host-target port txt-len txt-record context))
#+sbcl
(defparameter %resolve-callback-trampoline (alien-function-sap %%resolve-callback-trampoline))
;; ACL
#+allegro
(ff:defun-foreign-callable %%publish-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(error-code dns-service-error-type)
(name (* :char))
(type (* :char))
(domain (* :char))
(context (* :void)))
(publish-callback-trampoline oid flags error-code
(excl:native-to-string name)
(excl:native-to-string type)
(excl:native-to-string domain)
context))
#+allegro
(defparameter %publish-callback-trampoline (ff:register-foreign-callable '%%publish-callback-trampoline))
#+allegro
(ff:defun-foreign-callable %%browse-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :unsigned-long)
(error-code dns-service-error-type)
(name (* :char))
(type (* :char))
(domain (* :char))
(context (* :void)))
(browse-callback-trampoline oid flags interface-index error-code
(excl:native-to-string name)
(excl:native-to-string type)
(excl:native-to-string domain)
context))
#+allegro
(defparameter %browse-callback-trampoline (ff:register-foreign-callable '%%browse-callback-trampoline))
#+allegro
(ff:defun-foreign-callable %%resolve-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :unsigned-long)
(error-code dns-service-error-type)
(full-name (* :char))
(host-target (* :char))
(port :unsigned-short)
(txt-len :unsigned-short)
(txt-record (* :unsigned-char))
(context (* :void)))
(resolve-callback-trampoline oid flags interface-index error-code
(excl:native-to-string full-name)
(excl:native-to-string host-target)
port txt-len txt-record context))
#+allegro
(defparameter %resolve-callback-trampoline (ff:register-foreign-callable '%%resolve-callback-trampoline))
#+allegro
(ff:defun-foreign-callable %%query-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :unsigned-long)
(error-code dns-service-error-type)
(full-name (* :char))
(rrtype :unsigned-short)
(rrclass :unsigned-short)
(rdlen :unsigned-short)
(rdata (* :void))
(ttl :unsigned-long)
(context (* :void)))
(query-callback-trampoline oid flags interface-index error-code
(excl:native-to-string full-name)
rrtype rrclass rdlen rdata ttl context))
#+allegro
(defparameter %query-callback-trampoline (ff:register-foreign-callable '%%query-callback-trampoline))
;; OpenMCL
#+openmcl
(ccl:defcallback %publish-callback-trampoline (dns-service-ref oid
dns-service-flags flags
dns-service-error-type error-code
(:* :char) name
(:* :char) type
(:* :char) domain
(:* :void) context)
(publish-callback-trampoline oid flags error-code
(ccl:%get-cstring name)
(ccl:%get-cstring type)
(ccl:%get-cstring domain)
context))
#+openmcl
(ccl:defcallback %browse-callback-trampoline (dns-service-ref oid
dns-service-flags flags
:unsigned-long interface-index
dns-service-error-type error-code
(:* :char) name
(:* :char) type
(:* :char) domain
(:* :void) context)
(browse-callback-trampoline oid flags interface-index error-code
(ccl:%get-cstring name)
(ccl:%get-cstring type)
(ccl:%get-cstring domain)
context))
#+openmcl
(ccl:defcallback %resolve-callback-trampoline (dns-service-ref oid
dns-service-flags flags
:unsigned-long interface-index
dns-service-error-type error-code
(:* :char) full-name
(:* :char) host-target
:unsigned-short port
:unsigned-short txt-len
(:* :unsigned-char) txt-record
(:* :void) context)
(resolve-callback-trampoline oid flags interface-index error-code
(ccl:%get-cstring full-name)
(ccl:%get-cstring host-target)
port
txt-len txt-record context))
#+openmcl
(ccl:defcallback %query-callback-trampoline (dns-service-ref oid
dns-service-flags flags
:unsigned-long interface-index
dns-service-error-type error-code
(:* :char) full-name
:unsigned-short rrtype
:unsigned-short rrclass
:unsigned-short rdlen
(:* :void) rdata
:unsigned-long ttl
(:* :void) context)
(query-callback-trampoline oid flags interface-index error-code
(ccl:%get-cstring full-name)
rrtype rrclass rdlen rdata ttl context))
| null | https://raw.githubusercontent.com/wiseman/cl-zeroconf/f677c6fd6a86ed58d14a5e64859be23cee6d1e67/sysdeps.lisp | lisp | ------------------------------------------------- -*- Mode: LISP -*-
file.
This file contains implementation-specific code (for locks,
select-like functionality for sockets, callbacks from foreign
code).
Each Lisp implementation needs to provide a MAKE-LOCK function to
create a mutex, and a WITH-LOCK macro to use the mutex. The mutex
must be recursive.
Each implementation needs to define a function
system call. It must take a list of file descriptors and an
optional timeout, and return the subset of the descriptors for
which input is available (or the empty list if the timeout expires
without any descriptor being ready for input).
call or they poll all descriptors, sleep for a short time, then
loop. The select(2) method should be more efficient, but is less
portable (it will probably have to be changed for Windows).
We need this in order to do the (ccl::syscall os::select ...) stuff.
The OpenMCL implementation uses select(2). This code is based on
and handles single file descriptors.
The select timed out.
Got an interrupt, try again. I'm no UNIX
expert, is this check really required?
SOCKET-LISTEN function internal to the COMM package.
The ACL implementation uses the polling approach.
The select timed out.
Got an interrupt, try again. We do need to
check for this, right?
----------
Callbacks (implementation-specific)
----------
ACL
OpenMCL | CL - ZEROCONF -- A Lisp library for service discovery .
Copyright 2005
( )
2005 - 02 - 10
Licensed under the MIT license -- see the accompanying LICENSE.txt
(in-package "DNS-SD")
#+lispworks
(defun make-lock (name)
(mp:make-lock :name name))
#+lispworks
(defmacro with-lock ((lock) &body body)
`(mp:with-lock (,lock)
,@body))
#+allegro
(defun make-lock (name)
(mp:make-process-lock :name name))
#+allegro
(defmacro with-lock ((lock) &body body)
`(mp:with-process-lock (,lock)
,@body))
#+sbcl
(defun make-lock (name)
(sb-thread:make-mutex :name name))
#+sbcl
(defmacro with-lock ((lock) &body body)
`(sb-thread:with-recursive-lock (,lock)
,@body))
FDS - INPUT - AVAILABLE - P which acts a little like the UNIX select(2 )
The following implementations of this function for different
fall into two categories : They either use the UNIX select(2 ) system
#+openmcl
(eval-when (:load-toplevel :compile-toplevel)
#+linuxppc-target
(require "LINUX-SYSCALLS")
#+darwinppc-target
(require "DARWIN-SYSCALLS"))
the CCL::FD - INPUT - AVAILABLE - P function that 's internal to OpenMCL
#+openmcl
(defun fds-input-available-p (fd-list &optional timeout)
(if (null fd-list)
'()
(ccl:rletZ ((tv :timeval))
(let ((ticks (if timeout (ceiling (* timeout ccl::*ticks-per-second*)) nil)))
(ccl::ticks-to-timeval ticks tv))
(ccl:%stack-block ((infds ccl::*fd-set-size*)
(errfds ccl::*fd-set-size*))
(ccl::fd-zero infds)
(ccl::fd-zero errfds)
(dolist (fd fd-list)
(ccl::fd-set fd infds)
(ccl::fd-set fd errfds))
(let* ((result (ccl::syscall syscalls::select
(1+ (reduce #'max fd-list)) infds
(ccl:%null-ptr) errfds
(if timeout tv (ccl:%null-ptr)))))
(cond ((eql result 0)
'())
((and result (> result 0))
There 's activity on at least one fd .
(remove-if-not #'(lambda (fd)
(or (ccl::fd-is-set fd infds)
(ccl::fd-is-set fd errfds)))
fd-list))
((eql result #.(read-from-string "#$EINTR"))
(fds-input-available-p fd-list timeout))
(T
(error "select returned the error code ~S." result))))))))
The LispWorks implementation uses the polling approach , using the
#+lispworks
(require "comm")
#+lispworks
(defun fds-input-available-p (fd-list &optional timeout)
(if (and timeout (<= timeout 0))
'()
(if (null fd-list)
'()
(let ((ready-fds (remove-if-not #'comm::socket-listen fd-list)))
(if ready-fds
ready-fds
(progn
(sleep 0.1)
(fds-input-available-p fd-list (if timeout (- timeout 0.1) nil))))))))
#+allegro
(defun fds-input-available-p (fd-list &optional timeout)
(if (or (and timeout (<= timeout 0)) (null fd-list))
'()
(let ((ready-fds (remove-if-not #'excl:stream-listen fd-list)))
(if ready-fds
ready-fds
(progn
(sleep 0.1)
(fds-input-available-p fd-list (if timeout (- timeout 0.1) nil)))))))
The SBCL version uses select(2 ) . This is based on some old version
of CMUCL 's SUB - SERVE - EVENT I had lying around .
#+sbcl
(defun fds-input-available-p (fd-list &optional timeout)
(if (null fd-list)
'()
(multiple-value-bind (secs usecs)
(sb-impl::decode-timeout (/ timeout 1000.0))
(sb-alien:with-alien ((read-fds (sb-alien:struct sb-unix:fd-set))
(write-fds (sb-alien:struct sb-unix:fd-set))
(error-fds (sb-alien:struct sb-unix:fd-set)))
(sb-unix:fd-zero read-fds)
(sb-unix:fd-zero write-fds)
(sb-unix:fd-zero error-fds)
(dolist (fd fd-list)
(sb-unix:fd-set fd read-fds)
(sb-unix:fd-set fd error-fds))
(multiple-value-bind (value error)
(sb-unix:unix-fast-select (1+ (reduce #'max fd-list))
(sb-alien:addr read-fds)
(sb-alien:addr write-fds)
(sb-alien:addr error-fds)
secs usecs)
(cond ((eql value 0)
'())
((and value (> value 0))
There 's activity on at least one fd .
(remove-if-not #'(lambda (fd)
(or (sb-unix:fd-isset fd read-fds)
(sb-unix:fd-isset fd error-fds)))
fd-list))
((eql error sb-posix:eintr)
(fds-input-available-p fd-list timeout))
(T
(error "unix-fast-select returned the error code ~S." error))))))))
#+openmcl
(defun make-lock (name)
(ccl:make-lock name))
#+openmcl
(defmacro with-lock ((lock) &body body)
`(ccl:with-lock-grabbed (,lock)
,@body))
Lispworks
#+lispworks
(fli:define-foreign-callable (%%publish-callback-trampoline :result-type :void)
((oid dns-service-ref)
(flags dns-service-flags)
(error-code dns-service-error-type)
(name :pointer)
(type :pointer)
(domain :pointer)
(context :pointer))
(publish-callback-trampoline oid flags error-code
(fli:convert-from-foreign-string name)
(fli:convert-from-foreign-string type)
(fli:convert-from-foreign-string domain)
context))
#+lispworks
(defparameter %publish-callback-trampoline
(fli:make-pointer :symbol-name '%%publish-callback-trampoline :type 'dns-service-register-reply))
#+lispworks
(fli:define-foreign-callable (%%browse-callback-trampoline :result-type :void)
((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :long :unsigned)
(error-code dns-service-error-type)
(name :pointer)
(type :pointer)
(domain :pointer)
(context :pointer))
(browse-callback-trampoline oid flags interface-index error-code
(fli:convert-from-foreign-string name)
(fli:convert-from-foreign-string type)
(fli:convert-from-foreign-string domain)
context))
#+lispworks
(defparameter %browse-callback-trampoline
(fli:make-pointer :symbol-name '%%browse-callback-trampoline :type 'dns-service-browse-reply))
#+lispworks
(fli:define-foreign-callable (%%resolve-callback-trampoline :result-type :void)
((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :long :unsigned)
(error-code dns-service-error-type)
(full-name :pointer)
(host-target :pointer)
(port :short :unsigned)
(txt-len :short :unsigned)
(txt-record (:pointer (:unsigned :char)))
(context :pointer))
(resolve-callback-trampoline oid flags interface-index error-code
(fli:convert-from-foreign-string full-name)
(fli:convert-from-foreign-string host-target)
port txt-len txt-record context))
#+lispworks
(defparameter %resolve-callback-trampoline
(fli:make-pointer :symbol-name '%%resolve-callback-trampoline :type 'dns-service-resolve-reply))
SBCL
#+sbcl
(define-alien-function %%publish-callback-trampoline
(void (oid dns-service-ref)
(flags dns-service-flags)
(error-code dns-service-error-type)
(name c-string)
(type c-string)
(domain c-string)
(context (* t)))
(publish-callback-trampoline oid flags error-code name type domain context))
#+sbcl
(defparameter %publish-callback-trampoline (alien-function-sap %%publish-callback-trampoline))
#+sbcl
(define-alien-function %%browse-callback-trampoline
(void (oid dns-service-ref)
(flags dns-service-flags)
(interface-index unsigned-long)
(error-code dns-service-error-type)
(name c-string)
(type c-string)
(domain c-string)
(context (* T)))
(browse-callback-trampoline oid flags interface-index error-code
name type domain context))
#+sbcl
(defparameter %browse-callback-trampoline (alien-function-sap %%browse-callback-trampoline))
#+sbcl
(define-alien-function %%resolve-callback-trampoline
(void (oid dns-service-ref)
(flags dns-service-flags)
(interface-index unsigned-long)
(error-code dns-service-error-type)
(full-name c-string)
(host-target c-string)
(port unsigned-short)
(txt-len unsigned-short)
(txt-record (* unsigned-char))
(context (* T)))
(resolve-callback-trampoline oid flags interface-index error-code
full-name host-target port txt-len txt-record context))
#+sbcl
(defparameter %resolve-callback-trampoline (alien-function-sap %%resolve-callback-trampoline))
#+allegro
(ff:defun-foreign-callable %%publish-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(error-code dns-service-error-type)
(name (* :char))
(type (* :char))
(domain (* :char))
(context (* :void)))
(publish-callback-trampoline oid flags error-code
(excl:native-to-string name)
(excl:native-to-string type)
(excl:native-to-string domain)
context))
#+allegro
(defparameter %publish-callback-trampoline (ff:register-foreign-callable '%%publish-callback-trampoline))
#+allegro
(ff:defun-foreign-callable %%browse-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :unsigned-long)
(error-code dns-service-error-type)
(name (* :char))
(type (* :char))
(domain (* :char))
(context (* :void)))
(browse-callback-trampoline oid flags interface-index error-code
(excl:native-to-string name)
(excl:native-to-string type)
(excl:native-to-string domain)
context))
#+allegro
(defparameter %browse-callback-trampoline (ff:register-foreign-callable '%%browse-callback-trampoline))
#+allegro
(ff:defun-foreign-callable %%resolve-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :unsigned-long)
(error-code dns-service-error-type)
(full-name (* :char))
(host-target (* :char))
(port :unsigned-short)
(txt-len :unsigned-short)
(txt-record (* :unsigned-char))
(context (* :void)))
(resolve-callback-trampoline oid flags interface-index error-code
(excl:native-to-string full-name)
(excl:native-to-string host-target)
port txt-len txt-record context))
#+allegro
(defparameter %resolve-callback-trampoline (ff:register-foreign-callable '%%resolve-callback-trampoline))
#+allegro
(ff:defun-foreign-callable %%query-callback-trampoline ((oid dns-service-ref)
(flags dns-service-flags)
(interface-index :unsigned-long)
(error-code dns-service-error-type)
(full-name (* :char))
(rrtype :unsigned-short)
(rrclass :unsigned-short)
(rdlen :unsigned-short)
(rdata (* :void))
(ttl :unsigned-long)
(context (* :void)))
(query-callback-trampoline oid flags interface-index error-code
(excl:native-to-string full-name)
rrtype rrclass rdlen rdata ttl context))
#+allegro
(defparameter %query-callback-trampoline (ff:register-foreign-callable '%%query-callback-trampoline))
#+openmcl
(ccl:defcallback %publish-callback-trampoline (dns-service-ref oid
dns-service-flags flags
dns-service-error-type error-code
(:* :char) name
(:* :char) type
(:* :char) domain
(:* :void) context)
(publish-callback-trampoline oid flags error-code
(ccl:%get-cstring name)
(ccl:%get-cstring type)
(ccl:%get-cstring domain)
context))
#+openmcl
(ccl:defcallback %browse-callback-trampoline (dns-service-ref oid
dns-service-flags flags
:unsigned-long interface-index
dns-service-error-type error-code
(:* :char) name
(:* :char) type
(:* :char) domain
(:* :void) context)
(browse-callback-trampoline oid flags interface-index error-code
(ccl:%get-cstring name)
(ccl:%get-cstring type)
(ccl:%get-cstring domain)
context))
#+openmcl
(ccl:defcallback %resolve-callback-trampoline (dns-service-ref oid
dns-service-flags flags
:unsigned-long interface-index
dns-service-error-type error-code
(:* :char) full-name
(:* :char) host-target
:unsigned-short port
:unsigned-short txt-len
(:* :unsigned-char) txt-record
(:* :void) context)
(resolve-callback-trampoline oid flags interface-index error-code
(ccl:%get-cstring full-name)
(ccl:%get-cstring host-target)
port
txt-len txt-record context))
#+openmcl
(ccl:defcallback %query-callback-trampoline (dns-service-ref oid
dns-service-flags flags
:unsigned-long interface-index
dns-service-error-type error-code
(:* :char) full-name
:unsigned-short rrtype
:unsigned-short rrclass
:unsigned-short rdlen
(:* :void) rdata
:unsigned-long ttl
(:* :void) context)
(query-callback-trampoline oid flags interface-index error-code
(ccl:%get-cstring full-name)
rrtype rrclass rdlen rdata ttl context))
|
0763aa7294054707aabf7019aadc6416d7b2539189622ef147c98226aff351ec | turtl/api | sync.lisp | (in-package :turtl)
(defafun get-latest-sync-id (future) ()
"Retrieve the last sync-id from the sync table. This gives a newly-populated
client a reference point to run syncs against."
(alet* ((sock (db-sock))
(query (r:r
(:attr
(:limit
(:order-by
(:table "sync")
:index (:desc "id"))
1)
"id")))
(cursor (r:run sock query))
(sync-item (r:to-array sock cursor))
(sync-item (coerce sync-item 'list)))
(r:stop/disconnect sock cursor)
(finish future (car sync-item))))
(defun make-sync-record (user-id item-type item-id action &key client-id rel-ids fields no-auto-add-user)
"Creates a sync hash record from the given params."
(let* ((sync-record (make-hash-table :test #'equal)))
(add-id sync-record)
(setf (gethash "user_id" sync-record) user-id
(gethash "type" sync-record) item-type
(gethash "item_id" sync-record) item-id
(gethash "action" sync-record) action)
;; the originating user should always be in the relations
(unless no-auto-add-user
(if (listp rel-ids)
(push user-id rel-ids)
(setf rel-ids (concatenate 'vector rel-ids (vector user-id)))))
(setf (gethash "rel" sync-record) (remove-duplicates rel-ids :test #'string=))
;; can store the client id (cid) of a newly-created object
(when client-id (setf (gethash "cid" sync-record) client-id))
;; can be used to specify the public fields changed in an edit
(when (and fields (listp fields)) (setf (gethash "fields" sync-record) fields))
sync-record))
(defun convert-to-sync (item type &key (action "add"))
"Take a piece of data (say, a note) and turn it into a sync item the app can
understand. Very useful for pulling out extra data into a profile that didn't
come through sync but we want to be available to the app.
Defaults to an 'add' but can be specified via :action."
(let ((rec (make-sync-record (gethash "user_id" item)
type
(gethash "id" item)
action)))
(case (intern (string-upcase action) :keyword)
(:delete
(setf (gethash "data" rec) (hash ("id" (gethash "id" item))
("deleted" t))))
(t
(setf (gethash "data" rec) item)))
rec))
(defafun insert-sync-records (future) (sync-records)
"Insert one or more sync-records (list) objects (built with make-sync-record)
into the sync table."
(alet* ((sock (db-sock))
(query (r:r (:insert
(:table "sync")
(if (zerop (length sync-records))
#()
sync-records))))
(nil (r:run sock query)))
(r:disconnect sock)
(finish future t)))
(adefun add-sync-record (user-id item-type item-id action &key sub-action client-id rel-ids fields no-auto-add-user)
"Adds a record to the sync table describing a change to a specific object.
Allows specifying relation ids (:rel-ids) which can be used for filtering on
sync items. Returns the added sync records IDs as the first value and the
full sync records as the second."
(declare (ignore sub-action fields))
;; bomb out if bac action given (should never happen since this function is
;; only used internally, but accidents to happen)
(unless (find action '("add" "edit" "delete" "share" "unshare") :test #'string=)
(error 'server-error :msg (format nil "Bad sync record action: ~s~%" action)))
(alet* ((sync-record (make-sync-record user-id item-type item-id action :client-id client-id :rel-ids rel-ids :no-auto-add-user no-auto-add-user))
(nil (insert-sync-records (list sync-record))))
(list (gethash "id" sync-record))))
(defafun link-sync-items (future) (sync-items link-table)
"Given an array of items pulled from the `sync` table and a string table name
to link the items against, populate the sync items with their linked counter
parts (including the sync_id field for each sync item).
Note that all functions that deal with syncing should call this function. It
not only makes linking sync items to their data counterparts easier, it
uses a standard format for everything."
;; split up the items by deleted (ie, can't link against it, so we return a
;; "fake" record with deleted=true) or present (in which can we grab the
;; present items from the link-table and return them (along with any sync
;; metadata set. this gives us a completed picture of what's been changed
;; and/or deleted.
(let ((deleted-items nil)
(present-items nil))
(loop for sync-item across sync-items do
;; test if the item was deleted
(if (string= (gethash "action" sync-item) "delete")
;; create a return-ready "deleted" record, complete with sync metadata
(let ((item (hash ("id" (gethash "item_id" sync-item))
("deleted" t))))
(setf (gethash "data" sync-item) item)
(push sync-item deleted-items))
;; item is present, so save it (and its sync-id to pull out of the db
;; later).
(push sync-item present-items)))
;; define our finalizing function. this is needed because sometimes we'll
;; call out to the DB to pull out present items, sometimes we won't, and
;; since we're dealing with async, we define a function to handle both
;; instances
(flet ((finish (items)
(let* ((index (make-hash-table :test #'equal))
(synced-items nil))
;; index the present items by id
(loop for item across items do
(setf (gethash (gethash "id" item) index) item))
;; for each item we believe to be present, create a new hash
record for it with the sync_id present
(dolist (rec present-items)
(let* ((item (gethash (gethash "item_id" rec) index)))
;; sync could possibly be nil (if an item is edited and then
;; deleted in the same sync call, then although the edit
;; fools us into thinking the item is present, the delete
;; actually removed it. in this case, it will also be in
;; deleted-items and we don't need to bother tracking it).
(when item
;; create a sync record and save the object into it
(setf (gethash "data" rec) item))
(push rec synced-items)))
return the array of items , sorted by sync_id DESC
(finish future
(coerce (sort (append synced-items deleted-items)
(lambda (a b)
(string> (gethash "id" a)
(gethash "id" b))))
'vector)))))
;; if we have no items to link, just finish with a blank array, otherwise
;; pull out our items and finish with the list
(if (zerop (length present-items))
(finish #())
(alet* ((sock (db-sock))
(query (r:r
(:get-all
(:table link-table)
(mapcar (lambda (x) (gethash "item_id" x)) present-items))))
(cursor (r:run sock query))
(items (r:to-array sock cursor)))
(r:stop/disconnect sock cursor)
(finish items))))))
(adefun sync-scan (user-id from-sync-id &key poll)
"Given a user id, sync id, and item type, pull out all sync records *after*
the given sync-id, where the `rel` field contains the given user-id, and the
sync type matches the passed type. Links grabbed sync items against the given
link-table.
This is useful for boards/notes, because whenever they change (ie add a sync
record) they also record (in the `rel` field) which users are affected by the
change."
(alet* ((sock (db-sock))
(poll-timeout 30)
(sock-poll (when poll (db-sock :timeout poll-timeout)))
(query (r:r
(:between
(:table "sync")
(list user-id from-sync-id)
(list user-id (:maxval))
:index (db-index "sync" "scan_user")
:left-bound "open")))
;; wrap changes around the above query
(query-poll (r:r (:changes query :squash 1))))
;; run the changes query, saving the return promise
(let ((poll-promise (when poll (r:run sock-poll query-poll))))
;; run the non-changes query and grab the results
(alet* ((cursor (r:run sock query))
(sync-items (r:to-array sock cursor))
(sync-size (length sync-items)))
(r:stop/disconnect sock cursor)
(cond ((and poll (zerop sync-size))
;; we got no items returned from the instant query, run the changes
;; query (aka wait on the above promise) and save the results as
;; they come in
(alet* ((cursor-poll poll-promise)
(results nil)
(timer nil))
(as:with-delay ((+ poll-timeout 1))
(r:stop/disconnect sock-poll cursor-poll))
(chain
(r:each sock-poll cursor-poll
(lambda (rec)
(unless timer
(setf timer (as:with-delay (.1) (r:stop/disconnect sock-poll cursor-poll))))
(push rec results)))
(:catch (err)
(unless (typep err 'r:cursor-stopped)
(error err)))
(:finally
(r:stop/disconnect sock-poll cursor-poll)
(coerce (nreverse results) 'list)))))
((zerop sync-size)
#())
(t
(when poll (r:disconnect sock-poll))
sync-items))))))
(adefun sync-all (user-id last-sync-id &key poll)
"Grab all of the sync records for the given user-id since last-sync-id, link
them to their respective objects, and hand back the sorted (ASC) list of sync
items."
(alet* ((types (hash))
(last-sync-id (or last-sync-id (r:r (:maxval))))
(records (sync-scan user-id last-sync-id :poll poll)))
;; group our sync records by type so we can pull them out en-mass
(loop for record across records
for type = (gethash "type" record) do
(push record (gethash type types)))
;; loop through our groups records and link the corresponding objects
(let ((actions nil))
(loop for type being the hash-keys of types
for collection being the hash-values of types
for collection-arr = (coerce collection 'vector)
for table = (case (intern (string-upcase type) :keyword)
(:user "users")
(:keychain "keychain")
(:persona "personas")
(:board "boards")
(:note "notes")
(:file "notes")
(:invite "invites")) do
(unless table
(error (format nil "bad sync type: ~a" type)))
(push (link-sync-items collection-arr table) actions))
;; once our objects finish linking, flatten our groups and sort by sync id
;; ascending
(chain (all actions)
(:then (completed)
(let ((ungrouped nil))
(dolist (collection completed)
(loop for record across collection do
(remhash "rel" record)
(remhash "item_id" record)
(push record ungrouped)))
(let* ((latest-sync-id (if (zerop (length ungrouped))
""
(gethash "id" (car ungrouped))))
(sorted (sort ungrouped (lambda (a b)
(let ((a-sid (hget a '("id")))
(b-sid (hget b '("id"))))
(when (string< latest-sync-id a-sid)
(setf latest-sync-id a-sid))
(when (string< latest-sync-id b-sid)
(setf latest-sync-id b-sid))
(string< a-sid b-sid))))))
(values sorted latest-sync-id))))))))
(adefun process-incoming-sync (user-id sync)
"Applies a single sync item against a user's profile."
(let* ((type (intern (string-upcase (string (gethash "type" sync))) :keyword))
(action (intern (string-upcase (string (gethash "action" sync))) :keyword))
(item (gethash "data" sync))
(item-id (gethash "id" item)))
(unless (find action '(:add :edit :delete))
(error (format nil "Bad action given while syncing (~a)" action)))
(flet ((standard-delete (del-promise)
(alet* ((sync-ids del-promise))
(hash ("id" item-id)
("sync_ids" sync-ids)))))
(vom:debug "bulk sync: ~s ~s ~a" type action item-id)
(case type
(:user
(case action
(:edit
(edit-user item-id user-id item))
;; only allow edit of user via sync
(t (error "Only the `edit` action is allowed when syncing user data"))))
(:keychain
(case action
(:add
(add-keychain-entry user-id item))
(:edit
(edit-keychain-entry user-id item-id item))
(:delete
(standard-delete (delete-keychain-entry user-id item-id)))))
(:persona
(case action
(:add
(add-persona user-id item))
(:edit
(edit-persona user-id item-id item))
(:delete
(standard-delete (delete-persona user-id item-id)))))
(:board
(case action
(:add
(add-board user-id item))
(:edit
(edit-board user-id item-id item))
(:delete
(standard-delete (delete-board user-id item-id)))))
(:note
(case action
(:add
(add-note user-id item))
(:edit
(edit-note user-id item-id item))
(:delete
(standard-delete (delete-note user-id item-id)))))
(:file
(case action
(:delete
(standard-delete (delete-note-file user-id item-id)))))
;; TODO: invites? yes? no?
(t
(error (format nil "Unknown sync record given (~a)" type)))))))
(adefun bulk-sync (user-id sync-items &key request)
"Given a set of bulk items to sync to a user's profile, run them each. This is
done sequentially in order to catch errors (and preserve order in the case of
errors)."
(let ((successes nil)
(track-failed sync-items)
(error nil))
(chain (aeach (lambda (sync)
(alet* ((item (process-incoming-sync user-id sync)))
;; pop the failure that corresponds to this item off the
;; head of the fails list
(setf track-failed (cdr track-failed))
(let* ((sync-ids (gethash "sync_ids" item))
(type (gethash "type" sync))
(action (gethash "action" sync)))
(remhash "sync_ids" item)
(push (hash ("id" (gethash "id" sync))
("type" type)
("action" action)
("sync_ids" sync-ids)
("data" item))
successes))))
sync-items)
(:catch (err) (setf error err))
(:then ()
(dolist (sync successes)
(let* ((type (gethash "type" sync))
(action (gethash "action" sync))
(track-action (string-downcase (format nil "~a-~a" type action))))
(track track-action nil request)))
(hash ("success" (nreverse successes))
("fail" (mapcar (lambda (x) (gethash "id" x)) track-failed))
("error" error))))))
(adefun delete-sync-items (user-id &key only-affects-user)
"Delete sync records by user id, with the option of only deleting records that
affect that one user."
(alet* ((sock (db-sock))
(query (r:r (:delete
(:filter
(:between
(:table "sync")
(list user-id (:minval))
(list user-id (:maxval))
:index (db-index "sync" "scan_user"))
(r:fn (s)
(if only-affects-user
(:== (:count (:attr s "rel")) 1)
t))))))
(nil (r:run sock query)))
(r:disconnect sock)
t))
(adefun convert-board-share-to-sync (board-id)
"Given a board's data, return a promise that is resolved with a VECTOR of
sync items that add the correct board(s) and note(s)."
(multiple-promise-bind (boards notes)
(get-board-tree board-id)
(concatenate
'vector
(map 'vector (lambda (board) (convert-to-sync board "board")) boards)
(map 'vector (lambda (note) (convert-to-sync note "note")) notes))))
(adefun convert-board-unshare-to-sync (user-id board-id)
"Given a board's data, return a promise that is resolved with a VECTOR of
sync items that delete the correct board(s) and note(s).
NOTE that we have to be careful of situations where a note can be in two
shared boards, and if one of the boards is unshared, we do NOT delete the
note because it is still shared via the other board. Same goes for child
boards...Board A owns Board B, if Board A is unshared but Board B has a
separate and valid share to the same persona, Board B must NOT be deleted.
Also, once we find all the items we do want to delete, we need to sync
delete the keychain entries as well.
Adding is much easier than deleting =]."
(multiple-promise-bind (boards notes)
(get-board-tree
board-id
:user-id user-id
:perm-filter (lambda (type user-id data board-perms)
(case type
(:board
(let* ((cur-board-id (gethash "id" data))
(perm-entry (gethash cur-board-id board-perms)))
;; remove any boards we still have some level of
;; permissions for. this includes the board being
;; unshared
(and perm-entry
(< 0 (gethash "perms" perm-entry 0)))))
(:note
(user-can-read-note-p user-id data board-perms)))))
(concatenate
'vector
(map 'vector (lambda (board) (convert-to-sync board "board" :action "delete")) boards)
(map 'vector (lambda (note) (convert-to-sync note "note" :action "delete")) notes))))
(defafun cleanup-sync (future) ()
"Remove all sync items older than 30 days."
(alet* ((timestamp (- (get-timestamp) 2592000))
(sync-id (format nil "~8,'0X0000000000000000" timestamp))
(sock (db-sock))
(query (r:r
(:delete
(:between
(:table "sync")
(:minval)
sync-id))))
(nil (r:run sock query)))
(r:disconnect sock)
(finish future t)))
| null | https://raw.githubusercontent.com/turtl/api/20ab4cc91128921300913b885eb1e201a5e0fc3f/models/sync.lisp | lisp | the originating user should always be in the relations
can store the client id (cid) of a newly-created object
can be used to specify the public fields changed in an edit
bomb out if bac action given (should never happen since this function is
only used internally, but accidents to happen)
split up the items by deleted (ie, can't link against it, so we return a
"fake" record with deleted=true) or present (in which can we grab the
present items from the link-table and return them (along with any sync
metadata set. this gives us a completed picture of what's been changed
and/or deleted.
test if the item was deleted
create a return-ready "deleted" record, complete with sync metadata
item is present, so save it (and its sync-id to pull out of the db
later).
define our finalizing function. this is needed because sometimes we'll
call out to the DB to pull out present items, sometimes we won't, and
since we're dealing with async, we define a function to handle both
instances
index the present items by id
for each item we believe to be present, create a new hash
sync could possibly be nil (if an item is edited and then
deleted in the same sync call, then although the edit
fools us into thinking the item is present, the delete
actually removed it. in this case, it will also be in
deleted-items and we don't need to bother tracking it).
create a sync record and save the object into it
if we have no items to link, just finish with a blank array, otherwise
pull out our items and finish with the list
wrap changes around the above query
run the changes query, saving the return promise
run the non-changes query and grab the results
we got no items returned from the instant query, run the changes
query (aka wait on the above promise) and save the results as
they come in
group our sync records by type so we can pull them out en-mass
loop through our groups records and link the corresponding objects
once our objects finish linking, flatten our groups and sort by sync id
ascending
only allow edit of user via sync
TODO: invites? yes? no?
pop the failure that corresponds to this item off the
head of the fails list
remove any boards we still have some level of
permissions for. this includes the board being
unshared | (in-package :turtl)
(defafun get-latest-sync-id (future) ()
"Retrieve the last sync-id from the sync table. This gives a newly-populated
client a reference point to run syncs against."
(alet* ((sock (db-sock))
(query (r:r
(:attr
(:limit
(:order-by
(:table "sync")
:index (:desc "id"))
1)
"id")))
(cursor (r:run sock query))
(sync-item (r:to-array sock cursor))
(sync-item (coerce sync-item 'list)))
(r:stop/disconnect sock cursor)
(finish future (car sync-item))))
(defun make-sync-record (user-id item-type item-id action &key client-id rel-ids fields no-auto-add-user)
"Creates a sync hash record from the given params."
(let* ((sync-record (make-hash-table :test #'equal)))
(add-id sync-record)
(setf (gethash "user_id" sync-record) user-id
(gethash "type" sync-record) item-type
(gethash "item_id" sync-record) item-id
(gethash "action" sync-record) action)
(unless no-auto-add-user
(if (listp rel-ids)
(push user-id rel-ids)
(setf rel-ids (concatenate 'vector rel-ids (vector user-id)))))
(setf (gethash "rel" sync-record) (remove-duplicates rel-ids :test #'string=))
(when client-id (setf (gethash "cid" sync-record) client-id))
(when (and fields (listp fields)) (setf (gethash "fields" sync-record) fields))
sync-record))
(defun convert-to-sync (item type &key (action "add"))
"Take a piece of data (say, a note) and turn it into a sync item the app can
understand. Very useful for pulling out extra data into a profile that didn't
come through sync but we want to be available to the app.
Defaults to an 'add' but can be specified via :action."
(let ((rec (make-sync-record (gethash "user_id" item)
type
(gethash "id" item)
action)))
(case (intern (string-upcase action) :keyword)
(:delete
(setf (gethash "data" rec) (hash ("id" (gethash "id" item))
("deleted" t))))
(t
(setf (gethash "data" rec) item)))
rec))
(defafun insert-sync-records (future) (sync-records)
"Insert one or more sync-records (list) objects (built with make-sync-record)
into the sync table."
(alet* ((sock (db-sock))
(query (r:r (:insert
(:table "sync")
(if (zerop (length sync-records))
#()
sync-records))))
(nil (r:run sock query)))
(r:disconnect sock)
(finish future t)))
(adefun add-sync-record (user-id item-type item-id action &key sub-action client-id rel-ids fields no-auto-add-user)
"Adds a record to the sync table describing a change to a specific object.
Allows specifying relation ids (:rel-ids) which can be used for filtering on
sync items. Returns the added sync records IDs as the first value and the
full sync records as the second."
(declare (ignore sub-action fields))
(unless (find action '("add" "edit" "delete" "share" "unshare") :test #'string=)
(error 'server-error :msg (format nil "Bad sync record action: ~s~%" action)))
(alet* ((sync-record (make-sync-record user-id item-type item-id action :client-id client-id :rel-ids rel-ids :no-auto-add-user no-auto-add-user))
(nil (insert-sync-records (list sync-record))))
(list (gethash "id" sync-record))))
(defafun link-sync-items (future) (sync-items link-table)
"Given an array of items pulled from the `sync` table and a string table name
to link the items against, populate the sync items with their linked counter
parts (including the sync_id field for each sync item).
Note that all functions that deal with syncing should call this function. It
not only makes linking sync items to their data counterparts easier, it
uses a standard format for everything."
(let ((deleted-items nil)
(present-items nil))
(loop for sync-item across sync-items do
(if (string= (gethash "action" sync-item) "delete")
(let ((item (hash ("id" (gethash "item_id" sync-item))
("deleted" t))))
(setf (gethash "data" sync-item) item)
(push sync-item deleted-items))
(push sync-item present-items)))
(flet ((finish (items)
(let* ((index (make-hash-table :test #'equal))
(synced-items nil))
(loop for item across items do
(setf (gethash (gethash "id" item) index) item))
record for it with the sync_id present
(dolist (rec present-items)
(let* ((item (gethash (gethash "item_id" rec) index)))
(when item
(setf (gethash "data" rec) item))
(push rec synced-items)))
return the array of items , sorted by sync_id DESC
(finish future
(coerce (sort (append synced-items deleted-items)
(lambda (a b)
(string> (gethash "id" a)
(gethash "id" b))))
'vector)))))
(if (zerop (length present-items))
(finish #())
(alet* ((sock (db-sock))
(query (r:r
(:get-all
(:table link-table)
(mapcar (lambda (x) (gethash "item_id" x)) present-items))))
(cursor (r:run sock query))
(items (r:to-array sock cursor)))
(r:stop/disconnect sock cursor)
(finish items))))))
(adefun sync-scan (user-id from-sync-id &key poll)
"Given a user id, sync id, and item type, pull out all sync records *after*
the given sync-id, where the `rel` field contains the given user-id, and the
sync type matches the passed type. Links grabbed sync items against the given
link-table.
This is useful for boards/notes, because whenever they change (ie add a sync
record) they also record (in the `rel` field) which users are affected by the
change."
(alet* ((sock (db-sock))
(poll-timeout 30)
(sock-poll (when poll (db-sock :timeout poll-timeout)))
(query (r:r
(:between
(:table "sync")
(list user-id from-sync-id)
(list user-id (:maxval))
:index (db-index "sync" "scan_user")
:left-bound "open")))
(query-poll (r:r (:changes query :squash 1))))
(let ((poll-promise (when poll (r:run sock-poll query-poll))))
(alet* ((cursor (r:run sock query))
(sync-items (r:to-array sock cursor))
(sync-size (length sync-items)))
(r:stop/disconnect sock cursor)
(cond ((and poll (zerop sync-size))
(alet* ((cursor-poll poll-promise)
(results nil)
(timer nil))
(as:with-delay ((+ poll-timeout 1))
(r:stop/disconnect sock-poll cursor-poll))
(chain
(r:each sock-poll cursor-poll
(lambda (rec)
(unless timer
(setf timer (as:with-delay (.1) (r:stop/disconnect sock-poll cursor-poll))))
(push rec results)))
(:catch (err)
(unless (typep err 'r:cursor-stopped)
(error err)))
(:finally
(r:stop/disconnect sock-poll cursor-poll)
(coerce (nreverse results) 'list)))))
((zerop sync-size)
#())
(t
(when poll (r:disconnect sock-poll))
sync-items))))))
(adefun sync-all (user-id last-sync-id &key poll)
"Grab all of the sync records for the given user-id since last-sync-id, link
them to their respective objects, and hand back the sorted (ASC) list of sync
items."
(alet* ((types (hash))
(last-sync-id (or last-sync-id (r:r (:maxval))))
(records (sync-scan user-id last-sync-id :poll poll)))
(loop for record across records
for type = (gethash "type" record) do
(push record (gethash type types)))
(let ((actions nil))
(loop for type being the hash-keys of types
for collection being the hash-values of types
for collection-arr = (coerce collection 'vector)
for table = (case (intern (string-upcase type) :keyword)
(:user "users")
(:keychain "keychain")
(:persona "personas")
(:board "boards")
(:note "notes")
(:file "notes")
(:invite "invites")) do
(unless table
(error (format nil "bad sync type: ~a" type)))
(push (link-sync-items collection-arr table) actions))
(chain (all actions)
(:then (completed)
(let ((ungrouped nil))
(dolist (collection completed)
(loop for record across collection do
(remhash "rel" record)
(remhash "item_id" record)
(push record ungrouped)))
(let* ((latest-sync-id (if (zerop (length ungrouped))
""
(gethash "id" (car ungrouped))))
(sorted (sort ungrouped (lambda (a b)
(let ((a-sid (hget a '("id")))
(b-sid (hget b '("id"))))
(when (string< latest-sync-id a-sid)
(setf latest-sync-id a-sid))
(when (string< latest-sync-id b-sid)
(setf latest-sync-id b-sid))
(string< a-sid b-sid))))))
(values sorted latest-sync-id))))))))
(adefun process-incoming-sync (user-id sync)
"Applies a single sync item against a user's profile."
(let* ((type (intern (string-upcase (string (gethash "type" sync))) :keyword))
(action (intern (string-upcase (string (gethash "action" sync))) :keyword))
(item (gethash "data" sync))
(item-id (gethash "id" item)))
(unless (find action '(:add :edit :delete))
(error (format nil "Bad action given while syncing (~a)" action)))
(flet ((standard-delete (del-promise)
(alet* ((sync-ids del-promise))
(hash ("id" item-id)
("sync_ids" sync-ids)))))
(vom:debug "bulk sync: ~s ~s ~a" type action item-id)
(case type
(:user
(case action
(:edit
(edit-user item-id user-id item))
(t (error "Only the `edit` action is allowed when syncing user data"))))
(:keychain
(case action
(:add
(add-keychain-entry user-id item))
(:edit
(edit-keychain-entry user-id item-id item))
(:delete
(standard-delete (delete-keychain-entry user-id item-id)))))
(:persona
(case action
(:add
(add-persona user-id item))
(:edit
(edit-persona user-id item-id item))
(:delete
(standard-delete (delete-persona user-id item-id)))))
(:board
(case action
(:add
(add-board user-id item))
(:edit
(edit-board user-id item-id item))
(:delete
(standard-delete (delete-board user-id item-id)))))
(:note
(case action
(:add
(add-note user-id item))
(:edit
(edit-note user-id item-id item))
(:delete
(standard-delete (delete-note user-id item-id)))))
(:file
(case action
(:delete
(standard-delete (delete-note-file user-id item-id)))))
(t
(error (format nil "Unknown sync record given (~a)" type)))))))
(adefun bulk-sync (user-id sync-items &key request)
"Given a set of bulk items to sync to a user's profile, run them each. This is
done sequentially in order to catch errors (and preserve order in the case of
errors)."
(let ((successes nil)
(track-failed sync-items)
(error nil))
(chain (aeach (lambda (sync)
(alet* ((item (process-incoming-sync user-id sync)))
(setf track-failed (cdr track-failed))
(let* ((sync-ids (gethash "sync_ids" item))
(type (gethash "type" sync))
(action (gethash "action" sync)))
(remhash "sync_ids" item)
(push (hash ("id" (gethash "id" sync))
("type" type)
("action" action)
("sync_ids" sync-ids)
("data" item))
successes))))
sync-items)
(:catch (err) (setf error err))
(:then ()
(dolist (sync successes)
(let* ((type (gethash "type" sync))
(action (gethash "action" sync))
(track-action (string-downcase (format nil "~a-~a" type action))))
(track track-action nil request)))
(hash ("success" (nreverse successes))
("fail" (mapcar (lambda (x) (gethash "id" x)) track-failed))
("error" error))))))
(adefun delete-sync-items (user-id &key only-affects-user)
"Delete sync records by user id, with the option of only deleting records that
affect that one user."
(alet* ((sock (db-sock))
(query (r:r (:delete
(:filter
(:between
(:table "sync")
(list user-id (:minval))
(list user-id (:maxval))
:index (db-index "sync" "scan_user"))
(r:fn (s)
(if only-affects-user
(:== (:count (:attr s "rel")) 1)
t))))))
(nil (r:run sock query)))
(r:disconnect sock)
t))
(adefun convert-board-share-to-sync (board-id)
"Given a board's data, return a promise that is resolved with a VECTOR of
sync items that add the correct board(s) and note(s)."
(multiple-promise-bind (boards notes)
(get-board-tree board-id)
(concatenate
'vector
(map 'vector (lambda (board) (convert-to-sync board "board")) boards)
(map 'vector (lambda (note) (convert-to-sync note "note")) notes))))
(adefun convert-board-unshare-to-sync (user-id board-id)
"Given a board's data, return a promise that is resolved with a VECTOR of
sync items that delete the correct board(s) and note(s).
NOTE that we have to be careful of situations where a note can be in two
shared boards, and if one of the boards is unshared, we do NOT delete the
note because it is still shared via the other board. Same goes for child
boards...Board A owns Board B, if Board A is unshared but Board B has a
separate and valid share to the same persona, Board B must NOT be deleted.
Also, once we find all the items we do want to delete, we need to sync
delete the keychain entries as well.
Adding is much easier than deleting =]."
(multiple-promise-bind (boards notes)
(get-board-tree
board-id
:user-id user-id
:perm-filter (lambda (type user-id data board-perms)
(case type
(:board
(let* ((cur-board-id (gethash "id" data))
(perm-entry (gethash cur-board-id board-perms)))
(and perm-entry
(< 0 (gethash "perms" perm-entry 0)))))
(:note
(user-can-read-note-p user-id data board-perms)))))
(concatenate
'vector
(map 'vector (lambda (board) (convert-to-sync board "board" :action "delete")) boards)
(map 'vector (lambda (note) (convert-to-sync note "note" :action "delete")) notes))))
(defafun cleanup-sync (future) ()
"Remove all sync items older than 30 days."
(alet* ((timestamp (- (get-timestamp) 2592000))
(sync-id (format nil "~8,'0X0000000000000000" timestamp))
(sock (db-sock))
(query (r:r
(:delete
(:between
(:table "sync")
(:minval)
sync-id))))
(nil (r:run sock query)))
(r:disconnect sock)
(finish future t)))
|
5ab1740d4a72e8b33c6aad73f990eff07a68c23c920143bb77dfa2d82dda9943 | CardanoSolutions/kupo | Datum.hs | module Kupo.Data.Cardano.Datum where
import Kupo.Prelude
import Kupo.Data.Cardano.BinaryData
( BinaryData
, hashBinaryData
)
import Kupo.Data.Cardano.DatumHash
( DatumHash
)
import qualified Cardano.Ledger.Alonzo.Data as Ledger
data Datum
= NoDatum
| Reference !(Either DatumHash BinaryData)
| Inline !(Either DatumHash BinaryData)
deriving (Generic, Show, Eq, Ord)
toBabbageDatum
:: Datum
-> Ledger.Datum (BabbageEra StandardCrypto)
toBabbageDatum = \case
NoDatum -> Ledger.NoDatum
Reference (Left ref) -> Ledger.DatumHash ref
Reference (Right bin) -> Ledger.Datum bin
Inline (Left ref) -> Ledger.DatumHash ref
Inline (Right bin) -> Ledger.Datum bin
fromBabbageDatum
:: Ledger.Datum (BabbageEra StandardCrypto)
-> Datum
fromBabbageDatum = \case
Ledger.NoDatum -> NoDatum
Ledger.DatumHash ref -> Reference (Left ref)
Ledger.Datum bin -> Inline (Right bin)
getBinaryData
:: Datum
-> Maybe BinaryData
getBinaryData = \case
NoDatum -> Nothing
Reference (Right bin) -> Just bin
Reference{} -> Nothing
Inline (Right bin) -> Just bin
Inline Left{} -> Nothing
hashDatum
:: Datum
-> Maybe DatumHash
hashDatum = \case
NoDatum -> Nothing
Reference (Left ref) -> Just ref
Reference (Right bin) -> Just (hashBinaryData bin)
Inline (Left ref) -> Just ref
Inline (Right bin) -> Just (hashBinaryData bin)
| null | https://raw.githubusercontent.com/CardanoSolutions/kupo/4904123abeed53f672eb34e0ef10c6c710bda61b/src/Kupo/Data/Cardano/Datum.hs | haskell | module Kupo.Data.Cardano.Datum where
import Kupo.Prelude
import Kupo.Data.Cardano.BinaryData
( BinaryData
, hashBinaryData
)
import Kupo.Data.Cardano.DatumHash
( DatumHash
)
import qualified Cardano.Ledger.Alonzo.Data as Ledger
data Datum
= NoDatum
| Reference !(Either DatumHash BinaryData)
| Inline !(Either DatumHash BinaryData)
deriving (Generic, Show, Eq, Ord)
toBabbageDatum
:: Datum
-> Ledger.Datum (BabbageEra StandardCrypto)
toBabbageDatum = \case
NoDatum -> Ledger.NoDatum
Reference (Left ref) -> Ledger.DatumHash ref
Reference (Right bin) -> Ledger.Datum bin
Inline (Left ref) -> Ledger.DatumHash ref
Inline (Right bin) -> Ledger.Datum bin
fromBabbageDatum
:: Ledger.Datum (BabbageEra StandardCrypto)
-> Datum
fromBabbageDatum = \case
Ledger.NoDatum -> NoDatum
Ledger.DatumHash ref -> Reference (Left ref)
Ledger.Datum bin -> Inline (Right bin)
getBinaryData
:: Datum
-> Maybe BinaryData
getBinaryData = \case
NoDatum -> Nothing
Reference (Right bin) -> Just bin
Reference{} -> Nothing
Inline (Right bin) -> Just bin
Inline Left{} -> Nothing
hashDatum
:: Datum
-> Maybe DatumHash
hashDatum = \case
NoDatum -> Nothing
Reference (Left ref) -> Just ref
Reference (Right bin) -> Just (hashBinaryData bin)
Inline (Left ref) -> Just ref
Inline (Right bin) -> Just (hashBinaryData bin)
| |
32d3fb74c51e6b946cf03ccb6e75daecc19797e2f5836ad71e66bd7839b8b432 | synduce/Synduce | slsp.ml | * @synduce --no - lifting -NB -n 10
type list =
| Elt of int
| Cons of int * list
Concat - list of nested list with pivot
type cnlist =
| Sglt of int
| Cat of cnlist * int * cnlist
let rec clist_to_list = function
| Sglt a -> Elt a
| Cat (x, piv, y) -> dec y x
and dec l1 = function
| Sglt a -> Cons (a, clist_to_list l1)
| Cat (x, piv, y) -> dec (Cat (y, piv, l1)) x
;;
(* Type invariant: partitioned by sum value *)
let rec sorted = function
| Sglt a -> true
| Cat (x, piv, y) -> lmax x < piv && piv < lmin y && sorted x && sorted y
and lmin = function
| Sglt a -> a
| Cat (x, piv, y) -> min (lmin x) (lmin y)
and lmax = function
| Sglt a -> a
| Cat (x, piv, y) -> max (lmax x) (lmax y)
;;
(* Reference function : sum of longest suffis of positive elements. *)
let rec spec = function
| Elt a -> max 0 a, a >= 0
| Cons (hd, tl) ->
let mtss, cond = spec tl in
let new_cond = hd >= 0 && cond in
(if new_cond then mtss + hd else mtss), new_cond
[@@ensures fun (x, b) -> x >= 0]
;;
let rec target = function
| Sglt x -> [%synt s0] x
| Cat (l, piv, r) ->
if piv <= 0 then [%synt f1] (target r) else [%synt f2] piv (target r) (target l)
[@@requires sorted]
;;
assert (target = clist_to_list @@ spec)
| null | https://raw.githubusercontent.com/synduce/Synduce/d453b04cfb507395908a270b1906f5ac34298d29/benchmarks/constraints/sortedlist/slsp.ml | ocaml | Type invariant: partitioned by sum value
Reference function : sum of longest suffis of positive elements. | * @synduce --no - lifting -NB -n 10
type list =
| Elt of int
| Cons of int * list
Concat - list of nested list with pivot
type cnlist =
| Sglt of int
| Cat of cnlist * int * cnlist
let rec clist_to_list = function
| Sglt a -> Elt a
| Cat (x, piv, y) -> dec y x
and dec l1 = function
| Sglt a -> Cons (a, clist_to_list l1)
| Cat (x, piv, y) -> dec (Cat (y, piv, l1)) x
;;
let rec sorted = function
| Sglt a -> true
| Cat (x, piv, y) -> lmax x < piv && piv < lmin y && sorted x && sorted y
and lmin = function
| Sglt a -> a
| Cat (x, piv, y) -> min (lmin x) (lmin y)
and lmax = function
| Sglt a -> a
| Cat (x, piv, y) -> max (lmax x) (lmax y)
;;
let rec spec = function
| Elt a -> max 0 a, a >= 0
| Cons (hd, tl) ->
let mtss, cond = spec tl in
let new_cond = hd >= 0 && cond in
(if new_cond then mtss + hd else mtss), new_cond
[@@ensures fun (x, b) -> x >= 0]
;;
let rec target = function
| Sglt x -> [%synt s0] x
| Cat (l, piv, r) ->
if piv <= 0 then [%synt f1] (target r) else [%synt f2] piv (target r) (target l)
[@@requires sorted]
;;
assert (target = clist_to_list @@ spec)
|
d399ce899f456995d6e7f24bf0f0bb919ffe1ac6ae75b37fca73fbaa776618af | compiling-to-categories/concat | VectorSpace.hs | # LANGUAGE TypeApplications #
# LANGUAGE UndecidableInstances #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE CPP #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DefaultSignatures #
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -Wall #
{-# OPTIONS_GHC -Wno-unused-imports #-} -- TEMP
-- | Vector spaces as zippable functors
module ConCat.Free.VectorSpace where
import Prelude hiding (zipWith)
import Data.Monoid (Sum(..),Product(..))
import Data.Semigroup (Semigroup(..))
-- import GHC.Exts (Coercible,coerce)
import GHC.Generics (U1(..),Par1(..),(:*:)(..),(:+:)(..),(:.:)(..))
#ifdef VectorSized
import GHC.TypeLits (KnownNat)
#endif
import Data.Foldable (fold)
import Data.Pointed
import Data.Key (Zip(..))
import Data . Vector . Sized ( Vector )
import Data . Map ( Map )
import Data.Constraint ((:-)(..),Dict(..))
import Data.Vector.Sized (Vector)
import Control . Newtype . Generics
import ConCat.Orphans ()
import ConCat.Misc ((:*),(:+),(<~),sqr)
import ConCat.Rep
import ConCat . Category ( UT( .. ),Constrained( .. ( .. ) )
import ConCat.AltCat (OpCon(..),Sat,type (|-)(..),fmapC)
{--------------------------------------------------------------------
Vector spaces
--------------------------------------------------------------------}
infixl 7 *^, <.>, >.<
infixl 6 ^+^, ^-^
#if 1
type Zeroable = Pointed
Zero vector
zeroV :: (Pointed f, Num a) => f a
zeroV = point 0
-- TODO: Maybe use tabulate . const instead of point
#else
Experimental alternative to Pointed
class Functor f => Zeroable f where
zeroV :: Num a => f a
default zeroV :: (Pointed f, Num a) => f a
zeroV = point 0
The Functor superclass is just for convenience .
-- Remove if needed (and fix other signatures).
instance Zeroable U1 where
-- zeroV = U1
{ - # INLINE zeroV # - }
-- The following instance could be defaulted. I'm tracking down what might be an
-- inlining failure.
instance Zeroable Par1 where
zeroV = Par1 0
# INLINE zeroV #
instance Zeroable ((->) k)
instance Ord k => Zeroable (Map k) where
zeroV = mempty
# INLINE zeroV #
instance (Zeroable f, Zeroable g) => Zeroable (f :*: g) where
zeroV = zeroV :*: zeroV
# INLINE zeroV #
instance (Zeroable f, Zeroable g) => Zeroable (g :.: f) where
zeroV = Comp1 (const zeroV <$> (zeroV :: g Int))
# INLINE zeroV #
#endif
-- TODO: Replace Num constraints with Ring or SemiRing
-- | Scale a vector
scaleV, (*^) :: (Functor f, Num s) => s -> f s -> f s
s *^ v = (s *) <$> v
scaleV = (*^)
{-# INLINE (*^) #-}
# INLINE scaleV #
-- | Negate a vector
negateV :: (Functor f, Num s) => f s -> f s
negateV = ((-1) *^)
# INLINE negateV #
-- | Add vectors
addV, (^+^) :: (Zip f, Num s) => f s -> f s -> f s
(^+^) = zipWith (+)
addV = (^+^)
{-# INLINE (^+^) #-}
# INLINE addV #
-- | Subtract vectors
subV, (^-^) :: (Zip f, Num s) => f s -> f s -> f s
(^-^) = zipWith (-)
subV = (^-^)
# INLINE ( ^-^ ) #
# INLINE subV #
-- | Inner product
dotV, (<.>) :: forall s f. (Zip f, Foldable f, Num s) => f s -> f s -> s
x <.> y = sum (zipWith (*) x y)
dotV = (<.>)
{-# INLINE (<.>) #-}
# INLINE dotV #
-- | Norm squared
#if 1
normSqr :: forall s f. (Functor f, Foldable f, Num s) => f s -> s
normSqr = sum . fmap sqr
#else
normSqr :: forall s f. (Zip f, Foldable f, Num s) => f s -> s
normSqr u = u <.> u
#endif
# INLINE normSqr #
-- | Distance squared
distSqr :: forall s f. (Zip f, Foldable f, Num s) => f s -> f s -> s
distSqr u v = normSqr (u ^-^ v)
# INLINE distSqr #
-- | Outer product
outerV, (>.<) :: (Num s, Functor f, Functor g) => g s -> f s -> g (f s)
x >.< y = (*^ y) <$> x
outerV = (>.<)
{-# INLINE (>.<) #-}
# INLINE outerV #
-- | Normalize a vector (scale to unit magnitude)
normalizeV :: (Functor f, Foldable f, Floating a) => f a -> f a
normalizeV xs = (/ sum xs) <$> xs
{-# INLINE normalizeV #-}
-- Would I rather prefer swapping the arguments (equivalently, transposing the
-- result)?
-- newtype SumV f a = SumV (f a)
data SumV f a = SumV (f a)
instance HasRep (SumV f a) where
type Rep (SumV f a) = f a
abst as = SumV as
repr (SumV as) = as
# INLINE abst #
{-# INLINE repr #-}
instance (Zeroable f, Zip f, Num a) => Semigroup (SumV f a) where
(<>) = inAbst2 (^+^)
instance (Zeroable f, Zip f, Num a) => Monoid (SumV f a) where
mempty = abst zeroV
mappend = (<>)
sumV :: (Functor m, Foldable m, Zeroable n, Zip n, Num a) => m (n a) -> n a
sumV = repr . fold . fmap SumV
# INLINE sumV #
{--------------------------------------------------------------------
Conversion
--------------------------------------------------------------------}
type RepHasV s a = (HasRep a, HasV s (Rep a), V s a ~ V s (Rep a))
class HasV s a where
type V s a :: * -> *
toV :: a -> V s a s
unV :: V s a s -> a
-- Default via Rep.
type V s a = V s (Rep a)
default toV :: RepHasV s a => a -> V s a s
default unV :: RepHasV s a => V s a s -> a
toV = toV . repr
unV = abst . unV
# INLINE unV #
inV :: forall s a b. (HasV s a, HasV s b) => (a -> b) -> (V s a s -> V s b s)
inV = toV <~ unV
onV :: forall s a b. (HasV s a, HasV s b) => (V s a s -> V s b s) -> (a -> b)
onV = unV <~ toV
onV2 :: forall s a b c. (HasV s a, HasV s b, HasV s c) => (V s a s -> V s b s -> V s c s) -> (a -> b -> c)
onV2 = onV <~ toV
Can I replace my HasRep class with Newtype ?
-- -- Replace by special cases as needed
instance HasV s s where
-- type V s s = Par1
-- toV = Par1
-- unV = unPar1
type IsScalar s = (HasV s s, V s s ~ Par1)
instance HasV s () where
type V s () = U1
toV () = U1
unV U1 = ()
instance HasV Float Float where
type V Float Float = Par1
toV = Par1
unV = unPar1
instance HasV Double Double where
type V Double Double = Par1
toV = Par1
unV = unPar1
-- etc
instance (HasV s a, HasV s b) => HasV s (a :* b) where
type V s (a :* b) = V s a :*: V s b
toV (a,b) = toV a :*: toV b
unV (f :*: g) = (unV f,unV g)
# INLINE unV #
instance OpCon (:*) (Sat (HasV s)) where
inOp = Entail (Sub Dict)
# INLINE inOp #
instance (HasV s a, HasV s b) => HasV s (a :+ b) where
type V s (a :+ b) = V s a :+: V s b
toV (Left a) = L1 (toV a)
toV (Right b) = R1 (toV b)
unV (L1 fs) = Left (unV fs)
unV (R1 gs) = Right (unV gs)
# INLINE unV #
instance ( HasV s a , HasV s b , ( V s a ) , ( V s b ) , s )
-- => HasV s (a :+ b) where
-- type V s (a :+ b) = V s a :*: V s b
-- toV (Left a) = toV a :*: zeroV
-- toV (Right b) = zeroV :*: toV b
-- unV (f :*: g) = error "unV on a :+ b undefined" f g
instance (HasV s a, HasV s b, HasV s c) => HasV s (a,b,c)
instance (HasV s a, HasV s b, HasV s c, HasV s d) => HasV s (a,b,c,d)
-- Sometimes it's better not to use the default. I think the following gives more reuse:
instance HasV s a = > HasV s ( Pair a ) where
-- type V s (Pair a) = Pair :.: V s a
-- toV = Comp1 . fmap toV
-- unV = fmap unV . unComp1
-- Similarly for other functors
instance HasV s (U1 a)
instance HasV s a => HasV s (Par1 a)
instance (HasV s (f a), HasV s (g a)) => HasV s ((f :*: g) a)
instance (HasV s (g (f a))) => HasV s ((g :.: f) a)
instance HasV s (f a) => HasV s (SumV f a)
instance HasV s a => HasV s (Sum a)
instance HasV s a => HasV s (Product a)
TODO : More newtypes
-- Sometimes it's better not to use the default. I think the following gives more reuse:
instance HasV s a = > HasV s ( Pair a ) where
-- type V s (Pair a) = Pair :.: V s a
-- toV = Comp1 . fmap toV
-- unV = fmap unV . unComp1
-- Similarly for other functors
class VComp h where
vcomp :: forall s c. HasV s c :- (HasV s (h c), V s (h c) ~ (h :.: V s c))
#if 1
instance HasV s b => HasV s (a -> b) where
type V s (a -> b) = (->) a :.: V s b
toV = Comp1 . fmap toV
unV = fmap unV . unComp1
# INLINE unV #
#else
instance HasV s b => HasV s (a -> b) where
type V s (a -> b) = Map a :.: V s b
toV = Comp1 . ??
unV = ?? . unComp1
#endif
instance VComp ((->) a) where vcomp = Sub Dict
#ifdef VectorSized
#if 0
Until I work out HasL ( g : . : f ) or stop using it , restrict elements to s.
instance KnownNat n => HasV s (Vector n s) where
type V s (Vector n s) = Vector n
toV = id
unV = id
# INLINE toV #
# INLINE unV #
#else
instance (HasV s b, KnownNat n) => HasV s (Vector n b) where
type V s (Vector n b) = Vector n :.: V s b
toV = Comp1 . fmapC toV
unV = fmapC unV . unComp1
# INLINE toV #
# INLINE unV #
#endif
#else
instance (HasV s b) => HasV s (Vector n b) where
type V s (Vector n b) = Vector n :.: V s b
toV = Comp1 . fmapC toV
unV = fmapC unV . unComp1
# INLINE toV #
# INLINE unV #
#endif
-- TODO: find a better alternative to using fmapC explicitly here. I'd like to
-- use fmap instead, but it gets inlined immediately, as do all class
-- operations.
-- instance
-- #ifdef VectorSized
KnownNat n = >
-- #endif
-- VComp (Vector n) where vcomp = Sub Dict
#ifndef VectorSized
instance VComp (Vector n) where vcomp = Sub Dict
#endif
#if 0
-- Example default instance
data Pickle a = Pickle a a a
instance HasRep (Pickle a) where
type Rep (Pickle a) = (a :* a) :* a
repr (Pickle a b c) = ((a,b),c)
abst ((a,b),c) = Pickle a b c
instance HasV s a => HasV s (Pickle a)
#endif
#if 0
-- -- | The 'unV' form of 'zeroV'
zeroX : : forall s a. ( HasV s a , ( V s a ) ) = > a
-- zeroX = unV (zeroV :: V s a s)
vfun :: (HasV s a, HasV s b) => (a -> b) -> UT s (V s a) (V s b)
vfun = UT . inV
-- vfun f = UT (toV . f . unV)
-- | Free vector over scalar s
data VFun s
instance FunctorC (VFun s) (Constrained (HasV s) (->)) (UT s) where
type ( VFun s ) = HasV s
type ( VFun s ) a = HasV s a
type ( VFun s ) b a = ( HasV s a , HasV s b )
type VFun s % a = V s a
fmapC (Constrained f) = UT (inV f)
-- vfun f
#endif
#if 0
{--------------------------------------------------------------------
Coercible
--------------------------------------------------------------------}
-- I don't think I need this stuff.
As a second default , we can use coercible types .
coerceToV :: forall s a b. (Coercible a b, HasV s b) => a -> V s b s
coerceToV = toV . (coerce :: a -> b)
coerceUnV :: forall s a b. (Coercible a b, HasV s b) => V s b s -> a
coerceUnV = (coerce :: b -> a) . unV
#if 0
#define CoerceHasV(s,ty,rep) \
instance HasV s (rep) => HasV s (ty) where \
{ type V s (ty) = V s (rep) \
; toV = coerceToV @ s @ (ty) @ (rep) \
; unV = coerceUnV @ s @ (ty) @ (rep) }
newtype Two s = Two (s :* s)
instance HasV s s = > HasV s ( Two s ) where
type V s ( Two s ) = V s ( s :* s )
toV = coerceToV @ s @ ( Two s ) @ ( s :* s )
unV = coerceUnV @ s @ ( Two s ) @ ( s :* s )
CoerceHasV(s,Two s,s :* s)
#endif
#endif
{--------------------------------------------------------------------
Utilities
--------------------------------------------------------------------}
| null | https://raw.githubusercontent.com/compiling-to-categories/concat/49e554856576245f583dfd2484e5f7c19f688028/examples/src/ConCat/Free/VectorSpace.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE TypeSynonymInstances #
# OPTIONS_GHC -Wno-unused-imports #
TEMP
| Vector spaces as zippable functors
import GHC.Exts (Coercible,coerce)
-------------------------------------------------------------------
Vector spaces
-------------------------------------------------------------------
TODO: Maybe use tabulate . const instead of point
Remove if needed (and fix other signatures).
zeroV = U1
The following instance could be defaulted. I'm tracking down what might be an
inlining failure.
TODO: Replace Num constraints with Ring or SemiRing
| Scale a vector
# INLINE (*^) #
| Negate a vector
| Add vectors
# INLINE (^+^) #
| Subtract vectors
| Inner product
# INLINE (<.>) #
| Norm squared
| Distance squared
| Outer product
# INLINE (>.<) #
| Normalize a vector (scale to unit magnitude)
# INLINE normalizeV #
Would I rather prefer swapping the arguments (equivalently, transposing the
result)?
newtype SumV f a = SumV (f a)
# INLINE repr #
-------------------------------------------------------------------
Conversion
-------------------------------------------------------------------
Default via Rep.
-- Replace by special cases as needed
type V s s = Par1
toV = Par1
unV = unPar1
etc
=> HasV s (a :+ b) where
type V s (a :+ b) = V s a :*: V s b
toV (Left a) = toV a :*: zeroV
toV (Right b) = zeroV :*: toV b
unV (f :*: g) = error "unV on a :+ b undefined" f g
Sometimes it's better not to use the default. I think the following gives more reuse:
type V s (Pair a) = Pair :.: V s a
toV = Comp1 . fmap toV
unV = fmap unV . unComp1
Similarly for other functors
Sometimes it's better not to use the default. I think the following gives more reuse:
type V s (Pair a) = Pair :.: V s a
toV = Comp1 . fmap toV
unV = fmap unV . unComp1
Similarly for other functors
TODO: find a better alternative to using fmapC explicitly here. I'd like to
use fmap instead, but it gets inlined immediately, as do all class
operations.
instance
#ifdef VectorSized
#endif
VComp (Vector n) where vcomp = Sub Dict
Example default instance
-- | The 'unV' form of 'zeroV'
zeroX = unV (zeroV :: V s a s)
vfun f = UT (toV . f . unV)
| Free vector over scalar s
vfun f
-------------------------------------------------------------------
Coercible
-------------------------------------------------------------------
I don't think I need this stuff.
-------------------------------------------------------------------
Utilities
------------------------------------------------------------------- | # LANGUAGE TypeApplications #
# LANGUAGE UndecidableInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE CPP #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DefaultSignatures #
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -Wall #
module ConCat.Free.VectorSpace where
import Prelude hiding (zipWith)
import Data.Monoid (Sum(..),Product(..))
import Data.Semigroup (Semigroup(..))
import GHC.Generics (U1(..),Par1(..),(:*:)(..),(:+:)(..),(:.:)(..))
#ifdef VectorSized
import GHC.TypeLits (KnownNat)
#endif
import Data.Foldable (fold)
import Data.Pointed
import Data.Key (Zip(..))
import Data . Vector . Sized ( Vector )
import Data . Map ( Map )
import Data.Constraint ((:-)(..),Dict(..))
import Data.Vector.Sized (Vector)
import Control . Newtype . Generics
import ConCat.Orphans ()
import ConCat.Misc ((:*),(:+),(<~),sqr)
import ConCat.Rep
import ConCat . Category ( UT( .. ),Constrained( .. ( .. ) )
import ConCat.AltCat (OpCon(..),Sat,type (|-)(..),fmapC)
infixl 7 *^, <.>, >.<
infixl 6 ^+^, ^-^
#if 1
type Zeroable = Pointed
Zero vector
zeroV :: (Pointed f, Num a) => f a
zeroV = point 0
#else
Experimental alternative to Pointed
class Functor f => Zeroable f where
zeroV :: Num a => f a
default zeroV :: (Pointed f, Num a) => f a
zeroV = point 0
The Functor superclass is just for convenience .
instance Zeroable U1 where
{ - # INLINE zeroV # - }
instance Zeroable Par1 where
zeroV = Par1 0
# INLINE zeroV #
instance Zeroable ((->) k)
instance Ord k => Zeroable (Map k) where
zeroV = mempty
# INLINE zeroV #
instance (Zeroable f, Zeroable g) => Zeroable (f :*: g) where
zeroV = zeroV :*: zeroV
# INLINE zeroV #
instance (Zeroable f, Zeroable g) => Zeroable (g :.: f) where
zeroV = Comp1 (const zeroV <$> (zeroV :: g Int))
# INLINE zeroV #
#endif
scaleV, (*^) :: (Functor f, Num s) => s -> f s -> f s
s *^ v = (s *) <$> v
scaleV = (*^)
# INLINE scaleV #
negateV :: (Functor f, Num s) => f s -> f s
negateV = ((-1) *^)
# INLINE negateV #
addV, (^+^) :: (Zip f, Num s) => f s -> f s -> f s
(^+^) = zipWith (+)
addV = (^+^)
# INLINE addV #
subV, (^-^) :: (Zip f, Num s) => f s -> f s -> f s
(^-^) = zipWith (-)
subV = (^-^)
# INLINE ( ^-^ ) #
# INLINE subV #
dotV, (<.>) :: forall s f. (Zip f, Foldable f, Num s) => f s -> f s -> s
x <.> y = sum (zipWith (*) x y)
dotV = (<.>)
# INLINE dotV #
#if 1
normSqr :: forall s f. (Functor f, Foldable f, Num s) => f s -> s
normSqr = sum . fmap sqr
#else
normSqr :: forall s f. (Zip f, Foldable f, Num s) => f s -> s
normSqr u = u <.> u
#endif
# INLINE normSqr #
distSqr :: forall s f. (Zip f, Foldable f, Num s) => f s -> f s -> s
distSqr u v = normSqr (u ^-^ v)
# INLINE distSqr #
outerV, (>.<) :: (Num s, Functor f, Functor g) => g s -> f s -> g (f s)
x >.< y = (*^ y) <$> x
outerV = (>.<)
# INLINE outerV #
normalizeV :: (Functor f, Foldable f, Floating a) => f a -> f a
normalizeV xs = (/ sum xs) <$> xs
data SumV f a = SumV (f a)
instance HasRep (SumV f a) where
type Rep (SumV f a) = f a
abst as = SumV as
repr (SumV as) = as
# INLINE abst #
instance (Zeroable f, Zip f, Num a) => Semigroup (SumV f a) where
(<>) = inAbst2 (^+^)
instance (Zeroable f, Zip f, Num a) => Monoid (SumV f a) where
mempty = abst zeroV
mappend = (<>)
sumV :: (Functor m, Foldable m, Zeroable n, Zip n, Num a) => m (n a) -> n a
sumV = repr . fold . fmap SumV
# INLINE sumV #
type RepHasV s a = (HasRep a, HasV s (Rep a), V s a ~ V s (Rep a))
class HasV s a where
type V s a :: * -> *
toV :: a -> V s a s
unV :: V s a s -> a
type V s a = V s (Rep a)
default toV :: RepHasV s a => a -> V s a s
default unV :: RepHasV s a => V s a s -> a
toV = toV . repr
unV = abst . unV
# INLINE unV #
inV :: forall s a b. (HasV s a, HasV s b) => (a -> b) -> (V s a s -> V s b s)
inV = toV <~ unV
onV :: forall s a b. (HasV s a, HasV s b) => (V s a s -> V s b s) -> (a -> b)
onV = unV <~ toV
onV2 :: forall s a b c. (HasV s a, HasV s b, HasV s c) => (V s a s -> V s b s -> V s c s) -> (a -> b -> c)
onV2 = onV <~ toV
Can I replace my HasRep class with Newtype ?
instance HasV s s where
type IsScalar s = (HasV s s, V s s ~ Par1)
instance HasV s () where
type V s () = U1
toV () = U1
unV U1 = ()
instance HasV Float Float where
type V Float Float = Par1
toV = Par1
unV = unPar1
instance HasV Double Double where
type V Double Double = Par1
toV = Par1
unV = unPar1
instance (HasV s a, HasV s b) => HasV s (a :* b) where
type V s (a :* b) = V s a :*: V s b
toV (a,b) = toV a :*: toV b
unV (f :*: g) = (unV f,unV g)
# INLINE unV #
instance OpCon (:*) (Sat (HasV s)) where
inOp = Entail (Sub Dict)
# INLINE inOp #
instance (HasV s a, HasV s b) => HasV s (a :+ b) where
type V s (a :+ b) = V s a :+: V s b
toV (Left a) = L1 (toV a)
toV (Right b) = R1 (toV b)
unV (L1 fs) = Left (unV fs)
unV (R1 gs) = Right (unV gs)
# INLINE unV #
instance ( HasV s a , HasV s b , ( V s a ) , ( V s b ) , s )
instance (HasV s a, HasV s b, HasV s c) => HasV s (a,b,c)
instance (HasV s a, HasV s b, HasV s c, HasV s d) => HasV s (a,b,c,d)
instance HasV s a = > HasV s ( Pair a ) where
instance HasV s (U1 a)
instance HasV s a => HasV s (Par1 a)
instance (HasV s (f a), HasV s (g a)) => HasV s ((f :*: g) a)
instance (HasV s (g (f a))) => HasV s ((g :.: f) a)
instance HasV s (f a) => HasV s (SumV f a)
instance HasV s a => HasV s (Sum a)
instance HasV s a => HasV s (Product a)
TODO : More newtypes
instance HasV s a = > HasV s ( Pair a ) where
class VComp h where
vcomp :: forall s c. HasV s c :- (HasV s (h c), V s (h c) ~ (h :.: V s c))
#if 1
instance HasV s b => HasV s (a -> b) where
type V s (a -> b) = (->) a :.: V s b
toV = Comp1 . fmap toV
unV = fmap unV . unComp1
# INLINE unV #
#else
instance HasV s b => HasV s (a -> b) where
type V s (a -> b) = Map a :.: V s b
toV = Comp1 . ??
unV = ?? . unComp1
#endif
instance VComp ((->) a) where vcomp = Sub Dict
#ifdef VectorSized
#if 0
Until I work out HasL ( g : . : f ) or stop using it , restrict elements to s.
instance KnownNat n => HasV s (Vector n s) where
type V s (Vector n s) = Vector n
toV = id
unV = id
# INLINE toV #
# INLINE unV #
#else
instance (HasV s b, KnownNat n) => HasV s (Vector n b) where
type V s (Vector n b) = Vector n :.: V s b
toV = Comp1 . fmapC toV
unV = fmapC unV . unComp1
# INLINE toV #
# INLINE unV #
#endif
#else
instance (HasV s b) => HasV s (Vector n b) where
type V s (Vector n b) = Vector n :.: V s b
toV = Comp1 . fmapC toV
unV = fmapC unV . unComp1
# INLINE toV #
# INLINE unV #
#endif
KnownNat n = >
#ifndef VectorSized
instance VComp (Vector n) where vcomp = Sub Dict
#endif
#if 0
data Pickle a = Pickle a a a
instance HasRep (Pickle a) where
type Rep (Pickle a) = (a :* a) :* a
repr (Pickle a b c) = ((a,b),c)
abst ((a,b),c) = Pickle a b c
instance HasV s a => HasV s (Pickle a)
#endif
#if 0
zeroX : : forall s a. ( HasV s a , ( V s a ) ) = > a
vfun :: (HasV s a, HasV s b) => (a -> b) -> UT s (V s a) (V s b)
vfun = UT . inV
data VFun s
instance FunctorC (VFun s) (Constrained (HasV s) (->)) (UT s) where
type ( VFun s ) = HasV s
type ( VFun s ) a = HasV s a
type ( VFun s ) b a = ( HasV s a , HasV s b )
type VFun s % a = V s a
fmapC (Constrained f) = UT (inV f)
#endif
#if 0
As a second default , we can use coercible types .
coerceToV :: forall s a b. (Coercible a b, HasV s b) => a -> V s b s
coerceToV = toV . (coerce :: a -> b)
coerceUnV :: forall s a b. (Coercible a b, HasV s b) => V s b s -> a
coerceUnV = (coerce :: b -> a) . unV
#if 0
#define CoerceHasV(s,ty,rep) \
instance HasV s (rep) => HasV s (ty) where \
{ type V s (ty) = V s (rep) \
; toV = coerceToV @ s @ (ty) @ (rep) \
; unV = coerceUnV @ s @ (ty) @ (rep) }
newtype Two s = Two (s :* s)
instance HasV s s = > HasV s ( Two s ) where
type V s ( Two s ) = V s ( s :* s )
toV = coerceToV @ s @ ( Two s ) @ ( s :* s )
unV = coerceUnV @ s @ ( Two s ) @ ( s :* s )
CoerceHasV(s,Two s,s :* s)
#endif
#endif
|
7ff1923062f44d3cf97b3fc3ebb7ac5a1eda9dd7c587b39a9209e6285d78fe74 | kmi/irs | new.lisp | Mode : Lisp ; Package :
Author :
The Open University
(in-package "OCML")
(in-ontology medical-ontology)
(def-class generic-care-giver(person))
(def-class generic-care-giver-type ()?X
:iff-def (subclass-of ?X generic-care-giver))
(def-class medical-condition (medical-ontology-object)
((has-risk-factor :type medical-variable)))
(def-class patient (person)
((has-medical-condition :type medical-condition)))
(def-class healthcare-instrument (medical-ontology-object)
((associated-medical-condition-class :type medical-condition-type)))
(def-class clinical-instrument (healthcare-instrument)
"These are instruments that can only be used in a clinical setting")
(def-class generalized-healthcare-technique (medical-ontology-object))
(def-class generalized-clinical-technique (generalized-healthcare-technique)
"These are techniques that can only be applied in a clinical setting")
(def-class non-professional-care-giver (generic-care-giver)
((cares-for :type person)))
(def-class health-care-professional (generic-care-giver working-person))
(def-class lesion (medical-condition))
(def-class paramedic (health-care-professional))
(def-class health-care-technician (health-care-professional))
(def-class physician (health-care-professional))
(def-class nurse (health-care-professional))
(def-relation chair-bound (?x)
:constraint (person ?x))
(def-relation bed-ridden (?x)
:constraint (person ?x))
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/ontologies/domains/medical-ontology/new.lisp | lisp | Package : |
Author :
The Open University
(in-package "OCML")
(in-ontology medical-ontology)
(def-class generic-care-giver(person))
(def-class generic-care-giver-type ()?X
:iff-def (subclass-of ?X generic-care-giver))
(def-class medical-condition (medical-ontology-object)
((has-risk-factor :type medical-variable)))
(def-class patient (person)
((has-medical-condition :type medical-condition)))
(def-class healthcare-instrument (medical-ontology-object)
((associated-medical-condition-class :type medical-condition-type)))
(def-class clinical-instrument (healthcare-instrument)
"These are instruments that can only be used in a clinical setting")
(def-class generalized-healthcare-technique (medical-ontology-object))
(def-class generalized-clinical-technique (generalized-healthcare-technique)
"These are techniques that can only be applied in a clinical setting")
(def-class non-professional-care-giver (generic-care-giver)
((cares-for :type person)))
(def-class health-care-professional (generic-care-giver working-person))
(def-class lesion (medical-condition))
(def-class paramedic (health-care-professional))
(def-class health-care-technician (health-care-professional))
(def-class physician (health-care-professional))
(def-class nurse (health-care-professional))
(def-relation chair-bound (?x)
:constraint (person ?x))
(def-relation bed-ridden (?x)
:constraint (person ?x))
|
3e64bb88bf00225f8588416f271567f9da6860fb8eca908505ebbefb93a09818 | zkat/sheeple | properties.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Base : 10 ; indent - tabs - mode : nil -*-
;;;; This file is part of Sheeple
;;;; properties.lisp
;;;;
;;;; Property access, inspection, and management stuff, for the most part.
;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package :sheeple)
(defun property-position (property-name object)
(position property-name (mold-properties (%object-mold object)) :test #'eq))
;;;
;;; Base Property API
;;;
(defun direct-property-value (object property-name)
"Returns the property-value set locally in OBJECT for PROPERTY-NAME. If the
property is not set locally, a condition of type `unbound-property' is signaled."
(if (std-object-p object)
(std-direct-property-value object property-name)
(funcall 'smop:direct-property-value (object-metaobject object) object property-name)))
(defun std-direct-property-value (object property-name)
(aif (property-position property-name object)
(svref (%object-property-values object) it)
(restart-case (error 'unbound-property :object object :property-name property-name)
(continue ()
:report "Try accessing the property again."
(direct-property-value object property-name))
(use-value (value)
:report "Return a value."
:interactive (lambda ()
(format *query-io* "~&Value to use: ")
(list (read *query-io*)))
value))))
(defun property-value (object property-name)
"Returns the property-value for PROPERTY-NAME found first in OBJECT's hierarchy list.
If the value does not exist in the hierarchy list, a condition of type `unbound-property'
is signaled."
(if (std-object-p object)
(std-property-value object property-name)
(funcall 'smop:property-value (object-metaobject object) object property-name)))
(defun std-property-value (object property-name)
(dolist (ancestor (object-precedence-list object)
(error 'unbound-property :object object :property-name property-name))
(handler-bind ((unbound-property (fun (go :next))))
(return (direct-property-value ancestor property-name)))
:next))
(defun (setf direct-property-value) (new-value object property-name &rest options)
"Tries to set a direct property value for OBJECT. If it succeeds, returns T, otherwise NIL."
(if (std-object-p object)
(apply #'(setf std-direct-property-value) new-value object property-name options)
(apply #'(setf smop:direct-property-value) new-value (object-metaobject object) object property-name options)))
(defun (setf std-direct-property-value) (new-value object property-name &rest options)
(declare (ignore options))
(awhen (property-position property-name object)
(setf (svref (%object-property-values object) it) new-value)
t))
(defun add-direct-property (object property-name &rest options)
"Adds a direct property to object, which involves any necessary allocation."
(if (std-object-p object)
(apply 'std-add-direct-property object property-name options)
(apply 'smop:add-direct-property (object-metaobject object) object property-name options)))
(defun std-add-direct-property (object property-name &rest options)
(declare (ignore options))
(change-mold object (ensure-transition (%object-mold object) property-name)))
(defun (setf property-value) (new-value object property-name &rest options
&key (reader nil readerp) (writer nil writerp) accessor)
"Sets NEW-VALUE as the value of a direct-property belonging to OBJECT, named
PROPERTY-NAME."
(flet ((maybe-set-prop () (apply #'(setf direct-property-value) new-value object property-name options)))
(or (maybe-set-prop) ; try to set it
(progn (apply 'add-direct-property object property-name options) ; couldn't set it, try adding it
(maybe-set-prop)) ; then try setting it again
(error "Could not set direct property value."))) ; bought the farm
(when options ; if we know there's no options, we may as well skip all of the checks..
(when reader (add-reader-to-object reader property-name object))
(when writer (add-writer-to-object writer property-name object))
(when accessor
(let ((accessor-name (if (eq t accessor) property-name accessor)))
(unless (and readerp (null reader))
(add-reader-to-object accessor-name property-name object))
(unless (and writerp (null writer))
(add-writer-to-object `(setf ,accessor-name) property-name object)))))
new-value)
(defun property-makunbound (object property-name)
"Removes OBJECT's direct property named PROPERTY-NAME. Signals an error if there is no such
direct property. Returns OBJECT."
(if (std-object-p object)
(std-property-makunbound object property-name)
(funcall 'smop:property-makunbound (object-metaobject object) object property-name)))
(defun std-property-makunbound (object property-name)
(if (direct-property-p object property-name)
(prog1 object
(change-mold object
(ensure-mold (%object-metaobject object) (%object-parents object)
(remove property-name
(mold-properties (%object-mold object))))))
(error 'unbound-property :object object :property-name property-name)))
(defun remove-property (object property-name)
"Removes OBJECT's direct property named PROPERTY-NAME. Signals an error if there is no such
direct property. Returns OBJECT."
(warn 'deprecated-feature :feature #'remove-property :version "3.0.2")
(property-makunbound object property-name))
(defun remove-all-direct-properties (object)
"Wipes out all direct properties and their values from OBJECT."
(if (std-object-p object)
(std-remove-all-direct-properties object)
(funcall 'smop:remove-all-direct-properties (object-metaobject object) object)))
(defun std-remove-all-direct-properties (object)
(change-mold object (ensure-mold (%object-metaobject object) (%object-parents object)))
object)
;;;
;;; Reflection API
;;;
(defun direct-property-p (object property-name)
"Returns T if OBJECT has a property called PROPERTY-NAME as a direct property.
NIL otherwise."
(if (std-object-p object)
(std-direct-property-p object property-name)
(funcall 'smop:direct-property-p (object-metaobject object) object property-name)))
(defun std-direct-property-p (object property-name)
(let ((has-property-p t))
(handler-case (direct-property-value object property-name)
(unbound-property () (setf has-property-p nil)))
has-property-p))
(defun property-owner (object property-name)
"Returns the object, if any, from which OBJECT would fetch the value for PROPERTY-NAME"
(if (std-object-p object)
(std-property-owner object property-name)
(funcall 'smop:property-owner (object-metaobject object) object property-name)))
(defun std-property-owner (object property-name)
(find-if (rcurry 'direct-property-p property-name) (object-precedence-list object)))
(defun direct-properties (object)
"Returns a list of the names of OBJECT's direct properties -- ie, only ones which have been
set directly in OBJECT using (setf property-value). The consequences of side-effecting this
returned list are undefined."
(if (std-object-p object)
(std-direct-properties object)
(funcall 'smop:direct-properties (object-metaobject object) object)))
(defun std-direct-properties (object)
(coerce (mold-properties (%object-mold object)) 'list))
(defun available-properties (object)
"Returns a list of the names of all properties available to OBJECT, including inherited ones."
(if (std-object-p object)
(std-available-properties object)
(funcall 'smop:available-properties (object-metaobject object) object)))
(defun std-available-properties (object)
(delete-duplicates (nconc (copy-list (direct-properties object))
(mapcan 'available-properties (object-parents object)))))
;;;
;;; Property-related convenience
;;;
;;; Nicknames
(defun object-nickname (object)
"Returns OBJECT's nickname"
(property-value object 'nickname))
(defun (setf object-nickname) (new-nickname object)
"Sets OBJECT's nickname to NEW-NICKNAME"
(handler-bind ((unbound-property 'continue))
(setf (property-value object 'nickname) new-nickname)))
;;; DOCUMENTATION
(defmethod documentation ((x object) (doc-type (eql 't)))
(property-value x 'documentation))
(defmethod (setf documentation) ((new-value string) (x object) (doc-type (eql 't)))
(handler-bind ((unbound-property 'continue))
(setf (property-value x 'documentation) new-value)))
DESCRIBE
(defmethod describe-object ((object object) stream)
(format stream
"~&Object: ~A~@
Parents: ~A~@
Properties: ~%~{~A~%~}"
object (object-parents object)
(mapcar (fun (format nil "~S: ~S~@[ (Delegated to: ~A)~]"
(car _) (second _)
(unless (eq object (third _))
(third _))))
(mapcar (fun (list _ (property-value object _) (property-owner object _)))
(available-properties object)))))
;;;
;;; Property symbol-macro
;;;
(defmacro with-properties (properties object &body body)
(let ((sh (gensym)))
`(let ((,sh ,object))
(symbol-macrolet ,(mapcar (lambda (property-entry)
(let ((var-name
(if (symbolp property-entry)
property-entry
(car property-entry)))
(property-name
(if (symbolp property-entry)
property-entry
(cadr property-entry))))
`(,var-name
(property-value ,sh ',property-name))))
properties)
,@body))))
| null | https://raw.githubusercontent.com/zkat/sheeple/5393c74737ccf22c3fd5f390076b75c38453cb04/src/properties.lisp | lisp | Syntax : ANSI - Common - Lisp ; Base : 10 ; indent - tabs - mode : nil -*-
This file is part of Sheeple
properties.lisp
Property access, inspection, and management stuff, for the most part.
Base Property API
try to set it
couldn't set it, try adding it
then try setting it again
bought the farm
if we know there's no options, we may as well skip all of the checks..
Reflection API
Property-related convenience
Nicknames
DOCUMENTATION
Property symbol-macro
|
(in-package :sheeple)
(defun property-position (property-name object)
(position property-name (mold-properties (%object-mold object)) :test #'eq))
(defun direct-property-value (object property-name)
"Returns the property-value set locally in OBJECT for PROPERTY-NAME. If the
property is not set locally, a condition of type `unbound-property' is signaled."
(if (std-object-p object)
(std-direct-property-value object property-name)
(funcall 'smop:direct-property-value (object-metaobject object) object property-name)))
(defun std-direct-property-value (object property-name)
(aif (property-position property-name object)
(svref (%object-property-values object) it)
(restart-case (error 'unbound-property :object object :property-name property-name)
(continue ()
:report "Try accessing the property again."
(direct-property-value object property-name))
(use-value (value)
:report "Return a value."
:interactive (lambda ()
(format *query-io* "~&Value to use: ")
(list (read *query-io*)))
value))))
(defun property-value (object property-name)
"Returns the property-value for PROPERTY-NAME found first in OBJECT's hierarchy list.
If the value does not exist in the hierarchy list, a condition of type `unbound-property'
is signaled."
(if (std-object-p object)
(std-property-value object property-name)
(funcall 'smop:property-value (object-metaobject object) object property-name)))
(defun std-property-value (object property-name)
(dolist (ancestor (object-precedence-list object)
(error 'unbound-property :object object :property-name property-name))
(handler-bind ((unbound-property (fun (go :next))))
(return (direct-property-value ancestor property-name)))
:next))
(defun (setf direct-property-value) (new-value object property-name &rest options)
"Tries to set a direct property value for OBJECT. If it succeeds, returns T, otherwise NIL."
(if (std-object-p object)
(apply #'(setf std-direct-property-value) new-value object property-name options)
(apply #'(setf smop:direct-property-value) new-value (object-metaobject object) object property-name options)))
(defun (setf std-direct-property-value) (new-value object property-name &rest options)
(declare (ignore options))
(awhen (property-position property-name object)
(setf (svref (%object-property-values object) it) new-value)
t))
(defun add-direct-property (object property-name &rest options)
"Adds a direct property to object, which involves any necessary allocation."
(if (std-object-p object)
(apply 'std-add-direct-property object property-name options)
(apply 'smop:add-direct-property (object-metaobject object) object property-name options)))
(defun std-add-direct-property (object property-name &rest options)
(declare (ignore options))
(change-mold object (ensure-transition (%object-mold object) property-name)))
(defun (setf property-value) (new-value object property-name &rest options
&key (reader nil readerp) (writer nil writerp) accessor)
"Sets NEW-VALUE as the value of a direct-property belonging to OBJECT, named
PROPERTY-NAME."
(flet ((maybe-set-prop () (apply #'(setf direct-property-value) new-value object property-name options)))
(when reader (add-reader-to-object reader property-name object))
(when writer (add-writer-to-object writer property-name object))
(when accessor
(let ((accessor-name (if (eq t accessor) property-name accessor)))
(unless (and readerp (null reader))
(add-reader-to-object accessor-name property-name object))
(unless (and writerp (null writer))
(add-writer-to-object `(setf ,accessor-name) property-name object)))))
new-value)
(defun property-makunbound (object property-name)
"Removes OBJECT's direct property named PROPERTY-NAME. Signals an error if there is no such
direct property. Returns OBJECT."
(if (std-object-p object)
(std-property-makunbound object property-name)
(funcall 'smop:property-makunbound (object-metaobject object) object property-name)))
(defun std-property-makunbound (object property-name)
(if (direct-property-p object property-name)
(prog1 object
(change-mold object
(ensure-mold (%object-metaobject object) (%object-parents object)
(remove property-name
(mold-properties (%object-mold object))))))
(error 'unbound-property :object object :property-name property-name)))
(defun remove-property (object property-name)
"Removes OBJECT's direct property named PROPERTY-NAME. Signals an error if there is no such
direct property. Returns OBJECT."
(warn 'deprecated-feature :feature #'remove-property :version "3.0.2")
(property-makunbound object property-name))
(defun remove-all-direct-properties (object)
"Wipes out all direct properties and their values from OBJECT."
(if (std-object-p object)
(std-remove-all-direct-properties object)
(funcall 'smop:remove-all-direct-properties (object-metaobject object) object)))
(defun std-remove-all-direct-properties (object)
(change-mold object (ensure-mold (%object-metaobject object) (%object-parents object)))
object)
(defun direct-property-p (object property-name)
"Returns T if OBJECT has a property called PROPERTY-NAME as a direct property.
NIL otherwise."
(if (std-object-p object)
(std-direct-property-p object property-name)
(funcall 'smop:direct-property-p (object-metaobject object) object property-name)))
(defun std-direct-property-p (object property-name)
(let ((has-property-p t))
(handler-case (direct-property-value object property-name)
(unbound-property () (setf has-property-p nil)))
has-property-p))
(defun property-owner (object property-name)
"Returns the object, if any, from which OBJECT would fetch the value for PROPERTY-NAME"
(if (std-object-p object)
(std-property-owner object property-name)
(funcall 'smop:property-owner (object-metaobject object) object property-name)))
(defun std-property-owner (object property-name)
(find-if (rcurry 'direct-property-p property-name) (object-precedence-list object)))
(defun direct-properties (object)
"Returns a list of the names of OBJECT's direct properties -- ie, only ones which have been
set directly in OBJECT using (setf property-value). The consequences of side-effecting this
returned list are undefined."
(if (std-object-p object)
(std-direct-properties object)
(funcall 'smop:direct-properties (object-metaobject object) object)))
(defun std-direct-properties (object)
(coerce (mold-properties (%object-mold object)) 'list))
(defun available-properties (object)
"Returns a list of the names of all properties available to OBJECT, including inherited ones."
(if (std-object-p object)
(std-available-properties object)
(funcall 'smop:available-properties (object-metaobject object) object)))
(defun std-available-properties (object)
(delete-duplicates (nconc (copy-list (direct-properties object))
(mapcan 'available-properties (object-parents object)))))
(defun object-nickname (object)
"Returns OBJECT's nickname"
(property-value object 'nickname))
(defun (setf object-nickname) (new-nickname object)
"Sets OBJECT's nickname to NEW-NICKNAME"
(handler-bind ((unbound-property 'continue))
(setf (property-value object 'nickname) new-nickname)))
(defmethod documentation ((x object) (doc-type (eql 't)))
(property-value x 'documentation))
(defmethod (setf documentation) ((new-value string) (x object) (doc-type (eql 't)))
(handler-bind ((unbound-property 'continue))
(setf (property-value x 'documentation) new-value)))
DESCRIBE
(defmethod describe-object ((object object) stream)
(format stream
"~&Object: ~A~@
Parents: ~A~@
Properties: ~%~{~A~%~}"
object (object-parents object)
(mapcar (fun (format nil "~S: ~S~@[ (Delegated to: ~A)~]"
(car _) (second _)
(unless (eq object (third _))
(third _))))
(mapcar (fun (list _ (property-value object _) (property-owner object _)))
(available-properties object)))))
(defmacro with-properties (properties object &body body)
(let ((sh (gensym)))
`(let ((,sh ,object))
(symbol-macrolet ,(mapcar (lambda (property-entry)
(let ((var-name
(if (symbolp property-entry)
property-entry
(car property-entry)))
(property-name
(if (symbolp property-entry)
property-entry
(cadr property-entry))))
`(,var-name
(property-value ,sh ',property-name))))
properties)
,@body))))
|
68e6323b6d4df91cec3d72b9fef492819f25f8a72b3bc1480acbfd635f58997f | kirkedal/rfun-interp | TypeCheck.hs | ---------------------------------------------------------------------------
--
Module :
Copyright : , 2017
-- License : AllRightsReserved
--
Maintainer : < >
-- Stability : none?
-- Portability : ?
--
-- |Simple type check for RFun17
--
-----------------------------------------------------------------------------
module TypeCheck (typecheck) where
import Ast
import PrettyPrinter
import qualified Data.Map as M
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Except
import Data . List ( intersperse )
typecheck :: Program -> Maybe String
typecheck p = catchTCError $ hts >> cfd >> ltc
where
hts = mapError hasTypeSignature p
cfd = mapError checkFunctionDefinitions p
ltc = mapError (checkFunc (fenvFromProgram p)) p
-- Get function names and definitions
-- Check each function
Check first - match policy
-- Check that value had correct type
type TCError a = Either String a
noTypeError :: TCError ()
noTypeError = return ()
catchTCError :: TCError () -> Maybe String
catchTCError (Right _) = Nothing
catchTCError (Left l ) = return l
mapError :: (a -> TCError ()) -> [a] -> TCError ()
mapError f l =
case (mapM f l) of
(Right _) -> return ()
(Left e ) -> fail e
maybeError :: Maybe a -> a
maybeError Nothing = error "Cannot be nothing"
maybeError (Just x) = x
-- Check names
-- |Check Function types and definitions
checkFunctionDefinitions :: Func -> TCError ()
checkFunctionDefinitions func@(Func _ _ _) =
mapError checkFunctionClause $ funcClause func
where
checkFunctionClause clause | length (clauseParam clause) /= length typeDefList = fail $ errorDifferentNumberArgs (clauseIdent clause) (funcName func)
checkFunctionClause clause = mapError (\(x,y) -> checkTypeMatchLExpr (clauseIdent clause) x y) (zip typeDefList (clauseParam clause))
typeDefList = typeDefList_ $ funcTypesig func
typeDefList_ Nothing = []
typeDefList_ (Just (TypeSig ancT leftT _)) = ancT ++ [leftT]
checkFunctionDefinitions (DataType _ _) = noTypeError
[ BType ] BType
if all functions have type signatures
hasTypeSignature :: Func -> TCError ()
hasTypeSignature (Func i _ _) | (identifier i) == "eq" = fail $ "eq is a reserved function name."
hasTypeSignature (Func i _ _) | (identifier i) == "id" = fail $ "id is a reserved function name."
hasTypeSignature func@(Func _ _ _) =
case (funcTypesig func) of
(Just _) -> noTypeError
Nothing -> fail $ errorNoTypeSignature (funcName func)
hasTypeSignature (DataType i _) | (identifier i) == "EQ" = fail $ "EQ is a reserved datatype name."
hasTypeSignature (DataType _ _) = noTypeError
---------
checkTypeMatchLExpr :: Ident -> BType -> LExpr -> TCError ()
checkTypeMatchLExpr i t le =
case getLExprType le of
Nothing -> fail $ errorTypeMatch i t le
(Just leType) ->
case bTypeUnifies t leType of
True -> noTypeError
False -> fail $ errorTypeMatch i t le
typeEquality :: TypeSig -> TypeSig -> Bool
typeEquality (TypeSig ancT1 leftT1 rightT1) (TypeSig ancT2 leftT2 rightT2) | length ancT1 == length ancT2 =
and $ zipWith bTypeUnifies (leftT1:rightT1:ancT1) (leftT2:rightT2:ancT2)
typeEquality _ _ = False
bTypeUnifies :: BType -> BType -> Bool
bTypeUnifies NatT NatT = True
bTypeUnifies (DataT i1) (DataT i2) | identifier i1 == identifier i2 = True
bTypeUnifies (ListT t1) (ListT t2) = bTypeUnifies t1 t2
bTypeUnifies (ProdT t1) (ProdT t2) | length t1 == length t2 = and $ zipWith bTypeUnifies t1 t2
bTypeUnifies (SumT t1) (SumT t2) | length t1 == length t2 = and $ zipWith bTypeUnifies t1 t2
bTypeUnifies (FunT t1) (FunT t2) = typeEquality t1 t2
bTypeUnifies (VarT i1) (VarT i2) | identifier i1 == identifier i2 = True
bTypeUnifies AnyT _ = True
bTypeUnifies _ AnyT = True
bTypeUnifies _ _ = False
typeUnification :: TypeSig -> TypeSig -> Maybe TypeSig
typeUnification (TypeSig ancTs1 leftT1 rightT1) (TypeSig ancTs2 leftT2 rightT2) =
do ancT <- sequence $ zipWith bTypeUnification ancTs1 ancTs2
leftT <- bTypeUnification leftT1 leftT2
rightT <- bTypeUnification rightT1 rightT2
return $ TypeSig ancT leftT rightT
bTypeUnification :: BType -> BType -> Maybe BType
bTypeUnification t@(DataT i1) (DataT i2) | identifier i1 == identifier i2 = Just t
bTypeUnification (ListT t1) (ListT t2) =
case bTypeUnification t1 t2 of
Nothing -> Nothing
(Just t) -> Just $ ListT t
bTypeUnification (ProdT t1) (ProdT t2) | length t1 == length t2 =
case sequence $ zipWith bTypeUnification t1 t2 of
Nothing -> Nothing
(Just t) -> Just $ ProdT t
bTypeUnification ( SumT t1 ) ( SumT t2 ) | length t1 = = length t2 = and $ zipWith bTypeUnification t1 t2
bTypeUnification (FunT t1) (FunT t2) =
case typeUnification t1 t2 of
Nothing -> Nothing
(Just t) -> Just $ FunT t
bTypeUnification t@(VarT _) (VarT _) = Just t
bTypeUnification (VarT _) t = Just t
bTypeUnification t (VarT _) = Just t
-- bTypeUnification t@(VarT i1) (VarT i2) | identifier i1 == identifier i2 = Just t
bTypeUnification AnyT t = Just t
bTypeUnification t AnyT = Just t
bTypeUnification _ _ = Nothing
getLExprType :: LExpr -> Maybe BType
getLExprType (Var _) = Just AnyT -- Variable can be any type
getLExprType (Int _) = Just $ DataT $ makeIdent "Nat"
getLExprType ( Constr i [ ] ) | ( identifier i = = " Z " ) = Just NatT
getLExprType ( Constr i [ ] ) | ( identifier i = = " S " ) = ( ) > > = ( bTypeUnification NatT )
getLExprType (Constr _ _) = Just AnyT -- I need function Env
getLExprType (Tuple lExprs) = (sequence $ map getLExprType lExprs) >>= (\x -> Just $ ProdT x)
-- DataT Ident -- ^ Constructor term
getLExprType (List lExprList) = getListLExprType lExprList >>= (\t -> return $ ListT t)
where
getListLExprType (ListCons lExpr lExprL) =
do t1 <- getLExprType lExpr
t2 <- getListLExprType lExprL
bTypeUnification t1 t2
getListLExprType (ListEnd lExpr) = getLExprType lExpr
getListLExprType (ListNil) = Just AnyT
getLExprType (App _ _ _) = Just AnyT
data BType = NatT | AnyT Ident | ListT BType | [ BType ] | SumT [ BType ] | FunT TypeSig
deriving ( Eq , Show )
-- Check Linearity
-- Check Ancillae
type FunEnv = M.Map String Func
fenvFromProgram :: Program -> FunEnv
fenvFromProgram p = M.fromList $ (eqTD:(map f p))
where f func@(Func _ _ _) = ((identifier.funcName) func, func)
f dataT@(DataType _ _) = ((identifier.dataName) dataT, dataT)
eqTD = ("EQ", DataType (makeIdent "EQ") (M.fromList [
("Eq", (makeIdent "Eq", [])),
("Neq", (makeIdent "Neq", [AnyT]))]) )
data VarType = Ancillae BType | Live BType | Killed
deriving (Eq, Show)
type Vars = M.Map String VarType
newtype TC a = E { runE :: StateT Vars (ReaderT FunEnv (Except String)) a }
deriving (Applicative, Functor, Monad, MonadReader FunEnv, MonadState Vars, MonadError String)
runTC :: TC a -> Vars -> FunEnv -> (TCError (a, Vars))
runTC eval vars fenv = runExcept $ runReaderT (runStateT (runE eval) vars) fenv
addLive :: Ident -> BType -> TC BType
addLive i btype =
do b <- varExist i
when b $ throwError $ errorAddExistingVariable i --- Can check if it is alive of dead
modify (\x -> M.insert (identifier i) (Live btype) x)
return btype
addAncillae :: Ident -> BType -> TC BType
addAncillae i btype =
do b <- varExist i
when b $ throwError $ errorAddExistingVariable i --- Can check if it is alive of dead
modify (\x -> M.insert (identifier i) (Ancillae btype) x)
return btype
killLive :: Ident -> BType -> TC BType
killLive i btype =
do c <- get
case M.lookup (identifier i) c of
Nothing -> throwError $ errorUseOfNonExistingVariable i
(Just Killed) -> throwError $ errorUseKilledVariable i
(Just (Ancillae _)) -> throwError $ errorUseAncillaVariable i
(Just (Live t)) ->
case bTypeUnification btype t of
Nothing -> throwError $ errorDifferentTypes i t btype
(Just ut) -> return ut
checkAncillae :: Ident -> BType -> TC BType
checkAncillae i btype =
do c <- get
case M.lookup (identifier i) c of
Nothing ->
do b <- funExist i
unless b $ throwError $ errorUseOfNonExistingVariable i
-- t <- funTypeSig i
return btype
(Just Killed) -> throwError $ errorUseKilledVariable i
(Just (Ancillae t)) ->
case bTypeUnification btype t of
Nothing -> throwError $ errorDifferentTypes i t btype
(Just ut) -> return ut
(Just (Live t)) ->
case bTypeUnification btype t of
Nothing -> throwError $ errorDifferentTypes i t btype
(Just ut) -> return ut
varExist :: Ident -> TC Bool
varExist i =
do v <- get
return $ M.member (identifier i) v
funExist :: Ident -> TC Bool
funExist i =
do fenv <- ask
return $ M.member (identifier i) fenv
funTypeSig :: Ident -> TC TypeSig
funTypeSig i | (identifier i) == "eq" = return $ TypeSig [VarT i] (VarT i) (DataT $ makeIdent "EQ")
funTypeSig i =
do fenv <- ask
case M.lookup (identifier i) fenv of
Nothing ->
do v <- get
case M.lookup (identifier i) v of
Nothing -> throwError $ errorUseOfNonExistingFunction i
Just (Ancillae (FunT sig)) -> return sig
_ -> throwError $ errorUseOfNonExistingFunction i
Just (Func _ s _) -> return $ maybeError s
_ -> throwError $ errorUseOfNonExistingFunction i
dataTypeDef :: Ident -> Ident -> TC [BType]
dataTypeDef i c =
do fenv <- ask
case M.lookup (identifier i) fenv of
Nothing -> throwError $ errorUseOfNonExistingTypeDefinition i
Just (DataType _ s) ->
case M.lookup (identifier c) s of
Nothing -> throwError $ errorUseOfNonExistingDataConstructor c i
Just td -> return $ snd td
_ -> throwError $ errorUseOfNonExistingDataConstructor c i
checkFunc :: FunEnv -> Func -> TCError ()
checkFunc fe f@(Func _ _ _) = mapError (\x -> checkClause x (maybeError $ funcTypesig f) fe) $ funcClause f
checkFunc _ (DataType _ _) = noTypeError
-- We ignore Guards at the moment
checkClause :: Clause -> TypeSig -> FunEnv -> TCError ()
checkClause c (TypeSig ancT inT outT) fe =
case runTC (eval (clauseParam c) (ancT ++ [inT])) (M.empty) fe of
Left e -> fail e
Right _ -> noTypeError
where
eval [x] [y] = (checkLExpr addLive x y) >> (checkExpr (clauseBody c) outT)
eval (x:xs) (y:ys) = (checkLExpr addAncillae x y) >> (eval xs ys)
eval _ _ = error "...."
checkExpr :: Expr -> BType -> TC ()
checkExpr (LeftE lExpr) btype = checkLExpr killLive lExpr btype >> return ()
checkExpr (LetIn leftLE rightLE expr) btype =
do t <- checkLExpr killLive rightLE AnyT
checkLExpr addLive leftLE t
checkExpr expr btype
[ ( LExpr , Guard , ) ] -- ^ Case - of expression
do t <- checkLExpr killLive lExpr AnyT
mapM_ (testCase t) cases
where
testCase bt (lE, _, ex) =
do v <- get
checkLExpr addLive lE bt
checkExpr ex btype
put v
checkLExpr :: (Ident -> BType -> TC BType) -> LExpr -> BType -> TC BType
checkLExpr addFun (Var ident) btype = addFun ident btype -- Variable can be any type
-- Integers
checkLExpr _ (Int _) btype | bTypeUnifies btype (DataT $ makeIdent "Nat") = return $ DataT $ makeIdent "Nat"
checkLExpr _ lExpr@(Int _) t = throwError $ errorLExprUnification lExpr t
checkLExpr _ ( Constr i [ ] ) btype | ( identifier i = = " Z " ) , bTypeUnifies btype NatT = return NatT
checkLExpr addFun ( Constr i [ ] ) btype | ( identifier i = = " S " ) = checkLExpr addFun btype
checkLExpr addFun lExpr@(Constr i lExprs) t@(DataT typeName) =
do dd <- dataTypeDef typeName i
when ((length dd) /= length lExprs) $ throwError $ errorLExprUnification lExpr t
sequence $ zipWith (checkLExpr addFun) (lExprs) dd
return $ DataT typeName
checkLExpr addFun (Tuple lExprs) (ProdT btypes) | length lExprs == length btypes =
do types <- sequence $ zipWith (checkLExpr addFun) lExprs btypes
return $ ProdT types
checkLExpr _ lExpr@(Tuple _) t = throwError $ errorLExprUnification lExpr t
checkLExpr addFun le@(List lExprList) tp@(ListT btype) = getListLExprType lExprList
where
getListLExprType (ListCons lExpr lExprL) =
do t1 <- checkLExpr addFun lExpr btype
t2 <- getListLExprType lExprL
case bTypeUnification (ListT t1) t2 of
Nothing -> throwError $ errorLExprUnification le tp
Just t -> return t
getListLExprType (ListEnd lExpr) = checkLExpr addFun lExpr (ListT btype)
getListLExprType ListNil = return tp
checkLExpr _ lExpr@(List _) t = throwError $ errorLExprUnification lExpr t
checkLExpr addFun (App ident True lExprs) _ =
do (TypeSig ancTs updT retT) <- funTypeSig ident
when ( ( length ancTs ) + 1 /= length lExprs ) $ throwError $ errorDifferentNumberArgsApp ident ( TypeSig ancTs updT retT ) lExprs
sequence $ zipWith (checkLExpr checkAncillae) (init lExprs) ancTs
checkLExpr addFun (last lExprs) updT
return retT
checkLExpr addFun (App ident False lExprs) _ =
do (TypeSig ancTs updT retT) <- funTypeSig ident
when ( ( length ancTs ) + 1 /= length lExprs ) $ throwError $ errorDifferentNumberArgsApp ident ( TypeSig ancTs updT retT ) lExprs
sequence $ zipWith (checkLExpr checkAncillae) (init lExprs) ancTs
checkLExpr addFun (last lExprs) retT
return updT
checkLExpr _ lExpr t = throwError $ errorLExprUnification lExpr t
errorFirst :: Ident -> String
errorFirst i_def =
"In " ++ ppIdentFile i_def ++ ", " ++ ppIdentPos i_def ++ "\n "
errorUseKilledVariable :: Ident -> String
errorUseKilledVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " which is trying to be has already been used."
errorUseAncillaVariable :: Ident -> String
errorUseAncillaVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " which is trying to be has ancillae type."
errorAddExistingVariable :: Ident -> String
errorAddExistingVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " has already been defined."
errorUseOfNonExistingVariable :: Ident -> String
errorUseOfNonExistingVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " is undefined."
errorUseOfNonExistingFunction :: Ident -> String
errorUseOfNonExistingFunction i =
errorFirst i ++ "the function " ++ ppIdent i ++ " is undefined."
errorUseOfNonExistingDataConstructor :: Ident -> Ident -> String
errorUseOfNonExistingDataConstructor i t =
errorFirst i ++ "the constructor " ++ ppIdent i ++ " in type definition " ++ ppIdent t ++ " is undefined."
errorUseOfNonExistingTypeDefinition :: Ident -> String
errorUseOfNonExistingTypeDefinition i =
errorFirst i ++ "the type definition " ++ ppIdent i ++ " is undefined."
errorLExprUnification :: LExpr -> BType -> String
errorLExprUnification le a_type =
"The left-expression\n " ++ ppLExpr le ++ "\ncannot be unified with type\n " ++ ppBType a_type ++ "\n"
errorDifferentTypes :: Ident -> BType -> BType -> String
errorDifferentTypes i_def i_type a_type =
errorFirst i_def ++ "the variable " ++ ppIdent i_def ++ " of type\n " ++ ppBType i_type ++ "\n" ++
"does not have expected type\n " ++ ppBType a_type
errorDifferentNumberArgsApp :: Ident -> TypeSig -> [LExpr] -> String
errorDifferentNumberArgsApp i_def i_sig args =
errorFirst i_def ++ "the function \n " ++ ppIdent i_def ++ " :: " ++ ppTypeSig i_sig ++ "\n" ++
"is provided with " ++ (show $ length args) ++ " arguments.\n"
errorTypeMatch :: Ident -> BType -> LExpr -> String
errorTypeMatch i_def btype lExpr =
case getLExprType lExpr of
Nothing -> "errorTypeMatch"
(Just t) ->
"In " ++ ppIdentFile i_def ++ " function " ++ ppIdent i_def ++ " (" ++ ppIdentLine i_def ++ ") " ++
"the type of left-expression \n " ++ ppLExpr lExpr ++ "\nof type\n " ++ (ppBType t) ++
"\ndoes not match type signature \n " ++ ppBType btype ++ "\n"
errorDifferentNumberArgs :: Ident -> Ident -> String
errorDifferentNumberArgs i_def i_sig =
"In " ++ ppIdentFile i_def ++ " function " ++ ppIdent i_def ++ " (" ++ ppIdentLine i_def ++
") has different number of arguments than in type signature (" ++ ppIdentLine i_sig ++ ").\n"
errorNoTypeSignature :: Ident -> String
errorNoTypeSignature i =
"In " ++ ppIdentFile i ++ " function " ++ ppIdent i ++ " (" ++ ppIdentPos i ++ ") has not type signature.\n" ++
" Type inference is not supported yet."
| null | https://raw.githubusercontent.com/kirkedal/rfun-interp/c5297be7ab07c92e9d489c642cd987ed646e78c8/src/TypeCheck.hs | haskell | -------------------------------------------------------------------------
License : AllRightsReserved
Stability : none?
Portability : ?
|Simple type check for RFun17
---------------------------------------------------------------------------
Get function names and definitions
Check each function
Check that value had correct type
Check names
|Check Function types and definitions
-------
bTypeUnification t@(VarT i1) (VarT i2) | identifier i1 == identifier i2 = Just t
Variable can be any type
I need function Env
DataT Ident -- ^ Constructor term
Check Linearity
Check Ancillae
- Can check if it is alive of dead
- Can check if it is alive of dead
t <- funTypeSig i
We ignore Guards at the moment
^ Case - of expression
Variable can be any type
Integers | Module :
Copyright : , 2017
Maintainer : < >
module TypeCheck (typecheck) where
import Ast
import PrettyPrinter
import qualified Data.Map as M
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Except
import Data . List ( intersperse )
typecheck :: Program -> Maybe String
typecheck p = catchTCError $ hts >> cfd >> ltc
where
hts = mapError hasTypeSignature p
cfd = mapError checkFunctionDefinitions p
ltc = mapError (checkFunc (fenvFromProgram p)) p
Check first - match policy
type TCError a = Either String a
noTypeError :: TCError ()
noTypeError = return ()
catchTCError :: TCError () -> Maybe String
catchTCError (Right _) = Nothing
catchTCError (Left l ) = return l
mapError :: (a -> TCError ()) -> [a] -> TCError ()
mapError f l =
case (mapM f l) of
(Right _) -> return ()
(Left e ) -> fail e
maybeError :: Maybe a -> a
maybeError Nothing = error "Cannot be nothing"
maybeError (Just x) = x
checkFunctionDefinitions :: Func -> TCError ()
checkFunctionDefinitions func@(Func _ _ _) =
mapError checkFunctionClause $ funcClause func
where
checkFunctionClause clause | length (clauseParam clause) /= length typeDefList = fail $ errorDifferentNumberArgs (clauseIdent clause) (funcName func)
checkFunctionClause clause = mapError (\(x,y) -> checkTypeMatchLExpr (clauseIdent clause) x y) (zip typeDefList (clauseParam clause))
typeDefList = typeDefList_ $ funcTypesig func
typeDefList_ Nothing = []
typeDefList_ (Just (TypeSig ancT leftT _)) = ancT ++ [leftT]
checkFunctionDefinitions (DataType _ _) = noTypeError
[ BType ] BType
if all functions have type signatures
hasTypeSignature :: Func -> TCError ()
hasTypeSignature (Func i _ _) | (identifier i) == "eq" = fail $ "eq is a reserved function name."
hasTypeSignature (Func i _ _) | (identifier i) == "id" = fail $ "id is a reserved function name."
hasTypeSignature func@(Func _ _ _) =
case (funcTypesig func) of
(Just _) -> noTypeError
Nothing -> fail $ errorNoTypeSignature (funcName func)
hasTypeSignature (DataType i _) | (identifier i) == "EQ" = fail $ "EQ is a reserved datatype name."
hasTypeSignature (DataType _ _) = noTypeError
checkTypeMatchLExpr :: Ident -> BType -> LExpr -> TCError ()
checkTypeMatchLExpr i t le =
case getLExprType le of
Nothing -> fail $ errorTypeMatch i t le
(Just leType) ->
case bTypeUnifies t leType of
True -> noTypeError
False -> fail $ errorTypeMatch i t le
typeEquality :: TypeSig -> TypeSig -> Bool
typeEquality (TypeSig ancT1 leftT1 rightT1) (TypeSig ancT2 leftT2 rightT2) | length ancT1 == length ancT2 =
and $ zipWith bTypeUnifies (leftT1:rightT1:ancT1) (leftT2:rightT2:ancT2)
typeEquality _ _ = False
bTypeUnifies :: BType -> BType -> Bool
bTypeUnifies NatT NatT = True
bTypeUnifies (DataT i1) (DataT i2) | identifier i1 == identifier i2 = True
bTypeUnifies (ListT t1) (ListT t2) = bTypeUnifies t1 t2
bTypeUnifies (ProdT t1) (ProdT t2) | length t1 == length t2 = and $ zipWith bTypeUnifies t1 t2
bTypeUnifies (SumT t1) (SumT t2) | length t1 == length t2 = and $ zipWith bTypeUnifies t1 t2
bTypeUnifies (FunT t1) (FunT t2) = typeEquality t1 t2
bTypeUnifies (VarT i1) (VarT i2) | identifier i1 == identifier i2 = True
bTypeUnifies AnyT _ = True
bTypeUnifies _ AnyT = True
bTypeUnifies _ _ = False
typeUnification :: TypeSig -> TypeSig -> Maybe TypeSig
typeUnification (TypeSig ancTs1 leftT1 rightT1) (TypeSig ancTs2 leftT2 rightT2) =
do ancT <- sequence $ zipWith bTypeUnification ancTs1 ancTs2
leftT <- bTypeUnification leftT1 leftT2
rightT <- bTypeUnification rightT1 rightT2
return $ TypeSig ancT leftT rightT
bTypeUnification :: BType -> BType -> Maybe BType
bTypeUnification t@(DataT i1) (DataT i2) | identifier i1 == identifier i2 = Just t
bTypeUnification (ListT t1) (ListT t2) =
case bTypeUnification t1 t2 of
Nothing -> Nothing
(Just t) -> Just $ ListT t
bTypeUnification (ProdT t1) (ProdT t2) | length t1 == length t2 =
case sequence $ zipWith bTypeUnification t1 t2 of
Nothing -> Nothing
(Just t) -> Just $ ProdT t
bTypeUnification ( SumT t1 ) ( SumT t2 ) | length t1 = = length t2 = and $ zipWith bTypeUnification t1 t2
bTypeUnification (FunT t1) (FunT t2) =
case typeUnification t1 t2 of
Nothing -> Nothing
(Just t) -> Just $ FunT t
bTypeUnification t@(VarT _) (VarT _) = Just t
bTypeUnification (VarT _) t = Just t
bTypeUnification t (VarT _) = Just t
bTypeUnification AnyT t = Just t
bTypeUnification t AnyT = Just t
bTypeUnification _ _ = Nothing
getLExprType :: LExpr -> Maybe BType
getLExprType (Int _) = Just $ DataT $ makeIdent "Nat"
getLExprType ( Constr i [ ] ) | ( identifier i = = " Z " ) = Just NatT
getLExprType ( Constr i [ ] ) | ( identifier i = = " S " ) = ( ) > > = ( bTypeUnification NatT )
getLExprType (Tuple lExprs) = (sequence $ map getLExprType lExprs) >>= (\x -> Just $ ProdT x)
getLExprType (List lExprList) = getListLExprType lExprList >>= (\t -> return $ ListT t)
where
getListLExprType (ListCons lExpr lExprL) =
do t1 <- getLExprType lExpr
t2 <- getListLExprType lExprL
bTypeUnification t1 t2
getListLExprType (ListEnd lExpr) = getLExprType lExpr
getListLExprType (ListNil) = Just AnyT
getLExprType (App _ _ _) = Just AnyT
data BType = NatT | AnyT Ident | ListT BType | [ BType ] | SumT [ BType ] | FunT TypeSig
deriving ( Eq , Show )
type FunEnv = M.Map String Func
fenvFromProgram :: Program -> FunEnv
fenvFromProgram p = M.fromList $ (eqTD:(map f p))
where f func@(Func _ _ _) = ((identifier.funcName) func, func)
f dataT@(DataType _ _) = ((identifier.dataName) dataT, dataT)
eqTD = ("EQ", DataType (makeIdent "EQ") (M.fromList [
("Eq", (makeIdent "Eq", [])),
("Neq", (makeIdent "Neq", [AnyT]))]) )
data VarType = Ancillae BType | Live BType | Killed
deriving (Eq, Show)
type Vars = M.Map String VarType
newtype TC a = E { runE :: StateT Vars (ReaderT FunEnv (Except String)) a }
deriving (Applicative, Functor, Monad, MonadReader FunEnv, MonadState Vars, MonadError String)
runTC :: TC a -> Vars -> FunEnv -> (TCError (a, Vars))
runTC eval vars fenv = runExcept $ runReaderT (runStateT (runE eval) vars) fenv
addLive :: Ident -> BType -> TC BType
addLive i btype =
do b <- varExist i
modify (\x -> M.insert (identifier i) (Live btype) x)
return btype
addAncillae :: Ident -> BType -> TC BType
addAncillae i btype =
do b <- varExist i
modify (\x -> M.insert (identifier i) (Ancillae btype) x)
return btype
killLive :: Ident -> BType -> TC BType
killLive i btype =
do c <- get
case M.lookup (identifier i) c of
Nothing -> throwError $ errorUseOfNonExistingVariable i
(Just Killed) -> throwError $ errorUseKilledVariable i
(Just (Ancillae _)) -> throwError $ errorUseAncillaVariable i
(Just (Live t)) ->
case bTypeUnification btype t of
Nothing -> throwError $ errorDifferentTypes i t btype
(Just ut) -> return ut
checkAncillae :: Ident -> BType -> TC BType
checkAncillae i btype =
do c <- get
case M.lookup (identifier i) c of
Nothing ->
do b <- funExist i
unless b $ throwError $ errorUseOfNonExistingVariable i
return btype
(Just Killed) -> throwError $ errorUseKilledVariable i
(Just (Ancillae t)) ->
case bTypeUnification btype t of
Nothing -> throwError $ errorDifferentTypes i t btype
(Just ut) -> return ut
(Just (Live t)) ->
case bTypeUnification btype t of
Nothing -> throwError $ errorDifferentTypes i t btype
(Just ut) -> return ut
varExist :: Ident -> TC Bool
varExist i =
do v <- get
return $ M.member (identifier i) v
funExist :: Ident -> TC Bool
funExist i =
do fenv <- ask
return $ M.member (identifier i) fenv
funTypeSig :: Ident -> TC TypeSig
funTypeSig i | (identifier i) == "eq" = return $ TypeSig [VarT i] (VarT i) (DataT $ makeIdent "EQ")
funTypeSig i =
do fenv <- ask
case M.lookup (identifier i) fenv of
Nothing ->
do v <- get
case M.lookup (identifier i) v of
Nothing -> throwError $ errorUseOfNonExistingFunction i
Just (Ancillae (FunT sig)) -> return sig
_ -> throwError $ errorUseOfNonExistingFunction i
Just (Func _ s _) -> return $ maybeError s
_ -> throwError $ errorUseOfNonExistingFunction i
dataTypeDef :: Ident -> Ident -> TC [BType]
dataTypeDef i c =
do fenv <- ask
case M.lookup (identifier i) fenv of
Nothing -> throwError $ errorUseOfNonExistingTypeDefinition i
Just (DataType _ s) ->
case M.lookup (identifier c) s of
Nothing -> throwError $ errorUseOfNonExistingDataConstructor c i
Just td -> return $ snd td
_ -> throwError $ errorUseOfNonExistingDataConstructor c i
checkFunc :: FunEnv -> Func -> TCError ()
checkFunc fe f@(Func _ _ _) = mapError (\x -> checkClause x (maybeError $ funcTypesig f) fe) $ funcClause f
checkFunc _ (DataType _ _) = noTypeError
checkClause :: Clause -> TypeSig -> FunEnv -> TCError ()
checkClause c (TypeSig ancT inT outT) fe =
case runTC (eval (clauseParam c) (ancT ++ [inT])) (M.empty) fe of
Left e -> fail e
Right _ -> noTypeError
where
eval [x] [y] = (checkLExpr addLive x y) >> (checkExpr (clauseBody c) outT)
eval (x:xs) (y:ys) = (checkLExpr addAncillae x y) >> (eval xs ys)
eval _ _ = error "...."
checkExpr :: Expr -> BType -> TC ()
checkExpr (LeftE lExpr) btype = checkLExpr killLive lExpr btype >> return ()
checkExpr (LetIn leftLE rightLE expr) btype =
do t <- checkLExpr killLive rightLE AnyT
checkLExpr addLive leftLE t
checkExpr expr btype
do t <- checkLExpr killLive lExpr AnyT
mapM_ (testCase t) cases
where
testCase bt (lE, _, ex) =
do v <- get
checkLExpr addLive lE bt
checkExpr ex btype
put v
checkLExpr :: (Ident -> BType -> TC BType) -> LExpr -> BType -> TC BType
checkLExpr _ (Int _) btype | bTypeUnifies btype (DataT $ makeIdent "Nat") = return $ DataT $ makeIdent "Nat"
checkLExpr _ lExpr@(Int _) t = throwError $ errorLExprUnification lExpr t
checkLExpr _ ( Constr i [ ] ) btype | ( identifier i = = " Z " ) , bTypeUnifies btype NatT = return NatT
checkLExpr addFun ( Constr i [ ] ) btype | ( identifier i = = " S " ) = checkLExpr addFun btype
checkLExpr addFun lExpr@(Constr i lExprs) t@(DataT typeName) =
do dd <- dataTypeDef typeName i
when ((length dd) /= length lExprs) $ throwError $ errorLExprUnification lExpr t
sequence $ zipWith (checkLExpr addFun) (lExprs) dd
return $ DataT typeName
checkLExpr addFun (Tuple lExprs) (ProdT btypes) | length lExprs == length btypes =
do types <- sequence $ zipWith (checkLExpr addFun) lExprs btypes
return $ ProdT types
checkLExpr _ lExpr@(Tuple _) t = throwError $ errorLExprUnification lExpr t
checkLExpr addFun le@(List lExprList) tp@(ListT btype) = getListLExprType lExprList
where
getListLExprType (ListCons lExpr lExprL) =
do t1 <- checkLExpr addFun lExpr btype
t2 <- getListLExprType lExprL
case bTypeUnification (ListT t1) t2 of
Nothing -> throwError $ errorLExprUnification le tp
Just t -> return t
getListLExprType (ListEnd lExpr) = checkLExpr addFun lExpr (ListT btype)
getListLExprType ListNil = return tp
checkLExpr _ lExpr@(List _) t = throwError $ errorLExprUnification lExpr t
checkLExpr addFun (App ident True lExprs) _ =
do (TypeSig ancTs updT retT) <- funTypeSig ident
when ( ( length ancTs ) + 1 /= length lExprs ) $ throwError $ errorDifferentNumberArgsApp ident ( TypeSig ancTs updT retT ) lExprs
sequence $ zipWith (checkLExpr checkAncillae) (init lExprs) ancTs
checkLExpr addFun (last lExprs) updT
return retT
checkLExpr addFun (App ident False lExprs) _ =
do (TypeSig ancTs updT retT) <- funTypeSig ident
when ( ( length ancTs ) + 1 /= length lExprs ) $ throwError $ errorDifferentNumberArgsApp ident ( TypeSig ancTs updT retT ) lExprs
sequence $ zipWith (checkLExpr checkAncillae) (init lExprs) ancTs
checkLExpr addFun (last lExprs) retT
return updT
checkLExpr _ lExpr t = throwError $ errorLExprUnification lExpr t
errorFirst :: Ident -> String
errorFirst i_def =
"In " ++ ppIdentFile i_def ++ ", " ++ ppIdentPos i_def ++ "\n "
errorUseKilledVariable :: Ident -> String
errorUseKilledVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " which is trying to be has already been used."
errorUseAncillaVariable :: Ident -> String
errorUseAncillaVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " which is trying to be has ancillae type."
errorAddExistingVariable :: Ident -> String
errorAddExistingVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " has already been defined."
errorUseOfNonExistingVariable :: Ident -> String
errorUseOfNonExistingVariable i =
errorFirst i ++ "the variable " ++ ppIdent i ++ " is undefined."
errorUseOfNonExistingFunction :: Ident -> String
errorUseOfNonExistingFunction i =
errorFirst i ++ "the function " ++ ppIdent i ++ " is undefined."
errorUseOfNonExistingDataConstructor :: Ident -> Ident -> String
errorUseOfNonExistingDataConstructor i t =
errorFirst i ++ "the constructor " ++ ppIdent i ++ " in type definition " ++ ppIdent t ++ " is undefined."
errorUseOfNonExistingTypeDefinition :: Ident -> String
errorUseOfNonExistingTypeDefinition i =
errorFirst i ++ "the type definition " ++ ppIdent i ++ " is undefined."
errorLExprUnification :: LExpr -> BType -> String
errorLExprUnification le a_type =
"The left-expression\n " ++ ppLExpr le ++ "\ncannot be unified with type\n " ++ ppBType a_type ++ "\n"
errorDifferentTypes :: Ident -> BType -> BType -> String
errorDifferentTypes i_def i_type a_type =
errorFirst i_def ++ "the variable " ++ ppIdent i_def ++ " of type\n " ++ ppBType i_type ++ "\n" ++
"does not have expected type\n " ++ ppBType a_type
errorDifferentNumberArgsApp :: Ident -> TypeSig -> [LExpr] -> String
errorDifferentNumberArgsApp i_def i_sig args =
errorFirst i_def ++ "the function \n " ++ ppIdent i_def ++ " :: " ++ ppTypeSig i_sig ++ "\n" ++
"is provided with " ++ (show $ length args) ++ " arguments.\n"
errorTypeMatch :: Ident -> BType -> LExpr -> String
errorTypeMatch i_def btype lExpr =
case getLExprType lExpr of
Nothing -> "errorTypeMatch"
(Just t) ->
"In " ++ ppIdentFile i_def ++ " function " ++ ppIdent i_def ++ " (" ++ ppIdentLine i_def ++ ") " ++
"the type of left-expression \n " ++ ppLExpr lExpr ++ "\nof type\n " ++ (ppBType t) ++
"\ndoes not match type signature \n " ++ ppBType btype ++ "\n"
errorDifferentNumberArgs :: Ident -> Ident -> String
errorDifferentNumberArgs i_def i_sig =
"In " ++ ppIdentFile i_def ++ " function " ++ ppIdent i_def ++ " (" ++ ppIdentLine i_def ++
") has different number of arguments than in type signature (" ++ ppIdentLine i_sig ++ ").\n"
errorNoTypeSignature :: Ident -> String
errorNoTypeSignature i =
"In " ++ ppIdentFile i ++ " function " ++ ppIdent i ++ " (" ++ ppIdentPos i ++ ") has not type signature.\n" ++
" Type inference is not supported yet."
|
da52e7cbb84b6301c4762510d1c45db7e37414ac18289de5a0408d94c3dea710 | BSeppke/vigracket | morphology.rkt | #lang racket
(require vigracket/config)
(require vigracket/helpers)
(require scheme/foreign)
(unsafe!)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Distance Transform # # # # # # # # # # # # # # # # # # # #
(define vigra_distancetransform_c
(get-ffi-obj 'vigra_distancetransform_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 width height background_label norm) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[width : _int]
[height : _int]
[background_label : _float*]
[norm : _int]
-> (res : _int))))
(define (distancetransform-band band background_label norm)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_distancetransform_c (band-data band) (band-data band2) width height background_label norm)))
(case foo
((0) band2)
((1) (error "Error in vigracket.filters.distancetransform: Distance transformation failed!!"))
((2) (error "Error in vigracket.filters.distancetransform: Norm must be in {0,1,2} !!")))))
(define (distancetransform image background_label norm)
(map (lambda (band) (distancetransform-band band background_label norm)) image))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Erode image # # # # # # # # # # # # # # # # # # # #
(define vigra_discerosion_c
(get-ffi-obj 'vigra_discerosion_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 width height radius) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[width : _int]
[height : _int]
[radius : _int]
-> (res : _int))))
(define (erodeimage-band band radius)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_discerosion_c (band-data band) (band-data band2) width height radius)))
(case foo
((0) band2)
((1) (error "Error in vigracket.morphology:erodeimage: Erosion of image failed!!")))))
(define (erodeimage image radius)
(map (lambda (band) (erodeimage-band band radius)) image))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Dilate image # # # # # # # # # # # # # # # # # # # #
(define vigra_discdilation_c
(get-ffi-obj 'vigra_discdilation_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 width height radius) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[width : _int]
[height : _int]
[radius : _int]
-> (res : _int))))
(define (dilateimage-band band radius)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_discdilation_c (band-data band) (band-data band2) width height radius)))
(case foo
((0) band2)
((1) (error "Error in vigracket.morphology:dilateimage: Dilation of image failed!!")))))
(define (dilateimage image radius)
(map (lambda (band) (dilateimage-band band radius)) image))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Opening image # # # # # # # # # # # # # # # # # # # #
(define (openingimage-band band radius)
(dilateimage-band (erodeimage-band band radius) radius))
(define (openingimage image radius)
(dilateimage (erodeimage image radius) radius))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Closing image # # # # # # # # # # # # # # # # # # # #
(define (closingimage-band band radius)
(erodeimage-band (dilateimage-band band radius) radius))
(define (closingimage image radius)
(erodeimage (dilateimage image radius) radius))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Upwind image # # # # # # # # # # # # # # # # # # # #
(define vigra_upwindimage_c
(get-ffi-obj 'vigra_upwindimage_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 img_vector3 width height radius) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[img_vector3 : _cvector]
[width : _int]
[height : _int]
[radius : _float*]
-> (res : _int))))
(define (upwindimage-band band signum_band radius)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_upwindimage_c (band-data band) (band-data signum_band) (band-data band2) width height radius)))
(case foo
((0) band2)
((1) (error "Error in vigracket.morphology:upwindimage: Upwinding of image failed!!")))))
(define (upwindimage image signum_image radius)
(map (lambda (band signum_band) (upwindimage-band band signum_band radius)) image signum_image))
(provide
distancetransform-band
distancetransform
erodeimage-band
erodeimage
dilateimage-band
dilateimage
openingimage-band
openingimage
closingimage-band
closingimage
upwindimage-band
upwindimage
) | null | https://raw.githubusercontent.com/BSeppke/vigracket/077734ab5376e6bbf5600eb813225428d02838c0/morphology.rkt | racket | #lang racket
(require vigracket/config)
(require vigracket/helpers)
(require scheme/foreign)
(unsafe!)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Distance Transform # # # # # # # # # # # # # # # # # # # #
(define vigra_distancetransform_c
(get-ffi-obj 'vigra_distancetransform_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 width height background_label norm) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[width : _int]
[height : _int]
[background_label : _float*]
[norm : _int]
-> (res : _int))))
(define (distancetransform-band band background_label norm)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_distancetransform_c (band-data band) (band-data band2) width height background_label norm)))
(case foo
((0) band2)
((1) (error "Error in vigracket.filters.distancetransform: Distance transformation failed!!"))
((2) (error "Error in vigracket.filters.distancetransform: Norm must be in {0,1,2} !!")))))
(define (distancetransform image background_label norm)
(map (lambda (band) (distancetransform-band band background_label norm)) image))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Erode image # # # # # # # # # # # # # # # # # # # #
(define vigra_discerosion_c
(get-ffi-obj 'vigra_discerosion_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 width height radius) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[width : _int]
[height : _int]
[radius : _int]
-> (res : _int))))
(define (erodeimage-band band radius)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_discerosion_c (band-data band) (band-data band2) width height radius)))
(case foo
((0) band2)
((1) (error "Error in vigracket.morphology:erodeimage: Erosion of image failed!!")))))
(define (erodeimage image radius)
(map (lambda (band) (erodeimage-band band radius)) image))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Dilate image # # # # # # # # # # # # # # # # # # # #
(define vigra_discdilation_c
(get-ffi-obj 'vigra_discdilation_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 width height radius) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[width : _int]
[height : _int]
[radius : _int]
-> (res : _int))))
(define (dilateimage-band band radius)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_discdilation_c (band-data band) (band-data band2) width height radius)))
(case foo
((0) band2)
((1) (error "Error in vigracket.morphology:dilateimage: Dilation of image failed!!")))))
(define (dilateimage image radius)
(map (lambda (band) (dilateimage-band band radius)) image))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Opening image # # # # # # # # # # # # # # # # # # # #
(define (openingimage-band band radius)
(dilateimage-band (erodeimage-band band radius) radius))
(define (openingimage image radius)
(dilateimage (erodeimage image radius) radius))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Closing image # # # # # # # # # # # # # # # # # # # #
(define (closingimage-band band radius)
(erodeimage-band (dilateimage-band band radius) radius))
(define (closingimage image radius)
(erodeimage (dilateimage image radius) radius))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # Upwind image # # # # # # # # # # # # # # # # # # # #
(define vigra_upwindimage_c
(get-ffi-obj 'vigra_upwindimage_c vigracket-dylib-path
(_fun (img_vector1 img_vector2 img_vector3 width height radius) :: [img_vector1 : _cvector]
[img_vector2 : _cvector]
[img_vector3 : _cvector]
[width : _int]
[height : _int]
[radius : _float*]
-> (res : _int))))
(define (upwindimage-band band signum_band radius)
(let* ((width (band-width band))
(height (band-height band))
(band2 (make-band width height))
(foo (vigra_upwindimage_c (band-data band) (band-data signum_band) (band-data band2) width height radius)))
(case foo
((0) band2)
((1) (error "Error in vigracket.morphology:upwindimage: Upwinding of image failed!!")))))
(define (upwindimage image signum_image radius)
(map (lambda (band signum_band) (upwindimage-band band signum_band radius)) image signum_image))
(provide
distancetransform-band
distancetransform
erodeimage-band
erodeimage
dilateimage-band
dilateimage
openingimage-band
openingimage
closingimage-band
closingimage
upwindimage-band
upwindimage
) | |
994476031e768d5d751b692d078f3179ae60a29fe0284be4e1f3109c0939e7e0 | relevance/mycroft | handlers.clj | (ns mycroft.handlers)
(defn wrap-logging [handler]
(fn [request]
(let [start (System/nanoTime)
response (handler request)
elapsed (/ (double (- (System/nanoTime) start)) 1000000.0)]
(when response
(println (str (:uri request) " [" (:request-method request) "] " elapsed " msec"
"\n\tParameters " (:params request)
"\n\tSession " (:session request)))
response))))
| null | https://raw.githubusercontent.com/relevance/mycroft/1bf86dfd1092408c9e9fc836be6dfb2f95099253/src/mycroft/handlers.clj | clojure | (ns mycroft.handlers)
(defn wrap-logging [handler]
(fn [request]
(let [start (System/nanoTime)
response (handler request)
elapsed (/ (double (- (System/nanoTime) start)) 1000000.0)]
(when response
(println (str (:uri request) " [" (:request-method request) "] " elapsed " msec"
"\n\tParameters " (:params request)
"\n\tSession " (:session request)))
response))))
| |
fc2f50853da24e0d410b52707680c2e375ab870b3b337bdd380302e4c3941385 | spechub/Hets | OpDecls.hascasl.hs |
types :
A__s : : ( * , data )
A__t : : ( * , data )
values :
: : ( A__s , A__s ) - > A__s
x1 : : : : : : A__s : : A__t
scope :
Prelude . > Prelude . A__s , Type [ A__s ] [ ]
Prelude . > Prelude . A__s , con of A__s
Prelude . A__t |- > Prelude . A__t , Type [ A__t ] [ ]
Prelude . A__t |- > Prelude . A__t , con of A__t
Prelude.a___2_P_2 |- > Prelude.a___2_P_2 , Value
Prelude.x1 |- > Prelude.x1 , Value
Prelude.x2 |- > Prelude.x2 , Value
Prelude.y |- > Prelude.y , Value
> Prelude . A__s , Type [ A__s ] [ ]
> Prelude . A__s , con of A__s
A__t |- > Prelude . A__t , Type [ A__t ] [ ]
A__t |- > Prelude . A__t , con of A__t
a___2_P_2 |- > Prelude.a___2_P_2 , Value
x1 |- > Prelude.x1 , Value
x2 |- > Prelude.x2 , Value
y |- > Prelude.y , Value
types:
A__s :: (*, data)
A__t :: (*, data)
values:
a___2_P_2 :: (A__s, A__s) -> A__s
x1 :: A__s
x2 :: A__s
y :: A__s
A__s :: A__s
A__t :: A__t
scope:
Prelude.A__s |-> Prelude.A__s, Type [A__s] []
Prelude.A__s |-> Prelude.A__s, con of A__s
Prelude.A__t |-> Prelude.A__t, Type [A__t] []
Prelude.A__t |-> Prelude.A__t, con of A__t
Prelude.a___2_P_2 |-> Prelude.a___2_P_2, Value
Prelude.x1 |-> Prelude.x1, Value
Prelude.x2 |-> Prelude.x2, Value
Prelude.y |-> Prelude.y, Value
A__s |-> Prelude.A__s, Type [A__s] []
A__s |-> Prelude.A__s, con of A__s
A__t |-> Prelude.A__t, Type [A__t] []
A__t |-> Prelude.A__t, con of A__t
a___2_P_2 |-> Prelude.a___2_P_2, Value
x1 |-> Prelude.x1, Value
x2 |-> Prelude.x2, Value
y |-> Prelude.y, Value
-}
module Dummy where
data A__s = A__s
data A__t = A__t
a___2_P_2 :: (A__s, A__s) -> A__s
a___2_P_2
( ( A__s , A__s ) - > A__s )
x1 :: A__s
A__s
x2 :: A__s
A__s
y :: A__s
y = a___2_P_2 (x2, x2)
| null | https://raw.githubusercontent.com/spechub/Hets/af7b628a75aab0d510b8ae7f067a5c9bc48d0f9e/ToHaskell/test/OpDecls.hascasl.hs | haskell |
types :
A__s : : ( * , data )
A__t : : ( * , data )
values :
: : ( A__s , A__s ) - > A__s
x1 : : : : : : A__s : : A__t
scope :
Prelude . > Prelude . A__s , Type [ A__s ] [ ]
Prelude . > Prelude . A__s , con of A__s
Prelude . A__t |- > Prelude . A__t , Type [ A__t ] [ ]
Prelude . A__t |- > Prelude . A__t , con of A__t
Prelude.a___2_P_2 |- > Prelude.a___2_P_2 , Value
Prelude.x1 |- > Prelude.x1 , Value
Prelude.x2 |- > Prelude.x2 , Value
Prelude.y |- > Prelude.y , Value
> Prelude . A__s , Type [ A__s ] [ ]
> Prelude . A__s , con of A__s
A__t |- > Prelude . A__t , Type [ A__t ] [ ]
A__t |- > Prelude . A__t , con of A__t
a___2_P_2 |- > Prelude.a___2_P_2 , Value
x1 |- > Prelude.x1 , Value
x2 |- > Prelude.x2 , Value
y |- > Prelude.y , Value
types:
A__s :: (*, data)
A__t :: (*, data)
values:
a___2_P_2 :: (A__s, A__s) -> A__s
x1 :: A__s
x2 :: A__s
y :: A__s
A__s :: A__s
A__t :: A__t
scope:
Prelude.A__s |-> Prelude.A__s, Type [A__s] []
Prelude.A__s |-> Prelude.A__s, con of A__s
Prelude.A__t |-> Prelude.A__t, Type [A__t] []
Prelude.A__t |-> Prelude.A__t, con of A__t
Prelude.a___2_P_2 |-> Prelude.a___2_P_2, Value
Prelude.x1 |-> Prelude.x1, Value
Prelude.x2 |-> Prelude.x2, Value
Prelude.y |-> Prelude.y, Value
A__s |-> Prelude.A__s, Type [A__s] []
A__s |-> Prelude.A__s, con of A__s
A__t |-> Prelude.A__t, Type [A__t] []
A__t |-> Prelude.A__t, con of A__t
a___2_P_2 |-> Prelude.a___2_P_2, Value
x1 |-> Prelude.x1, Value
x2 |-> Prelude.x2, Value
y |-> Prelude.y, Value
-}
module Dummy where
data A__s = A__s
data A__t = A__t
a___2_P_2 :: (A__s, A__s) -> A__s
a___2_P_2
( ( A__s , A__s ) - > A__s )
x1 :: A__s
A__s
x2 :: A__s
A__s
y :: A__s
y = a___2_P_2 (x2, x2)
| |
8756ae168ecdc5e9b32eb7874d62ebcd785e5a8c24d980dbce0999285c2812b5 | sbcl/sbcl | alloc.lisp | allocation VOPs for the Sparc port
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB-VM")
;;;; LIST and LIST*
(define-vop (list)
(:args (things :more t :scs (any-reg descriptor-reg zero null control-stack)))
(:temporary (:scs (descriptor-reg)) ptr)
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (descriptor-reg) :to (:result 0) :target result)
res)
(:temporary (:scs (non-descriptor-reg)) alloc-temp)
(:info star cons-cells)
(:results (result :scs (descriptor-reg)))
(:node-var node)
(:generator 0
(macrolet ((maybe-load (tn)
(once-only ((tn tn))
`(sc-case ,tn
((any-reg descriptor-reg zero null)
,tn)
(control-stack
(load-stack-tn temp ,tn)
temp)))))
(let ((dx-p (node-stack-allocate-p node))
(alloc (* (pad-data-block cons-size) cons-cells)))
(pseudo-atomic (temp)
(allocation 'list alloc list-pointer-lowtag res
:stack-p dx-p
:temp-tn alloc-temp)
(move ptr res)
(dotimes (i (1- cons-cells))
(storew (maybe-load (tn-ref-tn things)) ptr
cons-car-slot list-pointer-lowtag)
(setf things (tn-ref-across things))
(inst add ptr ptr (pad-data-block cons-size))
(storew ptr ptr
(- cons-cdr-slot cons-size)
list-pointer-lowtag))
(storew (maybe-load (tn-ref-tn things)) ptr
cons-car-slot list-pointer-lowtag)
(storew (if star
(maybe-load (tn-ref-tn (tn-ref-across things)))
null-tn)
ptr cons-cdr-slot list-pointer-lowtag))
(move result res)))))
;;;; Special purpose inline allocators.
(define-vop (make-fdefn)
(:args (name :scs (descriptor-reg) :to :eval))
(:temporary (:scs (non-descriptor-reg)) temp)
(:results (result :scs (descriptor-reg) :from :argument))
(:policy :fast-safe)
(:translate make-fdefn)
(:generator 37
(with-fixed-allocation (result temp fdefn-widetag fdefn-size)
(inst li temp (make-fixup 'undefined-tramp :assembly-routine))
(storew name result fdefn-name-slot other-pointer-lowtag)
(storew null-tn result fdefn-fun-slot other-pointer-lowtag)
(storew temp result fdefn-raw-addr-slot other-pointer-lowtag))))
(define-vop (make-closure)
(:args (function :to :save :scs (descriptor-reg)))
(:info label length stack-allocate-p)
(:ignore label)
(:temporary (:scs (non-descriptor-reg)) temp)
(:results (result :scs (descriptor-reg)))
(:generator 10
(let* ((size (+ length closure-info-offset))
(alloc-size (pad-data-block size)))
(pseudo-atomic (temp)
(allocation nil alloc-size fun-pointer-lowtag result
:stack-p stack-allocate-p
:temp-tn temp)
(inst li temp (logior (ash (1- size) n-widetag-bits) closure-widetag))
(storew temp result 0 fun-pointer-lowtag)
(storew function result closure-fun-slot fun-pointer-lowtag)))))
;;; The compiler likes to be able to directly make value cells.
(define-vop (make-value-cell)
(:args (value :to :save :scs (descriptor-reg any-reg)))
(:temporary (:scs (non-descriptor-reg)) temp)
(:info stack-allocate-p)
(:ignore stack-allocate-p)
(:results (result :scs (descriptor-reg)))
(:generator 10
(with-fixed-allocation
(result temp value-cell-widetag value-cell-size)
(storew value result value-cell-value-slot other-pointer-lowtag))))
;;;; Automatic allocators for primitive objects.
(define-vop (make-unbound-marker)
(:args)
(:results (result :scs (descriptor-reg any-reg)))
(:generator 1
(inst li result unbound-marker-widetag)))
(define-vop (make-funcallable-instance-tramp)
(:args)
(:results (result :scs (any-reg)))
(:generator 1
(inst li result (make-fixup 'funcallable-instance-tramp :assembly-routine))))
(define-vop (fixed-alloc)
(:args)
(:info name words type lowtag stack-allocate-p)
(:ignore name)
(:results (result :scs (descriptor-reg)))
(:temporary (:scs (non-descriptor-reg)) temp)
(:generator 4
(pseudo-atomic (temp)
(allocation nil (pad-data-block words) lowtag result :temp-tn temp
:stack-p stack-allocate-p)
(inst li temp (compute-object-header words type))
(storew temp result 0 lowtag))))
(define-vop (var-alloc)
(:args (extra :scs (any-reg)))
(:arg-types positive-fixnum)
(:info name words type lowtag stack-allocate-p)
(:ignore name stack-allocate-p)
(:results (result :scs (descriptor-reg)))
(:temporary (:scs (any-reg)) bytes)
(:temporary (:scs (non-descriptor-reg)) header)
(:temporary (:scs (non-descriptor-reg)) temp)
(:generator 6
(inst add bytes extra (* (1+ words) n-word-bytes))
(inst sll header bytes (- (length-field-shift type) 2))
;; The specified EXTRA value is the exact value placed in the header
;; as the word count when allocating code.
(cond ((= type code-header-widetag)
(inst add header header type))
(t
(inst add header header (+ (ash -2 (length-field-shift type)) type))
(inst and bytes (lognot lowtag-mask))))
(pseudo-atomic (temp)
(allocation nil bytes lowtag result :temp-tn temp)
(storew header result 0 lowtag))))
| null | https://raw.githubusercontent.com/sbcl/sbcl/5c84fb3414e323f5373a4c1a59b0716d84d5c916/src/compiler/sparc/alloc.lisp | lisp | more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
LIST and LIST*
Special purpose inline allocators.
The compiler likes to be able to directly make value cells.
Automatic allocators for primitive objects.
The specified EXTRA value is the exact value placed in the header
as the word count when allocating code. | allocation VOPs for the Sparc port
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB-VM")
(define-vop (list)
(:args (things :more t :scs (any-reg descriptor-reg zero null control-stack)))
(:temporary (:scs (descriptor-reg)) ptr)
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (descriptor-reg) :to (:result 0) :target result)
res)
(:temporary (:scs (non-descriptor-reg)) alloc-temp)
(:info star cons-cells)
(:results (result :scs (descriptor-reg)))
(:node-var node)
(:generator 0
(macrolet ((maybe-load (tn)
(once-only ((tn tn))
`(sc-case ,tn
((any-reg descriptor-reg zero null)
,tn)
(control-stack
(load-stack-tn temp ,tn)
temp)))))
(let ((dx-p (node-stack-allocate-p node))
(alloc (* (pad-data-block cons-size) cons-cells)))
(pseudo-atomic (temp)
(allocation 'list alloc list-pointer-lowtag res
:stack-p dx-p
:temp-tn alloc-temp)
(move ptr res)
(dotimes (i (1- cons-cells))
(storew (maybe-load (tn-ref-tn things)) ptr
cons-car-slot list-pointer-lowtag)
(setf things (tn-ref-across things))
(inst add ptr ptr (pad-data-block cons-size))
(storew ptr ptr
(- cons-cdr-slot cons-size)
list-pointer-lowtag))
(storew (maybe-load (tn-ref-tn things)) ptr
cons-car-slot list-pointer-lowtag)
(storew (if star
(maybe-load (tn-ref-tn (tn-ref-across things)))
null-tn)
ptr cons-cdr-slot list-pointer-lowtag))
(move result res)))))
(define-vop (make-fdefn)
(:args (name :scs (descriptor-reg) :to :eval))
(:temporary (:scs (non-descriptor-reg)) temp)
(:results (result :scs (descriptor-reg) :from :argument))
(:policy :fast-safe)
(:translate make-fdefn)
(:generator 37
(with-fixed-allocation (result temp fdefn-widetag fdefn-size)
(inst li temp (make-fixup 'undefined-tramp :assembly-routine))
(storew name result fdefn-name-slot other-pointer-lowtag)
(storew null-tn result fdefn-fun-slot other-pointer-lowtag)
(storew temp result fdefn-raw-addr-slot other-pointer-lowtag))))
(define-vop (make-closure)
(:args (function :to :save :scs (descriptor-reg)))
(:info label length stack-allocate-p)
(:ignore label)
(:temporary (:scs (non-descriptor-reg)) temp)
(:results (result :scs (descriptor-reg)))
(:generator 10
(let* ((size (+ length closure-info-offset))
(alloc-size (pad-data-block size)))
(pseudo-atomic (temp)
(allocation nil alloc-size fun-pointer-lowtag result
:stack-p stack-allocate-p
:temp-tn temp)
(inst li temp (logior (ash (1- size) n-widetag-bits) closure-widetag))
(storew temp result 0 fun-pointer-lowtag)
(storew function result closure-fun-slot fun-pointer-lowtag)))))
(define-vop (make-value-cell)
(:args (value :to :save :scs (descriptor-reg any-reg)))
(:temporary (:scs (non-descriptor-reg)) temp)
(:info stack-allocate-p)
(:ignore stack-allocate-p)
(:results (result :scs (descriptor-reg)))
(:generator 10
(with-fixed-allocation
(result temp value-cell-widetag value-cell-size)
(storew value result value-cell-value-slot other-pointer-lowtag))))
(define-vop (make-unbound-marker)
(:args)
(:results (result :scs (descriptor-reg any-reg)))
(:generator 1
(inst li result unbound-marker-widetag)))
(define-vop (make-funcallable-instance-tramp)
(:args)
(:results (result :scs (any-reg)))
(:generator 1
(inst li result (make-fixup 'funcallable-instance-tramp :assembly-routine))))
(define-vop (fixed-alloc)
(:args)
(:info name words type lowtag stack-allocate-p)
(:ignore name)
(:results (result :scs (descriptor-reg)))
(:temporary (:scs (non-descriptor-reg)) temp)
(:generator 4
(pseudo-atomic (temp)
(allocation nil (pad-data-block words) lowtag result :temp-tn temp
:stack-p stack-allocate-p)
(inst li temp (compute-object-header words type))
(storew temp result 0 lowtag))))
(define-vop (var-alloc)
(:args (extra :scs (any-reg)))
(:arg-types positive-fixnum)
(:info name words type lowtag stack-allocate-p)
(:ignore name stack-allocate-p)
(:results (result :scs (descriptor-reg)))
(:temporary (:scs (any-reg)) bytes)
(:temporary (:scs (non-descriptor-reg)) header)
(:temporary (:scs (non-descriptor-reg)) temp)
(:generator 6
(inst add bytes extra (* (1+ words) n-word-bytes))
(inst sll header bytes (- (length-field-shift type) 2))
(cond ((= type code-header-widetag)
(inst add header header type))
(t
(inst add header header (+ (ash -2 (length-field-shift type)) type))
(inst and bytes (lognot lowtag-mask))))
(pseudo-atomic (temp)
(allocation nil bytes lowtag result :temp-tn temp)
(storew header result 0 lowtag))))
|
bd76714829c7abb74a637202f4c881c0f8f5631ae2201d4284064207260584cb | sunng87/diehard | timeout_test.clj | (ns diehard.timeout-test
(:require [clojure.test :refer :all]
[diehard.core :refer :all])
(:import [java.time Duration]
[dev.failsafe TimeoutExceededException]
[java.util.concurrent ExecutionException]
[clojure.lang ExceptionInfo]))
(def timeout-duration 50)
(deftest get-with-timeout-test
(testing "get"
(is (= "result" (with-timeout {:timeout-ms timeout-duration}
(Thread/sleep 25)
"result"))))
(testing "get exceeds timeout and throws timeout exception"
(is (thrown? TimeoutExceededException
(with-timeout {:timeout-ms timeout-duration}
(Thread/sleep 100)
"result"))))
(testing "get given interrupt flag set exceeds timeout and throws"
(let [start (System/currentTimeMillis)
timeout-ms 500]
(is (thrown? TimeoutExceededException
(with-timeout {:timeout-ms timeout-ms
:interrupt? true}
(Thread/sleep 5000)
"result")))
(let [end (System/currentTimeMillis)]
(is (< (- end start) (* 1.5 timeout-ms))))))
(testing "get on success callback"
(let [call-count (atom 0)]
(is (= "result" (with-timeout {:timeout-ms timeout-duration
:on-success (fn [_]
(swap! call-count inc))}
(Thread/sleep 25)
"result")))
(is (= 1 @call-count))))
(testing "get on failure callback"
(let [call-count (atom 0)]
(is (thrown? TimeoutExceededException
(with-timeout {:timeout-ms timeout-duration
:on-failure (fn [_]
(swap! call-count inc))}
(Thread/sleep 60)
"result")))
(is (= 1 @call-count)))))
(deftest timeout-test
(testing "should raise error on receiving unknown keys"
(is (thrown? ExceptionInfo
(with-timeout {:timeout-ms 5000
:on-success (fn [_])
:unknown-key 1}))))
(testing "should raise error on receiving unknown types"
(is (thrown? ExceptionInfo
(with-timeout {:timeout-ms 5000
:on-success "string instead of function"
:unknown-key 1})))
(is (thrown? ExceptionInfo
(with-timeout {:timeout-ms 5000
:on-failure "string instead of function"
:unknown-key 1})))))
| null | https://raw.githubusercontent.com/sunng87/diehard/636f3b0dbe6327147e727bf4a200ff1d157085a3/test/diehard/timeout_test.clj | clojure | (ns diehard.timeout-test
(:require [clojure.test :refer :all]
[diehard.core :refer :all])
(:import [java.time Duration]
[dev.failsafe TimeoutExceededException]
[java.util.concurrent ExecutionException]
[clojure.lang ExceptionInfo]))
(def timeout-duration 50)
(deftest get-with-timeout-test
(testing "get"
(is (= "result" (with-timeout {:timeout-ms timeout-duration}
(Thread/sleep 25)
"result"))))
(testing "get exceeds timeout and throws timeout exception"
(is (thrown? TimeoutExceededException
(with-timeout {:timeout-ms timeout-duration}
(Thread/sleep 100)
"result"))))
(testing "get given interrupt flag set exceeds timeout and throws"
(let [start (System/currentTimeMillis)
timeout-ms 500]
(is (thrown? TimeoutExceededException
(with-timeout {:timeout-ms timeout-ms
:interrupt? true}
(Thread/sleep 5000)
"result")))
(let [end (System/currentTimeMillis)]
(is (< (- end start) (* 1.5 timeout-ms))))))
(testing "get on success callback"
(let [call-count (atom 0)]
(is (= "result" (with-timeout {:timeout-ms timeout-duration
:on-success (fn [_]
(swap! call-count inc))}
(Thread/sleep 25)
"result")))
(is (= 1 @call-count))))
(testing "get on failure callback"
(let [call-count (atom 0)]
(is (thrown? TimeoutExceededException
(with-timeout {:timeout-ms timeout-duration
:on-failure (fn [_]
(swap! call-count inc))}
(Thread/sleep 60)
"result")))
(is (= 1 @call-count)))))
(deftest timeout-test
(testing "should raise error on receiving unknown keys"
(is (thrown? ExceptionInfo
(with-timeout {:timeout-ms 5000
:on-success (fn [_])
:unknown-key 1}))))
(testing "should raise error on receiving unknown types"
(is (thrown? ExceptionInfo
(with-timeout {:timeout-ms 5000
:on-success "string instead of function"
:unknown-key 1})))
(is (thrown? ExceptionInfo
(with-timeout {:timeout-ms 5000
:on-failure "string instead of function"
:unknown-key 1})))))
| |
5361cecd77243d150a85a8c7121eacc1216fc8d63602dfff2554013d278056df | jnoll/gantt | gantt.hs | {-# LANGUAGE DeriveDataTypeable #-}
import DateRange
import Parse
import FormatPGF (formatPGF)
import FormatMarkdown (formatMarkdown)
import Data.Maybe (fromMaybe)
import Control.Monad.Error
import Control.Monad.Reader
import Data.Data (constrFields, toConstr, gmapQ, cast)
import Data.List
import Data.String.Utils (replace)
import Data.Time.Clock (utctDay, getCurrentTime)
import Data.Time.Calendar (addDays, diffDays, addGregorianMonthsClip, addGregorianMonthsRollOver, addGregorianYearsRollOver, fromGregorian, toGregorian, gregorianMonthLength,)
import Paths_gantt (getDataFileName)
import System.Console.CmdArgs
import System.Directory (getCurrentDirectory, setCurrentDirectory)
import System.FilePath (takeExtension, takeBaseName, (</>), (<.>))
import System.IO
import System.IO.Temp (withSystemTempDirectory)
import System.Process (system)
import Text.StringTemplate as ST
-- Convert a record to a list.
showEm :: (Data a) => a -> [(String, String)]
showEm x = zip (constrFields $ toConstr x) (gmapQ (showIt [showPeriod, showBool, showDouble, showInt, showDate, showStr]) x)
Using a list of " showing " functions , find a representation of d.
showIt :: (Data a) => [(a -> Either String a)] -> a -> String
showIt ss d = either id (\_ -> "XXX Nope XXX") (foldl (>>=) (return d) ss)
-- Show various primitive types.
showInt, showDouble, showPeriod, showBool, showStr, showDef :: (Data a) => a -> Either String a
showInt d = maybe (Right d) (Left . show) (cast d :: Maybe Int)
showDouble d = maybe (Right d) (Left . show) (cast d :: Maybe Double)
showPeriod d = maybe (Right d) (Left . show) (cast d :: Maybe Period)
showBool d = maybe (Right d) (Left . show) (cast d :: Maybe Bool)
showDate d = maybe (Right d) (Left . show) (cast d :: Maybe Day)
showStr d = maybe (Right d) (Left) (cast d :: Maybe String)
showDef d = maybe (Right d) (Left . show) (cast d :: Maybe String)
printPGF :: Gantt -> ST.StringTemplate String -> Day -> Handle -> IO ()
printPGF g tmpl end h = do
let attr = filter (\(k, v) -> length v > 0) $ showEm g
hPutStrLn h $ ST.toString $ ST.setManyAttrib (attr ++ formatPGF g end) tmpl
printMarkdown :: Gantt -> ST.StringTemplate String -> Handle -> IO ()
printMarkdown g tmpl h = do
let body = formatMarkdown g
hPutStrLn h $ ST.toString $ ST.setAttribute "body" body tmpl
printGantt :: Gantt -> ST.StringTemplate String -> Handle -> IO ()
printGantt g tmpl h = do
when (verbose g) $ do
putStrLn "--- gantt ---"
putStrLn $ show $ g
let end = dur g :: Int
let st = 1 :: Int
let end_date = runReader ( endToDay $ addDays ( windowDur g ) ( ) ) g
let end_date = offsetToDay (windowStart g) (toInteger $ (windowDur g) - 1) (inSize g)
case charttype g of
Markdown -> printMarkdown g tmpl h
otherwise -> printPGF g tmpl end_date h
-- Command line parsing and processing --------------------------------------------------------------------------
-- help, summary, and program are for command line argument parsing.
data Options = Options {
opt_start :: String
, opt_dur :: Int
, opt_windowStart :: String
, opt_windowDur :: Int
, opt_inSize :: Period
, opt_outSize :: Period
-- Command line only options.
, opt_font :: String
, opt_labelWidth :: Int -- with of task, group, milestone labels
, opt_standalone :: Bool
, opt_markToday :: Bool
, opt_outfile :: FilePath
, opt_verbose :: Bool
, opt_file :: FilePath
, opt_template :: FilePath
, opt_chartopts :: String
, opt_charttype :: ChartType
} deriving (Data, Typeable, Show)
defaultOptions :: Options
defaultOptions = Options {
opt_start = def &= help "Project start date" &= explicit &= name "start"
, opt_dur = def &= help "Project duration (periods)" &= explicit &= name "dur"
, opt_windowStart = def &= help "Output 'window' start date (yyyy-mm-dd)" &= explicit &= name "winst"
, opt_windowDur = def &= help "Output 'window' duration (periods)" &= explicit &= name "windur"
, opt_inSize = def &= help "Input period size (default: monthly)" &= explicit &= name "insize"
, opt_outSize = enum [ DefaultPeriod
, Monthly &= help "Output report Monthly (default)"
, Daily &= help "Output report Daily"
, Weekly &= help "Output report Weekly"
, Quarterly &= help "Output report Quarterly"
, Yearly &= help "Output report Yearly"
]
-- Command line only options.
, opt_font = def &= help "Typeface for printed chart" &= explicit &= name "font"
, opt_labelWidth = def &= help "Width (in ems) of group, task, milestone label column" &= explicit &= name "labelwidth"
, opt_standalone = True &= help "Generate standlone latex file" &= explicit &= name "standalone"
, opt_markToday = False &= help "Show today's date as 'today'" &= explicit &= name "today"
, opt_outfile = "stdout" &= help "Output file" &= name "outfile"
, opt_verbose = False &= help "Print diagnostics as well" &= explicit &= name "verbose"
, opt_file = "test.gantt" &= args &= typFile
, opt_template = def &= help "Template for standalone output" &= explicit &= name "template"
, opt_chartopts = def &= help "Options for \\pgfganttchart" &= explicit &= name "chartopts"
, opt_charttype = def &= help "Chart type: Gantt (default) or Markdown" &= explicit &= name "type"
}
&= summary "Gantt chart v0.1, (C) 2016 John Noll"
&= program "main"
makePDF :: Gantt -> String -> FilePath -> IO ()
makePDF g tmpl outfile = getCurrentDirectory >>= (\cwd ->
withSystemTempDirectory "ganttpdf" (\d ->
setCurrentDirectory d >>
let texFile = (takeBaseName outfile) <.> "tex" in
openFile texFile WriteMode >>= (\h ->
printGantt g (ST.newSTMP tmpl) h >>
hClose h >>
(system $ "pdflatex " ++ texFile ++ " > /dev/null" ) >>
setCurrentDirectory cwd >>
(system $ "cp " ++ (d </> (takeBaseName texFile) <.> "pdf") ++ " " ++ outfile) >>
return () )))
getTemplate :: Options -> IO String
getTemplate opts = do
if opt_template opts == "" then
if opt_charttype opts == Markdown then
(getDataFileName $ "templates" </> "memo.st") >>= (\d -> readFile d)
else
(getDataFileName $ "templates" </> "gantt.st") >>= (\d -> readFile d)
else readFile $ opt_template opts
ifDef :: (Eq a, Default a) => a -> a -> a
ifDef x y = if x == def then y else x
main :: IO ()
main = do
args <- cmdArgs defaultOptions
todays_date <- getCurrentTime >>= return . utctDay
when (opt_verbose args) $ do
putStrLn "--- args ---"
putStrLn $ show $ args
let cfg = defaultGantt
c <- readFile (opt_file args)
case parseGantt cfg {
start = if (opt_start args) /= def then parseDate (opt_start args)
else (start cfg)
, dur = ifDef (opt_dur args) (dur cfg)
, windowStart = if (opt_windowStart args) /= def then parseDate (opt_windowStart args)
else if (opt_start args) /= def then parseDate (opt_start args) else (start cfg)
, windowDur = ifDef (opt_windowDur args) (windowDur cfg)
, inSize = ifDef (opt_inSize args) (inSize cfg)
, outSize = ifDef (opt_outSize args) (outSize cfg)
, today = if (opt_markToday args) then todays_date else (today cfg)
, font = opt_font args
, labelWidth = ifDef (opt_labelWidth args) (labelWidth cfg)
, standalone = opt_standalone args
, verbose = opt_verbose args
, file = opt_file args
, template = opt_template args
, chartopts = opt_chartopts args
, charttype = ifDef (opt_charttype args) (charttype cfg)
} c of
Left e -> putStrLn $ show $ e
Right g' -> do
t <- getTemplate args
let g = g' { windowStart = if diffDays (windowStart g') (fromGregorian 1970 1 1) == 0 then start g' else windowStart g'
, windowDur = if windowDur g' == 0 then dur g' else windowDur g'
}
in if (opt_outfile args) == "stdout" then printGantt g (ST.newSTMP t) stdout else
case takeExtension (opt_outfile args) of
".pdf" -> makePDF g t (opt_outfile args)
".png" -> let pdfFile = (takeBaseName (opt_outfile args)) <.> "pdf" in
(makePDF g t $ pdfFile) >>
-- the density is high so image can be resized without pixelating.
(system $ "convert -density 1200 -quality 100 " ++ pdfFile ++ " " ++ (opt_outfile args)) >>
return ()
otherwise -> (openFile (opt_outfile args) WriteMode >>= (\h ->
printGantt g (ST.newSTMP t) h >> hClose h))
| null | https://raw.githubusercontent.com/jnoll/gantt/e7099e1786177580526d8da43d62e0182f00e681/gantt.hs | haskell | # LANGUAGE DeriveDataTypeable #
Convert a record to a list.
Show various primitive types.
Command line parsing and processing --------------------------------------------------------------------------
help, summary, and program are for command line argument parsing.
Command line only options.
with of task, group, milestone labels
Command line only options.
the density is high so image can be resized without pixelating. | import DateRange
import Parse
import FormatPGF (formatPGF)
import FormatMarkdown (formatMarkdown)
import Data.Maybe (fromMaybe)
import Control.Monad.Error
import Control.Monad.Reader
import Data.Data (constrFields, toConstr, gmapQ, cast)
import Data.List
import Data.String.Utils (replace)
import Data.Time.Clock (utctDay, getCurrentTime)
import Data.Time.Calendar (addDays, diffDays, addGregorianMonthsClip, addGregorianMonthsRollOver, addGregorianYearsRollOver, fromGregorian, toGregorian, gregorianMonthLength,)
import Paths_gantt (getDataFileName)
import System.Console.CmdArgs
import System.Directory (getCurrentDirectory, setCurrentDirectory)
import System.FilePath (takeExtension, takeBaseName, (</>), (<.>))
import System.IO
import System.IO.Temp (withSystemTempDirectory)
import System.Process (system)
import Text.StringTemplate as ST
showEm :: (Data a) => a -> [(String, String)]
showEm x = zip (constrFields $ toConstr x) (gmapQ (showIt [showPeriod, showBool, showDouble, showInt, showDate, showStr]) x)
Using a list of " showing " functions , find a representation of d.
showIt :: (Data a) => [(a -> Either String a)] -> a -> String
showIt ss d = either id (\_ -> "XXX Nope XXX") (foldl (>>=) (return d) ss)
showInt, showDouble, showPeriod, showBool, showStr, showDef :: (Data a) => a -> Either String a
showInt d = maybe (Right d) (Left . show) (cast d :: Maybe Int)
showDouble d = maybe (Right d) (Left . show) (cast d :: Maybe Double)
showPeriod d = maybe (Right d) (Left . show) (cast d :: Maybe Period)
showBool d = maybe (Right d) (Left . show) (cast d :: Maybe Bool)
showDate d = maybe (Right d) (Left . show) (cast d :: Maybe Day)
showStr d = maybe (Right d) (Left) (cast d :: Maybe String)
showDef d = maybe (Right d) (Left . show) (cast d :: Maybe String)
printPGF :: Gantt -> ST.StringTemplate String -> Day -> Handle -> IO ()
printPGF g tmpl end h = do
let attr = filter (\(k, v) -> length v > 0) $ showEm g
hPutStrLn h $ ST.toString $ ST.setManyAttrib (attr ++ formatPGF g end) tmpl
printMarkdown :: Gantt -> ST.StringTemplate String -> Handle -> IO ()
printMarkdown g tmpl h = do
let body = formatMarkdown g
hPutStrLn h $ ST.toString $ ST.setAttribute "body" body tmpl
printGantt :: Gantt -> ST.StringTemplate String -> Handle -> IO ()
printGantt g tmpl h = do
when (verbose g) $ do
putStrLn "--- gantt ---"
putStrLn $ show $ g
let end = dur g :: Int
let st = 1 :: Int
let end_date = runReader ( endToDay $ addDays ( windowDur g ) ( ) ) g
let end_date = offsetToDay (windowStart g) (toInteger $ (windowDur g) - 1) (inSize g)
case charttype g of
Markdown -> printMarkdown g tmpl h
otherwise -> printPGF g tmpl end_date h
data Options = Options {
opt_start :: String
, opt_dur :: Int
, opt_windowStart :: String
, opt_windowDur :: Int
, opt_inSize :: Period
, opt_outSize :: Period
, opt_font :: String
, opt_standalone :: Bool
, opt_markToday :: Bool
, opt_outfile :: FilePath
, opt_verbose :: Bool
, opt_file :: FilePath
, opt_template :: FilePath
, opt_chartopts :: String
, opt_charttype :: ChartType
} deriving (Data, Typeable, Show)
defaultOptions :: Options
defaultOptions = Options {
opt_start = def &= help "Project start date" &= explicit &= name "start"
, opt_dur = def &= help "Project duration (periods)" &= explicit &= name "dur"
, opt_windowStart = def &= help "Output 'window' start date (yyyy-mm-dd)" &= explicit &= name "winst"
, opt_windowDur = def &= help "Output 'window' duration (periods)" &= explicit &= name "windur"
, opt_inSize = def &= help "Input period size (default: monthly)" &= explicit &= name "insize"
, opt_outSize = enum [ DefaultPeriod
, Monthly &= help "Output report Monthly (default)"
, Daily &= help "Output report Daily"
, Weekly &= help "Output report Weekly"
, Quarterly &= help "Output report Quarterly"
, Yearly &= help "Output report Yearly"
]
, opt_font = def &= help "Typeface for printed chart" &= explicit &= name "font"
, opt_labelWidth = def &= help "Width (in ems) of group, task, milestone label column" &= explicit &= name "labelwidth"
, opt_standalone = True &= help "Generate standlone latex file" &= explicit &= name "standalone"
, opt_markToday = False &= help "Show today's date as 'today'" &= explicit &= name "today"
, opt_outfile = "stdout" &= help "Output file" &= name "outfile"
, opt_verbose = False &= help "Print diagnostics as well" &= explicit &= name "verbose"
, opt_file = "test.gantt" &= args &= typFile
, opt_template = def &= help "Template for standalone output" &= explicit &= name "template"
, opt_chartopts = def &= help "Options for \\pgfganttchart" &= explicit &= name "chartopts"
, opt_charttype = def &= help "Chart type: Gantt (default) or Markdown" &= explicit &= name "type"
}
&= summary "Gantt chart v0.1, (C) 2016 John Noll"
&= program "main"
makePDF :: Gantt -> String -> FilePath -> IO ()
makePDF g tmpl outfile = getCurrentDirectory >>= (\cwd ->
withSystemTempDirectory "ganttpdf" (\d ->
setCurrentDirectory d >>
let texFile = (takeBaseName outfile) <.> "tex" in
openFile texFile WriteMode >>= (\h ->
printGantt g (ST.newSTMP tmpl) h >>
hClose h >>
(system $ "pdflatex " ++ texFile ++ " > /dev/null" ) >>
setCurrentDirectory cwd >>
(system $ "cp " ++ (d </> (takeBaseName texFile) <.> "pdf") ++ " " ++ outfile) >>
return () )))
getTemplate :: Options -> IO String
getTemplate opts = do
if opt_template opts == "" then
if opt_charttype opts == Markdown then
(getDataFileName $ "templates" </> "memo.st") >>= (\d -> readFile d)
else
(getDataFileName $ "templates" </> "gantt.st") >>= (\d -> readFile d)
else readFile $ opt_template opts
ifDef :: (Eq a, Default a) => a -> a -> a
ifDef x y = if x == def then y else x
main :: IO ()
main = do
args <- cmdArgs defaultOptions
todays_date <- getCurrentTime >>= return . utctDay
when (opt_verbose args) $ do
putStrLn "--- args ---"
putStrLn $ show $ args
let cfg = defaultGantt
c <- readFile (opt_file args)
case parseGantt cfg {
start = if (opt_start args) /= def then parseDate (opt_start args)
else (start cfg)
, dur = ifDef (opt_dur args) (dur cfg)
, windowStart = if (opt_windowStart args) /= def then parseDate (opt_windowStart args)
else if (opt_start args) /= def then parseDate (opt_start args) else (start cfg)
, windowDur = ifDef (opt_windowDur args) (windowDur cfg)
, inSize = ifDef (opt_inSize args) (inSize cfg)
, outSize = ifDef (opt_outSize args) (outSize cfg)
, today = if (opt_markToday args) then todays_date else (today cfg)
, font = opt_font args
, labelWidth = ifDef (opt_labelWidth args) (labelWidth cfg)
, standalone = opt_standalone args
, verbose = opt_verbose args
, file = opt_file args
, template = opt_template args
, chartopts = opt_chartopts args
, charttype = ifDef (opt_charttype args) (charttype cfg)
} c of
Left e -> putStrLn $ show $ e
Right g' -> do
t <- getTemplate args
let g = g' { windowStart = if diffDays (windowStart g') (fromGregorian 1970 1 1) == 0 then start g' else windowStart g'
, windowDur = if windowDur g' == 0 then dur g' else windowDur g'
}
in if (opt_outfile args) == "stdout" then printGantt g (ST.newSTMP t) stdout else
case takeExtension (opt_outfile args) of
".pdf" -> makePDF g t (opt_outfile args)
".png" -> let pdfFile = (takeBaseName (opt_outfile args)) <.> "pdf" in
(makePDF g t $ pdfFile) >>
(system $ "convert -density 1200 -quality 100 " ++ pdfFile ++ " " ++ (opt_outfile args)) >>
return ()
otherwise -> (openFile (opt_outfile args) WriteMode >>= (\h ->
printGantt g (ST.newSTMP t) h >> hClose h))
|
8fcb31a6cd9a3e011bb8902437b4b85595281bbdfbf3db25c3e2d08380ea0f4f | wenkokke/dep2con | Con2Bin.hs | module Language.Conversion.Con2Bin where
import Prelude hiding (Word)
import Control.Monad (msum)
import Data.List (delete, sortBy, minimumBy)
import Data.Maybe (fromMaybe)
import Data.Ord (comparing)
import Language.POS (toXP)
import qualified Language.Structure.Binary as Bin
import qualified Language.Structure.Constituency as Con
import qualified Language.Structure.Dependency as Dep
import Language.Word (Word (..))
import Debug.Trace (traceShow)
-- |Convert dependency structures to binary constituency structures,
-- ensuring that only the minimal number of projections are made.
toledo :: Dep.Tree -> Con.Tree -> Bin.Tree
toledo _ (Con.Leaf word) = Bin.Leaf word
toledo dep (Con.Node _ children) = let
gov :: Con.Tree
gov =
minimumBy (comparing (minimum . map (dependencyLevel dep) . Con.allWords)) children
deps :: [Con.Tree]
deps = delete gov children
x = Con.topMostPOS gov
xp = toXP x
sorted :: [Con.Tree]
sorted = sortBy (flip $ Con.nearestThenLeftMost (Con.leftMostIndex gov)) deps
asbin :: [Bin.Tree]
asbin = map (toledo dep) sorted
asfunc :: Bin.Tree -> Bin.Tree
asfunc = foldr ((.) . Bin.node xp) id asbin
in traceShow sorted $ asfunc (toledo dep gov)
-- |Compute the depth of a word in the dependency tree.
dependencyLevel :: Dep.Tree -> Word -> Int
dependencyLevel dep (Word _ _ i) = fromMaybe maxBound (go 0 dep)
where
go :: Int -> Dep.Tree -> Maybe Int
go n (Dep.Node (Word _ _ j) deps)
= if i == j then Just n else msum (map (go (n+1)) deps)
| null | https://raw.githubusercontent.com/wenkokke/dep2con/eec24bc8e1a4db5b1582f0fd933ab02f8a6ce041/src/Language/Conversion/Con2Bin.hs | haskell | |Convert dependency structures to binary constituency structures,
ensuring that only the minimal number of projections are made.
|Compute the depth of a word in the dependency tree. | module Language.Conversion.Con2Bin where
import Prelude hiding (Word)
import Control.Monad (msum)
import Data.List (delete, sortBy, minimumBy)
import Data.Maybe (fromMaybe)
import Data.Ord (comparing)
import Language.POS (toXP)
import qualified Language.Structure.Binary as Bin
import qualified Language.Structure.Constituency as Con
import qualified Language.Structure.Dependency as Dep
import Language.Word (Word (..))
import Debug.Trace (traceShow)
toledo :: Dep.Tree -> Con.Tree -> Bin.Tree
toledo _ (Con.Leaf word) = Bin.Leaf word
toledo dep (Con.Node _ children) = let
gov :: Con.Tree
gov =
minimumBy (comparing (minimum . map (dependencyLevel dep) . Con.allWords)) children
deps :: [Con.Tree]
deps = delete gov children
x = Con.topMostPOS gov
xp = toXP x
sorted :: [Con.Tree]
sorted = sortBy (flip $ Con.nearestThenLeftMost (Con.leftMostIndex gov)) deps
asbin :: [Bin.Tree]
asbin = map (toledo dep) sorted
asfunc :: Bin.Tree -> Bin.Tree
asfunc = foldr ((.) . Bin.node xp) id asbin
in traceShow sorted $ asfunc (toledo dep gov)
dependencyLevel :: Dep.Tree -> Word -> Int
dependencyLevel dep (Word _ _ i) = fromMaybe maxBound (go 0 dep)
where
go :: Int -> Dep.Tree -> Maybe Int
go n (Dep.Node (Word _ _ j) deps)
= if i == j then Just n else msum (map (go (n+1)) deps)
|
b0cdef55333187a9b2e0aa1cc8b3ebb21c1690f9cbafec27f98dd8597bcebad5 | ekmett/bifunctors | Functor.hs | # LANGUAGE CPP #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TypeOperators #
{-# LANGUAGE Safe #-}
# LANGUAGE PolyKinds #
# LANGUAGE QuantifiedConstraints #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
# LANGUAGE MonoLocalBinds #
module Data.Bifunctor.Functor
( (:->)
, BifunctorFunctor(..)
, BifunctorMonad(..)
, biliftM
, BifunctorComonad(..)
, biliftW
) where
#if __GLASGOW_HASKELL__ < 900
import Data.Bifunctor
#endif
import Data.Bifunctor.Classes
| Using parametricity as an approximation of a natural transformation in two arguments .
type (:->) p q = forall a b. p a b -> q a b
infixr 0 :->
class (forall a. Functor (f a)) => QFunctor f
instance (forall a. Functor (f a)) => QFunctor f
class
#if __GLASGOW_HASKELL__ < 900
( forall p. Bifunctor p => Bifunctor (t p)
, forall p. (Bifunctor p, QFunctor p) => QFunctor (t p)
#else
( forall p. Bifunctor' p => Bifunctor' (t p)
#endif
) => BifunctorFunctor t where
-- class (forall p. Bifunctor' p => Bifunctor' (t p)) => BifunctorFunctor t where
bifmap :: (p :-> q) -> t p :-> t q
class BifunctorFunctor t => BifunctorMonad t where
bireturn :: Bifunctor' p => p :-> t p
bibind :: Bifunctor' q => (p :-> t q) -> t p :-> t q
bibind f = bijoin . bifmap f
bijoin :: Bifunctor' p => t (t p) :-> t p
bijoin = bibind id
# MINIMAL bireturn , ( bibind | bijoin ) #
biliftM :: (BifunctorMonad t, Bifunctor' q) => (p :-> q) -> t p :-> t q
biliftM f = bibind (bireturn . f)
# INLINE biliftM #
class BifunctorFunctor t => BifunctorComonad t where
biextract :: Bifunctor' p => t p :-> p
biextend :: Bifunctor' p => (t p :-> q) -> t p :-> t q
biextend f = bifmap f . biduplicate
biduplicate :: Bifunctor' p => t p :-> t (t p)
biduplicate = biextend id
{-# MINIMAL biextract, (biextend | biduplicate) #-}
biliftW :: (BifunctorComonad t, Bifunctor' p) => (p :-> q) -> t p :-> t q
biliftW f = biextend (f . biextract)
{-# INLINE biliftW #-}
| null | https://raw.githubusercontent.com/ekmett/bifunctors/269d156f47ce9896d801572d1fc3a286fbf92c4e/src/Data/Bifunctor/Functor.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE Safe #
class (forall p. Bifunctor' p => Bifunctor' (t p)) => BifunctorFunctor t where
# MINIMAL biextract, (biextend | biduplicate) #
# INLINE biliftW # | # LANGUAGE CPP #
# LANGUAGE TypeOperators #
# LANGUAGE PolyKinds #
# LANGUAGE QuantifiedConstraints #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
# LANGUAGE MonoLocalBinds #
module Data.Bifunctor.Functor
( (:->)
, BifunctorFunctor(..)
, BifunctorMonad(..)
, biliftM
, BifunctorComonad(..)
, biliftW
) where
#if __GLASGOW_HASKELL__ < 900
import Data.Bifunctor
#endif
import Data.Bifunctor.Classes
| Using parametricity as an approximation of a natural transformation in two arguments .
type (:->) p q = forall a b. p a b -> q a b
infixr 0 :->
class (forall a. Functor (f a)) => QFunctor f
instance (forall a. Functor (f a)) => QFunctor f
class
#if __GLASGOW_HASKELL__ < 900
( forall p. Bifunctor p => Bifunctor (t p)
, forall p. (Bifunctor p, QFunctor p) => QFunctor (t p)
#else
( forall p. Bifunctor' p => Bifunctor' (t p)
#endif
) => BifunctorFunctor t where
bifmap :: (p :-> q) -> t p :-> t q
class BifunctorFunctor t => BifunctorMonad t where
bireturn :: Bifunctor' p => p :-> t p
bibind :: Bifunctor' q => (p :-> t q) -> t p :-> t q
bibind f = bijoin . bifmap f
bijoin :: Bifunctor' p => t (t p) :-> t p
bijoin = bibind id
# MINIMAL bireturn , ( bibind | bijoin ) #
biliftM :: (BifunctorMonad t, Bifunctor' q) => (p :-> q) -> t p :-> t q
biliftM f = bibind (bireturn . f)
# INLINE biliftM #
class BifunctorFunctor t => BifunctorComonad t where
biextract :: Bifunctor' p => t p :-> p
biextend :: Bifunctor' p => (t p :-> q) -> t p :-> t q
biextend f = bifmap f . biduplicate
biduplicate :: Bifunctor' p => t p :-> t (t p)
biduplicate = biextend id
biliftW :: (BifunctorComonad t, Bifunctor' p) => (p :-> q) -> t p :-> t q
biliftW f = biextend (f . biextract)
|
0302f2e1364e0ed769bd20b320842ecc07e038d51f56d411b0b210bf8535654a | PLSysSec/lio | Common.hs | # LANGUAGE Trustworthy #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE StandaloneDeriving, DeriveGeneric #-}
module Memblog.Common where
import Prelude hiding (readFile, writeFile, appendFile, catch)
import LIO
import LIO.Error
import LIO.DCLabel
import LIO.Web.Simple
import LIO.Web.Simple.DCLabel
import Web.Simple.Templates
import Control.Applicative
import System.FilePath
import LIO.Web.Simple.TCB (lioGetTemplateTCB)
import GHC.Generics
import Data.Aeson
import LIO.Concurrent
data AppSettings = AppSettings { db :: LMVar DCLabel [Post]}
newAppSettings :: DC AppSettings
newAppSettings = do
mv <- newLMVar dcPublic [post0, post1]
return $ AppSettings { db = mv }
instance HasTemplates DC AppSettings where
viewDirectory = return $ "liofs" </> "views"
defaultLayout = Just <$> getTemplate ("liofs" </> "layouts" </> "main.html")
getTemplate = liftLIO . lioGetTemplateTCB
-- NOTE: We assume that "liofs" only contains public data, DO NOT
-- store any sensitive data in this directory
| Post I d 's are stringified
type PostId = String
-- | Data-type representing a blog post
data Post = Post { postId :: PostId
, postTitle :: String
, postBody :: String }
deriving (Show, Read)
deriving instance Generic Post
instance ToJSON Post
getAllPosts :: DCController AppSettings [Post]
getAllPosts = do
settings <- controllerState
liftLIO . withContext "getAllPosts" $ readLMVar $ db settings
getPostById :: PostId -> DCController AppSettings Post
getPostById idNr = do
posts <- getAllPosts
case filter ((== idNr) . postId) posts of
[post] -> return post
_ -> fail "No such post"
insertPost :: Post -> DCController AppSettings PostId
insertPost post = do
settings <- controllerState
liftLIO . withContext "insertPost" $ do
posts <- takeLMVar $ db settings
let pId = show $ length posts
post' = post { postId = pId }
putLMVar (db settings) $ post' : posts
return pId
--
-- Dummy posts
--
post0 :: Post
post0 = Post {
postId = "0"
, postTitle = "The Title of Your First Post on a Single Line"
, postBody = unlines
[ "Lorem ipsum dolor sit amet, consectetur adipiscing elit."
, "Etiam vitae interdum sapien. In congue..." ]
}
post1 :: Post
post1 = Post {
postId = "1"
, postTitle = "The Title of Your Second Post"
, postBody = unlines
[ "Aliquam tempor varius justo vitae bibendum! Duis vitae rutrum"
, "neque. Sed ut sed..." ]
}
| null | https://raw.githubusercontent.com/PLSysSec/lio/622a3e7bc86a3b42ab4ce8be954064a5f142247a/lio-simple/examples/memblog/Memblog/Common.hs | haskell | # LANGUAGE StandaloneDeriving, DeriveGeneric #
NOTE: We assume that "liofs" only contains public data, DO NOT
store any sensitive data in this directory
| Data-type representing a blog post
Dummy posts
| # LANGUAGE Trustworthy #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Memblog.Common where
import Prelude hiding (readFile, writeFile, appendFile, catch)
import LIO
import LIO.Error
import LIO.DCLabel
import LIO.Web.Simple
import LIO.Web.Simple.DCLabel
import Web.Simple.Templates
import Control.Applicative
import System.FilePath
import LIO.Web.Simple.TCB (lioGetTemplateTCB)
import GHC.Generics
import Data.Aeson
import LIO.Concurrent
data AppSettings = AppSettings { db :: LMVar DCLabel [Post]}
newAppSettings :: DC AppSettings
newAppSettings = do
mv <- newLMVar dcPublic [post0, post1]
return $ AppSettings { db = mv }
instance HasTemplates DC AppSettings where
viewDirectory = return $ "liofs" </> "views"
defaultLayout = Just <$> getTemplate ("liofs" </> "layouts" </> "main.html")
getTemplate = liftLIO . lioGetTemplateTCB
| Post I d 's are stringified
type PostId = String
data Post = Post { postId :: PostId
, postTitle :: String
, postBody :: String }
deriving (Show, Read)
deriving instance Generic Post
instance ToJSON Post
getAllPosts :: DCController AppSettings [Post]
getAllPosts = do
settings <- controllerState
liftLIO . withContext "getAllPosts" $ readLMVar $ db settings
getPostById :: PostId -> DCController AppSettings Post
getPostById idNr = do
posts <- getAllPosts
case filter ((== idNr) . postId) posts of
[post] -> return post
_ -> fail "No such post"
insertPost :: Post -> DCController AppSettings PostId
insertPost post = do
settings <- controllerState
liftLIO . withContext "insertPost" $ do
posts <- takeLMVar $ db settings
let pId = show $ length posts
post' = post { postId = pId }
putLMVar (db settings) $ post' : posts
return pId
post0 :: Post
post0 = Post {
postId = "0"
, postTitle = "The Title of Your First Post on a Single Line"
, postBody = unlines
[ "Lorem ipsum dolor sit amet, consectetur adipiscing elit."
, "Etiam vitae interdum sapien. In congue..." ]
}
post1 :: Post
post1 = Post {
postId = "1"
, postTitle = "The Title of Your Second Post"
, postBody = unlines
[ "Aliquam tempor varius justo vitae bibendum! Duis vitae rutrum"
, "neque. Sed ut sed..." ]
}
|
c6a3fcf00604865c0718ec07d2eb33d1808788f120963484998f1dd0ae9f19f3 | ucsd-progsys/nate | odoc_config.mli | (***********************************************************************)
(* OCamldoc *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : odoc_config.mli , v 1.1.20.2 2007/03/07 08:50:05 xleroy Exp $
(** Ocamldoc configuration contants. *)
(** Default path to search for custom generators and to install them. *)
val custom_generators_path : string
(** A flag to indicate whether to print ocamldoc warnings or not. *)
val print_warnings : bool ref
| null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/ocamldoc/odoc_config.mli | ocaml | *********************************************************************
OCamldoc
*********************************************************************
* Ocamldoc configuration contants.
* Default path to search for custom generators and to install them.
* A flag to indicate whether to print ocamldoc warnings or not. | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : odoc_config.mli , v 1.1.20.2 2007/03/07 08:50:05 xleroy Exp $
val custom_generators_path : string
val print_warnings : bool ref
|
b06a7aeff87b5d9f5f9a975a1a01728fa1923c76ea9166fa30016d44d2afb71e | futurice/haskell-mega-repo | Class.hs | module Okta.Class where
import Futurice.Prelude
import Prelude ()
import Okta.Request
import Okta.Types
class Monad m => MonadOkta m where
oktaReq :: Req a -> m a
users :: MonadOkta m => m [User]
users = oktaReq ReqGetAllUsers
appUsers :: MonadOkta m => OktaAppId -> m [AppUser GithubProfile]
appUsers = oktaReq . ReqGetAppUsers
slackUsers :: MonadOkta m => OktaAppId -> m [AppUser SlackProfile]
slackUsers = oktaReq . ReqGetSlackUsers
updateUser :: MonadOkta m => OktaId -> Value -> m User
updateUser oktaid value = oktaReq $ ReqUpdateUser oktaid value
createUser :: MonadOkta m => NewUser -> m User
createUser = oktaReq . ReqCreateUser
groups :: MonadOkta m => m [Group]
groups = oktaReq ReqGetAllGroups
groupMembers :: MonadOkta m => OktaGroupId -> m [User]
groupMembers = oktaReq . ReqGetGroupUsers
addUserToGroup :: MonadOkta m => OktaGroupId -> OktaId -> m ()
addUserToGroup gid uid = oktaReq $ ReqAddUserToGroup gid uid
deleteUserFromGroup :: MonadOkta m => OktaGroupId -> OktaId -> m ()
deleteUserFromGroup gid uid = oktaReq $ ReqRemoveUserFromGroup gid uid
userApplications :: MonadOkta m => OktaId -> m [AppLink]
userApplications = oktaReq . ReqGetAppLinks
application :: MonadOkta m => OktaAppId -> m App
application = oktaReq . ReqGetApplication
user :: MonadOkta m => OktaId -> m User
user = oktaReq . ReqGetUser
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/okta-client/src/Okta/Class.hs | haskell | module Okta.Class where
import Futurice.Prelude
import Prelude ()
import Okta.Request
import Okta.Types
class Monad m => MonadOkta m where
oktaReq :: Req a -> m a
users :: MonadOkta m => m [User]
users = oktaReq ReqGetAllUsers
appUsers :: MonadOkta m => OktaAppId -> m [AppUser GithubProfile]
appUsers = oktaReq . ReqGetAppUsers
slackUsers :: MonadOkta m => OktaAppId -> m [AppUser SlackProfile]
slackUsers = oktaReq . ReqGetSlackUsers
updateUser :: MonadOkta m => OktaId -> Value -> m User
updateUser oktaid value = oktaReq $ ReqUpdateUser oktaid value
createUser :: MonadOkta m => NewUser -> m User
createUser = oktaReq . ReqCreateUser
groups :: MonadOkta m => m [Group]
groups = oktaReq ReqGetAllGroups
groupMembers :: MonadOkta m => OktaGroupId -> m [User]
groupMembers = oktaReq . ReqGetGroupUsers
addUserToGroup :: MonadOkta m => OktaGroupId -> OktaId -> m ()
addUserToGroup gid uid = oktaReq $ ReqAddUserToGroup gid uid
deleteUserFromGroup :: MonadOkta m => OktaGroupId -> OktaId -> m ()
deleteUserFromGroup gid uid = oktaReq $ ReqRemoveUserFromGroup gid uid
userApplications :: MonadOkta m => OktaId -> m [AppLink]
userApplications = oktaReq . ReqGetAppLinks
application :: MonadOkta m => OktaAppId -> m App
application = oktaReq . ReqGetApplication
user :: MonadOkta m => OktaId -> m User
user = oktaReq . ReqGetUser
| |
4bb7e7a0d05195c8daccedbe1d6264ae0dba653b40fd9e84935018ebcd63d020 | silky/quipper | Binary.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
-- ----------------------------------------------------------------------
-- | This tool decomposes a circuit into binary gates.
module Main where
import Quipper
import QuipperLib.QuipperASCIIParser
import QuipperLib.Decompose
-- | Main function: read from 'stdin', do the decomposition, and write
-- to 'stdout'.
main :: IO ()
main = do
(ins,circuit) <- parse_from_stdin
let decomposed_circuit = decompose_generic Binary circuit
print_generic ASCII decomposed_circuit ins
| null | https://raw.githubusercontent.com/silky/quipper/1ef6d031984923d8b7ded1c14f05db0995791633/Programs/Tools/Binary.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
----------------------------------------------------------------------
| This tool decomposes a circuit into binary gates.
| Main function: read from 'stdin', do the decomposition, and write
to 'stdout'. | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
module Main where
import Quipper
import QuipperLib.QuipperASCIIParser
import QuipperLib.Decompose
main :: IO ()
main = do
(ins,circuit) <- parse_from_stdin
let decomposed_circuit = decompose_generic Binary circuit
print_generic ASCII decomposed_circuit ins
|
c062ee51e2dac62b9c5cf8a25d5e388df2902ff2f8069f862e53449ef8780adc | UU-ComputerScience/uhc | MarshalError.hs | # EXCLUDE_IF_TARGET js #
{-# EXCLUDE_IF_TARGET cr #-}
module MarshalError (
module Foreign.Marshal.Error,
IOErrorType,
mkIOError,
alreadyExistsErrorType,
doesNotExistErrorType,
alreadyInUseErrorType,
fullErrorType,
eofErrorType,
illegalOperationErrorType,
permissionErrorType,
userErrorType,
annotateIOError
) where
import System.IO.Error
import Foreign.Marshal.Error
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/ehclib/haskell98/MarshalError.hs | haskell | # EXCLUDE_IF_TARGET cr # | # EXCLUDE_IF_TARGET js #
module MarshalError (
module Foreign.Marshal.Error,
IOErrorType,
mkIOError,
alreadyExistsErrorType,
doesNotExistErrorType,
alreadyInUseErrorType,
fullErrorType,
eofErrorType,
illegalOperationErrorType,
permissionErrorType,
userErrorType,
annotateIOError
) where
import System.IO.Error
import Foreign.Marshal.Error
|
44602a675471e94f039deed5b28fd586ba8a48f15572247ebcfcc742b2594046 | cyverse-archive/DiscoveryEnvironmentBackend | home.clj | (ns data-info.routes.home
(:use [common-swagger-api.schema]
[data-info.routes.domain.common]
[data-info.routes.domain.stats])
(:require [data-info.services.home :as home]
[data-info.util.service :as svc]))
(defroutes* home
(context* "/home" []
:tags ["home"]
(GET* "/" [:as {uri :uri}]
:query [params StandardUserQueryParams]
:return PathIdInfo
:summary "Get User's Home Dir"
:description (str
"This endpoint returns the ID and path of a user's home directory, creating it if it does not
already exist."
(get-error-code-block
"ERR_NOT_A_USER"))
(svc/trap uri home/do-homedir params))))
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/services/data-info/src/data_info/routes/home.clj | clojure | (ns data-info.routes.home
(:use [common-swagger-api.schema]
[data-info.routes.domain.common]
[data-info.routes.domain.stats])
(:require [data-info.services.home :as home]
[data-info.util.service :as svc]))
(defroutes* home
(context* "/home" []
:tags ["home"]
(GET* "/" [:as {uri :uri}]
:query [params StandardUserQueryParams]
:return PathIdInfo
:summary "Get User's Home Dir"
:description (str
"This endpoint returns the ID and path of a user's home directory, creating it if it does not
already exist."
(get-error-code-block
"ERR_NOT_A_USER"))
(svc/trap uri home/do-homedir params))))
| |
0b42bca5c434b80e3bde877f282f335305c655b51ab104ff45f2ab8ee1c8ba02 | avsm/hello-world-action-ocaml | world.ml | let () = print_endline Hello.hello
| null | https://raw.githubusercontent.com/avsm/hello-world-action-ocaml/d5d6c55d78c7155bdbb0a2aa9e3285f6848737ed/world.ml | ocaml | let () = print_endline Hello.hello
| |
2a936456eb950aa8cdeb27a630501754282cafe734bc950e52c381a767fb48f9 | 0x0f0f0f/gobba | testing.ml | (** Basic Testing Primitives *)
open Types
open Errors
open Typecheck
* Assert if two values are equal , otherwise fail
let assertp args =
let (x,y) = match args with
| [|x;y|] -> (x, y)
| _ -> iraise WrongPrimitiveArgs in
if (compare_evt x y) <> 0 then
iraise @@ InternalFailure ("Assertion Error: expected " ^ (show_evt y) ^ " but got " ^ (show_evt x))
else EvtUnit
let unit_assert args =
let (desc, x,y) = match args with
| [|desc; x;y|] -> (unpack_string desc, x, y)
| _ -> iraise WrongPrimitiveArgs in
if (compare_evt x y) <> 0 then
(print_message
~color:T.Red
~loc:Nowhere
"FAIL"
desc; EvtBool false)
else (print_message
~color:T.Green
~loc:Nowhere
"PASS"
desc; EvtBool true)
let table = [
("assert", Primitive (assertp, ("assert", [|"a"; "b"|], Pure)));
("unit_assert", Primitive (unit_assert, ("unit_assert", [|"description"; "a"; "b"|], Pure)));
]
| null | https://raw.githubusercontent.com/0x0f0f0f/gobba/61092207438fb102e36245c46c27a711b8f357cb/lib/primitives/testing.ml | ocaml | * Basic Testing Primitives |
open Types
open Errors
open Typecheck
* Assert if two values are equal , otherwise fail
let assertp args =
let (x,y) = match args with
| [|x;y|] -> (x, y)
| _ -> iraise WrongPrimitiveArgs in
if (compare_evt x y) <> 0 then
iraise @@ InternalFailure ("Assertion Error: expected " ^ (show_evt y) ^ " but got " ^ (show_evt x))
else EvtUnit
let unit_assert args =
let (desc, x,y) = match args with
| [|desc; x;y|] -> (unpack_string desc, x, y)
| _ -> iraise WrongPrimitiveArgs in
if (compare_evt x y) <> 0 then
(print_message
~color:T.Red
~loc:Nowhere
"FAIL"
desc; EvtBool false)
else (print_message
~color:T.Green
~loc:Nowhere
"PASS"
desc; EvtBool true)
let table = [
("assert", Primitive (assertp, ("assert", [|"a"; "b"|], Pure)));
("unit_assert", Primitive (unit_assert, ("unit_assert", [|"description"; "a"; "b"|], Pure)));
]
|
c43a509aa74275a6b0d27c44ac2bd8e536b2a17acc5ec76ab4946b015367f131 | Ebanflo42/Persistence | Filtration.hs | |
Module : Persistence . Filtration
Copyright : ( c ) , 2018
License : BSD 3 Clause
Maintainer :
Stability : experimental
This module contains functions for V.constructing filtrations and computing persistent homology , persistence landscapes , and computing bottleneck distance between barcode diagrams .
A filtration is a finite sequence of simplicial complexes where each complex is a subset of the next . This means that a filtration can be thought of as a single simplicial complex where each of the simplices is labeled with a " filtration index " that represents the index in the sequence where that simplex enters the filtration .
One way to create a filtration , given a simplicial complex , a metric for the vertices , and a list of distances , is to loop through the distances from greatest to least : create a simplicial complex each iteration which excludes simplices that contain pairs of vertices which are further than the current distance apart . This method will produce a filtration of Vietoris - Rips complexes - each filtration index will correspond to a Rips complex whose scale is the corresponding distance . This filtration represents the topology of the data at each of the scales with which it was V.constructed .
NOTE : It 's important that , even though the smallest filtration index represents the smallest scale at which the data is being anaylzed , all functions in this library receive your list of scales sorted in * decreasing * order .
An essential thing to note in this library is the distinction between " fast " and " light " functions . Light functions call the metric every time distance between two points is required , which is a lot . Fast functions store the distances between points and access them in V.constant time , BUT this means they use O(n^2 ) memory with respect to the number of data points , so it 's a really bad idea to use this optimization on substantially large data if you do n't have a lot of RAM .
Persistent homology is the main event of topological data analysis . It allows one to identify clusters , tunnels , cavities , and higher dimensional holes that persist in the data throughout many scales . The output of the persistence algorithm is a barcode diagram . A single barcode represents the filtration index where a feature appears and the index where it disappears ( if it does ) . Alternatively , a barcode can represent the scale at which a feature and the scale at which it ends . Thus , short barcodes are typically interpretted as sampling irregularities and long barcodes are interpretted as actual features of whatever the underlying data set represents . In this context , what a feature * is * depends on which dimension the barcode diagram is ; 0 - dimensional features are connected components , 1 - dimensional features are loops or tunnels , 2 - dimensional features are hollow volumes , and higher dimensional features correspond to - dimensional cavities .
After you 've got the barcodes of a data set , you might want to compare it with that of a different data set . This is the purpose of bottleneck distance , which corresponds to the distance between barcode diagrams .
Another way to compare barcode diagrams is by using persistence landscapes . The peristence landscape of a barcode diagram is a finite sequence of piecewise - linear , real - valued functions . This means they can be used to take averages and compute distances between barcode diagrams . See " A Persistence Landscapes Toolbox For Topological Statistics " by Bubenik and Dlotko for more information .
WARNING : The persistence landscape functions have not been fully tested . Use them with caution . If you get any errors or unexpected output , please do n't hesitate to email me .
Module : Persistence.Filtration
Copyright : (c) Eben Kadile, 2018
License : BSD 3 Clause
Maintainer :
Stability : experimental
This module contains functions for V.constructing filtrations and computing persistent homology, persistence landscapes, and computing bottleneck distance between barcode diagrams.
A filtration is a finite sequence of simplicial complexes where each complex is a subset of the next. This means that a filtration can be thought of as a single simplicial complex where each of the simplices is labeled with a "filtration index" that represents the index in the sequence where that simplex enters the filtration.
One way to create a filtration, given a simplicial complex, a metric for the vertices, and a list of distances, is to loop through the distances from greatest to least: create a simplicial complex each iteration which excludes simplices that contain pairs of vertices which are further than the current distance apart. This method will produce a filtration of Vietoris-Rips complexes - each filtration index will correspond to a Rips complex whose scale is the corresponding distance. This filtration represents the topology of the data at each of the scales with which it was V.constructed.
NOTE: It's important that, even though the smallest filtration index represents the smallest scale at which the data is being anaylzed, all functions in this library receive your list of scales sorted in *decreasing* order.
An essential thing to note in this library is the distinction between "fast" and "light" functions. Light functions call the metric every time distance between two points is required, which is a lot. Fast functions store the distances between points and access them in V.constant time, BUT this means they use O(n^2) memory with respect to the number of data points, so it's a really bad idea to use this optimization on substantially large data if you don't have a lot of RAM.
Persistent homology is the main event of topological data analysis. It allows one to identify clusters, tunnels, cavities, and higher dimensional holes that persist in the data throughout many scales. The output of the persistence algorithm is a barcode diagram. A single barcode represents the filtration index where a feature appears and the index where it disappears (if it does). Alternatively, a barcode can represent the scale at which a feature and the scale at which it ends. Thus, short barcodes are typically interpretted as sampling irregularities and long barcodes are interpretted as actual features of whatever the underlying data set represents. In this context, what a feature *is* depends on which dimension the barcode diagram is; 0-dimensional features are connected components, 1-dimensional features are loops or tunnels, 2-dimensional features are hollow volumes, and higher dimensional features correspond to heigher-dimensional cavities.
After you've got the barcodes of a data set, you might want to compare it with that of a different data set. This is the purpose of bottleneck distance, which corresponds to the Hausdorff distance between barcode diagrams.
Another way to compare barcode diagrams is by using persistence landscapes. The peristence landscape of a barcode diagram is a finite sequence of piecewise-linear, real-valued functions. This means they can be used to take averages and compute distances between barcode diagrams. See "A Persistence Landscapes Toolbox For Topological Statistics" by Bubenik and Dlotko for more information.
WARNING: The persistence landscape functions have not been fully tested. Use them with caution. If you get any errors or unexpected output, please don't hesitate to email me.
-}
module Persistence.Filtration (
-- * Types
FilterSimplex
, SimpleFiltration
, Filtration
, BarCode
, Landscape
-- * Utilities
, sim2String
, filtr2String
, getComplex
, getDimension
, simple2Filtr
-- * Construction
, filterByWeightsFast
, ripsFiltrationFast
, ripsFiltrationFastPar
, filterByWeightsLight
, ripsFiltrationLight
, ripsFiltrationLightPar
-- * Persistent homology
, indexBarCodes
, indexBarCodesSimple
, scaleBarCodes
, scaleBarCodesSimple
-- * Comparing barcode diagrams
, indexMetric
, bottleNeckDistance
, bottleNeckDistances
, calcLandscape
, evalLandscape
, evalLandscapeAll
, linearComboLandscapes
, avgLandscapes
, diffLandscapes
, normLp
, metricLp
) where
import Persistence.Util
import Persistence.Graph
import Persistence.SimplicialComplex
import Data.Maybe
import Data.List as L
import Data.Vector as V
import Data.ByteString as B
import Data.IntSet
import Data.Bits
import qualified Data.Vector.Unboxed as UV
import Data.Algorithm.MaximalCliques
import Control.Parallel.Strategies
-- * Types
{- |
This type synonym exists to make other synonyms more concise.
Each simplex in a filtration is represented as a triple: its filtration index,
the indices of its vertices in the original data, and the indices of its faces in the next lowest dimension.
Edges do not have reference to their faces, as it would be redundant with their vertices.
All simplices are sorted according to filtration index upon V.construction of the filtration.
In each dimension, all simplices are sorted in increasing order of filtration index,
and every simplices face indices are sorted in decreasing order;
both of these facts are critical to the computation of persistent homology.
-}
type FilterSimplex = (Int, Vector Int, Vector Int)
|
A type representing a filtration whose vertices all have filtration index 0 .
Slightly faster and slightly less memory usage . The first component is simply the number of vertices .
The second component is a vector with an entry for each dimension of simplices , starting at dimension 1 for edges .
A type representing a filtration whose vertices all have filtration index 0.
Slightly faster and slightly less memory usage. The first component is simply the number of vertices.
The second component is a vector with an entry for each dimension of simplices, starting at dimension 1 for edges.
-}
type SimpleFiltration = (Int, Vector (Vector FilterSimplex))
|
Representation of a filtration which , unlike SimpleFiltration , can cope with vertices that have a non - zero filtration index . Vertices of the filtration are represented like all other simplices except that they do n't their own have vertices or faces .
Note that , since this library currently only deals with static pointcloud data , all of the filtration V.construction functions produce vertices whose filtration index is 0 . Thus , if you want to use this type you will have to V.construct the instances yourself .
Representation of a filtration which, unlike SimpleFiltration, can cope with vertices that have a non-zero filtration index. Vertices of the filtration are represented like all other simplices except that they don't their own have vertices or faces.
Note that, since this library currently only deals with static pointcloud data, all of the filtration V.construction functions produce vertices whose filtration index is 0. Thus, if you want to use this type you will have to V.construct the instances yourself.
-}
type Filtration = Vector (Vector FilterSimplex)
| ( x , Finite y ) is a topological feature that appears at the index or scale x and disappears at the index or scale y. ( x , Infinity ) begins at x and does n't disappear .
type BarCode a = (a, Extended a)
{- |
A Persistence landscape is a certain type of piecewise linear function based on a barcode diagram.
It can be represented as a list of critical points paired with critical values.
Useful for taking averages and differences between barcode diagrams.
-}
type Landscape = Vector (Vector (Extended Double, Extended Double))
-- * Utilities
-- | Shows all the information in a simplex.
sim2String :: FilterSimplex -> String
sim2String (index, vertices, faces) =
"Filtration index: " L.++ (show index) L.++
"; Vertex indices: " L.++ (show vertices) L.++
"; Boundary indices: " L.++ (show faces) L.++ "\n"
-- | Shows all the information in a filtration.
filtr2String :: Either SimpleFiltration Filtration -> String
filtr2String (Left f) =
"Simple filtration:\n" L.++ ((L.intercalate "\n") $ V.toList
$ V.map (L.concat . V.toList . (V.map sim2String)) $ snd f)
filtr2String (Right f) =
(L.intercalate "\n") $ V.toList $ V.map (L.concat . V.toList . (V.map sim2String)) f
{- |
Gets the simplicial complex specified by the filtration index.
This is O(n) with respect to the number of simplices.
-}
getComplex :: Int -> Either SimpleFiltration Filtration -> SimplicialComplex
getComplex index (Left (n, simplices)) =
(n, dropRightWhile V.null
$ V.map (V.map not1 . V.filter (\(i, _, _) ->
i <= index) . V.map (\(a, b, c) -> (a, UV.convert b, UV.convert c))) simplices)
getComplex index (Right simplices) =
(V.length $ V.filter (\v ->
one v <= index) (V.head simplices), dropRightWhile V.null
$ V.map (V.map not1 . V.filter (\(i, _, _) -> i <= index)
. V.map (\(a, b, c) -> (a, UV.convert b, UV.convert c))) (V.tail simplices))
-- | Return the dimension of the highest dimensional simplex in the filtration (V.constant time).
getDimension :: Either SimpleFiltration Filtration -> Int
getDimension (Left sf) = V.length $ snd sf
getDimension (Right f) = V.length f - 1
-- | Convert a simple filtration into an ordinary filtration.
simple2Filtr :: SimpleFiltration -> Filtration
simple2Filtr (n, x) =
let x' = (V.map (\(i, v, _) -> (i, v, V.reverse v)) $ V.head x) `V.cons` (V.tail x)
in (mapWithIndex (\i (a,b,c) ->
(a,i `V.cons` V.empty,c)) $ V.replicate n (0, V.empty, V.empty)) `V.cons` x'
-- * Construction
{- |
This function creates a filtration out of a simplicial complex by removing simplices
that contain edges that are too long for each scale in the list.
This is really a helper function to be called by makeRipsFiltrationFast,
but I decided to expose it in case you have a simplicial complex and weighted graph lying around.
The scales MUST be in decreasing order.
-}
filterByWeightsFast :: UV.Unbox a
=> Ord a
=> Either (Vector a) [a] -- ^Scales in decreasing order
-> (SimplicialComplex, Graph a) -- ^Simplicial complex and a graph encoding the distance between every data point as well as whether or not they are within the largest scale of each other.
-> SimpleFiltration
filterByWeightsFast scales' ((numVerts, simplices'), graph) =
let simplices =
V.map (V.map (\(b, c) -> (UV.convert b, UV.convert c))) simplices'
scales = case scales' of Left v -> V.toList v; Right l -> l
edgeInSimplex edge simplex = (V.any (\x -> V.head edge == x) simplex)
&& (V.any (\x -> V.last edge == x) simplex)
edgeTooLong scale edge = scale <= (fst $ graph `indexGraph` (edge ! 0, edge ! 1))
maxIndex = (L.length scales) - 1
calcIndices 0 [] sc = sc
calcIndices i (scl:scls) sc =
--find edges excluded by this scale
let longEdges = V.filter (edgeTooLong scl) $ V.map (\(i, v, f) -> v) $ V.head sc
in calcIndices (i - 1) scls $ V.map (V.map (\(j, v, f) ->
--if the simplex has not yet been assigned a fitration index
if j == 0 then
--if a long edge is in the simplex, assign it the current index
if V.any (\edge -> edgeInSimplex edge v) longEdges then (i, v, f)
--otherwise wait until next iteration
else (0, v, f)
--otherwise leave it alone
else (j, v, f))) sc
sortFiltration simplices =
let sortedSimplices =
--sorted in reverse order
V.map (quickSort (\((i, _, _), _) ((j, _, _), _) -> i > j)) $
V.map (mapWithIndex (\i s -> (s, i))) simplices
newFaces dim (i, v, f) =
let findNew j =
case V.findIndex (\x -> snd x == j) $ sortedSimplices ! (dim - 1) of
Just k -> k
Nothing -> error "Persistence.Filtration.sortFiltration.newFaces.findNew. This is a bug. Please email the Persistence maintainers."
in (i, v, (V.map findNew f))
in
if V.null simplices then simplices
else mapWithIndex (\i ss -> V.map ((newFaces i) . fst) ss) sortedSimplices
sortBoundaries = V.map (V.map (\(i, v, f) -> (i, v, quickSort (\a b -> a <= b) f)))
--sort the simplices by filtration index,
--then sort boundaries so that the boundary chains can be acquired easily
in (numVerts, sortBoundaries $ sortFiltration $
calcIndices maxIndex (L.tail scales) $
V.map (V.map (\(v, f) -> (0, v, f))) $ simplices)
|
This function V.constructs a filtration of the Vietoris - Rips complexes associated with the scales .
Note that this a fast function , meaning it uses O(n^2 ) memory to quickly access distances where n is the number of data points .
This function V.constructs a filtration of the Vietoris-Rips complexes associated with the scales.
Note that this a fast function, meaning it uses O(n^2) memory to quickly access distances where n is the number of data points.
-}
ripsFiltrationFast :: UV.Unbox a
=> Ord a
=> Eq b
=> Either (Vector a) [a] -- ^Scales in decreasing order
-> (b -> b -> a) -- ^Metric
-> Either (Vector b) [b] -- ^Data set
-> SimpleFiltration
ripsFiltrationFast scales metric =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in (filterByWeightsFast scales) . (ripsComplexFast scale metric)
|
Same as above except it uses parallelism when computing the Vietoris - Rips complex of the largest scale .
Same as above except it uses parallelism when computing the Vietoris-Rips complex of the largest scale.
-}
ripsFiltrationFastPar :: UV.Unbox a
=> Ord a
=> Eq b
=> Either (Vector a) [a] -- ^Scales in decreasing order
-> (b -> b -> a) -- ^Metric
-> Either (Vector b) [b] -- ^Data set
-> SimpleFiltration
ripsFiltrationFastPar scales metric =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in (filterByWeightsFast scales) . (ripsComplexFastPar scale metric)
{- |
The same as filterbyWeightsFast except it uses far less memory at the cost of speed.
Note that the scales must be in decreasing order.
-}
filterByWeightsLight :: Ord a
=> Either (Vector a) [a] -- ^Scales in decreasing order
-> (b -> b -> a) -- ^Metric
-> Either (Vector b) [b] -- ^Data set
-> SimplicialComplex -- ^Vietoris-Rips complex of the data at the largest scale.
-> SimpleFiltration
filterByWeightsLight scales' metric dataSet (numVerts, simplices') =
let simplices =
V.map (V.map (\(b, c) -> (UV.convert b, UV.convert c))) simplices'
scales = case scales' of Left v -> V.toList v; Right l -> l
edgeInSimplex edge simplex = (V.any (\x -> V.head edge == x) simplex)
&& (V.any (\x -> V.last edge == x) simplex)
vector = case dataSet of Left v -> v; Right l -> V.fromList l
edgeTooLong scale edge = scale <= (metric (vector ! (edge ! 0)) (vector ! (edge ! 1)))
maxIndex = (L.length scales) - 1
calcIndices 0 [] sc = sc
calcIndices i (scl:scls) sc =
--find edges excluded by this scale
let longEdges = V.filter (edgeTooLong scl) $ V.map (\(i, v, f) -> v) $ V.head sc
in calcIndices (i - 1) scls $ V.map (V.map (\(j, v, f) ->
--if the simplex has not yet been assigned a fitration index
if j == 0 then
--if a long edge is in the simplex, assign it the current index
if V.any (\edge -> edgeInSimplex edge v) longEdges then (i, v, f)
--otherwise wait until next iteration
else (0, v, f)
--otherwise leave it alone
else (j, v, f))) sc
--sortFiltration :: Vector (Int, Vector Int, Vector Int) -> SimpleFiltration
sortFiltration simplxs =
let
: : Vector ( Vector ( ( ( Int , Vector Int , Vector Int ) , Int ) ) )
sortedSimplices =
--sorted in increasing order
V.map (quickSort (\((i, _, _), _) ((j, _, _), _) -> i > j)) $
V.map (mapWithIndex (\i s -> (s, i))) simplxs
newFaces dim (i, v, f) =
let findNew j =
case V.findIndex (\x -> snd x == j) $ sortedSimplices ! (dim - 1) of
Just k -> k
Nothing -> error "Persistence.Filtration.filterByWeightsLight.sortFiltration.newFaces.findNew. This is a bug. Please email the Persistence maintainers."
in (i, v, (V.map findNew f))
in
if V.null simplxs then simplxs
else mapWithIndex (\i ss -> V.map ((newFaces i) . fst) ss) sortedSimplices
in (numVerts, sortFiltration $ --sort the simplices by filtration index
calcIndices maxIndex (L.tail scales) $
V.map (V.map (\(v, f) -> (0, v, f))) $ simplices)
|
Constructs the filtration of Vietoris - Rips complexes corresponding to each of the scales .
Constructs the filtration of Vietoris-Rips complexes corresponding to each of the scales.
-}
ripsFiltrationLight :: (Ord a, Eq b)
=> Either (Vector a) [a] -- ^List of scales in decreasing order
-> (b -> b -> a) -- ^Metric
-> Either (Vector b) [b] -- ^Data set
-> SimpleFiltration
ripsFiltrationLight scales metric dataSet =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in filterByWeightsLight scales metric dataSet $ ripsComplexLight scale metric dataSet
|
Same as above except it uses parallelism when computing the Vietoris - Rips complex of the largest scale .
Same as above except it uses parallelism when computing the Vietoris-Rips complex of the largest scale.
-}
ripsFiltrationLightPar :: UV.Unbox a
=> Ord a
=> Eq b
=> Either (Vector a) [a] -- ^List of scales in decreasing order
-> (b -> b -> a) -- ^Metric
-> Either (Vector b) [b] -- ^Data set
-> SimpleFiltration
ripsFiltrationLightPar scales metric dataSet =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in filterByWeightsLight scales metric dataSet $ ripsComplexLightPar scale metric dataSet
-- * Persistent Homology
indices of the simplices in the sum are 1
type Chain = ByteString
--addition of chains
(+++) :: Chain -> Chain -> Chain
a +++ b = L.foldl (\acc w -> B.snoc acc w) B.empty $ B.zipWith xor a b
--intersection
(-^-) :: Chain -> Chain -> Chain
a -^- b = L.foldl (\acc w -> B.snoc acc w) B.empty $ B.zipWith (.&.) a b
--return the list of simplex indices.
chain2indxs :: Chain -> Vector Int
chain2indxs bits = V.filter (testBBit bits) $ 0 `range` (8*(B.length bits))
first simplex in the chain
headChain :: Chain -> Int
headChain = V.head . chain2indxs
given the number of simplices of a certain dimension rounded up to the nearest multiple of 8
create the zero chain for that number of simplices
makeEmpty :: Int -> Chain
makeEmpty num = B.replicate (num `shiftR` 3) $ fromIntegral 0
--convert indices of simplices to a chain
given total number of simplices of that dimension founded up to the nearest multiple of 8
indxs2chain :: Int -> Vector Int -> Chain
indxs2chain num = V.foldl (\acc i -> setBBit acc i) (makeEmpty num)
--BROKEN!!!
{--}
|
The nth entry in the list will describe the n - dimensional topology of the filtration .
That is , the first list will represent clusters , the second list will represent tunnels or punctures , the third will represent hollow volumes ,
and the nth index list will represent n - dimensional holes in the data .
Features are encoded by the filtration indices where they appear and disappear .
The nth entry in the list will describe the n-dimensional topology of the filtration.
That is, the first list will represent clusters, the second list will represent tunnels or punctures, the third will represent hollow volumes,
and the nth index list will represent n-dimensional holes in the data.
Features are encoded by the filtration indices where they appear and disappear.
-}
indexBarCodes :: Filtration -> Vector (Vector (BarCode Int))
indexBarCodes filtration =
let maxdim = getDimension (Right filtration)
--given a chain of simplices which are marked
--and a vector of boundary chains paired with the indices of their parent simplices
--remove the unmarked simplices from the chain
removeUnmarked :: Chain -> Vector (Int, Chain) -> Vector (Int, Chain)
removeUnmarked marked = V.map (\(i, c) -> (i, marked -^- c))
--eliminate monomials in the boundary chain until it is no longer
--or there is a monomial which can't be eliminated
removePivotRows :: Vector (Maybe Chain) -> Chain -> Chain
removePivotRows slots chain =
if B.null chain then B.empty
else
case slots ! (headChain chain) of
Nothing -> chain
Just c -> removePivotRows slots (chain +++ c)
--given the indices of the marked simplices from the last iteration,
--slots from the last iteration,and boundary chains
--mark the appropriate simplices, fill in the appropriate slots, and identify bar codes
--boundary chains are paired with the index of their coresponding simplex
makeFiniteBarCodes :: Int
-> Chain
-> Vector (Maybe Chain)
-> Vector (Int, Chain)
-> Vector (BarCode Int)
-> (Chain, Vector (Maybe Chain), Vector (BarCode Int))
makeFiniteBarCodes dim newMarked slots boundaries barcodes =
if V.null boundaries then (newMarked, slots, barcodes)
else
let boundary = V.head boundaries
reduced = removePivotRows slots $ snd boundary
in
--mark the simplex if its boundary chain is reduced to null
if B.null reduced then
makeFiniteBarCodes dim
(setBBit newMarked (fst boundary)) slots (V.tail boundaries) barcodes
else
let pivot = headChain reduced
--put the pivot chain in the pivot's slot, add the new barcode to the list
in makeFiniteBarCodes dim newMarked
(replaceElem pivot (Just reduced) slots)
(V.tail boundaries) ((one $ filtration ! (dim - 1) ! pivot,
Finite $ one $ filtration ! dim ! (fst boundary)) `V.cons` barcodes)
--get the finite bar codes for each dimension
loopFiniteBarCodes :: Int
-> Vector Chain
-> Vector (Vector (Maybe Chain))
-> Vector (Vector (BarCode Int))
-> ( Vector Chain
, Vector (Vector (Maybe Chain))
, Vector (Vector (BarCode Int))
)
loopFiniteBarCodes dim marked slots barcodes =
--the slots vector made when looping over the vertices will be null
if dim > maxdim
then (marked, V.tail slots, (V.tail barcodes) V.++ (V.empty `V.cons` V.empty))
else
let numSlots = if dim == 0 then 0 else V.length $ filtration ! (dim - 1) --see above
numSlots8 = numSlots + 8 - (numSlots .&. 7)
boundaries =
removeUnmarked (V.last marked)
$ mapWithIndex (\i (_, _, f) -> (i, indxs2chain numSlots8 f)) $ filtration ! dim
(newMarked, newSlots, newCodes) =
makeFiniteBarCodes dim (makeEmpty numSlots8)
(V.replicate numSlots Nothing) boundaries V.empty
in loopFiniteBarCodes (dim + 1) (marked `V.snoc` newMarked)
(slots `V.snoc` newSlots) (barcodes V.++ (newCodes `V.cons` V.empty))
--if a simplex isn't marked and has an empty slot,
--an infinite bar code begins at it's filtration index
makeInfiniteBarCodes :: Int -> Chain -> Vector (Maybe Chain) -> Vector (BarCode Int)
makeInfiniteBarCodes dim marked slots =
V.map (\i -> (one $ filtration ! dim ! i, Infinity))
$ V.filter (\i -> slots ! i == Nothing) $ chain2indxs marked
--add the infinite bar codes to the list of bar codes in each dimension
loopInfiniteBarCodes :: Int
-> ( Vector Chain, Vector (Vector (Maybe Chain))
, Vector (Vector (BarCode Int)))
-> Vector (Vector (BarCode Int))
loopInfiniteBarCodes dim (marked, slots, barcodes) =
if dim > maxdim then barcodes
else
loopInfiniteBarCodes (dim + 1) (marked, slots, replaceElem dim ((barcodes ! dim)
V.++ (makeInfiniteBarCodes dim (marked ! dim) (slots ! dim))) barcodes)
finiteBCs = loopFiniteBarCodes 0 V.empty V.empty V.empty
in V.map (V.filter (\(a, b) -> b /= Finite a)) $ loopInfiniteBarCodes 0 finiteBCs
| Same as above except this function acts on filtrations whose vertices all have filtration index zero ( for a very slight speedup ) .
indexBarCodesSimple :: SimpleFiltration -> Vector (Vector (BarCode Int))
indexBarCodesSimple (numVerts, allSimplices) =
let maxdim = getDimension (Right allSimplices)
verts8 = numVerts + 8 - (numVerts .&. 7)
edges = V.map (\(i, v, f) -> (i, v, (V.reverse v))) $ V.head allSimplices
numEdges = V.length edges
numEdges8 = numEdges + 8 - (numEdges .&. 7)
numSimplices = V.map V.length $ V.tail allSimplices
numSimplices8 = V.map (\x -> x + 8 - (numEdges .&. 7)) numSimplices
--remove marked simplices from the given chain
removeUnmarked :: Chain -> Chain -> Chain
removeUnmarked marked chain = marked -^- chain
--eliminate monomials in the boundary chain until it is no longer
--or there is a monomial which can't be eliminated
removePivotRows :: Vector (Maybe Chain) -> Chain -> Chain
removePivotRows slots chain =
if B.null chain then B.empty
else
case slots ! (headChain chain) of
Nothing -> chain
Just c -> removePivotRows slots (chain +++ c)
makeEdgeCodes :: Int
-> Vector (Maybe Chain)
-> Vector (Int, Vector Int, Vector Int)
-> (Vector (BarCode Int), Chain)
-> (Vector (BarCode Int), Chain, Vector Int)
makeEdgeCodes index reduced edges (codes, marked)
| V.null edges = (codes, marked, V.findIndices (\x -> x == Nothing) reduced)
| B.null d =
makeEdgeCodes (index + 1) reduced (V.tail edges) (codes, setBBit marked index)
| otherwise =
makeEdgeCodes (index + 1) (replaceElem (headChain d)
(Just d) reduced) (V.tail edges) ((0, Finite i) `V.cons` codes, marked)
where (i, v, f) = V.head edges
d = removePivotRows reduced $ indxs2chain numEdges v --should be f?
makeBarCodesAndMark :: Int
-> Int
-> Chain
-> Vector (Maybe Chain)
-> Vector (Int, Vector Int, Vector Int)
-> (Vector (BarCode Int), Chain)
-> (Vector (BarCode Int), Chain, Vector Int)
makeBarCodesAndMark dim index marked reduced simplices (codes, newMarked)
| V.null simplices = (codes, newMarked, V.findIndices (\x -> x == Nothing) reduced)
| B.null d =
makeBarCodesAndMark dim (index + 1) marked reduced
(V.tail simplices) (codes, setBBit newMarked index)
| otherwise =
let maxindex = headChain d
begin = one $ allSimplices ! (dim - 1) ! maxindex
in makeBarCodesAndMark dim (index + 1) marked
(replaceElem maxindex (Just d) reduced) (V.tail simplices)
((begin, Finite i) `V.cons` codes, newMarked)
where (i, v, f) = V.head simplices
d = removePivotRows reduced
$ removeUnmarked marked $ indxs2chain (numSimplices8 ! (dim - 2)) f
makeFiniteBarCodes :: Int
-> Int
-> Vector (Vector (BarCode Int))
-> Vector Chain
-> Vector (Vector Int)
-> ( Vector (Vector (BarCode Int))
, Vector Chain
, Vector (Vector Int)
)
makeFiniteBarCodes dim maxdim barcodes marked slots =
if dim == maxdim then (barcodes, marked, slots)
else
let (newCodes, newMarked, unusedSlots) =
makeBarCodesAndMark dim 0 (V.last marked)
(V.replicate (V.length $ allSimplices ! (dim - 1)) Nothing)
(allSimplices ! dim) (V.empty, makeEmpty $ numSimplices8 ! (dim - 2))
in makeFiniteBarCodes (dim + 1) maxdim
(barcodes V.++ (newCodes `V.cons` V.empty))
(marked `V.snoc` newMarked) (slots `V.snoc` unusedSlots)
makeInfiniteBarCodes :: ( Vector (Vector (BarCode Int))
, Vector Chain
, Vector (Vector Int)
)
-> Vector (Vector (BarCode Int))
makeInfiniteBarCodes (barcodes, marked', unusedSlots) =
let marked = V.map chain2indxs marked'
makeCodes :: Int -> Vector (BarCode Int) -> Vector (BarCode Int)
makeCodes i codes =
let slots = unusedSlots ! i; marks = marked ! i
in codes V.++ (V.map (\j -> (one
$ allSimplices ! (i - 1) ! j, Infinity)) $ slots |^| marks)
loop :: Int -> Vector (Vector (BarCode Int)) -> Vector (Vector (BarCode Int))
loop i v
| V.null v = V.empty
| i == 0 =
((V.head v) V.++ (V.map (\j -> (0, Infinity))
$ (unusedSlots ! 0) |^| (marked ! 0))) `V.cons` (loop 1 $ V.tail v)
| otherwise = (makeCodes i $ V.head v) `V.cons` (loop (i + 1) $ V.tail v)
in loop 0 barcodes
(fstCodes, fstMarked, fstSlots) = makeEdgeCodes 0
(V.replicate numVerts Nothing)
edges (V.empty, makeEmpty numEdges)
verts = 0 `range` (numVerts - 1)
in
V.map (V.filter (\(a, b) ->
b /= Finite a)) $ makeInfiniteBarCodes
$ makeFiniteBarCodes 1 (V.length allSimplices)
(fstCodes `V.cons` V.empty) ((indxs2chain verts8 verts)
`V.cons` (fstMarked `V.cons` V.empty)) (fstSlots `V.cons` V.empty)
--}
-
translate : : F.Extended a - > Extended a
translate F.Infinity = Infinity
translate ( F.Finite x ) = Finite x
translate F.MinusInfty = MinusInfty
indexBarCodes =
( V.map ( V.map ( \(i , j ) - > ( i , translate j ) ) ) ) . F.indexBarCodes
indexBarCodesSimple =
( V.map ( V.map ( \(i , j ) - > ( i , translate j ) ) ) ) .
translate :: F.Extended a -> Extended a
translate F.Infinity = Infinity
translate (F.Finite x) = Finite x
translate F.MinusInfty = MinusInfty
indexBarCodes =
(V.map (V.map (\(i, j) -> (i, translate j)))) . F.indexBarCodes
indexBarCodesSimple =
(V.map (V.map (\(i, j) -> (i, translate j)))) . F.indexBarCodesSimple
--}
{- |
The nth entry in the list will describe the n-dimensional topology of the filtration.
However, features are encoded by the scales where they appear and disappear. For V.consistency,
scales must be in decreasing order.
-}
scaleBarCodes :: Either (Vector a) [a] -> Filtration -> Vector (Vector (BarCode a))
scaleBarCodes scales filtration =
let s = V.reverse $ (\a -> case a of Left v -> v; Right l -> V.fromList l) scales
translateBarCode (i, Infinity) = (s ! i, Infinity)
translateBarCode (i, Finite j) = (s ! i, Finite $ s ! j)
in V.map (V.map translateBarCode) $ indexBarCodes filtration
{- |
Same as above except acts only on filtrations whose vertices all have filtration index 0.
Note that scales must be in decreasing order.
-}
scaleBarCodesSimple :: Either (Vector a) [a] -> SimpleFiltration -> Vector (Vector (BarCode a))
scaleBarCodesSimple scales filtration =
let s = V.reverse $ (\a -> case a of Left v -> v; Right l -> V.fromList l) scales
translateBarCode (i, Infinity) = (s ! i, Infinity)
translateBarCode (i, Finite j) = (s ! i, Finite $ s ! j)
in V.map (V.map translateBarCode) $ indexBarCodesSimple filtration
-- * Comparing barcode diagrams
|
The standard ( Euclidean ) metric between index barcodes .
The distance between infinite and finite barcodes is infinite ,
and the distance between two infinite barcodes is the absolute value of the
difference of their fst component .
The standard (Euclidean) metric between index barcodes.
The distance between infinite and finite barcodes is infinite,
and the distance between two infinite barcodes is the absolute value of the
difference of their fst component.
-}
indexMetric :: BarCode Int -> BarCode Int -> Extended Double
indexMetric (_, Finite _) (_, Infinity) = Infinity
indexMetric (_, Infinity) (_, Finite _) = Infinity
indexMetric (i, Infinity) (j, Infinity) =
Finite $ fromIntegral $ abs $ i - j
indexMetric (i, Finite j) (k, Finite l) =
let x = i - k; y = j - l
in Finite $ sqrt $ fromIntegral $ x*x + y*y
|
Given a metric , return the distance
( referred to as bottleneck distance in TDA ) between the two sets .
Returns nothing if either list of barcodes is empty .
Given a metric, return the Hausdorff distance
(referred to as bottleneck distance in TDA) between the two sets.
Returns nothing if either list of barcodes is empty.
-}
bottleNeckDistance :: Ord b
=> (BarCode a -> BarCode a -> Extended b)
-> Vector (BarCode a)
-> Vector (BarCode a)
-> Maybe (Extended b)
bottleNeckDistance metric diagram1 diagram2
| V.null diagram1 = Nothing
| V.null diagram2 = Nothing
| otherwise =
let first = V.maximum $ V.map (\p -> V.minimum $ V.map (metric p) diagram2) diagram1
second = V.maximum $ V.map (\p -> V.minimum $ V.map (metric p) diagram1) diagram2
in Just $ max first second
|
Get 's all the bottleneck distances ;
a good way to determine the similarity of the topology of two filtrations .
Get's all the bottleneck distances;
a good way to determine the similarity of the topology of two filtrations.
-}
bottleNeckDistances :: Ord b => (BarCode a -> BarCode a -> Extended b)
-> Vector (Vector (BarCode a))
-> Vector (Vector (BarCode a))
-> Vector (Maybe (Extended b))
bottleNeckDistances metric diagrams1 diagrams2 =
let d = (V.length diagrams1) - (V.length diagrams2)
in
if d >= 0
then (V.zipWith (bottleNeckDistance metric) diagrams1 diagrams2) V.++ (V.replicate d Nothing)
else (V.zipWith (bottleNeckDistance metric) diagrams1 diagrams2) V.++ (V.replicate (-d) Nothing)
-- | Compute the persistence landscape of the barcodes for a single dimension.
calcLandscape :: Vector (BarCode Int) -> Landscape
calcLandscape brcds =
let half = Finite 0.5
(i,j) `leq` (k,l) = i > k || j <= l
innerLoop :: (Extended Double, Extended Double)
-> Vector (Extended Double, Extended Double)
-> Landscape
-> Landscape
innerLoop (b, d) barcodes result =
case V.findIndex (\(b', d') -> d' > d) barcodes of
Nothing ->
outerLoop barcodes (((V.fromList [(0, b), (Infinity, 0)])
V.++ (V.head result)) `V.cons` (V.tail result))
Just i -> let (b', d') = barcodes ! i in
if b' >= d then
if b == d then
let new = [(Finite 0.0, b')]
in
if d' == Infinity then
outerLoop (rmIndex i barcodes) (((V.fromList ((Infinity, Infinity):new))
V.++ (V.head result)) `V.cons` (V.tail result))
else
innerLoop (b', d') (rmIndex i barcodes)
((V.fromList ((half*(b' + d'), half*(d' - b')):new)
V.++ (V.head result)) `V.cons` (V.tail result))
else
let new = [(Finite 0.0, d), (Finite 0.0, b')]
in
if d' == Infinity then
outerLoop (rmIndex i barcodes) (((V.fromList ((Infinity, Infinity):new))
V.++ (V.head result)) `V.cons` (V.tail result))
else
innerLoop (b', d') (rmIndex i barcodes)
(((V.fromList ((half*(b' + d'), half*(d' - b')):new))
V.++ (V.head result)) `V.cons` (V.tail result))
else
let newbr = (half*(b' + d), half*(d - b'))
in
if d' == Infinity then
outerLoop (orderedInsert leq newbr barcodes)
(((V.fromList [(Infinity, Infinity), newbr])
V.++ (V.head result)) `V.cons` (V.tail result))
else
innerLoop (b', d') (orderedInsert leq newbr barcodes)
(((V.fromList [(half*(b' + d'), half*(d' - b')), newbr])
V.++ (V.head result)) `V.cons` (V.tail result))
outerLoop :: Vector (Extended Double, Extended Double) -> Landscape -> Landscape
outerLoop barcodes result =
if not $ V.null barcodes then
let (b, d) = V.head barcodes
in
if (b, d) == (MinusInfty, Infinity)
then
outerLoop (V.tail barcodes)
((V.fromList [(MinusInfty, Infinity),
(Infinity, Infinity)]) `V.cons` result)
else if d == Infinity
then
outerLoop (V.tail barcodes) ((V.fromList
[(MinusInfty, Finite 0.0),(b, Finite 0.0),(Infinity,Infinity)]) `V.cons` result)
else
let newCritPoints =
if b == Infinity
then [(MinusInfty, Infinity)]
else [(MinusInfty, Finite 0.0), (half*(b + d), half*(d - b))]
in innerLoop (b, d) (V.tail barcodes) ((V.fromList newCritPoints) `V.cons` result)
else result
in V.map (quickSort (\(x1, _) (x2, _) -> x1 > x2))
$ outerLoop (quickSort leq $ V.map (\(i, j) ->
(fromInt $ Finite i, fromInt j)) brcds) V.empty
-- | Evaluate the nth function in the landscape for the given point.
evalLandscape :: Landscape -> Int -> Extended Double -> Extended Double
evalLandscape landscape i arg =
let fcn = landscape ! i
findPointNeighbors :: Ord a => Int -> a -> Vector a -> (Int, Int)
findPointNeighbors helper x vector =
let len = V.length vector
i = len `div` 2
y = vector ! i
in
if x == y
then (helper + i, helper + i)
else if x > y
then
case vector !? (i + 1) of
Nothing -> (helper + i, helper + i)
Just z ->
if x < z
then (helper + i, helper + i + 1)
else findPointNeighbors (helper + i) x $ V.drop i vector
else
case vector !? (i - 1) of
Nothing -> (helper + i, helper + i)
Just z ->
if x > z
then (helper + i - 1, helper + i)
else findPointNeighbors helper x $ V.take i vector
(i1, i2) = findPointNeighbors 0 arg $ V.map fst fcn
(x1, x2) = (fst $ fcn ! i1, fst $ fcn ! i2)
(y1, y2) = (snd $ fromMaybe (error "Persistence.Filtration.evalLandscape. This is a bug. Please email the Persistence mainstainers.") $ V.find (\a -> x1 == fst a) fcn, snd $ fromMaybe (error "Persistence.Filtration.evalLandscape. This is a bug. Please email the Persistence mainstainers.") $ V.find (\a -> x2 == fst a) fcn)
in
if x1 == x2
then y1
else
case (x1, x2) of
(MinusInfty, Infinity) -> arg
(MinusInfty, Finite _) -> y1
(Finite a, Finite b) ->
case arg of
Finite c ->
let t = Finite $ (c - a)/(b - a)
in t*y2 + ((Finite 1.0) - t)*y1
_ -> error "Persistence.Filtration.evalLandscape.findPointNeighbors. This is a bug. Please email the Persistence maintainers."
(Finite a, Infinity) ->
case arg of
Infinity -> y2
Finite c ->
case y2 of
Infinity -> Finite $ c - a
Finite 0.0 -> Finite 0.0
_ -> error $ "Persistence.Filtration.evalLandscape: y2 = " L.++ (show y2) L.++ ". This is a bug. Please email the Persistence maintainers."
_ -> error "Persistence.Filtration.evalLandscape.findPointNeighbors: bad argument. This is a bug. Please email the Persistence maintainers."
anything -> error $ "Persistence.Filtration.evalLandscape.findPointNeighbors: " L.++ (show anything) L.++ ". This is a bug. Please email the Persistence maintainers."
-- | Evaluate all the real-valued functions in the landscape.
evalLandscapeAll :: Landscape -> Extended Double -> Vector (Extended Double)
evalLandscapeAll landscape arg =
if V.null landscape then V.empty
else (evalLandscape landscape 0 arg) `V.cons` (evalLandscapeAll (V.tail landscape) arg)
|
Compute a linear combination of the landscapes .
If the coefficient list is too short , the rest of the coefficients are assumed to be zero .
If it is too long , the extra coefficients are discarded .
Compute a linear combination of the landscapes.
If the coefficient list is too short, the rest of the coefficients are assumed to be zero.
If it is too long, the extra coefficients are discarded.
-}
linearComboLandscapes :: [Double] -> [Landscape] -> Landscape
linearComboLandscapes coeffs landscapes =
let maxlen = L.maximum $ L.map V.length landscapes
emptylayer = V.fromList [(MinusInfty, Finite 0.0), (Infinity, Finite 0.0)]
landscapes' = L.map (\l -> l V.++ (V.replicate (maxlen - V.length l) emptylayer)) landscapes
myconcat v1 v2
| V.null v1 = v2
| V.null v2 = v1
| otherwise = ((V.head v1) V.++ (V.head v2)) `V.cons` (myconcat (V.tail v1) (V.tail v2))
xs = L.map (V.map (V.map fst)) landscapes'
concatted = L.foldl myconcat V.empty xs
unionXs = V.map ((quickSort (>)) . V.fromList . L.nub . V.toList) concatted
yVals = L.map (\landscape ->
mapWithIndex (\i v -> V.map (evalLandscape landscape i) v) unionXs) landscapes'
yVals' = L.zipWith (\coeff yvals ->
V.map (V.map ((Finite coeff)*)) yvals) coeffs yVals
finalY = L.foldl1 (\acc new -> V.zipWith (V.zipWith (+)) acc new) yVals'
in V.zipWith V.zip unionXs finalY
-- | Average the persistence landscapes.
avgLandscapes :: [Landscape] -> Landscape
avgLandscapes landscapes =
let numscapes = L.length landscapes
coeffs = L.replicate numscapes (1.0/(fromIntegral numscapes))
in linearComboLandscapes coeffs landscapes
| Subtract the second landscape from the first .
diffLandscapes :: Landscape -> Landscape -> Landscape
diffLandscapes scape1 scape2 = linearComboLandscapes [1, -1] [scape1, scape2]
|
If p>=1 then it will compute the L^p norm on the given interval .
Uses trapezoidal approximation .
You should ensure that the stepsize partitions the interval evenly .
If p>=1 then it will compute the L^p norm on the given interval.
Uses trapezoidal approximation.
You should ensure that the stepsize partitions the interval evenly.
-}
^p , the power of the norm
-> (Double, Double) -- ^Interval to compute the integral over
-> Double -- ^Step size
-> Landscape -- ^Persistence landscape whose norm is to be computed
-> Maybe Double
normLp p interval step landscape =
let len = V.length landscape
a = fst interval
b = snd interval
fcn x =
let vals = V.map (\n ->
abs $ unExtend $ evalLandscape landscape n (Finite x)) $ 0 `range` (len - 1)
in
case p of
Infinity -> V.maximum vals
Finite 1.0 -> V.sum vals
Finite 2.0 -> sqrt $ V.sum $ V.map (\a -> a*a) vals
Finite p' -> (**(1.0/p')) $ V.sum $ V.map (**p') vals
computeSum :: Double -> Double -> Double
computeSum currentX result =
let nextX = currentX + step
in
if nextX > b then result + (fcn nextX)
else computeSum nextX (result + 2.0*(fcn nextX))
in
if p < (Finite 1.0) then Nothing
else Just $ 0.5*step*(computeSum a $ fcn a)
|
Given the same information as above , computes the L^p distance between the two landscapes .
One way to compare the topologies of two filtrations .
Given the same information as above, computes the L^p distance between the two landscapes.
One way to compare the topologies of two filtrations.
-}
^p , power of the metric
-> (Double, Double) -- ^Interval on which the integral will be computed
-> Double -- ^Step size
-> Landscape -- ^First landscape
-> Landscape -- ^Second landscape
-> Maybe Double
metricLp p interval step scape1 scape2 = normLp p interval step $ diffLandscapes scape1 scape2 | null | https://raw.githubusercontent.com/Ebanflo42/Persistence/6e5691cec58ce85d800ce8fff56e0797a6f2532d/src/Persistence/Filtration.hs | haskell | * Types
* Utilities
* Construction
* Persistent homology
* Comparing barcode diagrams
* Types
|
This type synonym exists to make other synonyms more concise.
Each simplex in a filtration is represented as a triple: its filtration index,
the indices of its vertices in the original data, and the indices of its faces in the next lowest dimension.
Edges do not have reference to their faces, as it would be redundant with their vertices.
All simplices are sorted according to filtration index upon V.construction of the filtration.
In each dimension, all simplices are sorted in increasing order of filtration index,
and every simplices face indices are sorted in decreasing order;
both of these facts are critical to the computation of persistent homology.
|
A Persistence landscape is a certain type of piecewise linear function based on a barcode diagram.
It can be represented as a list of critical points paired with critical values.
Useful for taking averages and differences between barcode diagrams.
* Utilities
| Shows all the information in a simplex.
| Shows all the information in a filtration.
|
Gets the simplicial complex specified by the filtration index.
This is O(n) with respect to the number of simplices.
| Return the dimension of the highest dimensional simplex in the filtration (V.constant time).
| Convert a simple filtration into an ordinary filtration.
* Construction
|
This function creates a filtration out of a simplicial complex by removing simplices
that contain edges that are too long for each scale in the list.
This is really a helper function to be called by makeRipsFiltrationFast,
but I decided to expose it in case you have a simplicial complex and weighted graph lying around.
The scales MUST be in decreasing order.
^Scales in decreasing order
^Simplicial complex and a graph encoding the distance between every data point as well as whether or not they are within the largest scale of each other.
find edges excluded by this scale
if the simplex has not yet been assigned a fitration index
if a long edge is in the simplex, assign it the current index
otherwise wait until next iteration
otherwise leave it alone
sorted in reverse order
sort the simplices by filtration index,
then sort boundaries so that the boundary chains can be acquired easily
^Scales in decreasing order
^Metric
^Data set
^Scales in decreasing order
^Metric
^Data set
|
The same as filterbyWeightsFast except it uses far less memory at the cost of speed.
Note that the scales must be in decreasing order.
^Scales in decreasing order
^Metric
^Data set
^Vietoris-Rips complex of the data at the largest scale.
find edges excluded by this scale
if the simplex has not yet been assigned a fitration index
if a long edge is in the simplex, assign it the current index
otherwise wait until next iteration
otherwise leave it alone
sortFiltration :: Vector (Int, Vector Int, Vector Int) -> SimpleFiltration
sorted in increasing order
sort the simplices by filtration index
^List of scales in decreasing order
^Metric
^Data set
^List of scales in decreasing order
^Metric
^Data set
* Persistent Homology
addition of chains
intersection
return the list of simplex indices.
convert indices of simplices to a chain
BROKEN!!!
given a chain of simplices which are marked
and a vector of boundary chains paired with the indices of their parent simplices
remove the unmarked simplices from the chain
eliminate monomials in the boundary chain until it is no longer
or there is a monomial which can't be eliminated
given the indices of the marked simplices from the last iteration,
slots from the last iteration,and boundary chains
mark the appropriate simplices, fill in the appropriate slots, and identify bar codes
boundary chains are paired with the index of their coresponding simplex
mark the simplex if its boundary chain is reduced to null
put the pivot chain in the pivot's slot, add the new barcode to the list
get the finite bar codes for each dimension
the slots vector made when looping over the vertices will be null
see above
if a simplex isn't marked and has an empty slot,
an infinite bar code begins at it's filtration index
add the infinite bar codes to the list of bar codes in each dimension
remove marked simplices from the given chain
eliminate monomials in the boundary chain until it is no longer
or there is a monomial which can't be eliminated
should be f?
}
}
|
The nth entry in the list will describe the n-dimensional topology of the filtration.
However, features are encoded by the scales where they appear and disappear. For V.consistency,
scales must be in decreasing order.
|
Same as above except acts only on filtrations whose vertices all have filtration index 0.
Note that scales must be in decreasing order.
* Comparing barcode diagrams
| Compute the persistence landscape of the barcodes for a single dimension.
| Evaluate the nth function in the landscape for the given point.
| Evaluate all the real-valued functions in the landscape.
| Average the persistence landscapes.
^Interval to compute the integral over
^Step size
^Persistence landscape whose norm is to be computed
^Interval on which the integral will be computed
^Step size
^First landscape
^Second landscape | |
Module : Persistence . Filtration
Copyright : ( c ) , 2018
License : BSD 3 Clause
Maintainer :
Stability : experimental
This module contains functions for V.constructing filtrations and computing persistent homology , persistence landscapes , and computing bottleneck distance between barcode diagrams .
A filtration is a finite sequence of simplicial complexes where each complex is a subset of the next . This means that a filtration can be thought of as a single simplicial complex where each of the simplices is labeled with a " filtration index " that represents the index in the sequence where that simplex enters the filtration .
One way to create a filtration , given a simplicial complex , a metric for the vertices , and a list of distances , is to loop through the distances from greatest to least : create a simplicial complex each iteration which excludes simplices that contain pairs of vertices which are further than the current distance apart . This method will produce a filtration of Vietoris - Rips complexes - each filtration index will correspond to a Rips complex whose scale is the corresponding distance . This filtration represents the topology of the data at each of the scales with which it was V.constructed .
NOTE : It 's important that , even though the smallest filtration index represents the smallest scale at which the data is being anaylzed , all functions in this library receive your list of scales sorted in * decreasing * order .
An essential thing to note in this library is the distinction between " fast " and " light " functions . Light functions call the metric every time distance between two points is required , which is a lot . Fast functions store the distances between points and access them in V.constant time , BUT this means they use O(n^2 ) memory with respect to the number of data points , so it 's a really bad idea to use this optimization on substantially large data if you do n't have a lot of RAM .
Persistent homology is the main event of topological data analysis . It allows one to identify clusters , tunnels , cavities , and higher dimensional holes that persist in the data throughout many scales . The output of the persistence algorithm is a barcode diagram . A single barcode represents the filtration index where a feature appears and the index where it disappears ( if it does ) . Alternatively , a barcode can represent the scale at which a feature and the scale at which it ends . Thus , short barcodes are typically interpretted as sampling irregularities and long barcodes are interpretted as actual features of whatever the underlying data set represents . In this context , what a feature * is * depends on which dimension the barcode diagram is ; 0 - dimensional features are connected components , 1 - dimensional features are loops or tunnels , 2 - dimensional features are hollow volumes , and higher dimensional features correspond to - dimensional cavities .
After you 've got the barcodes of a data set , you might want to compare it with that of a different data set . This is the purpose of bottleneck distance , which corresponds to the distance between barcode diagrams .
Another way to compare barcode diagrams is by using persistence landscapes . The peristence landscape of a barcode diagram is a finite sequence of piecewise - linear , real - valued functions . This means they can be used to take averages and compute distances between barcode diagrams . See " A Persistence Landscapes Toolbox For Topological Statistics " by Bubenik and Dlotko for more information .
WARNING : The persistence landscape functions have not been fully tested . Use them with caution . If you get any errors or unexpected output , please do n't hesitate to email me .
Module : Persistence.Filtration
Copyright : (c) Eben Kadile, 2018
License : BSD 3 Clause
Maintainer :
Stability : experimental
This module contains functions for V.constructing filtrations and computing persistent homology, persistence landscapes, and computing bottleneck distance between barcode diagrams.
A filtration is a finite sequence of simplicial complexes where each complex is a subset of the next. This means that a filtration can be thought of as a single simplicial complex where each of the simplices is labeled with a "filtration index" that represents the index in the sequence where that simplex enters the filtration.
One way to create a filtration, given a simplicial complex, a metric for the vertices, and a list of distances, is to loop through the distances from greatest to least: create a simplicial complex each iteration which excludes simplices that contain pairs of vertices which are further than the current distance apart. This method will produce a filtration of Vietoris-Rips complexes - each filtration index will correspond to a Rips complex whose scale is the corresponding distance. This filtration represents the topology of the data at each of the scales with which it was V.constructed.
NOTE: It's important that, even though the smallest filtration index represents the smallest scale at which the data is being anaylzed, all functions in this library receive your list of scales sorted in *decreasing* order.
An essential thing to note in this library is the distinction between "fast" and "light" functions. Light functions call the metric every time distance between two points is required, which is a lot. Fast functions store the distances between points and access them in V.constant time, BUT this means they use O(n^2) memory with respect to the number of data points, so it's a really bad idea to use this optimization on substantially large data if you don't have a lot of RAM.
Persistent homology is the main event of topological data analysis. It allows one to identify clusters, tunnels, cavities, and higher dimensional holes that persist in the data throughout many scales. The output of the persistence algorithm is a barcode diagram. A single barcode represents the filtration index where a feature appears and the index where it disappears (if it does). Alternatively, a barcode can represent the scale at which a feature and the scale at which it ends. Thus, short barcodes are typically interpretted as sampling irregularities and long barcodes are interpretted as actual features of whatever the underlying data set represents. In this context, what a feature *is* depends on which dimension the barcode diagram is; 0-dimensional features are connected components, 1-dimensional features are loops or tunnels, 2-dimensional features are hollow volumes, and higher dimensional features correspond to heigher-dimensional cavities.
After you've got the barcodes of a data set, you might want to compare it with that of a different data set. This is the purpose of bottleneck distance, which corresponds to the Hausdorff distance between barcode diagrams.
Another way to compare barcode diagrams is by using persistence landscapes. The peristence landscape of a barcode diagram is a finite sequence of piecewise-linear, real-valued functions. This means they can be used to take averages and compute distances between barcode diagrams. See "A Persistence Landscapes Toolbox For Topological Statistics" by Bubenik and Dlotko for more information.
WARNING: The persistence landscape functions have not been fully tested. Use them with caution. If you get any errors or unexpected output, please don't hesitate to email me.
-}
module Persistence.Filtration (
FilterSimplex
, SimpleFiltration
, Filtration
, BarCode
, Landscape
, sim2String
, filtr2String
, getComplex
, getDimension
, simple2Filtr
, filterByWeightsFast
, ripsFiltrationFast
, ripsFiltrationFastPar
, filterByWeightsLight
, ripsFiltrationLight
, ripsFiltrationLightPar
, indexBarCodes
, indexBarCodesSimple
, scaleBarCodes
, scaleBarCodesSimple
, indexMetric
, bottleNeckDistance
, bottleNeckDistances
, calcLandscape
, evalLandscape
, evalLandscapeAll
, linearComboLandscapes
, avgLandscapes
, diffLandscapes
, normLp
, metricLp
) where
import Persistence.Util
import Persistence.Graph
import Persistence.SimplicialComplex
import Data.Maybe
import Data.List as L
import Data.Vector as V
import Data.ByteString as B
import Data.IntSet
import Data.Bits
import qualified Data.Vector.Unboxed as UV
import Data.Algorithm.MaximalCliques
import Control.Parallel.Strategies
type FilterSimplex = (Int, Vector Int, Vector Int)
|
A type representing a filtration whose vertices all have filtration index 0 .
Slightly faster and slightly less memory usage . The first component is simply the number of vertices .
The second component is a vector with an entry for each dimension of simplices , starting at dimension 1 for edges .
A type representing a filtration whose vertices all have filtration index 0.
Slightly faster and slightly less memory usage. The first component is simply the number of vertices.
The second component is a vector with an entry for each dimension of simplices, starting at dimension 1 for edges.
-}
type SimpleFiltration = (Int, Vector (Vector FilterSimplex))
|
Representation of a filtration which , unlike SimpleFiltration , can cope with vertices that have a non - zero filtration index . Vertices of the filtration are represented like all other simplices except that they do n't their own have vertices or faces .
Note that , since this library currently only deals with static pointcloud data , all of the filtration V.construction functions produce vertices whose filtration index is 0 . Thus , if you want to use this type you will have to V.construct the instances yourself .
Representation of a filtration which, unlike SimpleFiltration, can cope with vertices that have a non-zero filtration index. Vertices of the filtration are represented like all other simplices except that they don't their own have vertices or faces.
Note that, since this library currently only deals with static pointcloud data, all of the filtration V.construction functions produce vertices whose filtration index is 0. Thus, if you want to use this type you will have to V.construct the instances yourself.
-}
type Filtration = Vector (Vector FilterSimplex)
| ( x , Finite y ) is a topological feature that appears at the index or scale x and disappears at the index or scale y. ( x , Infinity ) begins at x and does n't disappear .
type BarCode a = (a, Extended a)
type Landscape = Vector (Vector (Extended Double, Extended Double))
sim2String :: FilterSimplex -> String
sim2String (index, vertices, faces) =
"Filtration index: " L.++ (show index) L.++
"; Vertex indices: " L.++ (show vertices) L.++
"; Boundary indices: " L.++ (show faces) L.++ "\n"
filtr2String :: Either SimpleFiltration Filtration -> String
filtr2String (Left f) =
"Simple filtration:\n" L.++ ((L.intercalate "\n") $ V.toList
$ V.map (L.concat . V.toList . (V.map sim2String)) $ snd f)
filtr2String (Right f) =
(L.intercalate "\n") $ V.toList $ V.map (L.concat . V.toList . (V.map sim2String)) f
getComplex :: Int -> Either SimpleFiltration Filtration -> SimplicialComplex
getComplex index (Left (n, simplices)) =
(n, dropRightWhile V.null
$ V.map (V.map not1 . V.filter (\(i, _, _) ->
i <= index) . V.map (\(a, b, c) -> (a, UV.convert b, UV.convert c))) simplices)
getComplex index (Right simplices) =
(V.length $ V.filter (\v ->
one v <= index) (V.head simplices), dropRightWhile V.null
$ V.map (V.map not1 . V.filter (\(i, _, _) -> i <= index)
. V.map (\(a, b, c) -> (a, UV.convert b, UV.convert c))) (V.tail simplices))
getDimension :: Either SimpleFiltration Filtration -> Int
getDimension (Left sf) = V.length $ snd sf
getDimension (Right f) = V.length f - 1
simple2Filtr :: SimpleFiltration -> Filtration
simple2Filtr (n, x) =
let x' = (V.map (\(i, v, _) -> (i, v, V.reverse v)) $ V.head x) `V.cons` (V.tail x)
in (mapWithIndex (\i (a,b,c) ->
(a,i `V.cons` V.empty,c)) $ V.replicate n (0, V.empty, V.empty)) `V.cons` x'
filterByWeightsFast :: UV.Unbox a
=> Ord a
-> SimpleFiltration
filterByWeightsFast scales' ((numVerts, simplices'), graph) =
let simplices =
V.map (V.map (\(b, c) -> (UV.convert b, UV.convert c))) simplices'
scales = case scales' of Left v -> V.toList v; Right l -> l
edgeInSimplex edge simplex = (V.any (\x -> V.head edge == x) simplex)
&& (V.any (\x -> V.last edge == x) simplex)
edgeTooLong scale edge = scale <= (fst $ graph `indexGraph` (edge ! 0, edge ! 1))
maxIndex = (L.length scales) - 1
calcIndices 0 [] sc = sc
calcIndices i (scl:scls) sc =
let longEdges = V.filter (edgeTooLong scl) $ V.map (\(i, v, f) -> v) $ V.head sc
in calcIndices (i - 1) scls $ V.map (V.map (\(j, v, f) ->
if j == 0 then
if V.any (\edge -> edgeInSimplex edge v) longEdges then (i, v, f)
else (0, v, f)
else (j, v, f))) sc
sortFiltration simplices =
let sortedSimplices =
V.map (quickSort (\((i, _, _), _) ((j, _, _), _) -> i > j)) $
V.map (mapWithIndex (\i s -> (s, i))) simplices
newFaces dim (i, v, f) =
let findNew j =
case V.findIndex (\x -> snd x == j) $ sortedSimplices ! (dim - 1) of
Just k -> k
Nothing -> error "Persistence.Filtration.sortFiltration.newFaces.findNew. This is a bug. Please email the Persistence maintainers."
in (i, v, (V.map findNew f))
in
if V.null simplices then simplices
else mapWithIndex (\i ss -> V.map ((newFaces i) . fst) ss) sortedSimplices
sortBoundaries = V.map (V.map (\(i, v, f) -> (i, v, quickSort (\a b -> a <= b) f)))
in (numVerts, sortBoundaries $ sortFiltration $
calcIndices maxIndex (L.tail scales) $
V.map (V.map (\(v, f) -> (0, v, f))) $ simplices)
|
This function V.constructs a filtration of the Vietoris - Rips complexes associated with the scales .
Note that this a fast function , meaning it uses O(n^2 ) memory to quickly access distances where n is the number of data points .
This function V.constructs a filtration of the Vietoris-Rips complexes associated with the scales.
Note that this a fast function, meaning it uses O(n^2) memory to quickly access distances where n is the number of data points.
-}
ripsFiltrationFast :: UV.Unbox a
=> Ord a
=> Eq b
-> SimpleFiltration
ripsFiltrationFast scales metric =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in (filterByWeightsFast scales) . (ripsComplexFast scale metric)
|
Same as above except it uses parallelism when computing the Vietoris - Rips complex of the largest scale .
Same as above except it uses parallelism when computing the Vietoris-Rips complex of the largest scale.
-}
ripsFiltrationFastPar :: UV.Unbox a
=> Ord a
=> Eq b
-> SimpleFiltration
ripsFiltrationFastPar scales metric =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in (filterByWeightsFast scales) . (ripsComplexFastPar scale metric)
filterByWeightsLight :: Ord a
-> SimpleFiltration
filterByWeightsLight scales' metric dataSet (numVerts, simplices') =
let simplices =
V.map (V.map (\(b, c) -> (UV.convert b, UV.convert c))) simplices'
scales = case scales' of Left v -> V.toList v; Right l -> l
edgeInSimplex edge simplex = (V.any (\x -> V.head edge == x) simplex)
&& (V.any (\x -> V.last edge == x) simplex)
vector = case dataSet of Left v -> v; Right l -> V.fromList l
edgeTooLong scale edge = scale <= (metric (vector ! (edge ! 0)) (vector ! (edge ! 1)))
maxIndex = (L.length scales) - 1
calcIndices 0 [] sc = sc
calcIndices i (scl:scls) sc =
let longEdges = V.filter (edgeTooLong scl) $ V.map (\(i, v, f) -> v) $ V.head sc
in calcIndices (i - 1) scls $ V.map (V.map (\(j, v, f) ->
if j == 0 then
if V.any (\edge -> edgeInSimplex edge v) longEdges then (i, v, f)
else (0, v, f)
else (j, v, f))) sc
sortFiltration simplxs =
let
: : Vector ( Vector ( ( ( Int , Vector Int , Vector Int ) , Int ) ) )
sortedSimplices =
V.map (quickSort (\((i, _, _), _) ((j, _, _), _) -> i > j)) $
V.map (mapWithIndex (\i s -> (s, i))) simplxs
newFaces dim (i, v, f) =
let findNew j =
case V.findIndex (\x -> snd x == j) $ sortedSimplices ! (dim - 1) of
Just k -> k
Nothing -> error "Persistence.Filtration.filterByWeightsLight.sortFiltration.newFaces.findNew. This is a bug. Please email the Persistence maintainers."
in (i, v, (V.map findNew f))
in
if V.null simplxs then simplxs
else mapWithIndex (\i ss -> V.map ((newFaces i) . fst) ss) sortedSimplices
calcIndices maxIndex (L.tail scales) $
V.map (V.map (\(v, f) -> (0, v, f))) $ simplices)
|
Constructs the filtration of Vietoris - Rips complexes corresponding to each of the scales .
Constructs the filtration of Vietoris-Rips complexes corresponding to each of the scales.
-}
ripsFiltrationLight :: (Ord a, Eq b)
-> SimpleFiltration
ripsFiltrationLight scales metric dataSet =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in filterByWeightsLight scales metric dataSet $ ripsComplexLight scale metric dataSet
|
Same as above except it uses parallelism when computing the Vietoris - Rips complex of the largest scale .
Same as above except it uses parallelism when computing the Vietoris-Rips complex of the largest scale.
-}
ripsFiltrationLightPar :: UV.Unbox a
=> Ord a
=> Eq b
-> SimpleFiltration
ripsFiltrationLightPar scales metric dataSet =
let scale = case scales of Left v -> V.head v; Right l -> L.head l
in filterByWeightsLight scales metric dataSet $ ripsComplexLightPar scale metric dataSet
indices of the simplices in the sum are 1
type Chain = ByteString
(+++) :: Chain -> Chain -> Chain
a +++ b = L.foldl (\acc w -> B.snoc acc w) B.empty $ B.zipWith xor a b
(-^-) :: Chain -> Chain -> Chain
a -^- b = L.foldl (\acc w -> B.snoc acc w) B.empty $ B.zipWith (.&.) a b
chain2indxs :: Chain -> Vector Int
chain2indxs bits = V.filter (testBBit bits) $ 0 `range` (8*(B.length bits))
first simplex in the chain
headChain :: Chain -> Int
headChain = V.head . chain2indxs
given the number of simplices of a certain dimension rounded up to the nearest multiple of 8
create the zero chain for that number of simplices
makeEmpty :: Int -> Chain
makeEmpty num = B.replicate (num `shiftR` 3) $ fromIntegral 0
given total number of simplices of that dimension founded up to the nearest multiple of 8
indxs2chain :: Int -> Vector Int -> Chain
indxs2chain num = V.foldl (\acc i -> setBBit acc i) (makeEmpty num)
|
The nth entry in the list will describe the n - dimensional topology of the filtration .
That is , the first list will represent clusters , the second list will represent tunnels or punctures , the third will represent hollow volumes ,
and the nth index list will represent n - dimensional holes in the data .
Features are encoded by the filtration indices where they appear and disappear .
The nth entry in the list will describe the n-dimensional topology of the filtration.
That is, the first list will represent clusters, the second list will represent tunnels or punctures, the third will represent hollow volumes,
and the nth index list will represent n-dimensional holes in the data.
Features are encoded by the filtration indices where they appear and disappear.
-}
indexBarCodes :: Filtration -> Vector (Vector (BarCode Int))
indexBarCodes filtration =
let maxdim = getDimension (Right filtration)
removeUnmarked :: Chain -> Vector (Int, Chain) -> Vector (Int, Chain)
removeUnmarked marked = V.map (\(i, c) -> (i, marked -^- c))
removePivotRows :: Vector (Maybe Chain) -> Chain -> Chain
removePivotRows slots chain =
if B.null chain then B.empty
else
case slots ! (headChain chain) of
Nothing -> chain
Just c -> removePivotRows slots (chain +++ c)
makeFiniteBarCodes :: Int
-> Chain
-> Vector (Maybe Chain)
-> Vector (Int, Chain)
-> Vector (BarCode Int)
-> (Chain, Vector (Maybe Chain), Vector (BarCode Int))
makeFiniteBarCodes dim newMarked slots boundaries barcodes =
if V.null boundaries then (newMarked, slots, barcodes)
else
let boundary = V.head boundaries
reduced = removePivotRows slots $ snd boundary
in
if B.null reduced then
makeFiniteBarCodes dim
(setBBit newMarked (fst boundary)) slots (V.tail boundaries) barcodes
else
let pivot = headChain reduced
in makeFiniteBarCodes dim newMarked
(replaceElem pivot (Just reduced) slots)
(V.tail boundaries) ((one $ filtration ! (dim - 1) ! pivot,
Finite $ one $ filtration ! dim ! (fst boundary)) `V.cons` barcodes)
loopFiniteBarCodes :: Int
-> Vector Chain
-> Vector (Vector (Maybe Chain))
-> Vector (Vector (BarCode Int))
-> ( Vector Chain
, Vector (Vector (Maybe Chain))
, Vector (Vector (BarCode Int))
)
loopFiniteBarCodes dim marked slots barcodes =
if dim > maxdim
then (marked, V.tail slots, (V.tail barcodes) V.++ (V.empty `V.cons` V.empty))
else
numSlots8 = numSlots + 8 - (numSlots .&. 7)
boundaries =
removeUnmarked (V.last marked)
$ mapWithIndex (\i (_, _, f) -> (i, indxs2chain numSlots8 f)) $ filtration ! dim
(newMarked, newSlots, newCodes) =
makeFiniteBarCodes dim (makeEmpty numSlots8)
(V.replicate numSlots Nothing) boundaries V.empty
in loopFiniteBarCodes (dim + 1) (marked `V.snoc` newMarked)
(slots `V.snoc` newSlots) (barcodes V.++ (newCodes `V.cons` V.empty))
makeInfiniteBarCodes :: Int -> Chain -> Vector (Maybe Chain) -> Vector (BarCode Int)
makeInfiniteBarCodes dim marked slots =
V.map (\i -> (one $ filtration ! dim ! i, Infinity))
$ V.filter (\i -> slots ! i == Nothing) $ chain2indxs marked
loopInfiniteBarCodes :: Int
-> ( Vector Chain, Vector (Vector (Maybe Chain))
, Vector (Vector (BarCode Int)))
-> Vector (Vector (BarCode Int))
loopInfiniteBarCodes dim (marked, slots, barcodes) =
if dim > maxdim then barcodes
else
loopInfiniteBarCodes (dim + 1) (marked, slots, replaceElem dim ((barcodes ! dim)
V.++ (makeInfiniteBarCodes dim (marked ! dim) (slots ! dim))) barcodes)
finiteBCs = loopFiniteBarCodes 0 V.empty V.empty V.empty
in V.map (V.filter (\(a, b) -> b /= Finite a)) $ loopInfiniteBarCodes 0 finiteBCs
| Same as above except this function acts on filtrations whose vertices all have filtration index zero ( for a very slight speedup ) .
indexBarCodesSimple :: SimpleFiltration -> Vector (Vector (BarCode Int))
indexBarCodesSimple (numVerts, allSimplices) =
let maxdim = getDimension (Right allSimplices)
verts8 = numVerts + 8 - (numVerts .&. 7)
edges = V.map (\(i, v, f) -> (i, v, (V.reverse v))) $ V.head allSimplices
numEdges = V.length edges
numEdges8 = numEdges + 8 - (numEdges .&. 7)
numSimplices = V.map V.length $ V.tail allSimplices
numSimplices8 = V.map (\x -> x + 8 - (numEdges .&. 7)) numSimplices
removeUnmarked :: Chain -> Chain -> Chain
removeUnmarked marked chain = marked -^- chain
removePivotRows :: Vector (Maybe Chain) -> Chain -> Chain
removePivotRows slots chain =
if B.null chain then B.empty
else
case slots ! (headChain chain) of
Nothing -> chain
Just c -> removePivotRows slots (chain +++ c)
makeEdgeCodes :: Int
-> Vector (Maybe Chain)
-> Vector (Int, Vector Int, Vector Int)
-> (Vector (BarCode Int), Chain)
-> (Vector (BarCode Int), Chain, Vector Int)
makeEdgeCodes index reduced edges (codes, marked)
| V.null edges = (codes, marked, V.findIndices (\x -> x == Nothing) reduced)
| B.null d =
makeEdgeCodes (index + 1) reduced (V.tail edges) (codes, setBBit marked index)
| otherwise =
makeEdgeCodes (index + 1) (replaceElem (headChain d)
(Just d) reduced) (V.tail edges) ((0, Finite i) `V.cons` codes, marked)
where (i, v, f) = V.head edges
makeBarCodesAndMark :: Int
-> Int
-> Chain
-> Vector (Maybe Chain)
-> Vector (Int, Vector Int, Vector Int)
-> (Vector (BarCode Int), Chain)
-> (Vector (BarCode Int), Chain, Vector Int)
makeBarCodesAndMark dim index marked reduced simplices (codes, newMarked)
| V.null simplices = (codes, newMarked, V.findIndices (\x -> x == Nothing) reduced)
| B.null d =
makeBarCodesAndMark dim (index + 1) marked reduced
(V.tail simplices) (codes, setBBit newMarked index)
| otherwise =
let maxindex = headChain d
begin = one $ allSimplices ! (dim - 1) ! maxindex
in makeBarCodesAndMark dim (index + 1) marked
(replaceElem maxindex (Just d) reduced) (V.tail simplices)
((begin, Finite i) `V.cons` codes, newMarked)
where (i, v, f) = V.head simplices
d = removePivotRows reduced
$ removeUnmarked marked $ indxs2chain (numSimplices8 ! (dim - 2)) f
makeFiniteBarCodes :: Int
-> Int
-> Vector (Vector (BarCode Int))
-> Vector Chain
-> Vector (Vector Int)
-> ( Vector (Vector (BarCode Int))
, Vector Chain
, Vector (Vector Int)
)
makeFiniteBarCodes dim maxdim barcodes marked slots =
if dim == maxdim then (barcodes, marked, slots)
else
let (newCodes, newMarked, unusedSlots) =
makeBarCodesAndMark dim 0 (V.last marked)
(V.replicate (V.length $ allSimplices ! (dim - 1)) Nothing)
(allSimplices ! dim) (V.empty, makeEmpty $ numSimplices8 ! (dim - 2))
in makeFiniteBarCodes (dim + 1) maxdim
(barcodes V.++ (newCodes `V.cons` V.empty))
(marked `V.snoc` newMarked) (slots `V.snoc` unusedSlots)
makeInfiniteBarCodes :: ( Vector (Vector (BarCode Int))
, Vector Chain
, Vector (Vector Int)
)
-> Vector (Vector (BarCode Int))
makeInfiniteBarCodes (barcodes, marked', unusedSlots) =
let marked = V.map chain2indxs marked'
makeCodes :: Int -> Vector (BarCode Int) -> Vector (BarCode Int)
makeCodes i codes =
let slots = unusedSlots ! i; marks = marked ! i
in codes V.++ (V.map (\j -> (one
$ allSimplices ! (i - 1) ! j, Infinity)) $ slots |^| marks)
loop :: Int -> Vector (Vector (BarCode Int)) -> Vector (Vector (BarCode Int))
loop i v
| V.null v = V.empty
| i == 0 =
((V.head v) V.++ (V.map (\j -> (0, Infinity))
$ (unusedSlots ! 0) |^| (marked ! 0))) `V.cons` (loop 1 $ V.tail v)
| otherwise = (makeCodes i $ V.head v) `V.cons` (loop (i + 1) $ V.tail v)
in loop 0 barcodes
(fstCodes, fstMarked, fstSlots) = makeEdgeCodes 0
(V.replicate numVerts Nothing)
edges (V.empty, makeEmpty numEdges)
verts = 0 `range` (numVerts - 1)
in
V.map (V.filter (\(a, b) ->
b /= Finite a)) $ makeInfiniteBarCodes
$ makeFiniteBarCodes 1 (V.length allSimplices)
(fstCodes `V.cons` V.empty) ((indxs2chain verts8 verts)
`V.cons` (fstMarked `V.cons` V.empty)) (fstSlots `V.cons` V.empty)
-
translate : : F.Extended a - > Extended a
translate F.Infinity = Infinity
translate ( F.Finite x ) = Finite x
translate F.MinusInfty = MinusInfty
indexBarCodes =
( V.map ( V.map ( \(i , j ) - > ( i , translate j ) ) ) ) . F.indexBarCodes
indexBarCodesSimple =
( V.map ( V.map ( \(i , j ) - > ( i , translate j ) ) ) ) .
translate :: F.Extended a -> Extended a
translate F.Infinity = Infinity
translate (F.Finite x) = Finite x
translate F.MinusInfty = MinusInfty
indexBarCodes =
(V.map (V.map (\(i, j) -> (i, translate j)))) . F.indexBarCodes
indexBarCodesSimple =
(V.map (V.map (\(i, j) -> (i, translate j)))) . F.indexBarCodesSimple
scaleBarCodes :: Either (Vector a) [a] -> Filtration -> Vector (Vector (BarCode a))
scaleBarCodes scales filtration =
let s = V.reverse $ (\a -> case a of Left v -> v; Right l -> V.fromList l) scales
translateBarCode (i, Infinity) = (s ! i, Infinity)
translateBarCode (i, Finite j) = (s ! i, Finite $ s ! j)
in V.map (V.map translateBarCode) $ indexBarCodes filtration
scaleBarCodesSimple :: Either (Vector a) [a] -> SimpleFiltration -> Vector (Vector (BarCode a))
scaleBarCodesSimple scales filtration =
let s = V.reverse $ (\a -> case a of Left v -> v; Right l -> V.fromList l) scales
translateBarCode (i, Infinity) = (s ! i, Infinity)
translateBarCode (i, Finite j) = (s ! i, Finite $ s ! j)
in V.map (V.map translateBarCode) $ indexBarCodesSimple filtration
|
The standard ( Euclidean ) metric between index barcodes .
The distance between infinite and finite barcodes is infinite ,
and the distance between two infinite barcodes is the absolute value of the
difference of their fst component .
The standard (Euclidean) metric between index barcodes.
The distance between infinite and finite barcodes is infinite,
and the distance between two infinite barcodes is the absolute value of the
difference of their fst component.
-}
indexMetric :: BarCode Int -> BarCode Int -> Extended Double
indexMetric (_, Finite _) (_, Infinity) = Infinity
indexMetric (_, Infinity) (_, Finite _) = Infinity
indexMetric (i, Infinity) (j, Infinity) =
Finite $ fromIntegral $ abs $ i - j
indexMetric (i, Finite j) (k, Finite l) =
let x = i - k; y = j - l
in Finite $ sqrt $ fromIntegral $ x*x + y*y
|
Given a metric , return the distance
( referred to as bottleneck distance in TDA ) between the two sets .
Returns nothing if either list of barcodes is empty .
Given a metric, return the Hausdorff distance
(referred to as bottleneck distance in TDA) between the two sets.
Returns nothing if either list of barcodes is empty.
-}
bottleNeckDistance :: Ord b
=> (BarCode a -> BarCode a -> Extended b)
-> Vector (BarCode a)
-> Vector (BarCode a)
-> Maybe (Extended b)
bottleNeckDistance metric diagram1 diagram2
| V.null diagram1 = Nothing
| V.null diagram2 = Nothing
| otherwise =
let first = V.maximum $ V.map (\p -> V.minimum $ V.map (metric p) diagram2) diagram1
second = V.maximum $ V.map (\p -> V.minimum $ V.map (metric p) diagram1) diagram2
in Just $ max first second
|
Get 's all the bottleneck distances ;
a good way to determine the similarity of the topology of two filtrations .
Get's all the bottleneck distances;
a good way to determine the similarity of the topology of two filtrations.
-}
bottleNeckDistances :: Ord b => (BarCode a -> BarCode a -> Extended b)
-> Vector (Vector (BarCode a))
-> Vector (Vector (BarCode a))
-> Vector (Maybe (Extended b))
bottleNeckDistances metric diagrams1 diagrams2 =
let d = (V.length diagrams1) - (V.length diagrams2)
in
if d >= 0
then (V.zipWith (bottleNeckDistance metric) diagrams1 diagrams2) V.++ (V.replicate d Nothing)
else (V.zipWith (bottleNeckDistance metric) diagrams1 diagrams2) V.++ (V.replicate (-d) Nothing)
calcLandscape :: Vector (BarCode Int) -> Landscape
calcLandscape brcds =
let half = Finite 0.5
(i,j) `leq` (k,l) = i > k || j <= l
innerLoop :: (Extended Double, Extended Double)
-> Vector (Extended Double, Extended Double)
-> Landscape
-> Landscape
innerLoop (b, d) barcodes result =
case V.findIndex (\(b', d') -> d' > d) barcodes of
Nothing ->
outerLoop barcodes (((V.fromList [(0, b), (Infinity, 0)])
V.++ (V.head result)) `V.cons` (V.tail result))
Just i -> let (b', d') = barcodes ! i in
if b' >= d then
if b == d then
let new = [(Finite 0.0, b')]
in
if d' == Infinity then
outerLoop (rmIndex i barcodes) (((V.fromList ((Infinity, Infinity):new))
V.++ (V.head result)) `V.cons` (V.tail result))
else
innerLoop (b', d') (rmIndex i barcodes)
((V.fromList ((half*(b' + d'), half*(d' - b')):new)
V.++ (V.head result)) `V.cons` (V.tail result))
else
let new = [(Finite 0.0, d), (Finite 0.0, b')]
in
if d' == Infinity then
outerLoop (rmIndex i barcodes) (((V.fromList ((Infinity, Infinity):new))
V.++ (V.head result)) `V.cons` (V.tail result))
else
innerLoop (b', d') (rmIndex i barcodes)
(((V.fromList ((half*(b' + d'), half*(d' - b')):new))
V.++ (V.head result)) `V.cons` (V.tail result))
else
let newbr = (half*(b' + d), half*(d - b'))
in
if d' == Infinity then
outerLoop (orderedInsert leq newbr barcodes)
(((V.fromList [(Infinity, Infinity), newbr])
V.++ (V.head result)) `V.cons` (V.tail result))
else
innerLoop (b', d') (orderedInsert leq newbr barcodes)
(((V.fromList [(half*(b' + d'), half*(d' - b')), newbr])
V.++ (V.head result)) `V.cons` (V.tail result))
outerLoop :: Vector (Extended Double, Extended Double) -> Landscape -> Landscape
outerLoop barcodes result =
if not $ V.null barcodes then
let (b, d) = V.head barcodes
in
if (b, d) == (MinusInfty, Infinity)
then
outerLoop (V.tail barcodes)
((V.fromList [(MinusInfty, Infinity),
(Infinity, Infinity)]) `V.cons` result)
else if d == Infinity
then
outerLoop (V.tail barcodes) ((V.fromList
[(MinusInfty, Finite 0.0),(b, Finite 0.0),(Infinity,Infinity)]) `V.cons` result)
else
let newCritPoints =
if b == Infinity
then [(MinusInfty, Infinity)]
else [(MinusInfty, Finite 0.0), (half*(b + d), half*(d - b))]
in innerLoop (b, d) (V.tail barcodes) ((V.fromList newCritPoints) `V.cons` result)
else result
in V.map (quickSort (\(x1, _) (x2, _) -> x1 > x2))
$ outerLoop (quickSort leq $ V.map (\(i, j) ->
(fromInt $ Finite i, fromInt j)) brcds) V.empty
evalLandscape :: Landscape -> Int -> Extended Double -> Extended Double
evalLandscape landscape i arg =
let fcn = landscape ! i
findPointNeighbors :: Ord a => Int -> a -> Vector a -> (Int, Int)
findPointNeighbors helper x vector =
let len = V.length vector
i = len `div` 2
y = vector ! i
in
if x == y
then (helper + i, helper + i)
else if x > y
then
case vector !? (i + 1) of
Nothing -> (helper + i, helper + i)
Just z ->
if x < z
then (helper + i, helper + i + 1)
else findPointNeighbors (helper + i) x $ V.drop i vector
else
case vector !? (i - 1) of
Nothing -> (helper + i, helper + i)
Just z ->
if x > z
then (helper + i - 1, helper + i)
else findPointNeighbors helper x $ V.take i vector
(i1, i2) = findPointNeighbors 0 arg $ V.map fst fcn
(x1, x2) = (fst $ fcn ! i1, fst $ fcn ! i2)
(y1, y2) = (snd $ fromMaybe (error "Persistence.Filtration.evalLandscape. This is a bug. Please email the Persistence mainstainers.") $ V.find (\a -> x1 == fst a) fcn, snd $ fromMaybe (error "Persistence.Filtration.evalLandscape. This is a bug. Please email the Persistence mainstainers.") $ V.find (\a -> x2 == fst a) fcn)
in
if x1 == x2
then y1
else
case (x1, x2) of
(MinusInfty, Infinity) -> arg
(MinusInfty, Finite _) -> y1
(Finite a, Finite b) ->
case arg of
Finite c ->
let t = Finite $ (c - a)/(b - a)
in t*y2 + ((Finite 1.0) - t)*y1
_ -> error "Persistence.Filtration.evalLandscape.findPointNeighbors. This is a bug. Please email the Persistence maintainers."
(Finite a, Infinity) ->
case arg of
Infinity -> y2
Finite c ->
case y2 of
Infinity -> Finite $ c - a
Finite 0.0 -> Finite 0.0
_ -> error $ "Persistence.Filtration.evalLandscape: y2 = " L.++ (show y2) L.++ ". This is a bug. Please email the Persistence maintainers."
_ -> error "Persistence.Filtration.evalLandscape.findPointNeighbors: bad argument. This is a bug. Please email the Persistence maintainers."
anything -> error $ "Persistence.Filtration.evalLandscape.findPointNeighbors: " L.++ (show anything) L.++ ". This is a bug. Please email the Persistence maintainers."
evalLandscapeAll :: Landscape -> Extended Double -> Vector (Extended Double)
evalLandscapeAll landscape arg =
if V.null landscape then V.empty
else (evalLandscape landscape 0 arg) `V.cons` (evalLandscapeAll (V.tail landscape) arg)
|
Compute a linear combination of the landscapes .
If the coefficient list is too short , the rest of the coefficients are assumed to be zero .
If it is too long , the extra coefficients are discarded .
Compute a linear combination of the landscapes.
If the coefficient list is too short, the rest of the coefficients are assumed to be zero.
If it is too long, the extra coefficients are discarded.
-}
linearComboLandscapes :: [Double] -> [Landscape] -> Landscape
linearComboLandscapes coeffs landscapes =
let maxlen = L.maximum $ L.map V.length landscapes
emptylayer = V.fromList [(MinusInfty, Finite 0.0), (Infinity, Finite 0.0)]
landscapes' = L.map (\l -> l V.++ (V.replicate (maxlen - V.length l) emptylayer)) landscapes
myconcat v1 v2
| V.null v1 = v2
| V.null v2 = v1
| otherwise = ((V.head v1) V.++ (V.head v2)) `V.cons` (myconcat (V.tail v1) (V.tail v2))
xs = L.map (V.map (V.map fst)) landscapes'
concatted = L.foldl myconcat V.empty xs
unionXs = V.map ((quickSort (>)) . V.fromList . L.nub . V.toList) concatted
yVals = L.map (\landscape ->
mapWithIndex (\i v -> V.map (evalLandscape landscape i) v) unionXs) landscapes'
yVals' = L.zipWith (\coeff yvals ->
V.map (V.map ((Finite coeff)*)) yvals) coeffs yVals
finalY = L.foldl1 (\acc new -> V.zipWith (V.zipWith (+)) acc new) yVals'
in V.zipWith V.zip unionXs finalY
avgLandscapes :: [Landscape] -> Landscape
avgLandscapes landscapes =
let numscapes = L.length landscapes
coeffs = L.replicate numscapes (1.0/(fromIntegral numscapes))
in linearComboLandscapes coeffs landscapes
| Subtract the second landscape from the first .
diffLandscapes :: Landscape -> Landscape -> Landscape
diffLandscapes scape1 scape2 = linearComboLandscapes [1, -1] [scape1, scape2]
|
If p>=1 then it will compute the L^p norm on the given interval .
Uses trapezoidal approximation .
You should ensure that the stepsize partitions the interval evenly .
If p>=1 then it will compute the L^p norm on the given interval.
Uses trapezoidal approximation.
You should ensure that the stepsize partitions the interval evenly.
-}
^p , the power of the norm
-> Maybe Double
normLp p interval step landscape =
let len = V.length landscape
a = fst interval
b = snd interval
fcn x =
let vals = V.map (\n ->
abs $ unExtend $ evalLandscape landscape n (Finite x)) $ 0 `range` (len - 1)
in
case p of
Infinity -> V.maximum vals
Finite 1.0 -> V.sum vals
Finite 2.0 -> sqrt $ V.sum $ V.map (\a -> a*a) vals
Finite p' -> (**(1.0/p')) $ V.sum $ V.map (**p') vals
computeSum :: Double -> Double -> Double
computeSum currentX result =
let nextX = currentX + step
in
if nextX > b then result + (fcn nextX)
else computeSum nextX (result + 2.0*(fcn nextX))
in
if p < (Finite 1.0) then Nothing
else Just $ 0.5*step*(computeSum a $ fcn a)
|
Given the same information as above , computes the L^p distance between the two landscapes .
One way to compare the topologies of two filtrations .
Given the same information as above, computes the L^p distance between the two landscapes.
One way to compare the topologies of two filtrations.
-}
^p , power of the metric
-> Maybe Double
metricLp p interval step scape1 scape2 = normLp p interval step $ diffLandscapes scape1 scape2 |
3270eacc729ae963a612d1998274fce6f26ef2c2880729d61ce71873b1a48e28 | CDSoft/pp | blob.hs | #!/usr/bin/env stack
{- stack
script
--package bytestring
--package filepath
--package split
-}
PP
Copyright ( C ) 2015 - 2023
This file is part of PP .
PP is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
PP is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with PP . If not , see < / > .
Copyright (C) 2015-2023 Christophe Delord
This file is part of PP.
PP is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PP is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PP. If not, see </>.
-}
This script to remove a dependency .
- It creates a C array with the content of the input file .
- It creates a C array with the content of the input file.
-}
import Data.Char
import Data.List.Split
import System.Environment
import System.Exit
import System.FilePath
import qualified Data.ByteString as B
main :: IO ()
main = do
args <- getArgs
case args of
[blobName] -> do
let cvar = cVarname blobName
let hsvar = hsVarname blobName
let cmod = cFilename blobName
let hsmod = hsFilename blobName
putStrLn $ "Blob file: " ++ blobName
blob <- B.readFile blobName
let blobLen = B.length blob
putStrLn $ "size : " ++ show blobLen ++ " bytes"
putStrLn $ "output : " ++ cmod
writeFile cmod $ unlines
[ "/* generated from " ++ takeFileName blobName ++ ". Do not modify. */"
, ""
, "unsigned char " ++ cvar ++ "[] = {"
, mkBlob blob
, "};"
, ""
, "unsigned int " ++ cvar ++ "_len = " ++ show blobLen ++ ";"
]
putStrLn $ "output : " ++ hsmod
writeFile hsmod $ unlines
[ "{- generated from " ++ takeFileName blobName ++ ". Do not modify. -}"
, ""
, "module " ++ dropExtension (takeFileName hsmod)
, "where"
, ""
, "import Foreign.C.Types"
, "import Foreign.Ptr"
, ""
, "foreign import ccall \"&" ++ cvar ++ "\" _" ++ cvar ++ " :: Ptr CChar"
, "foreign import ccall \"&" ++ cvar ++ "_len\" _" ++ cvar ++ "_len :: Ptr CInt"
, ""
, hsvar ++ " :: (Ptr CChar, Ptr CInt)"
, hsvar ++ " = (_" ++ cvar ++ ", _" ++ cvar ++ "_len)"
]
_ -> putStrLn "usage: hsblob <blob filename>" >> exitFailure
cVarname :: FilePath -> String
cVarname = map tr . takeFileName
where
tr c | isAlphaNum c = toLower c
| otherwise = '_'
hsVarname :: FilePath -> String
hsVarname = lowerCamelCase . takeFileName
filename :: FilePath -> FilePath
filename name = dirname </> upperCamelCase basename
where
(dirname, basename) = splitFileName name
cFilename :: FilePath -> FilePath
cFilename = (<.> "c") . (++ "_c") . filename
hsFilename :: FilePath -> FilePath
hsFilename = (<.> "hs") . filename
lowerCamelCase :: String -> String
lowerCamelCase = lower . dropNonLetters
upperCamelCase :: String -> String
upperCamelCase = capitalize . dropNonLetters
dropNonLetters :: String -> String
dropNonLetters = dropWhile (not . isLetter)
capitalize :: String -> String
capitalize (c:cs) = toUpper c : lower cs
capitalize [] = []
lower :: String -> String
lower (c:cs) | isAlphaNum c = toLower c : lower cs
| otherwise = capitalize $ dropNonLetters cs
lower [] = []
mkBlob :: B.ByteString -> String
mkBlob blob = unlines $ map concat bytes
where
bytes = map (join ",") $ chunksOf 32 $ B.unpack blob
join sep = map $ (++ sep) . show
| null | https://raw.githubusercontent.com/CDSoft/pp/625e6a2ce449ffb905fb91a5f37090c417a0e82a/tools/blob.hs | haskell | stack
script
--package bytestring
--package filepath
--package split
| #!/usr/bin/env stack
PP
Copyright ( C ) 2015 - 2023
This file is part of PP .
PP is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
PP is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with PP . If not , see < / > .
Copyright (C) 2015-2023 Christophe Delord
This file is part of PP.
PP is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PP is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PP. If not, see </>.
-}
This script to remove a dependency .
- It creates a C array with the content of the input file .
- It creates a C array with the content of the input file.
-}
import Data.Char
import Data.List.Split
import System.Environment
import System.Exit
import System.FilePath
import qualified Data.ByteString as B
main :: IO ()
main = do
args <- getArgs
case args of
[blobName] -> do
let cvar = cVarname blobName
let hsvar = hsVarname blobName
let cmod = cFilename blobName
let hsmod = hsFilename blobName
putStrLn $ "Blob file: " ++ blobName
blob <- B.readFile blobName
let blobLen = B.length blob
putStrLn $ "size : " ++ show blobLen ++ " bytes"
putStrLn $ "output : " ++ cmod
writeFile cmod $ unlines
[ "/* generated from " ++ takeFileName blobName ++ ". Do not modify. */"
, ""
, "unsigned char " ++ cvar ++ "[] = {"
, mkBlob blob
, "};"
, ""
, "unsigned int " ++ cvar ++ "_len = " ++ show blobLen ++ ";"
]
putStrLn $ "output : " ++ hsmod
writeFile hsmod $ unlines
[ "{- generated from " ++ takeFileName blobName ++ ". Do not modify. -}"
, ""
, "module " ++ dropExtension (takeFileName hsmod)
, "where"
, ""
, "import Foreign.C.Types"
, "import Foreign.Ptr"
, ""
, "foreign import ccall \"&" ++ cvar ++ "\" _" ++ cvar ++ " :: Ptr CChar"
, "foreign import ccall \"&" ++ cvar ++ "_len\" _" ++ cvar ++ "_len :: Ptr CInt"
, ""
, hsvar ++ " :: (Ptr CChar, Ptr CInt)"
, hsvar ++ " = (_" ++ cvar ++ ", _" ++ cvar ++ "_len)"
]
_ -> putStrLn "usage: hsblob <blob filename>" >> exitFailure
cVarname :: FilePath -> String
cVarname = map tr . takeFileName
where
tr c | isAlphaNum c = toLower c
| otherwise = '_'
hsVarname :: FilePath -> String
hsVarname = lowerCamelCase . takeFileName
filename :: FilePath -> FilePath
filename name = dirname </> upperCamelCase basename
where
(dirname, basename) = splitFileName name
cFilename :: FilePath -> FilePath
cFilename = (<.> "c") . (++ "_c") . filename
hsFilename :: FilePath -> FilePath
hsFilename = (<.> "hs") . filename
lowerCamelCase :: String -> String
lowerCamelCase = lower . dropNonLetters
upperCamelCase :: String -> String
upperCamelCase = capitalize . dropNonLetters
dropNonLetters :: String -> String
dropNonLetters = dropWhile (not . isLetter)
capitalize :: String -> String
capitalize (c:cs) = toUpper c : lower cs
capitalize [] = []
lower :: String -> String
lower (c:cs) | isAlphaNum c = toLower c : lower cs
| otherwise = capitalize $ dropNonLetters cs
lower [] = []
mkBlob :: B.ByteString -> String
mkBlob blob = unlines $ map concat bytes
where
bytes = map (join ",") $ chunksOf 32 $ B.unpack blob
join sep = map $ (++ sep) . show
|
2dbd969fa7cf5756e9a2ed14e67dab05e51ff9637ee7591bb682496398298be3 | nd/bird | 4.4.2.hs | unzip . zipp = id
proof by induction on x:
x is undefined:
unzip . zipp (undefined, y) =
{case exhaustion in zipp}
unzip undefined =
{case exhaustion in unzip (map)}
undefined
x is []:
unzip . zipp ([], y) =
{def of zip}
unzip [] =
{def of unzip}
[]
x is (x:xs):
unzip . zipp ((x:xs), (y:ys)) =
{def of zip}
unzip (x, y):zipp(xs, ys) = *
we have to proof that
unzip (x, y):tail = ((x:(map fst tail)), (y:(map snd tail)))
by induction on tail:
tail is undefined:
left side:
unzip (x, y):undefined =
{def of unzip}
pair (map fst, map snd) ((x, y):undefined)=
{def of map}
(undefined, undefined)
right side:
((x:(map fst tail)), (y:(map snd tail))) =
{def of map}
((x:undefined), (y:undefined)) =
{def of :}
(undefined, undefined)
tail is []:
left side:
unzip (x, y):[] =
{def of unzip}
pair (map fst, map snd) [(x, y)] =
{def of map, fst, snd}
([x], [y])
right side:
((x:(map fst [])), (y:(map snd []))) =
{def of map}
([x], [y])
tail is ((x', y'):tail):
left side:
unzip (x, y):(x', y'):tail =
{def of unzip}
pair (map fst, map snd) (x, y):(x', y'):tail =
{def of map, fst, snd}
(x:x':(map fst tail), y:y':(map snd tail))
right side:
((x:(map fst ((x', y'):tail))), (y:(map snd ((x', y'):tail)))) =
{def of map, fst, snd}
(x:x':(map fst tail), y:y':(map snd tail))
so
* =
unzip (x, y):zipp(xs, ys) =
((x:(map fst zipp(xs, ys))), (y:(map snd zipp(xs, ys)))) =
= ((x:(unzip zipp (xs, ys))), (y:(unzip zipp (xs, ys))))
= ((x:xs), (y:ys)) =
right side | null | https://raw.githubusercontent.com/nd/bird/06dba97af7cfb11f558eaeb31a75bd04cacf7201/ch04/4.4.2.hs | haskell | unzip . zipp = id
proof by induction on x:
x is undefined:
unzip . zipp (undefined, y) =
{case exhaustion in zipp}
unzip undefined =
{case exhaustion in unzip (map)}
undefined
x is []:
unzip . zipp ([], y) =
{def of zip}
unzip [] =
{def of unzip}
[]
x is (x:xs):
unzip . zipp ((x:xs), (y:ys)) =
{def of zip}
unzip (x, y):zipp(xs, ys) = *
we have to proof that
unzip (x, y):tail = ((x:(map fst tail)), (y:(map snd tail)))
by induction on tail:
tail is undefined:
left side:
unzip (x, y):undefined =
{def of unzip}
pair (map fst, map snd) ((x, y):undefined)=
{def of map}
(undefined, undefined)
right side:
((x:(map fst tail)), (y:(map snd tail))) =
{def of map}
((x:undefined), (y:undefined)) =
{def of :}
(undefined, undefined)
tail is []:
left side:
unzip (x, y):[] =
{def of unzip}
pair (map fst, map snd) [(x, y)] =
{def of map, fst, snd}
([x], [y])
right side:
((x:(map fst [])), (y:(map snd []))) =
{def of map}
([x], [y])
tail is ((x', y'):tail):
left side:
unzip (x, y):(x', y'):tail =
{def of unzip}
pair (map fst, map snd) (x, y):(x', y'):tail =
{def of map, fst, snd}
(x:x':(map fst tail), y:y':(map snd tail))
right side:
((x:(map fst ((x', y'):tail))), (y:(map snd ((x', y'):tail)))) =
{def of map, fst, snd}
(x:x':(map fst tail), y:y':(map snd tail))
so
* =
unzip (x, y):zipp(xs, ys) =
((x:(map fst zipp(xs, ys))), (y:(map snd zipp(xs, ys)))) =
= ((x:(unzip zipp (xs, ys))), (y:(unzip zipp (xs, ys))))
= ((x:xs), (y:ys)) =
right side | |
ad3752aea9f6a32fe546804cfa654d17d40ddcc6c728a7039f01769960a21a59 | icicle-lang/icicle-ambiata | Program.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternGuards #
module Icicle.Test.Gen.Core.Program where
import qualified Icicle.Core.Exp.Simp as Simp
import qualified Data.Functor.Identity as Identity
import qualified Icicle.Common.FixT as Fix
import Icicle.Core.Exp.Prim
import Icicle.Common.Base
import Icicle.Common.Type
import Icicle.Common.Exp
import Disorder.Corpus
import Icicle.Test.Arbitrary.Data ()
import Icicle.Core.Exp.Combinators
import Icicle.Core.Stream
import Icicle.Core.Program.Program
import Icicle.Test.Gen.Core.Prim
import Icicle.Test.Gen.Core.Type
import Icicle.Test.Gen.Core.Value
import qualified Icicle.Common.Exp.Prim.Minimal as PM
import Icicle.Test.Arbitrary.Data
import Hedgehog hiding (Var)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Hedgehog.Gen.QuickCheck as Qc
import P
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Prelude
import qualified Control.Monad.Reader as Reader
import qualified Control.Monad.State as State
newtype Priority = Priority { getPriority :: Int }
type GenT' = State.StateT Int (Reader.ReaderT (Env Var (ValType,Priority), Map.Map ValType [Prim]) Gen)
type C m = (Reader.MonadReader (Env Var (ValType,Priority), Map.Map ValType [Prim]) m, State.MonadState Int m, MonadGen m, MonadPlus m)
freshName :: C m => m (Name Var)
freshName = Gen.element cats >>= freshName'
freshName' :: C m => Text -> m (Name Var)
freshName' n = do
i <- State.get
State.put (i + 1)
return $ nameOf $ NameBase $ Var n i
freshBind :: C m => ValType -> Priority -> (Name Var -> m a) -> m a
freshBind t pri m = do
v <- freshName
Reader.local (\(e,p) -> (Map.insert v (t,pri) e, p)) (m v)
runCoreGen :: ValType -> GenT' a -> Gen a
runCoreGen t m = Gen.sized $ \s -> Gen.resize (sqrt' s) $ do
primmap <- Gen.lift $ genPrimLookupList $ genDerivedTypeTop t
Reader.runReaderT (State.evalStateT m 0) (Map.empty, primmap)
where
sqrt' = truncate . (sqrt :: Double -> Double) . fromIntegral
genExpTop :: Gen (Exp () Var Prim, ValType)
genExpTop = do
t <- genInputType
runCoreGen t genExp
genExpForTypeTop :: ValType -> Gen (Exp () Var Prim)
genExpForTypeTop t = do
runCoreGen t $ genExpForType (FunT [] t)
TODO : we should include the outputType in runCoreGen 's primitive map generation
programForStreamType :: ValType -> ValType -> Gen (Program () Var)
programForStreamType streamType outputType = runCoreGen streamType (programForStreamType' streamType outputType)
programForStreamType' :: C m => ValType -> ValType -> m (Program () Var)
programForStreamType' streamType outputType = do
nmapsz <- freshName' "maxMapSize"
ntime <- freshName' "factTime"
ndate <- freshName' "snapshotTime"
ninput <- freshName' "input"
let env0 = Map.fromList [ ( ndate, (TimeT, Priority 20))
, ( nmapsz, (IntT, Priority 1)) ]
-- Generate a few precomputation expressions
npres <- genCount
(envP',pres)<- genExps env0 npres
let envS0 = Map.fromList [ ( ninput, (PairT streamType TimeT, Priority 40))
, ( ntime, (TimeT, Priority 20)) ]
nstrs <- genCount
(envS',strs)<- genStreams envP' envS0 nstrs
Postcomputations with access to the reduction values
nposts <- genCount
(envE', posts) <- genExps envS' nposts
Finally , everything is wrapped up into one return value
retName <- Qc.arbitrary
ret <- genExpForTypeEnv (FunT [] outputType) envE'
return Program
{ inputType = streamType
, factValName = ninput
, factTimeName = ntime
, snaptimeName = ndate
, maxMapSize = nmapsz
, precomps = pres
, streams = strs
, postcomps = posts
, returns = [(retName, ret)]
}
where
genCount = Gen.integral $ Range.linear 1 (10 :: Int)
stuffEnv e m = Reader.local (\(_,p) -> (e,p)) m
genExpEnv e = stuffEnv e $ genExp
genExpForTypeEnv t e = stuffEnv e $ genExpForType t
-- Generate a bunch of expressions, collecting up the environment
genExps env 0
= return (env, [])
genExps env n
= do (x,t) <- genExpEnv env
nm <- freshName
let env' = Map.insert nm (t, Priority 20) env
(env'', xs) <- genExps env' (n-1)
return (env'', (nm, x) : xs)
-- Generate some streams
genStreams zE _ 0
= return (zE, [])
genStreams zE kE n
= do (zE',s') <- Gen.recursive Gen.choice [genFold zE kE] [genFilter zE kE (n-1)]
(zE'', ss) <- genStreams zE' kE (n-1)
return (zE'', s' : ss)
genFold zE kE
= do n <- freshName
(z,t) <- genExpEnv zE
let zE' = Map.insert n (t, Priority 50) zE
k <- genExpForTypeEnv (FunT [] t) (Map.union zE' kE)
return (zE', SFold n t z k)
genFilter zE kE num
= do num' <- Gen.integral $ Range.linear 1 num
pred <- genExpForTypeEnv (FunT [] BoolT) (Map.union zE kE)
(zE', ss') <- genStreams zE kE num'
return (zE', SFilter pred ss')
genExpForValType :: C m => ValType -> m (Exp () Var Prim)
genExpForValType ty = shrink $ Gen.recursive Gen.choice
[ genContextForType ty
, genPrimConstructor ty ]
-- When we can generate primitives, prefer them
[ genPrimitiveForType ty
, genPrimitiveForType ty
, genLetForType ty ]
-- | Generate a well typed expression that has given type.
--
genExpForType :: C m => Type -> m (Exp () Var Prim)
genExpForType ty
= case ty of
FunT (FunT [] t : ts) ret
-> freshBind t (Priority 30) $ \n -> xLam n t <$> genExpForType (FunT ts ret)
FunT (_:_) _
-> Prelude.error "genExpForType: invalid higher order function type. We cannot generate these, so type generator should not include these."
FunT [] ret
-> genExpForValType ret
where
genPrimConstructor :: C m => ValType -> m (Exp () Var Prim)
genPrimConstructor t
= case t of
IntT -> genContextOrValue
DoubleT -> genContextOrValue
UnitT -> genContextOrValue
ErrorT -> genContextOrValue
BoolT -> genContextOrValue
TimeT -> genContextOrValue
StringT -> genContextOrValue
StructT _ -> genContextOrValue
PairT a b -> valueChoice
[ (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstPair a b) `pApp` genExpForValType a `pApp` genExpForValType b ]
SumT a b -> valueChoice
[ (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstLeft a b) `pApp` genExpForValType a
, (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstRight a b) `pApp` genExpForValType b ]
OptionT a -> valueChoice
[ (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstSome a) `pApp` genExpForValType a ]
ArrayT a
| isOrdValType a -> valueChoice
[ genArrayOfBuf a
, genArrayOfMap PM.PrimBuiltinVals IntT a
, genArrayOfMap PM.PrimBuiltinKeys a IntT ]
| otherwise -> valueChoice
[ genArrayOfBuf a
, genArrayOfMap PM.PrimBuiltinVals IntT a ]
BufT n a -> genrec1
(xValue t $ VBuf [])
(\x' -> (xPrim' $ PrimLatest $ PrimLatestPush n a) `pApp` pure x' `pApp` genExpForValType a)
MapT k v -> genrec1
(xValue t (VMap Map.empty) )
(\x' -> (xPrim' $ PrimMap $ PrimMapInsertOrUpdate k v) `pApp` xid v `pApp` genExpForValType v `pApp` genExpForValType k `pApp` pure x' )
where
-- Make values very rare, but allow us to shrink to them
valueChoice vs = Gen.frequency ((1, genValue) : fmap ((,) 10) vs)
genValue = xValue t <$> baseValueForType t
genrec1 nonrec rec =
Gen.recursive Gen.choice
[ pure nonrec, rec nonrec ]
[ genContextForType t >>= rec ]
xPrim' = pure . xPrim
xid t' = genExpForType (FunT [FunT [] t'] t')
pApp l r = do
lx <- l
rx <- r
return (xApp lx rx)
genContextOrValue = do
c <- tryGenContextForType t
case c of
Nothing -> genValue
Just c' -> return c'
genArrayOfBuf a = do
n <- Gen.lift genBufLength
(xPrim' $ PrimLatest $ PrimLatestRead n a) `pApp` genExpForValType (BufT n a)
genArrayOfMap p tk tv =
(xPrim' $ PrimMinimal $ PM.PrimBuiltinFun $ PM.PrimBuiltinMap $ p tk tv) `pApp` genExpForValType (MapT tk tv)
-- | Generate an expression for an arbitrary value type
genExp :: C m => m (Exp () Var Prim, ValType)
genExp = do
(env,prims) <- Reader.asks id
let pts = Map.keys prims
let ets = fmap fst $ Map.elems env
Gen.choice [ genXT ets <|> genXT pts
, genXT pts ]
where
genXT ts
| null ts = Gen.discard
| otherwise = do
t <- Gen.element ts
(,) <$> genExpForType (FunT [] t) <*> pure t
-- | Try to generate an expression with a given type from the context.
tryGenContextForType :: C m => ValType -> m (Maybe (Exp () Var Prim))
tryGenContextForType r = do
env <- Reader.asks fst
let m' = catMaybes $ fmap gen $ Map.toList env
case List.null m' of
True -> return Nothing
False -> Just <$> Gen.frequency m'
where
gen (v,(t,Priority p)) = do
x <- project (xVar v) t
return (p, return x)
project x t
| t == r
= Just x
| PairT a b <- t
= project (prim (PrimMinimal $ PM.PrimPair $ PM.PrimPairFst a b) x) a
<|> project (prim (PrimMinimal $ PM.PrimPair $ PM.PrimPairSnd a b) x) b
| StructT st@(StructType fs) <- t
= msum
$ fmap (\(fn,ft) -> project (prim (PrimMinimal $ PM.PrimStruct $ PM.PrimStructGet fn ft st) x) ft)
$ Map.toList fs
| otherwise
= Nothing
prim p x = xPrim p `xApp` x
-- | Generate context lookup, falling back to constructors / values
genContextForType :: C m => ValType -> m (Exp () Var Prim)
genContextForType r = do
e <- tryGenContextForType r
case e of
Nothing -> genPrimConstructor r
Just e' -> return e'
-- | Try to generate a primitive of a given type
tryGenPrimitiveForType :: C m => ValType -> m (Maybe (Exp () Var Prim))
tryGenPrimitiveForType r = do
primmap <- Reader.asks snd
case Map.lookup r primmap of
Nothing -> return Nothing
Just ps -> do
p <- Gen.element ps
args <- mapM (genArg p) (functionArguments $ typeOfPrim p)
return $ Just $ foldl xApp (xPrim p) args
where
-- Only ever minus by small constant amounts for times.
-- Bit over a year.
genArg (PrimMinimal (PM.PrimTime p)) (FunT [] IntT)
| PM.PrimTimeMinusSeconds <- p
= genVInt (370 * 60 * 60 * 24)
| PM.PrimTimeMinusDays <- p
= genVInt 370
| PM.PrimTimeMinusMonths <- p
= genVInt 13
-- Otherwise make an expression
genArg _ t
= genExpForType t
genVInt upper
= xValue IntT . VInt <$> Gen.integral (Range.linear 0 upper)
-- | Generate primitive application, falling back to context
genPrimitiveForType :: C m => ValType -> m (Exp () Var Prim)
genPrimitiveForType r = do
p <- tryGenPrimitiveForType r
case p of
Nothing -> do
genContextForType r
Just p' -> return p'
-- | Generate a let binding with given return type.
genLetForType :: C m => ValType -> m (Exp () Var Prim)
genLetForType r = shrink $ do
(x, t) <- genExp
freshBind t (Priority 50) $ \n -> xLet n x <$> genExpForType (FunT [] r)
-- Shrink the
shrink :: C m => m (Exp () Var Prim) -> m (Exp () Var Prim)
shrink = Gen.shrink go
where
go x = runFixT (Simp.simpX ()) (Simp.deadX x)
runFixT f a = case Identity.runIdentity $ Fix.runFixT $ f a of
(a', Fix.RunAgain) -> [Identity.runIdentity $ Fix.fixpoint f a']
_ -> []
| null | https://raw.githubusercontent.com/icicle-lang/icicle-ambiata/9b9cc45a75f66603007e4db7e5f3ba908cae2df2/icicle-compiler/test/Icicle/Test/Gen/Core/Program.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE OverloadedStrings #
Generate a few precomputation expressions
Generate a bunch of expressions, collecting up the environment
Generate some streams
When we can generate primitives, prefer them
| Generate a well typed expression that has given type.
Make values very rare, but allow us to shrink to them
| Generate an expression for an arbitrary value type
| Try to generate an expression with a given type from the context.
| Generate context lookup, falling back to constructors / values
| Try to generate a primitive of a given type
Only ever minus by small constant amounts for times.
Bit over a year.
Otherwise make an expression
| Generate primitive application, falling back to context
| Generate a let binding with given return type.
Shrink the | # LANGUAGE FlexibleContexts #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE PatternGuards #
module Icicle.Test.Gen.Core.Program where
import qualified Icicle.Core.Exp.Simp as Simp
import qualified Data.Functor.Identity as Identity
import qualified Icicle.Common.FixT as Fix
import Icicle.Core.Exp.Prim
import Icicle.Common.Base
import Icicle.Common.Type
import Icicle.Common.Exp
import Disorder.Corpus
import Icicle.Test.Arbitrary.Data ()
import Icicle.Core.Exp.Combinators
import Icicle.Core.Stream
import Icicle.Core.Program.Program
import Icicle.Test.Gen.Core.Prim
import Icicle.Test.Gen.Core.Type
import Icicle.Test.Gen.Core.Value
import qualified Icicle.Common.Exp.Prim.Minimal as PM
import Icicle.Test.Arbitrary.Data
import Hedgehog hiding (Var)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Hedgehog.Gen.QuickCheck as Qc
import P
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Prelude
import qualified Control.Monad.Reader as Reader
import qualified Control.Monad.State as State
newtype Priority = Priority { getPriority :: Int }
type GenT' = State.StateT Int (Reader.ReaderT (Env Var (ValType,Priority), Map.Map ValType [Prim]) Gen)
type C m = (Reader.MonadReader (Env Var (ValType,Priority), Map.Map ValType [Prim]) m, State.MonadState Int m, MonadGen m, MonadPlus m)
freshName :: C m => m (Name Var)
freshName = Gen.element cats >>= freshName'
freshName' :: C m => Text -> m (Name Var)
freshName' n = do
i <- State.get
State.put (i + 1)
return $ nameOf $ NameBase $ Var n i
freshBind :: C m => ValType -> Priority -> (Name Var -> m a) -> m a
freshBind t pri m = do
v <- freshName
Reader.local (\(e,p) -> (Map.insert v (t,pri) e, p)) (m v)
runCoreGen :: ValType -> GenT' a -> Gen a
runCoreGen t m = Gen.sized $ \s -> Gen.resize (sqrt' s) $ do
primmap <- Gen.lift $ genPrimLookupList $ genDerivedTypeTop t
Reader.runReaderT (State.evalStateT m 0) (Map.empty, primmap)
where
sqrt' = truncate . (sqrt :: Double -> Double) . fromIntegral
genExpTop :: Gen (Exp () Var Prim, ValType)
genExpTop = do
t <- genInputType
runCoreGen t genExp
genExpForTypeTop :: ValType -> Gen (Exp () Var Prim)
genExpForTypeTop t = do
runCoreGen t $ genExpForType (FunT [] t)
TODO : we should include the outputType in runCoreGen 's primitive map generation
programForStreamType :: ValType -> ValType -> Gen (Program () Var)
programForStreamType streamType outputType = runCoreGen streamType (programForStreamType' streamType outputType)
programForStreamType' :: C m => ValType -> ValType -> m (Program () Var)
programForStreamType' streamType outputType = do
nmapsz <- freshName' "maxMapSize"
ntime <- freshName' "factTime"
ndate <- freshName' "snapshotTime"
ninput <- freshName' "input"
let env0 = Map.fromList [ ( ndate, (TimeT, Priority 20))
, ( nmapsz, (IntT, Priority 1)) ]
npres <- genCount
(envP',pres)<- genExps env0 npres
let envS0 = Map.fromList [ ( ninput, (PairT streamType TimeT, Priority 40))
, ( ntime, (TimeT, Priority 20)) ]
nstrs <- genCount
(envS',strs)<- genStreams envP' envS0 nstrs
Postcomputations with access to the reduction values
nposts <- genCount
(envE', posts) <- genExps envS' nposts
Finally , everything is wrapped up into one return value
retName <- Qc.arbitrary
ret <- genExpForTypeEnv (FunT [] outputType) envE'
return Program
{ inputType = streamType
, factValName = ninput
, factTimeName = ntime
, snaptimeName = ndate
, maxMapSize = nmapsz
, precomps = pres
, streams = strs
, postcomps = posts
, returns = [(retName, ret)]
}
where
genCount = Gen.integral $ Range.linear 1 (10 :: Int)
stuffEnv e m = Reader.local (\(_,p) -> (e,p)) m
genExpEnv e = stuffEnv e $ genExp
genExpForTypeEnv t e = stuffEnv e $ genExpForType t
genExps env 0
= return (env, [])
genExps env n
= do (x,t) <- genExpEnv env
nm <- freshName
let env' = Map.insert nm (t, Priority 20) env
(env'', xs) <- genExps env' (n-1)
return (env'', (nm, x) : xs)
genStreams zE _ 0
= return (zE, [])
genStreams zE kE n
= do (zE',s') <- Gen.recursive Gen.choice [genFold zE kE] [genFilter zE kE (n-1)]
(zE'', ss) <- genStreams zE' kE (n-1)
return (zE'', s' : ss)
genFold zE kE
= do n <- freshName
(z,t) <- genExpEnv zE
let zE' = Map.insert n (t, Priority 50) zE
k <- genExpForTypeEnv (FunT [] t) (Map.union zE' kE)
return (zE', SFold n t z k)
genFilter zE kE num
= do num' <- Gen.integral $ Range.linear 1 num
pred <- genExpForTypeEnv (FunT [] BoolT) (Map.union zE kE)
(zE', ss') <- genStreams zE kE num'
return (zE', SFilter pred ss')
genExpForValType :: C m => ValType -> m (Exp () Var Prim)
genExpForValType ty = shrink $ Gen.recursive Gen.choice
[ genContextForType ty
, genPrimConstructor ty ]
[ genPrimitiveForType ty
, genPrimitiveForType ty
, genLetForType ty ]
genExpForType :: C m => Type -> m (Exp () Var Prim)
genExpForType ty
= case ty of
FunT (FunT [] t : ts) ret
-> freshBind t (Priority 30) $ \n -> xLam n t <$> genExpForType (FunT ts ret)
FunT (_:_) _
-> Prelude.error "genExpForType: invalid higher order function type. We cannot generate these, so type generator should not include these."
FunT [] ret
-> genExpForValType ret
where
genPrimConstructor :: C m => ValType -> m (Exp () Var Prim)
genPrimConstructor t
= case t of
IntT -> genContextOrValue
DoubleT -> genContextOrValue
UnitT -> genContextOrValue
ErrorT -> genContextOrValue
BoolT -> genContextOrValue
TimeT -> genContextOrValue
StringT -> genContextOrValue
StructT _ -> genContextOrValue
PairT a b -> valueChoice
[ (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstPair a b) `pApp` genExpForValType a `pApp` genExpForValType b ]
SumT a b -> valueChoice
[ (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstLeft a b) `pApp` genExpForValType a
, (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstRight a b) `pApp` genExpForValType b ]
OptionT a -> valueChoice
[ (xPrim' $ PrimMinimal $ PM.PrimConst $ PM.PrimConstSome a) `pApp` genExpForValType a ]
ArrayT a
| isOrdValType a -> valueChoice
[ genArrayOfBuf a
, genArrayOfMap PM.PrimBuiltinVals IntT a
, genArrayOfMap PM.PrimBuiltinKeys a IntT ]
| otherwise -> valueChoice
[ genArrayOfBuf a
, genArrayOfMap PM.PrimBuiltinVals IntT a ]
BufT n a -> genrec1
(xValue t $ VBuf [])
(\x' -> (xPrim' $ PrimLatest $ PrimLatestPush n a) `pApp` pure x' `pApp` genExpForValType a)
MapT k v -> genrec1
(xValue t (VMap Map.empty) )
(\x' -> (xPrim' $ PrimMap $ PrimMapInsertOrUpdate k v) `pApp` xid v `pApp` genExpForValType v `pApp` genExpForValType k `pApp` pure x' )
where
valueChoice vs = Gen.frequency ((1, genValue) : fmap ((,) 10) vs)
genValue = xValue t <$> baseValueForType t
genrec1 nonrec rec =
Gen.recursive Gen.choice
[ pure nonrec, rec nonrec ]
[ genContextForType t >>= rec ]
xPrim' = pure . xPrim
xid t' = genExpForType (FunT [FunT [] t'] t')
pApp l r = do
lx <- l
rx <- r
return (xApp lx rx)
genContextOrValue = do
c <- tryGenContextForType t
case c of
Nothing -> genValue
Just c' -> return c'
genArrayOfBuf a = do
n <- Gen.lift genBufLength
(xPrim' $ PrimLatest $ PrimLatestRead n a) `pApp` genExpForValType (BufT n a)
genArrayOfMap p tk tv =
(xPrim' $ PrimMinimal $ PM.PrimBuiltinFun $ PM.PrimBuiltinMap $ p tk tv) `pApp` genExpForValType (MapT tk tv)
genExp :: C m => m (Exp () Var Prim, ValType)
genExp = do
(env,prims) <- Reader.asks id
let pts = Map.keys prims
let ets = fmap fst $ Map.elems env
Gen.choice [ genXT ets <|> genXT pts
, genXT pts ]
where
genXT ts
| null ts = Gen.discard
| otherwise = do
t <- Gen.element ts
(,) <$> genExpForType (FunT [] t) <*> pure t
tryGenContextForType :: C m => ValType -> m (Maybe (Exp () Var Prim))
tryGenContextForType r = do
env <- Reader.asks fst
let m' = catMaybes $ fmap gen $ Map.toList env
case List.null m' of
True -> return Nothing
False -> Just <$> Gen.frequency m'
where
gen (v,(t,Priority p)) = do
x <- project (xVar v) t
return (p, return x)
project x t
| t == r
= Just x
| PairT a b <- t
= project (prim (PrimMinimal $ PM.PrimPair $ PM.PrimPairFst a b) x) a
<|> project (prim (PrimMinimal $ PM.PrimPair $ PM.PrimPairSnd a b) x) b
| StructT st@(StructType fs) <- t
= msum
$ fmap (\(fn,ft) -> project (prim (PrimMinimal $ PM.PrimStruct $ PM.PrimStructGet fn ft st) x) ft)
$ Map.toList fs
| otherwise
= Nothing
prim p x = xPrim p `xApp` x
genContextForType :: C m => ValType -> m (Exp () Var Prim)
genContextForType r = do
e <- tryGenContextForType r
case e of
Nothing -> genPrimConstructor r
Just e' -> return e'
tryGenPrimitiveForType :: C m => ValType -> m (Maybe (Exp () Var Prim))
tryGenPrimitiveForType r = do
primmap <- Reader.asks snd
case Map.lookup r primmap of
Nothing -> return Nothing
Just ps -> do
p <- Gen.element ps
args <- mapM (genArg p) (functionArguments $ typeOfPrim p)
return $ Just $ foldl xApp (xPrim p) args
where
genArg (PrimMinimal (PM.PrimTime p)) (FunT [] IntT)
| PM.PrimTimeMinusSeconds <- p
= genVInt (370 * 60 * 60 * 24)
| PM.PrimTimeMinusDays <- p
= genVInt 370
| PM.PrimTimeMinusMonths <- p
= genVInt 13
genArg _ t
= genExpForType t
genVInt upper
= xValue IntT . VInt <$> Gen.integral (Range.linear 0 upper)
genPrimitiveForType :: C m => ValType -> m (Exp () Var Prim)
genPrimitiveForType r = do
p <- tryGenPrimitiveForType r
case p of
Nothing -> do
genContextForType r
Just p' -> return p'
genLetForType :: C m => ValType -> m (Exp () Var Prim)
genLetForType r = shrink $ do
(x, t) <- genExp
freshBind t (Priority 50) $ \n -> xLet n x <$> genExpForType (FunT [] r)
shrink :: C m => m (Exp () Var Prim) -> m (Exp () Var Prim)
shrink = Gen.shrink go
where
go x = runFixT (Simp.simpX ()) (Simp.deadX x)
runFixT f a = case Identity.runIdentity $ Fix.runFixT $ f a of
(a', Fix.RunAgain) -> [Identity.runIdentity $ Fix.fixpoint f a']
_ -> []
|
4c1332d0cea5d84933b4a36dd4b542882bce0a579a2c40674afca04a224cb11e | ocaml-gospel/gospel | t24.mli | (**************************************************************************)
(* *)
GOSPEL -- A Specification Language for OCaml
(* *)
Copyright ( c ) 2018- The VOCaL Project
(* *)
This software is free software , distributed under the MIT license
(* (as described in file LICENSE enclosed). *)
(**************************************************************************)
val f : ('a -> 'b -> 'c) -> 'a -> 'b -> 'c
(*@ r = f x y z w *)
ERROR :
Line 12
too many parameters in function header
add one parameter less in line 12
Line 12
too many parameters in function header
add one parameter less in line 12 *)
{ gospel_expected|
[ 125 ] File " t24.mli " , line 12 , characters 16 - 17 :
12 | ( * @ r = f x y z w
[125] File "t24.mli", line 12, characters 16-17:
12 | (*@ r = f x y z w *)
^
Error: Type checking error: parameter do not match with val type.
|gospel_expected} *)
| null | https://raw.githubusercontent.com/ocaml-gospel/gospel/79841c510baeb396d9a695ae33b290899188380b/test/negative/t24.mli | ocaml | ************************************************************************
(as described in file LICENSE enclosed).
************************************************************************
@ r = f x y z w
@ r = f x y z w | GOSPEL -- A Specification Language for OCaml
Copyright ( c ) 2018- The VOCaL Project
This software is free software , distributed under the MIT license
val f : ('a -> 'b -> 'c) -> 'a -> 'b -> 'c
ERROR :
Line 12
too many parameters in function header
add one parameter less in line 12
Line 12
too many parameters in function header
add one parameter less in line 12 *)
{ gospel_expected|
[ 125 ] File " t24.mli " , line 12 , characters 16 - 17 :
12 | ( * @ r = f x y z w
[125] File "t24.mli", line 12, characters 16-17:
^
Error: Type checking error: parameter do not match with val type.
|gospel_expected} *)
|
b4c2ab625e050b5961a81a79fa990e888241c1d2878f859396935daa5518ec5a | yesodweb/yesod | non-th.hs | {-# LANGUAGE OverloadedStrings #-}
import Yesod.Routes.Dispatch
import Data.Text (Text, words)
import Prelude hiding (words)
import Web.PathPieces
import Criterion.Main
import Control.DeepSeq
import Control.Monad (forM_, unless)
data TestRoute = Foo | Bar !Int | Baz
deriving Eq
instance NFData TestRoute
samples = take 10000 $ cycle
[ words "foo"
, words "foo bar"
, words ""
, words "bar baz"
, words "bar 4"
, words "bar 1234566789"
, words "baz"
, words "baz 4"
, words "something else"
]
simple :: [Text] -> Maybe TestRoute
simple ["foo"] = Just Foo
simple ["bar", x] = fmap Bar (fromPathPiece x)
simple ["baz"] = Just Baz
simple ["FOO"] = Just Foo
simple ["BAR", x] = fmap Bar (fromPathPiece x)
simple ["BAZ"] = Just Baz
simple ["Foo"] = Just Foo
simple ["Bar", x] = fmap Bar (fromPathPiece x)
simple ["Baz"] = Just Baz
simple ["Xfoo"] = Just Foo
simple ["Xbar", x] = fmap Bar (fromPathPiece x)
simple ["Xbaz"] = Just Baz
simple ["XFOO"] = Just Foo
simple ["XBAR", x] = fmap Bar (fromPathPiece x)
simple ["XBAZ"] = Just Baz
simple ["XFoo"] = Just Foo
simple ["XBar", x] = fmap Bar (fromPathPiece x)
simple ["XBaz"] = Just Baz
simple _ = Nothing
dispatch :: [Text] -> Maybe TestRoute
dispatch = toDispatch
[ Route [Static "foo"] False (const (Just Foo))
, Route [Static "bar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "baz"] False (const (Just Baz))
, Route [Static "FOO"] False (const (Just Foo))
, Route [Static "BAR", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "BAZ"] False (const (Just Baz))
, Route [Static "Foo"] False (const (Just Foo))
, Route [Static "Bar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "Baz"] False (const (Just Baz))
, Route [Static "Xfoo"] False (const (Just Foo))
, Route [Static "Xbar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "Xbaz"] False (const (Just Baz))
, Route [Static "XFOO"] False (const (Just Foo))
, Route [Static "XBAR", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "XBAZ"] False (const (Just Baz))
, Route [Static "XFoo"] False (const (Just Foo))
, Route [Static "XBar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "XBaz"] False (const (Just Baz))
]
main :: IO ()
main = do
forM_ samples $ \sample -> unless (simple sample == dispatch sample) (error $ show sample)
defaultMain
[ bench "simple" $ nf (map simple) samples
, bench "dispatch" $ nf (map dispatch) samples
]
| null | https://raw.githubusercontent.com/yesodweb/yesod/c59993ff287b880abbf768f1e3f56ae9b19df51e/yesod-core/bench/non-th.hs | haskell | # LANGUAGE OverloadedStrings # | import Yesod.Routes.Dispatch
import Data.Text (Text, words)
import Prelude hiding (words)
import Web.PathPieces
import Criterion.Main
import Control.DeepSeq
import Control.Monad (forM_, unless)
data TestRoute = Foo | Bar !Int | Baz
deriving Eq
instance NFData TestRoute
samples = take 10000 $ cycle
[ words "foo"
, words "foo bar"
, words ""
, words "bar baz"
, words "bar 4"
, words "bar 1234566789"
, words "baz"
, words "baz 4"
, words "something else"
]
simple :: [Text] -> Maybe TestRoute
simple ["foo"] = Just Foo
simple ["bar", x] = fmap Bar (fromPathPiece x)
simple ["baz"] = Just Baz
simple ["FOO"] = Just Foo
simple ["BAR", x] = fmap Bar (fromPathPiece x)
simple ["BAZ"] = Just Baz
simple ["Foo"] = Just Foo
simple ["Bar", x] = fmap Bar (fromPathPiece x)
simple ["Baz"] = Just Baz
simple ["Xfoo"] = Just Foo
simple ["Xbar", x] = fmap Bar (fromPathPiece x)
simple ["Xbaz"] = Just Baz
simple ["XFOO"] = Just Foo
simple ["XBAR", x] = fmap Bar (fromPathPiece x)
simple ["XBAZ"] = Just Baz
simple ["XFoo"] = Just Foo
simple ["XBar", x] = fmap Bar (fromPathPiece x)
simple ["XBaz"] = Just Baz
simple _ = Nothing
dispatch :: [Text] -> Maybe TestRoute
dispatch = toDispatch
[ Route [Static "foo"] False (const (Just Foo))
, Route [Static "bar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "baz"] False (const (Just Baz))
, Route [Static "FOO"] False (const (Just Foo))
, Route [Static "BAR", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "BAZ"] False (const (Just Baz))
, Route [Static "Foo"] False (const (Just Foo))
, Route [Static "Bar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "Baz"] False (const (Just Baz))
, Route [Static "Xfoo"] False (const (Just Foo))
, Route [Static "Xbar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "Xbaz"] False (const (Just Baz))
, Route [Static "XFOO"] False (const (Just Foo))
, Route [Static "XBAR", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "XBAZ"] False (const (Just Baz))
, Route [Static "XFoo"] False (const (Just Foo))
, Route [Static "XBar", Dynamic] False (\[_, x] -> (fmap Bar (fromPathPiece x)))
, Route [Static "XBaz"] False (const (Just Baz))
]
main :: IO ()
main = do
forM_ samples $ \sample -> unless (simple sample == dispatch sample) (error $ show sample)
defaultMain
[ bench "simple" $ nf (map simple) samples
, bench "dispatch" $ nf (map dispatch) samples
]
|
c1e764718e6b20457620e355a3bc196a9129cd5ac4050450288fbe5c00931383 | mzp/coq-ruby | tactics.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
$ I d : tactics.ml 12956 2010 - 04 - 20 08:49:15Z herbelin $
open Pp
open Util
open Names
open Nameops
open Sign
open Term
open Termops
open Declarations
open Inductive
open Inductiveops
open Reductionops
open Environ
open Libnames
open Evd
open Pfedit
open Tacred
open Rawterm
open Tacmach
open Proof_trees
open Proof_type
open Logic
open Evar_refiner
open Clenv
open Clenvtac
open Refiner
open Tacticals
open Hipattern
open Coqlib
open Nametab
open Genarg
open Tacexpr
open Decl_kinds
open Evarutil
open Indrec
open Pretype_errors
open Unification
exception Bound
let rec nb_prod x =
let rec count n c =
match kind_of_term c with
Prod(_,_,t) -> count (n+1) t
| LetIn(_,a,_,t) -> count n (subst1 a t)
| Cast(c,_,_) -> count n c
| _ -> n
in count 0 x
let inj_with_occurrences e = (all_occurrences_expr,e)
let inj_open c = (Evd.empty,c)
let inj_occ (occ,c) = (occ,inj_open c)
let inj_red_expr = function
| Simpl lo -> Simpl (Option.map inj_occ lo)
| Fold l -> Fold (List.map inj_open l)
| Pattern l -> Pattern (List.map inj_occ l)
| (ExtraRedExpr _ | CbvVm | Red _ | Hnf | Cbv _ | Lazy _ | Unfold _ as c)
-> c
let inj_ebindings = function
| NoBindings -> NoBindings
| ImplicitBindings l -> ImplicitBindings (List.map inj_open l)
| ExplicitBindings l ->
ExplicitBindings (List.map (fun (l,id,c) -> (l,id,inj_open c)) l)
let dloc = dummy_loc
(*********************************************)
(* Tactics *)
(*********************************************)
(****************************************)
(* General functions *)
(****************************************)
let string_of_inductive c =
try match kind_of_term c with
| Ind ind_sp ->
let (mib,mip) = Global.lookup_inductive ind_sp in
string_of_id mip.mind_typename
| _ -> raise Bound
with Bound -> error "Bound head variable."
let rec head_constr_bound t =
let t = strip_outer_cast t in
let _,ccl = decompose_prod_assum t in
let hd,args = decompose_app ccl in
match kind_of_term hd with
| Const _ | Ind _ | Construct _ | Var _ -> (hd,args)
| _ -> raise Bound
let head_constr c =
try head_constr_bound c with Bound -> error "Bound head variable."
(******************************************)
(* Primitive tactics *)
(******************************************)
let introduction = Tacmach.introduction
let refine = Tacmach.refine
let convert_concl = Tacmach.convert_concl
let convert_hyp = Tacmach.convert_hyp
let thin_body = Tacmach.thin_body
let error_clear_dependency env id = function
| Evarutil.OccurHypInSimpleClause None ->
errorlabstrm "" (pr_id id ++ str " is used in conclusion.")
| Evarutil.OccurHypInSimpleClause (Some id') ->
errorlabstrm ""
(pr_id id ++ strbrk " is used in hypothesis " ++ pr_id id' ++ str".")
| Evarutil.EvarTypingBreak ev ->
errorlabstrm ""
(str "Cannot remove " ++ pr_id id ++
strbrk " without breaking the typing of " ++
Printer.pr_existential env ev ++ str".")
let thin l gl =
try thin l gl
with Evarutil.ClearDependencyError (id,err) ->
error_clear_dependency (pf_env gl) id err
let internal_cut_gen b d t gl =
try internal_cut b d t gl
with Evarutil.ClearDependencyError (id,err) ->
error_clear_dependency (pf_env gl) id err
let internal_cut = internal_cut_gen false
let internal_cut_replace = internal_cut_gen true
let internal_cut_rev_gen b d t gl =
try internal_cut_rev b d t gl
with Evarutil.ClearDependencyError (id,err) ->
error_clear_dependency (pf_env gl) id err
let internal_cut_rev = internal_cut_rev_gen false
let internal_cut_rev_replace = internal_cut_rev_gen true
(* Moving hypotheses *)
let move_hyp = Tacmach.move_hyp
let order_hyps = Tacmach.order_hyps
(* Renaming hypotheses *)
let rename_hyp = Tacmach.rename_hyp
(**************************************************************)
(* Fresh names *)
(**************************************************************)
let fresh_id_avoid avoid id =
next_global_ident_away true id avoid
let fresh_id avoid id gl =
fresh_id_avoid (avoid@(pf_ids_of_hyps gl)) id
(**************************************************************)
Fixpoints and CoFixpoints
(**************************************************************)
(* Refine as a fixpoint *)
let mutual_fix = Tacmach.mutual_fix
let fix ido n gl = match ido with
| None ->
mutual_fix (fresh_id [] (Pfedit.get_current_proof_name ()) gl) n [] gl
| Some id ->
mutual_fix id n [] gl
(* Refine as a cofixpoint *)
let mutual_cofix = Tacmach.mutual_cofix
let cofix ido gl = match ido with
| None ->
mutual_cofix (fresh_id [] (Pfedit.get_current_proof_name ()) gl) [] gl
| Some id ->
mutual_cofix id [] gl
(**************************************************************)
(* Reduction and conversion tactics *)
(**************************************************************)
type tactic_reduction = env -> evar_map -> constr -> constr
let pf_reduce_decl redfun where (id,c,ty) gl =
let redfun' = pf_reduce redfun gl in
match c with
| None ->
if where = InHypValueOnly then
errorlabstrm "" (pr_id id ++ str "has no value.");
(id,None,redfun' ty)
| Some b ->
let b' = if where <> InHypTypeOnly then redfun' b else b in
let ty' = if where <> InHypValueOnly then redfun' ty else ty in
(id,Some b',ty')
The following two tactics apply an arbitrary
reduction function either to the conclusion or to a
certain hypothesis
reduction function either to the conclusion or to a
certain hypothesis *)
let reduct_in_concl (redfun,sty) gl =
convert_concl_no_check (pf_reduce redfun gl (pf_concl gl)) sty gl
let reduct_in_hyp redfun ((_,id),where) gl =
convert_hyp_no_check
(pf_reduce_decl redfun where (pf_get_hyp gl id) gl) gl
let reduct_option redfun = function
| Some id -> reduct_in_hyp (fst redfun) id
| None -> reduct_in_concl redfun
(* The following tactic determines whether the reduction
function has to be applied to the conclusion or
to the hypotheses. *)
let redin_combinator redfun =
onClauses (reduct_option redfun)
(* Now we introduce different instances of the previous tacticals *)
let change_and_check cv_pb t env sigma c =
if is_fconv cv_pb env sigma t c then
t
else
errorlabstrm "convert-check-hyp" (str "Not convertible.")
Use cumulutavity only if changing the conclusion not a subterm
let change_on_subterm cv_pb t = function
| None -> change_and_check cv_pb t
| Some occl -> contextually false occl (change_and_check Reduction.CONV t)
let change_in_concl occl t =
reduct_in_concl ((change_on_subterm Reduction.CUMUL t occl),DEFAULTcast)
let change_in_hyp occl t id =
with_check (reduct_in_hyp (change_on_subterm Reduction.CONV t occl) id)
let change_option occl t = function
Some id -> change_in_hyp occl t id
| None -> change_in_concl occl t
let out_arg = function
| ArgVar _ -> anomaly "Unevaluated or_var variable"
| ArgArg x -> x
let adjust_clause occl cls =
(* warn as much as possible on loss of occurrence information *)
(match cls, occl with
({onhyps=(Some(_::_::_)|None)}
|{onhyps=Some(_::_);concl_occs=((false,_)|(true,_::_))}),
Some _ ->
error "No occurrences expected when changing several hypotheses."
| _ -> ());
get at clause from cls if only goal or one hyp specified
let occl,cls = match occl with
| None -> None,cls
| Some (occs,c) ->
if cls.onhyps=Some[] && occs=all_occurrences then
Some (on_snd (List.map out_arg) cls.concl_occs,c),
{cls with concl_occs=all_occurrences_expr}
else
match cls.onhyps with
| Some[(occs',id),l] when
cls.concl_occs=no_occurrences_expr && occs=all_occurrences ->
Some (on_snd (List.map out_arg) occs',c),
{cls with onhyps=Some[(all_occurrences_expr,id),l]}
| _ ->
occl,cls in
(* check if cls has still specified occs *)
if cls.onhyps <> None &&
List.exists (fun ((occs,_),_) -> occs <> all_occurrences_expr)
(Option.get cls.onhyps)
|| cls.concl_occs <> all_occurrences_expr &&
cls.concl_occs <> no_occurrences_expr
then
Flags.if_verbose Pp.msg_warning
(if cls.onhyps=Some[] then
str "Trailing \"at\" modifier not taken into account."
else
str "\"at\" modifier in clause \"in\" not taken into account.");
(* Anticipate on onClauses which removes concl if not at all occs *)
if cls.concl_occs=no_occurrences_expr then cls
else {cls with concl_occs=all_occurrences_expr}
let change occl c cls =
onClauses (change_option occl c) (adjust_clause occl cls)
(* Pour usage interne (le niveau User est pris en compte par reduce) *)
let red_in_concl = reduct_in_concl (red_product,DEFAULTcast)
let red_in_hyp = reduct_in_hyp red_product
let red_option = reduct_option (red_product,DEFAULTcast)
let hnf_in_concl = reduct_in_concl (hnf_constr,DEFAULTcast)
let hnf_in_hyp = reduct_in_hyp hnf_constr
let hnf_option = reduct_option (hnf_constr,DEFAULTcast)
let simpl_in_concl = reduct_in_concl (simpl,DEFAULTcast)
let simpl_in_hyp = reduct_in_hyp simpl
let simpl_option = reduct_option (simpl,DEFAULTcast)
let normalise_in_concl = reduct_in_concl (compute,DEFAULTcast)
let normalise_in_hyp = reduct_in_hyp compute
let normalise_option = reduct_option (compute,DEFAULTcast)
let normalise_vm_in_concl = reduct_in_concl (Redexpr.cbv_vm,VMcast)
let unfold_in_concl loccname = reduct_in_concl (unfoldn loccname,DEFAULTcast)
let unfold_in_hyp loccname = reduct_in_hyp (unfoldn loccname)
let unfold_option loccname = reduct_option (unfoldn loccname,DEFAULTcast)
let pattern_option l = reduct_option (pattern_occs l,DEFAULTcast)
A function which reduces accordingly to a reduction expression ,
as the command does .
as the command Eval does. *)
let checking_fun = function
Expansion is not necessarily well - typed : e.g. expansion of t into x is
not well - typed in [ H:(P t ) ; x:=t |- G ] because x is defined after H
not well-typed in [H:(P t); x:=t |- G] because x is defined after H *)
| Fold _ -> with_check
| Pattern _ -> with_check
| _ -> (fun x -> x)
let reduce redexp cl goal =
let red = Redexpr.reduction_of_red_expr redexp in
match redexp with
(Fold _|Pattern _) -> with_check (redin_combinator red cl) goal
| _ -> redin_combinator red cl goal
(* Unfolding occurrences of a constant *)
let unfold_constr = function
| ConstRef sp -> unfold_in_concl [all_occurrences,EvalConstRef sp]
| VarRef id -> unfold_in_concl [all_occurrences,EvalVarRef id]
| _ -> errorlabstrm "unfold_constr" (str "Cannot unfold a non-constant.")
(*******************************************)
(* Introduction tactics *)
(*******************************************)
let id_of_name_with_default id = function
| Anonymous -> id
| Name id -> id
let hid = id_of_string "H"
let xid = id_of_string "X"
let default_id_of_sort = function Prop _ -> hid | Type _ -> xid
let default_id env sigma = function
| (name,None,t) ->
let dft = default_id_of_sort (Typing.sort_of env sigma t) in
id_of_name_with_default dft name
| (name,Some b,_) -> id_of_name_using_hdchar env b name
Non primitive introduction tactics are treated by There is possibly renaming , with possibly names to avoid and
possibly a move to do after the introduction
There is possibly renaming, with possibly names to avoid and
possibly a move to do after the introduction *)
type intro_name_flag =
| IntroAvoid of identifier list
| IntroBasedOn of identifier * identifier list
| IntroMustBe of identifier
let find_name loc decl gl = function
| IntroAvoid idl ->
(* this case must be compatible with [find_intro_names] below. *)
let id = fresh_id idl (default_id (pf_env gl) gl.sigma decl) gl in id
| IntroBasedOn (id,idl) -> fresh_id idl id gl
| IntroMustBe id ->
let id' = fresh_id [] id gl in
if id'<>id then user_err_loc (loc,"",pr_id id ++ str" is already used.");
id'
Returns the names that would be created by intros , without doing
intros . This function is supposed to be compatible with an
iteration of [ find_name ] above . As [ default_id ] checks the sort of
the type to build hyp names , we maintain an environment to be able
to type dependent hyps .
intros. This function is supposed to be compatible with an
iteration of [find_name] above. As [default_id] checks the sort of
the type to build hyp names, we maintain an environment to be able
to type dependent hyps. *)
let find_intro_names ctxt gl =
let _, res = List.fold_right
(fun decl acc ->
let wantedname,x,typdecl = decl in
let env,idl = acc in
let name = fresh_id idl (default_id env gl.sigma decl) gl in
let newenv = push_rel (wantedname,x,typdecl) env in
(newenv,(name::idl)))
ctxt (pf_env gl , []) in
List.rev res
let build_intro_tac id = function
| MoveToEnd true -> introduction id
| dest -> tclTHEN (introduction id) (move_hyp true id dest)
let rec intro_gen loc name_flag move_flag force_flag gl =
match kind_of_term (pf_concl gl) with
| Prod (name,t,_) ->
build_intro_tac (find_name loc (name,None,t) gl name_flag) move_flag gl
| LetIn (name,b,t,_) ->
build_intro_tac (find_name loc (name,Some b,t) gl name_flag) move_flag
gl
| _ ->
if not force_flag then raise (RefinerError IntroNeedsProduct);
try
tclTHEN
(reduce (Red true) onConcl)
(intro_gen loc name_flag move_flag force_flag) gl
with Redelimination ->
user_err_loc(loc,"Intro",str "No product even after head-reduction.")
let intro_mustbe_force id = intro_gen dloc (IntroMustBe id) no_move true
let intro_using id = intro_gen dloc (IntroBasedOn (id,[])) no_move false
let intro_force force_flag = intro_gen dloc (IntroAvoid []) no_move force_flag
let intro = intro_force false
let introf = intro_force true
let intro_avoiding l = intro_gen dloc (IntroAvoid l) no_move false
let introf_move_name destopt = intro_gen dloc (IntroAvoid []) destopt true
(**** Multiple introduction tactics ****)
let rec intros_using = function
| [] -> tclIDTAC
| str::l -> tclTHEN (intro_using str) (intros_using l)
let intros = tclREPEAT (intro_force false)
let intro_erasing id = tclTHEN (thin [id]) (introduction id)
let rec get_next_hyp_position id = function
| [] -> error ("No such hypothesis: " ^ string_of_id id)
| (hyp,_,_) :: right ->
if hyp = id then
match right with (id,_,_)::_ -> MoveBefore id | [] -> MoveToEnd true
else
get_next_hyp_position id right
let thin_for_replacing l gl =
try Tacmach.thin l gl
with Evarutil.ClearDependencyError (id,err) -> match err with
| Evarutil.OccurHypInSimpleClause None ->
errorlabstrm ""
(str "Cannot change " ++ pr_id id ++ str ", it is used in conclusion.")
| Evarutil.OccurHypInSimpleClause (Some id') ->
errorlabstrm ""
(str "Cannot change " ++ pr_id id ++
strbrk ", it is used in hypothesis " ++ pr_id id' ++ str".")
| Evarutil.EvarTypingBreak ev ->
errorlabstrm ""
(str "Cannot change " ++ pr_id id ++
strbrk " without breaking the typing of " ++
Printer.pr_existential (pf_env gl) ev ++ str".")
let intro_replacing id gl =
let next_hyp = get_next_hyp_position id (pf_hyps gl) in
tclTHENLIST
[thin_for_replacing [id]; introduction id; move_hyp true id next_hyp] gl
let intros_replacing ids gl =
let rec introrec = function
| [] -> tclIDTAC
| id::tl ->
tclTHEN (tclORELSE (intro_replacing id) (intro_using id))
(introrec tl)
in
introrec ids gl
(* User-level introduction tactics *)
let intro_move idopt hto = match idopt with
| None -> intro_gen dloc (IntroAvoid []) hto true
| Some id -> intro_gen dloc (IntroMustBe id) hto true
let pf_lookup_hypothesis_as_renamed env ccl = function
| AnonHyp n -> pf_lookup_index_as_renamed env ccl n
| NamedHyp id -> pf_lookup_name_as_renamed env ccl id
let pf_lookup_hypothesis_as_renamed_gen red h gl =
let env = pf_env gl in
let rec aux ccl =
match pf_lookup_hypothesis_as_renamed env ccl h with
| None when red ->
aux
((fst (Redexpr.reduction_of_red_expr (Red true)))
env (project gl) ccl)
| x -> x
in
try aux (pf_concl gl)
with Redelimination -> None
let is_quantified_hypothesis id g =
match pf_lookup_hypothesis_as_renamed_gen true (NamedHyp id) g with
| Some _ -> true
| None -> false
let msg_quantified_hypothesis = function
| NamedHyp id ->
str "quantified hypothesis named " ++ pr_id id
| AnonHyp n ->
int n ++ str (match n with 1 -> "st" | 2 -> "nd" | _ -> "th") ++
str " non dependent hypothesis"
let depth_of_quantified_hypothesis red h gl =
match pf_lookup_hypothesis_as_renamed_gen red h gl with
| Some depth -> depth
| None ->
errorlabstrm "lookup_quantified_hypothesis"
(str "No " ++ msg_quantified_hypothesis h ++
strbrk " in current goal" ++
(if red then strbrk " even after head-reduction" else mt ()) ++
str".")
let intros_until_gen red h g =
tclDO (depth_of_quantified_hypothesis red h g) intro g
let intros_until_id id = intros_until_gen true (NamedHyp id)
let intros_until_n_gen red n = intros_until_gen red (AnonHyp n)
let intros_until = intros_until_gen true
let intros_until_n = intros_until_n_gen true
let intros_until_n_wored = intros_until_n_gen false
let try_intros_until tac = function
| NamedHyp id -> tclTHEN (tclTRY (intros_until_id id)) (tac id)
| AnonHyp n -> tclTHEN (intros_until_n n) (onLastHyp tac)
let rec intros_move = function
| [] -> tclIDTAC
| (hyp,destopt) :: rest ->
tclTHEN (intro_gen dloc (IntroMustBe hyp) destopt false)
(intros_move rest)
let dependent_in_decl a (_,c,t) =
match c with
| None -> dependent a t
| Some body -> dependent a body || dependent a t
(* Apply a tactic on a quantified hypothesis, an hypothesis in context
or a term with bindings *)
let onInductionArg tac = function
| ElimOnConstr (c,lbindc as cbl) ->
if isVar c & lbindc = NoBindings then
tclTHEN (tclTRY (intros_until_id (destVar c))) (tac cbl)
else
tac cbl
| ElimOnAnonHyp n ->
tclTHEN (intros_until_n n) (tclLAST_HYP (fun c -> tac (c,NoBindings)))
| ElimOnIdent (_,id) ->
(*Identifier apart because id can be quantified in goal and not typable*)
tclTHEN (tclTRY (intros_until_id id)) (tac (mkVar id,NoBindings))
(**************************)
(* Refinement tactics *)
(**************************)
let apply_type hdcty argl gl =
refine (applist (mkCast (Evarutil.mk_new_meta(),DEFAULTcast, hdcty),argl)) gl
let apply_term hdc argl gl =
refine (applist (hdc,argl)) gl
let bring_hyps hyps =
if hyps = [] then Refiner.tclIDTAC
else
(fun gl ->
let newcl = List.fold_right mkNamedProd_or_LetIn hyps (pf_concl gl) in
let f = mkCast (Evarutil.mk_new_meta(),DEFAULTcast, newcl) in
refine_no_check (mkApp (f, instance_from_named_context hyps)) gl)
let resolve_classes gl =
let env = pf_env gl and evd = project gl in
if evd = Evd.empty then tclIDTAC gl
else
let evd' = Typeclasses.resolve_typeclasses env (Evd.create_evar_defs evd) in
(tclTHEN (tclEVARS (Evd.evars_of evd')) tclNORMEVAR) gl
(**************************)
(* Cut tactics *)
(**************************)
let cut c gl =
match kind_of_term (hnf_type_of gl c) with
| Sort _ ->
let id=next_name_away_with_default "H" Anonymous (pf_ids_of_hyps gl) in
let t = mkProd (Anonymous, c, pf_concl gl) in
tclTHENFIRST
(internal_cut_rev id c)
(tclTHEN (apply_type t [mkVar id]) (thin [id]))
gl
| _ -> error "Not a proposition or a type."
let cut_intro t = tclTHENFIRST (cut t) intro
(* cut_replacing échoue si l'hypothèse à remplacer apparaît dans le
but, ou dans une autre hypothèse *)
let cut_replacing id t tac =
tclTHENLAST (internal_cut_rev_replace id t)
(tac (refine_no_check (mkVar id)))
let cut_in_parallel l =
let rec prec = function
| [] -> tclIDTAC
| h::t -> tclTHENFIRST (cut h) (prec t)
in
prec (List.rev l)
let error_uninstantiated_metas t clenv =
let na = meta_name clenv.evd (List.hd (Metaset.elements (metavars_of t))) in
let id = match na with Name id -> id | _ -> anomaly "unnamed dependent meta"
in errorlabstrm "" (str "Cannot find an instance for " ++ pr_id id ++ str".")
let clenv_refine_in with_evars ?(with_classes=true) id clenv gl =
let clenv = clenv_pose_dependent_evars with_evars clenv in
let clenv =
if with_classes then
{ clenv with evd = Typeclasses.resolve_typeclasses ~fail:(not with_evars) clenv.env clenv.evd }
else clenv
in
let new_hyp_typ = clenv_type clenv in
if not with_evars & occur_meta new_hyp_typ then
error_uninstantiated_metas new_hyp_typ clenv;
let new_hyp_prf = clenv_value clenv in
tclTHEN
(tclEVARS (evars_of clenv.evd))
(cut_replacing id new_hyp_typ
(fun x gl -> refine_no_check new_hyp_prf gl)) gl
(********************************************)
(* Elimination tactics *)
(********************************************)
let last_arg c = match kind_of_term c with
| App (f,cl) ->
array_last cl
| _ -> anomaly "last_arg"
let elim_flags = {
modulo_conv_on_closed_terms = Some full_transparent_state;
use_metas_eagerly = true;
modulo_delta = empty_transparent_state;
}
let elimination_clause_scheme with_evars allow_K elimclause indclause gl =
let indmv =
(match kind_of_term (last_arg elimclause.templval.rebus) with
| Meta mv -> mv
| _ -> errorlabstrm "elimination_clause"
(str "The type of elimination clause is not well-formed."))
in
let elimclause' = clenv_fchain indmv elimclause indclause in
res_pf elimclause' ~with_evars:with_evars ~allow_K:allow_K ~flags:elim_flags
gl
(* cast added otherwise tactics Case (n1,n2) generates (?f x y) and
* refine fails *)
let type_clenv_binding wc (c,t) lbind =
clenv_type (make_clenv_binding wc (c,t) lbind)
* Elimination tactic with bindings and using an arbitrary
* elimination constant called elimc . This constant should end
* with a clause ( x : I)(P .. ) , where P is a bound variable .
* The term c is of type t , which is a product ending with a type
* matching I , lbindc are the expected terms for c arguments
* Elimination tactic with bindings and using an arbitrary
* elimination constant called elimc. This constant should end
* with a clause (x:I)(P .. ), where P is a bound variable.
* The term c is of type t, which is a product ending with a type
* matching I, lbindc are the expected terms for c arguments
*)
let general_elim_clause elimtac (c,lbindc) (elimc,lbindelimc) gl =
let ct = pf_type_of gl c in
let t = try snd (pf_reduce_to_quantified_ind gl ct) with UserError _ -> ct in
let indclause = make_clenv_binding gl (c,t) lbindc in
let elimt = pf_type_of gl elimc in
let elimclause = make_clenv_binding gl (elimc,elimt) lbindelimc in
elimtac elimclause indclause gl
let general_elim with_evars c e ?(allow_K=true) =
general_elim_clause (elimination_clause_scheme with_evars allow_K) c e
(* Elimination tactic with bindings but using the default elimination
* constant associated with the type. *)
let find_eliminator c gl =
let (ind,t) = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
lookup_eliminator ind (elimination_sort_of_goal gl)
let default_elim with_evars (c,_ as cx) gl =
general_elim with_evars cx (find_eliminator c gl,NoBindings) gl
let elim_in_context with_evars c = function
| Some elim -> general_elim with_evars c elim ~allow_K:true
| None -> default_elim with_evars c
let elim with_evars (c,lbindc as cx) elim =
match kind_of_term c with
| Var id when lbindc = NoBindings ->
tclTHEN (tclTRY (intros_until_id id))
(elim_in_context with_evars cx elim)
| _ -> elim_in_context with_evars cx elim
(* The simplest elimination tactic, with no substitutions at all. *)
let simplest_elim c = default_elim false (c,NoBindings)
(* Elimination in hypothesis *)
Typically , elimclause : = ( eq_ind ? x ? P ? H ? y ? : ? P ? y )
indclause : forall ... , hyps - > a = b ( to take place of ? )
i d : ) ( to take place of ? H )
and the result is to overwrite i d with the proof of phi(b )
but this generalizes to any elimination scheme with one constructor
( e.g. it could replace id : A->B->C by id : C , knowing A/\B )
indclause : forall ..., hyps -> a=b (to take place of ?Heq)
id : phi(a) (to take place of ?H)
and the result is to overwrite id with the proof of phi(b)
but this generalizes to any elimination scheme with one constructor
(e.g. it could replace id:A->B->C by id:C, knowing A/\B)
*)
let clenv_fchain_in id elim_flags mv elimclause hypclause =
try clenv_fchain ~allow_K:false ~flags:elim_flags mv elimclause hypclause
with PretypeError (env,NoOccurrenceFound (op,_)) ->
(* Set the hypothesis name in the message *)
raise (PretypeError (env,NoOccurrenceFound (op,Some id)))
let elimination_in_clause_scheme with_evars id elimclause indclause gl =
let (hypmv,indmv) =
match clenv_independent elimclause with
[k1;k2] -> (k1,k2)
| _ -> errorlabstrm "elimination_clause"
(str "The type of elimination clause is not well-formed.") in
let elimclause' = clenv_fchain indmv elimclause indclause in
let hyp = mkVar id in
let hyp_typ = pf_type_of gl hyp in
let hypclause = mk_clenv_from_n gl (Some 0) (hyp, hyp_typ) in
let elimclause'' =
clenv_fchain_in id elim_flags hypmv elimclause' hypclause in
let new_hyp_typ = clenv_type elimclause'' in
if eq_constr hyp_typ new_hyp_typ then
errorlabstrm "general_rewrite_in"
(str "Nothing to rewrite in " ++ pr_id id ++ str".");
clenv_refine_in with_evars id elimclause'' gl
let general_elim_in with_evars id =
general_elim_clause (elimination_in_clause_scheme with_evars id)
(* Case analysis tactics *)
let general_case_analysis_in_context with_evars (c,lbindc) gl =
let (mind,_) = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
let sort = elimination_sort_of_goal gl in
let case =
if occur_term c (pf_concl gl) then make_case_dep else make_case_gen in
let elim = pf_apply case gl mind sort in
general_elim with_evars (c,lbindc) (elim,NoBindings) gl
let general_case_analysis with_evars (c,lbindc as cx) =
match kind_of_term c with
| Var id when lbindc = NoBindings ->
tclTHEN (tclTRY (intros_until_id id))
(general_case_analysis_in_context with_evars cx)
| _ ->
general_case_analysis_in_context with_evars cx
let simplest_case c = general_case_analysis false (c,NoBindings)
(* Apply a tactic below the products of the conclusion of a lemma *)
let descend_in_conjunctions with_evars tac exit c gl =
try
let (mind,t) = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
match match_with_record (snd (decompose_prod t)) with
| Some _ ->
let n = (mis_constr_nargs mind).(0) in
let sort = elimination_sort_of_goal gl in
let elim = pf_apply make_case_gen gl mind sort in
tclTHENLAST
(general_elim with_evars (c,NoBindings) (elim,NoBindings))
(tclTHENLIST [
tclDO n intro;
tclLAST_NHYPS n (fun l ->
tclFIRST
(List.map (fun id -> tclTHEN (tac (mkVar id)) (thin l)) l))])
gl
| None ->
raise Exit
with RefinerError _|UserError _|Exit -> exit ()
(****************************************************)
(* Resolution tactics *)
(****************************************************)
(* Resolution with missing arguments *)
let check_evars sigma evm gl =
let origsigma = gl.sigma in
let rest =
Evd.fold (fun ev evi acc ->
if not (Evd.mem origsigma ev) && not (Evd.is_defined sigma ev)
then Evd.add acc ev evi else acc)
evm Evd.empty
in
if rest <> Evd.empty then
errorlabstrm "apply" (str"Uninstantiated existential variables: " ++
fnl () ++ pr_evar_map rest)
let general_apply with_delta with_destruct with_evars (c,lbind) gl0 =
let flags =
if with_delta then default_unify_flags else default_no_delta_unify_flags in
(* The actual type of the theorem. It will be matched against the
goal. If this fails, then the head constant will be unfolded step by
step. *)
let concl_nprod = nb_prod (pf_concl gl0) in
let evm, c = c in
let rec try_main_apply c gl =
let thm_ty0 = nf_betaiota (project gl) (pf_type_of gl c) in
let try_apply thm_ty nprod =
let n = nb_prod thm_ty - nprod in
if n<0 then error "Applied theorem has not enough premisses.";
let clause = make_clenv_binding_apply gl (Some n) (c,thm_ty) lbind in
let res = Clenvtac.res_pf clause ~with_evars:with_evars ~flags:flags gl in
if not with_evars then check_evars (fst res).sigma evm gl0;
res
in
try try_apply thm_ty0 concl_nprod
with PretypeError _|RefinerError _|UserError _|Failure _ as exn ->
let rec try_red_apply thm_ty =
try
(* Try to head-reduce the conclusion of the theorem *)
let red_thm = try_red_product (pf_env gl) (project gl) thm_ty in
try try_apply red_thm concl_nprod
with PretypeError _|RefinerError _|UserError _|Failure _ ->
try_red_apply red_thm
with Redelimination ->
Last chance : if the head is a variable , apply may try
second order unification
second order unification *)
try if concl_nprod <> 0 then try_apply thm_ty 0 else raise Exit
with PretypeError _|RefinerError _|UserError _|Failure _|Exit ->
if with_destruct then
descend_in_conjunctions with_evars
try_main_apply (fun _ -> raise exn) c gl
else
raise exn
in try_red_apply thm_ty0
in
if evm = Evd.empty then try_main_apply c gl0
else
tclTHEN (tclEVARS (Evd.merge gl0.sigma evm)) (try_main_apply c) gl0
let rec apply_with_ebindings_gen b e = function
| [] ->
tclIDTAC
| [cb] ->
general_apply b b e cb
| cb::cbl ->
tclTHENLAST (general_apply b b e cb) (apply_with_ebindings_gen b e cbl)
let apply_with_ebindings cb = apply_with_ebindings_gen false false [cb]
let eapply_with_ebindings cb = apply_with_ebindings_gen false true [cb]
let apply_with_bindings (c,bl) =
apply_with_ebindings (inj_open c,inj_ebindings bl)
let eapply_with_bindings (c,bl) =
apply_with_ebindings_gen false true [inj_open c,inj_ebindings bl]
let apply c =
apply_with_ebindings (inj_open c,NoBindings)
let apply_list = function
| c::l -> apply_with_bindings (c,ImplicitBindings l)
| _ -> assert false
(* Resolution with no reduction on the type (used ?) *)
let apply_without_reduce c gl =
let clause = mk_clenv_type_of gl c in
res_pf clause gl
[ apply_in hyp c ] replaces
hyp : forall y1 , ti - > t hyp : rho(u )
= = = = = = = = = = = = = = = = = = = = = = = = with = = = = = = = = = = = = and the = = = = = = =
goal goal rho(ti )
assuming that [ c ] has type [ forall x1 .. xn - > t ' - > u ] for some [ t ]
unifiable with [ t ' ] with unifier [ rho ]
hyp : forall y1, ti -> t hyp : rho(u)
======================== with ============ and the =======
goal goal rho(ti)
assuming that [c] has type [forall x1..xn -> t' -> u] for some [t]
unifiable with [t'] with unifier [rho]
*)
let find_matching_clause unifier clause =
let rec find clause =
try unifier clause
with exn when catchable_exception exn ->
try find (clenv_push_prod clause)
with NotExtensibleClause -> failwith "Cannot apply"
in find clause
let progress_with_clause flags innerclause clause =
let ordered_metas = List.rev (clenv_independent clause) in
if ordered_metas = [] then error "Statement without assumptions.";
let f mv =
find_matching_clause (clenv_fchain mv ~flags clause) innerclause in
try list_try_find f ordered_metas
with Failure _ -> error "Unable to unify."
let apply_in_once_main flags innerclause (d,lbind) gl =
let thm = nf_betaiota gl.sigma (pf_type_of gl d) in
let rec aux clause =
try progress_with_clause flags innerclause clause
with err ->
try aux (clenv_push_prod clause)
with NotExtensibleClause -> raise err in
aux (make_clenv_binding gl (d,thm) lbind)
let apply_in_once with_delta with_destruct with_evars id ((sigma,d),lbind) gl0 =
let flags =
if with_delta then default_unify_flags else default_no_delta_unify_flags in
let t' = pf_get_hyp_typ gl0 id in
let innerclause = mk_clenv_from_n gl0 (Some 0) (mkVar id,t') in
let rec aux c gl =
try
let clause = apply_in_once_main flags innerclause (c,lbind) gl in
let res = clenv_refine_in with_evars id clause gl in
if not with_evars then check_evars (fst res).sigma sigma gl0;
res
with exn when with_destruct ->
descend_in_conjunctions true aux (fun _ -> raise exn) c gl
in
if sigma = Evd.empty then aux d gl0
else
tclTHEN (tclEVARS (Evd.merge gl0.sigma sigma)) (aux d) gl0
A useful resolution tactic which , if c : A->B , transforms |- C into
|- B - > C and |- A
-------------------
Gamma |- c : A - > B Gamma |- ? 2 : A
----------------------------------------
Gamma |- B Gamma |- ? 1 : B - > C
-----------------------------------------------------
Gamma |- ? : C
: =
let ty : = check c in
match in ty with
? A - > ? B = > cut B ; [ idtac | apply c ]
end .
|- B -> C and |- A
-------------------
Gamma |- c : A -> B Gamma |- ?2 : A
----------------------------------------
Gamma |- B Gamma |- ?1 : B -> C
-----------------------------------------------------
Gamma |- ? : C
Ltac lapply c :=
let ty := check c in
match eval hnf in ty with
?A -> ?B => cut B; [ idtac | apply c ]
end.
*)
let cut_and_apply c gl =
let goal_constr = pf_concl gl in
match kind_of_term (pf_hnf_constr gl (pf_type_of gl c)) with
| Prod (_,c1,c2) when not (dependent (mkRel 1) c2) ->
tclTHENLAST
(apply_type (mkProd (Anonymous,c2,goal_constr)) [mkMeta(new_meta())])
(apply_term c [mkMeta (new_meta())]) gl
| _ -> error "lapply needs a non-dependent product."
(********************************************************************)
(* Exact tactics *)
(********************************************************************)
let exact_check c gl =
let concl = (pf_concl gl) in
let ct = pf_type_of gl c in
if pf_conv_x_leq gl ct concl then
refine_no_check c gl
else
error "Not an exact proof."
let exact_no_check = refine_no_check
let vm_cast_no_check c gl =
let concl = pf_concl gl in
refine_no_check (Term.mkCast(c,Term.VMcast,concl)) gl
let exact_proof c gl =
(* on experimente la synthese d'ise dans exact *)
let c = Constrintern.interp_casted_constr (project gl) (pf_env gl) c (pf_concl gl)
in refine_no_check c gl
let (assumption : tactic) = fun gl ->
let concl = pf_concl gl in
let hyps = pf_hyps gl in
let rec arec only_eq = function
| [] ->
if only_eq then arec false hyps else error "No such assumption."
| (id,c,t)::rest ->
if (only_eq & eq_constr t concl)
or (not only_eq & pf_conv_x_leq gl t concl)
then refine_no_check (mkVar id) gl
else arec only_eq rest
in
arec true hyps
(*****************************************************************)
(* Modification of a local context *)
(*****************************************************************)
(* This tactic enables the user to remove hypotheses from the signature.
* Some care is taken to prevent him from removing variables that are
* subsequently used in other hypotheses or in the conclusion of the
* goal. *)
avant seul dyn_clear n'echouait pas en [ ]
if ids=[] then tclIDTAC else thin ids
let clear_body = thin_body
let clear_wildcards ids =
tclMAP (fun (loc,id) gl ->
try with_check (Tacmach.thin_no_check [id]) gl
with ClearDependencyError (id,err) ->
Intercept standard [ thin ] error message
Stdpp.raise_with_loc loc
(error_clear_dependency (pf_env gl) (id_of_string "_") err))
ids
(* Takes a list of booleans, and introduces all the variables
* quantified in the goal which are associated with a value
* true in the boolean list. *)
let rec intros_clearing = function
| [] -> tclIDTAC
| (false::tl) -> tclTHEN intro (intros_clearing tl)
| (true::tl) ->
tclTHENLIST
[ intro; onLastHyp (fun id -> clear [id]); intros_clearing tl]
(* Modifying/Adding an hypothesis *)
let specialize mopt (c,lbind) g =
let evars, term =
if lbind = NoBindings then None, c
else
let clause = make_clenv_binding g (c,pf_type_of g c) lbind in
let clause = clenv_unify_meta_types clause in
let (thd,tstack) =
whd_stack (evars_of clause.evd) (clenv_value clause) in
let nargs = List.length tstack in
let tstack = match mopt with
| Some m ->
if m < nargs then list_firstn m tstack else tstack
| None ->
let rec chk = function
| [] -> []
| t::l -> if occur_meta t then [] else t :: chk l
in chk tstack
in
let term = applist(thd,tstack) in
if occur_meta term then
errorlabstrm "" (str "Cannot infer an instance for " ++
pr_name (meta_name clause.evd (List.hd (collect_metas term))) ++
str ".");
Some (evars_of clause.evd), term
in
tclTHEN
(match evars with Some e -> tclEVARS e | _ -> tclIDTAC)
(match kind_of_term (fst(decompose_app (snd(decompose_lam_assum c)))) with
| Var id when List.mem id (pf_ids_of_hyps g) ->
tclTHENFIRST
(fun g -> internal_cut_replace id (pf_type_of g term) g)
(exact_no_check term)
| _ -> tclTHENLAST
(fun g -> cut (pf_type_of g term) g)
(exact_no_check term))
g
(* Keeping only a few hypotheses *)
let keep hyps gl =
let env = Global.env() in
let ccl = pf_concl gl in
let cl,_ =
fold_named_context_reverse (fun (clear,keep) (hyp,_,_ as decl) ->
if List.mem hyp hyps
or List.exists (occur_var_in_decl env hyp) keep
or occur_var env hyp ccl
then (clear,decl::keep)
else (hyp::clear,keep))
~init:([],[]) (pf_env gl)
in thin cl gl
(************************)
(* Introduction tactics *)
(************************)
let check_number_of_constructors expctdnumopt i nconstr =
if i=0 then error "The constructors are numbered starting from 1.";
begin match expctdnumopt with
| Some n when n <> nconstr ->
error ("Not an inductive goal with "^
string_of_int n^plural n " constructor"^".")
| _ -> ()
end;
if i > nconstr then error "Not enough constructors."
let constructor_tac with_evars expctdnumopt i lbind gl =
let cl = pf_concl gl in
let (mind,redcl) = pf_reduce_to_quantified_ind gl cl in
let nconstr =
Array.length (snd (Global.lookup_inductive mind)).mind_consnames in
check_number_of_constructors expctdnumopt i nconstr;
let cons = mkConstruct (ith_constructor_of_inductive mind i) in
let apply_tac = general_apply true false with_evars (inj_open cons,lbind) in
(tclTHENLIST
[convert_concl_no_check redcl DEFAULTcast; intros; apply_tac]) gl
let one_constructor i = constructor_tac false None i
Try to apply the constructor of the inductive definition followed by
a tactic t given as an argument .
Should be generalize in Constructor ( Fun c : I - > tactic )
a tactic t given as an argument.
Should be generalize in Constructor (Fun c : I -> tactic)
*)
let any_constructor with_evars tacopt gl =
let t = match tacopt with None -> tclIDTAC | Some t -> t in
let mind = fst (pf_reduce_to_quantified_ind gl (pf_concl gl)) in
let nconstr =
Array.length (snd (Global.lookup_inductive mind)).mind_consnames in
if nconstr = 0 then error "The type has no constructors.";
tclFIRST
(List.map
(fun i -> tclTHEN (constructor_tac with_evars None i NoBindings) t)
(interval 1 nconstr)) gl
let left_with_ebindings with_evars = constructor_tac with_evars (Some 2) 1
let right_with_ebindings with_evars = constructor_tac with_evars (Some 2) 2
let split_with_ebindings with_evars = constructor_tac with_evars (Some 1) 1
let left l = left_with_ebindings false (inj_ebindings l)
let simplest_left = left NoBindings
let right l = right_with_ebindings false (inj_ebindings l)
let simplest_right = right NoBindings
let split l = split_with_ebindings false (inj_ebindings l)
let simplest_split = split NoBindings
(*****************************)
(* Decomposing introductions *)
(*****************************)
let forward_general_multi_rewrite =
ref (fun _ -> failwith "general_multi_rewrite undefined")
let register_general_multi_rewrite f =
forward_general_multi_rewrite := f
let error_unexpected_extra_pattern loc nb pat =
let s1,s2,s3 = match pat with
| IntroIdentifier _ -> "name", (plural nb " introduction pattern"), "no"
| _ -> "introduction pattern", "", "none" in
user_err_loc (loc,"",str "Unexpected " ++ str s1 ++ str " (" ++
(if nb = 0 then (str s3 ++ str s2) else
(str "at most " ++ int nb ++ str s2)) ++ spc () ++
str (if nb = 1 then "was" else "were") ++
strbrk " expected in the branch).")
let intro_or_and_pattern loc b ll l' tac id gl =
let c = mkVar id in
let ind,_ = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
let nv = mis_constr_nargs ind in
let bracketed = b or not (l'=[]) in
let rec adjust_names_length nb n = function
| [] when n = 0 or not bracketed -> []
| [] -> (dloc,IntroAnonymous) :: adjust_names_length nb (n-1) []
| (loc',pat) :: _ as l when n = 0 ->
if bracketed then error_unexpected_extra_pattern loc' nb pat;
l
| ip :: l -> ip :: adjust_names_length nb (n-1) l in
let ll = fix_empty_or_and_pattern (Array.length nv) ll in
check_or_and_pattern_size loc ll (Array.length nv);
tclTHENLASTn
(tclTHEN (simplest_case c) (clear [id]))
(array_map2 (fun n l -> tac ((adjust_names_length n n l)@l'))
nv (Array.of_list ll))
gl
let rewrite_hyp l2r id gl =
let rew_on l2r =
!forward_general_multi_rewrite l2r false (inj_open (mkVar id),NoBindings) in
let clear_var_and_eq c =
tclTRY (tclTHEN (clear [id]) (tclTRY (clear [destVar c]))) in
let t = pf_whd_betadeltaiota gl (pf_type_of gl (mkVar id)) in
TODO : detect equality ? better detect the different equalities
match match_with_equality_type t with
| Some (hdcncl,[_;lhs;rhs]) ->
if l2r & isVar lhs & not (occur_var (pf_env gl) (destVar lhs) rhs) then
tclTHEN (rew_on l2r allClauses) (clear_var_and_eq lhs) gl
else if not l2r & isVar rhs & not (occur_var (pf_env gl) (destVar rhs) lhs) then
tclTHEN (rew_on l2r allClauses) (clear_var_and_eq rhs) gl
else
tclTHEN (rew_on l2r onConcl) (tclTRY (clear [id])) gl
| Some (hdcncl,[c]) ->
let l2r = not l2r in (* equality of the form eq_true *)
if isVar c then
tclTHEN (rew_on l2r allClauses) (clear_var_and_eq c) gl
else
tclTHEN (rew_on l2r onConcl) (tclTRY (clear [id])) gl
| _ ->
error "Cannot find a known equation."
let rec explicit_intro_names = function
| (_, IntroIdentifier id) :: l ->
id :: explicit_intro_names l
| (_, (IntroWildcard | IntroAnonymous | IntroFresh _ | IntroRewrite _)) :: l ->
explicit_intro_names l
| (_, IntroOrAndPattern ll) :: l' ->
List.flatten (List.map (fun l -> explicit_intro_names (l@l')) ll)
| [] ->
[]
We delay thinning until the completion of the whole intros tactic
to ensure that dependent hypotheses are cleared in the right
dependency order ( see bug # 1000 ) ; we use fresh names , not used in
the tactic , for the hyps to clear
to ensure that dependent hypotheses are cleared in the right
dependency order (see bug #1000); we use fresh names, not used in
the tactic, for the hyps to clear *)
let rec intros_patterns b avoid thin destopt = function
| (loc, IntroWildcard) :: l ->
tclTHEN
(intro_gen loc (IntroAvoid(avoid@explicit_intro_names l)) no_move true)
(onLastHyp (fun id ->
tclORELSE
(tclTHEN (clear [id]) (intros_patterns b avoid thin destopt l))
(intros_patterns b avoid ((loc,id)::thin) destopt l)))
| (loc, IntroIdentifier id) :: l ->
tclTHEN
(intro_gen loc (IntroMustBe id) destopt true)
(intros_patterns b avoid thin destopt l)
| (loc, IntroAnonymous) :: l ->
tclTHEN
(intro_gen loc (IntroAvoid (avoid@explicit_intro_names l))
destopt true)
(intros_patterns b avoid thin destopt l)
| (loc, IntroFresh id) :: l ->
tclTHEN
(intro_gen loc (IntroBasedOn (id, avoid@explicit_intro_names l))
destopt true)
(intros_patterns b avoid thin destopt l)
| (loc, IntroOrAndPattern ll) :: l' ->
tclTHEN
introf
(onLastHyp
(intro_or_and_pattern loc b ll l'
(intros_patterns b avoid thin destopt)))
| (loc, IntroRewrite l2r) :: l ->
tclTHEN
(intro_gen loc (IntroAvoid(avoid@explicit_intro_names l)) no_move true)
(onLastHyp (fun id ->
tclTHEN
(rewrite_hyp l2r id)
(intros_patterns b avoid thin destopt l)))
| [] -> clear_wildcards thin
let intros_pattern = intros_patterns false [] []
let intro_pattern destopt pat = intros_patterns false [] [] destopt [dloc,pat]
let intro_patterns = function
| [] -> tclREPEAT intro
| l -> intros_pattern no_move l
(**************************)
(* Other cut tactics *)
(**************************)
let make_id s = fresh_id [] (default_id_of_sort s)
let prepare_intros s ipat gl = match ipat with
| None -> make_id s gl, tclIDTAC
| Some (loc,ipat) -> match ipat with
| IntroIdentifier id -> id, tclIDTAC
| IntroAnonymous -> make_id s gl, tclIDTAC
| IntroFresh id -> fresh_id [] id gl, tclIDTAC
| IntroWildcard -> let id = make_id s gl in id, clear_wildcards [dloc,id]
| IntroRewrite l2r ->
let id = make_id s gl in
id, !forward_general_multi_rewrite l2r false (inj_open (mkVar id),NoBindings) allClauses
| IntroOrAndPattern ll -> make_id s gl,
onLastHyp
(intro_or_and_pattern loc true ll []
(intros_patterns true [] [] no_move))
let ipat_of_name = function
| Anonymous -> None
| Name id -> Some (dloc, IntroIdentifier id)
let allow_replace c gl = function (* A rather arbitrary condition... *)
| Some (_, IntroIdentifier id) ->
fst (decompose_app (snd (decompose_lam_assum c))) = mkVar id
| _ ->
false
let assert_as first ipat c gl =
match kind_of_term (hnf_type_of gl c) with
| Sort s ->
let id,tac = prepare_intros s ipat gl in
let repl = allow_replace c gl ipat in
tclTHENS
((if first then internal_cut_gen else internal_cut_rev_gen) repl id c)
(if first then [tclIDTAC; tac] else [tac; tclIDTAC]) gl
| _ -> error "Not a proposition or a type."
let assert_tac na = assert_as true (ipat_of_name na)
(* apply in as *)
let as_tac id ipat = match ipat with
| Some (loc,IntroRewrite l2r) ->
!forward_general_multi_rewrite l2r false (inj_open (mkVar id),NoBindings) allClauses
| Some (loc,IntroOrAndPattern ll) ->
intro_or_and_pattern loc true ll [] (intros_patterns true [] [] no_move)
id
| Some (loc,
(IntroIdentifier _ | IntroAnonymous | IntroFresh _ | IntroWildcard)) ->
user_err_loc (loc,"", str "Disjunctive/conjunctive pattern expected")
| None -> tclIDTAC
let general_apply_in with_delta with_destruct with_evars id lemmas ipat gl =
tclTHEN
(tclMAP (apply_in_once with_delta with_destruct with_evars id) lemmas)
(as_tac id ipat)
gl
let apply_in simple with_evars = general_apply_in simple simple with_evars
(**************************)
tactics
(**************************)
let generalized_name c t ids cl = function
| Name id as na ->
if List.mem id ids then
errorlabstrm "" (pr_id id ++ str " is already used");
na
| Anonymous ->
match kind_of_term c with
| Var id ->
(* Keep the name even if not occurring: may be used by intros later *)
Name id
| _ ->
if noccurn 1 cl then Anonymous else
On ne s'etait pas casse la tete : on avait pris pour nom de
variable la premiere , meme si " c " avait ete une
constante do nt on aurait directement
variable la premiere lettre du type, meme si "c" avait ete une
constante dont on aurait pu prendre directement le nom *)
named_hd (Global.env()) t Anonymous
let generalize_goal gl i ((occs,c),na) cl =
let t = pf_type_of gl c in
let decls,cl = decompose_prod_n_assum i cl in
let dummy_prod = it_mkProd_or_LetIn mkProp decls in
let newdecls,_ = decompose_prod_n_assum i (subst_term c dummy_prod) in
let cl' = subst_term_occ occs c (it_mkProd_or_LetIn cl newdecls) in
let na = generalized_name c t (pf_ids_of_hyps gl) cl' na in
mkProd (na,t,cl')
let generalize_dep c gl =
let env = pf_env gl in
let sign = pf_hyps gl in
let init_ids = ids_of_named_context (Global.named_context()) in
let rec seek d toquant =
if List.exists (fun (id,_,_) -> occur_var_in_decl env id d) toquant
or dependent_in_decl c d then
d::toquant
else
toquant in
let to_quantify = Sign.fold_named_context seek sign ~init:[] in
let to_quantify_rev = List.rev to_quantify in
let qhyps = List.map (fun (id,_,_) -> id) to_quantify_rev in
let tothin = List.filter (fun id -> not (List.mem id init_ids)) qhyps in
let tothin' =
match kind_of_term c with
| Var id when mem_named_context id sign & not (List.mem id init_ids)
-> id::tothin
| _ -> tothin
in
let cl' = it_mkNamedProd_or_LetIn (pf_concl gl) to_quantify in
let cl'' = generalize_goal gl 0 ((all_occurrences,c),Anonymous) cl' in
let args = Array.to_list (instance_from_named_context to_quantify_rev) in
tclTHEN
(apply_type cl'' (c::args))
(thin (List.rev tothin'))
gl
let generalize_gen lconstr gl =
let newcl =
list_fold_right_i (generalize_goal gl) 0 lconstr (pf_concl gl) in
apply_type newcl (List.map (fun ((_,c),_) -> c) lconstr) gl
let generalize l =
generalize_gen (List.map (fun c -> ((all_occurrences,c),Anonymous)) l)
let revert hyps gl =
tclTHEN (generalize (List.map mkVar hyps)) (clear hyps) gl
Faudra - t - il une version avec plusieurs args de generalize_dep ?
troublant de faire " Generalize Dependent H n " dans
" n : ; H : n = n |- P(n ) " et d'échouer parce que H a disparu après la
généralisation quantify lconstr =
List.fold_right
( fun com tac - > tclTHEN tac ( tactic_com generalize_dep c ) )
lconstr
tclIDTAC
Cela peut-être troublant de faire "Generalize Dependent H n" dans
"n:nat; H:n=n |- P(n)" et d'échouer parce que H a disparu après la
généralisation dépendante par n.
let quantify lconstr =
List.fold_right
(fun com tac -> tclTHEN tac (tactic_com generalize_dep c))
lconstr
tclIDTAC
*)
A dependent cut rule à la sequent calculus
------------------------------------------
Sera simplifiable le jour où il y aura un let in primitif dans constr
[ letin_tac b na c ( occ_hyp , occ_ccl ) gl ] transforms
[ ... x1 : T1(c), ... ,x2 : T2(c ) , ... |- G(c ) ] into
[ ... x : T;Heqx:(x = c);x1 : T1(x), ... ,x2 : T2(x ) , ... |- G(x ) ] if [ b ] is false or
[ ... x:=c : T;x1 : T1(x), ... ,x2 : T2(x ) , ... |- G(x ) ] if [ b ] is true
[ occ_hyp , occ_ccl ] tells which occurrences of [ c ] have to be substituted ;
if [ occ_hyp = [ ] ] and [ occ_ccl = None ] then [ c ] is substituted
wherever it occurs , otherwise [ c ] is substituted only in hyps
present in [ occ_hyps ] at the specified occurrences ( everywhere if
the list of occurrences is empty ) , and in the goal at the specified
occurrences if [ occ_goal ] is not [ None ] ;
if name = Anonymous , the name is build from the first letter of the type ;
The tactic first quantify the goal over x1 , x2 , ... then substitute then
re - intro x1 , x2 , ... at their initial place ( [ marks ] is internally
used to remember the place of x1 , x2 , ... : it is the list of hypotheses on
the left of each x1 , ... ) .
------------------------------------------
Sera simplifiable le jour où il y aura un let in primitif dans constr
[letin_tac b na c (occ_hyp,occ_ccl) gl] transforms
[...x1:T1(c),...,x2:T2(c),... |- G(c)] into
[...x:T;Heqx:(x=c);x1:T1(x),...,x2:T2(x),... |- G(x)] if [b] is false or
[...x:=c:T;x1:T1(x),...,x2:T2(x),... |- G(x)] if [b] is true
[occ_hyp,occ_ccl] tells which occurrences of [c] have to be substituted;
if [occ_hyp = []] and [occ_ccl = None] then [c] is substituted
wherever it occurs, otherwise [c] is substituted only in hyps
present in [occ_hyps] at the specified occurrences (everywhere if
the list of occurrences is empty), and in the goal at the specified
occurrences if [occ_goal] is not [None];
if name = Anonymous, the name is build from the first letter of the type;
The tactic first quantify the goal over x1, x2,... then substitute then
re-intro x1, x2,... at their initial place ([marks] is internally
used to remember the place of x1, x2, ...: it is the list of hypotheses on
the left of each x1, ...).
*)
let occurrences_of_hyp id cls =
let rec hyp_occ = function
[] -> None
| (((b,occs),id'),hl)::_ when id=id' -> Some ((b,List.map out_arg occs),hl)
| _::l -> hyp_occ l in
match cls.onhyps with
None -> Some (all_occurrences,InHyp)
| Some l -> hyp_occ l
let occurrences_of_goal cls =
if cls.concl_occs = no_occurrences_expr then None
else Some (on_snd (List.map out_arg) cls.concl_occs)
let in_every_hyp cls = (cls.onhyps=None)
(*
(* Implementation with generalisation then re-intro: introduces noise *)
(* in proofs *)
let letin_abstract id c occs gl =
let env = pf_env gl in
let compute_dependency _ (hyp,_,_ as d) ctxt =
let d' =
try
match occurrences_of_hyp hyp occs with
| None -> raise Not_found
| Some occ ->
let newdecl = subst_term_occ_decl occ c d in
if occ = [] & d = newdecl then
if not (in_every_hyp occs)
then raise (RefinerError (DoesNotOccurIn (c,hyp)))
else raise Not_found
else
(subst1_named_decl (mkVar id) newdecl, true)
with Not_found ->
(d,List.exists
(fun ((id,_,_),dep) -> dep && occur_var_in_decl env id d) ctxt)
in d'::ctxt
in
let ctxt' = fold_named_context compute_dependency env ~init:[] in
let compute_marks ((depdecls,marks as accu),lhyp) ((hyp,_,_) as d,b) =
if b then ((d::depdecls,(hyp,lhyp)::marks), lhyp)
else (accu, Some hyp) in
let (depdecls,marks),_ = List.fold_left compute_marks (([],[]),None) ctxt' in
let ccl = match occurrences_of_goal occs with
| None -> pf_concl gl
| Some occ -> subst1 (mkVar id) (subst_term_occ occ c (pf_concl gl))
in
(depdecls,marks,ccl)
let letin_tac with_eq name c occs gl =
let x = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c) name in
let id =
if name = Anonymous then fresh_id [] x gl else
if not (mem_named_context x (pf_hyps gl)) then x else
error ("The variable "^(string_of_id x)^" is already declared") in
let (depdecls,marks,ccl)= letin_abstract id c occs gl in
let t = pf_type_of gl c in
let tmpcl = List.fold_right mkNamedProd_or_LetIn depdecls ccl in
let args = Array.to_list (instance_from_named_context depdecls) in
let newcl = mkNamedLetIn id c t tmpcl in
let lastlhyp = if marks=[] then None else snd (List.hd marks) in
tclTHENLIST
[ apply_type newcl args;
thin (List.map (fun (id,_,_) -> id) depdecls);
intro_gen (IntroMustBe id) lastlhyp false;
if with_eq then tclIDTAC else thin_body [id];
intros_move marks ] gl
*)
Implementation without generalisation : abbrev will be lost in hyps in
(* in the extracted proof *)
let letin_abstract id c (occs,check_occs) gl =
let env = pf_env gl in
let compute_dependency _ (hyp,_,_ as d) depdecls =
match occurrences_of_hyp hyp occs with
| None -> depdecls
| Some occ ->
let newdecl = subst_term_occ_decl occ c d in
if occ = (all_occurrences,InHyp) & d = newdecl then
if check_occs & not (in_every_hyp occs)
then raise (RefinerError (DoesNotOccurIn (c,hyp)))
else depdecls
else
(subst1_named_decl (mkVar id) newdecl)::depdecls in
let depdecls = fold_named_context compute_dependency env ~init:[] in
let ccl = match occurrences_of_goal occs with
| None -> pf_concl gl
| Some occ -> subst1 (mkVar id) (subst_term_occ occ c (pf_concl gl)) in
let lastlhyp =
if depdecls = [] then no_move else MoveAfter(pi1(list_last depdecls)) in
(depdecls,lastlhyp,ccl)
let letin_tac_gen with_eq name c ty occs gl =
let id =
let x = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c) name in
if name = Anonymous then fresh_id [] x gl else
if not (mem_named_context x (pf_hyps gl)) then x else
error ("The variable "^(string_of_id x)^" is already declared.") in
let (depdecls,lastlhyp,ccl)= letin_abstract id c occs gl in
let t = match ty with Some t -> t | None -> pf_type_of gl c in
let newcl,eq_tac = match with_eq with
| Some (lr,(loc,ido)) ->
let heq = match ido with
| IntroAnonymous -> fresh_id [id] (add_prefix "Heq" id) gl
| IntroFresh heq_base -> fresh_id [id] heq_base gl
| IntroIdentifier id -> id
| _ -> error"Expect an introduction pattern naming one hypothesis." in
let eqdata = build_coq_eq_data () in
let args = if lr then [t;mkVar id;c] else [t;c;mkVar id]in
let eq = applist (eqdata.eq,args) in
let refl = applist (eqdata.refl, [t;mkVar id]) in
mkNamedLetIn id c t (mkLetIn (Name heq, refl, eq, ccl)),
tclTHEN
(intro_gen loc (IntroMustBe heq) lastlhyp true)
(thin_body [heq;id])
| None ->
mkNamedLetIn id c t ccl, tclIDTAC in
tclTHENLIST
[ convert_concl_no_check newcl DEFAULTcast;
intro_gen dloc (IntroMustBe id) lastlhyp true;
eq_tac;
tclMAP convert_hyp_no_check depdecls ] gl
let letin_tac with_eq name c ty occs =
letin_tac_gen with_eq name c ty (occs,true)
(* Tactics "pose proof" (usetac=None) and "assert" (otherwise) *)
let forward usetac ipat c gl =
match usetac with
| None ->
let t = pf_type_of gl c in
tclTHENFIRST (assert_as true ipat t) (exact_no_check c) gl
| Some tac ->
tclTHENFIRST (assert_as true ipat c) tac gl
let pose_proof na c = forward None (ipat_of_name na) c
let assert_by na t tac = forward (Some tac) (ipat_of_name na) t
(*****************************)
(* Ad hoc unfold *)
(*****************************)
The two following functions should already exist , but found nowhere
(* Unfolds x by its definition everywhere *)
let unfold_body x gl =
let hyps = pf_hyps gl in
let xval =
match Sign.lookup_named x hyps with
(_,Some xval,_) -> xval
| _ -> errorlabstrm "unfold_body"
(pr_id x ++ str" is not a defined hypothesis.") in
let aft = afterHyp x gl in
let hl = List.fold_right (fun (y,yval,_) cl -> (([],y),InHyp) :: cl) aft [] in
let xvar = mkVar x in
let rfun _ _ c = replace_term xvar xval c in
tclTHENLIST
[tclMAP (fun h -> reduct_in_hyp rfun h) hl;
reduct_in_concl (rfun,DEFAULTcast)] gl
(* Unfolds x by its definition everywhere and clear x. This may raise
an error if x is not defined. *)
let unfold_all x gl =
let (_,xval,_) = pf_get_hyp gl x in
(* If x has a body, simply replace x with body and clear x *)
if xval <> None then tclTHEN (unfold_body x) (clear [x]) gl
else tclIDTAC gl
(*****************************)
(* High-level induction *)
(*****************************)
* A " natural " induction tactic
*
- [ H0 : T0 , ... , Hi : Ti , hyp0 : ) , Hi+1 : Ti+1 , ... , Hn : Tn |-G ] is the goal
- [ hyp0 ] is the induction hypothesis
- we extract from [ args ] the variables which are not rigid parameters
of the inductive type , this is [ indvars ] ( other terms are forgotten ) ;
[ indhyps ] are the ones which actually are declared in context
( done in [ find_atomic_param_of_ind ] )
- we look for all hyps depending of [ hyp0 ] or one of [ indvars ] :
this is [ dephyps ] of types [ deptyps ] respectively
- [ statuslist ] tells for each hyps in [ dephyps ] after which other hyp
fixed in the context they must be moved ( when induction is done )
- [ hyp0succ ] is the name of the hyp fixed in the context after which to
move the subterms of [ hyp0succ ] in the i - th branch where it is supposed
to be the i - th constructor of the inductive type .
Strategy : ( cf in [ induction_from_context ] )
- requantify and clear all [ dephyps ]
- apply induction on [ hyp0 ]
- clear [ indhyps ] and [ hyp0 ]
- in the i - th subgoal , intro the arguments of the i - th constructor
of the inductive type after [ hyp0succ ] ( done in
[ induct_discharge ] ) let the induction hypotheses on top of the
hyps because they may depend on variables between [ hyp0 ] and the
top . A counterpart is that the dep hyps programmed to be intro - ed
on top must now be intro - ed after the induction hypotheses
- move each of [ dephyps ] at the right place following the
[ statuslist ]
* A "natural" induction tactic
*
- [H0:T0, ..., Hi:Ti, hyp0:P->I(args), Hi+1:Ti+1, ..., Hn:Tn |-G] is the goal
- [hyp0] is the induction hypothesis
- we extract from [args] the variables which are not rigid parameters
of the inductive type, this is [indvars] (other terms are forgotten);
[indhyps] are the ones which actually are declared in context
(done in [find_atomic_param_of_ind])
- we look for all hyps depending of [hyp0] or one of [indvars]:
this is [dephyps] of types [deptyps] respectively
- [statuslist] tells for each hyps in [dephyps] after which other hyp
fixed in the context they must be moved (when induction is done)
- [hyp0succ] is the name of the hyp fixed in the context after which to
move the subterms of [hyp0succ] in the i-th branch where it is supposed
to be the i-th constructor of the inductive type.
Strategy: (cf in [induction_from_context])
- requantify and clear all [dephyps]
- apply induction on [hyp0]
- clear [indhyps] and [hyp0]
- in the i-th subgoal, intro the arguments of the i-th constructor
of the inductive type after [hyp0succ] (done in
[induct_discharge]) let the induction hypotheses on top of the
hyps because they may depend on variables between [hyp0] and the
top. A counterpart is that the dep hyps programmed to be intro-ed
on top must now be intro-ed after the induction hypotheses
- move each of [dephyps] at the right place following the
[statuslist]
*)
let check_unused_names names =
if names <> [] & Flags.is_verbose () then
msg_warning
(str"Unused introduction " ++ str (plural (List.length names) "pattern")
++ str": " ++ prlist_with_sep spc pr_intro_pattern names)
let rec first_name_buggy avoid gl (loc,pat) = match pat with
| IntroOrAndPattern [] -> no_move
| IntroOrAndPattern ([]::l) ->
first_name_buggy avoid gl (loc,IntroOrAndPattern l)
| IntroOrAndPattern ((p::_)::_) -> first_name_buggy avoid gl p
| IntroWildcard -> no_move
| IntroRewrite _ -> no_move
| IntroIdentifier id -> MoveAfter id
| IntroAnonymous | IntroFresh _ -> (* buggy *) no_move
let consume_pattern avoid id gl = function
| [] -> ((dloc, IntroIdentifier (fresh_id avoid id gl)), [])
| (loc,IntroAnonymous)::names ->
let avoid = avoid@explicit_intro_names names in
((loc,IntroIdentifier (fresh_id avoid id gl)), names)
| (loc,IntroFresh id')::names ->
let avoid = avoid@explicit_intro_names names in
((loc,IntroIdentifier (fresh_id avoid id' gl)), names)
| pat::names -> (pat,names)
let re_intro_dependent_hypotheses tophyp (lstatus,rstatus) =
if some IH has taken place at the top of hyps
List.map (function (hyp,MoveToEnd true) -> (hyp,tophyp) | x -> x) lstatus
in
tclTHEN
(intros_move rstatus)
(intros_move newlstatus)
let update destopt tophyp = if destopt = no_move then tophyp else destopt
type elim_arg_kind = RecArg | IndArg | OtherArg
let induct_discharge statuslists destopt avoid' (avoid,ra) names gl =
let avoid = avoid @ avoid' in
let rec peel_tac ra names tophyp gl =
match ra with
| (RecArg,recvarname) ::
(IndArg,hyprecname) :: ra' ->
let recpat,names = match names with
| [loc,IntroIdentifier id as pat] ->
let id' = next_ident_away (add_prefix "IH" id) avoid in
(pat, [dloc, IntroIdentifier id'])
| _ -> consume_pattern avoid recvarname gl names in
let hyprec,names = consume_pattern avoid hyprecname gl names in
IH stays at top : we need to update tophyp
This is buggy for intro - or - patterns with different first hypnames
(* Would need to pass peel_tac as a continuation of intros_patterns *)
(* (or to have hypotheses classified by blocks...) *)
let newtophyp =
if tophyp=no_move then first_name_buggy avoid gl hyprec else tophyp
in
tclTHENLIST
[ intros_patterns true avoid [] (update destopt tophyp) [recpat];
intros_patterns true avoid [] no_move [hyprec];
peel_tac ra' names newtophyp] gl
| (IndArg,hyprecname) :: ra' ->
Rem : does not happen in Coq schemes , only in user - defined schemes
let pat,names = consume_pattern avoid hyprecname gl names in
tclTHEN (intros_patterns true avoid [] (update destopt tophyp) [pat])
(peel_tac ra' names tophyp) gl
| (RecArg,recvarname) :: ra' ->
let pat,names = consume_pattern avoid recvarname gl names in
tclTHEN (intros_patterns true avoid [] (update destopt tophyp) [pat])
(peel_tac ra' names tophyp) gl
| (OtherArg,_) :: ra' ->
let pat,names = match names with
| [] -> (dloc, IntroAnonymous), []
| pat::names -> pat,names in
tclTHEN (intros_patterns true avoid [] (update destopt tophyp) [pat])
(peel_tac ra' names tophyp) gl
| [] ->
check_unused_names names;
re_intro_dependent_hypotheses tophyp statuslists gl
in
peel_tac ra names no_move gl
- le recalcul de indtyp à chaque itération de atomize_one est pour ne pas
s'embêter à regarder letin_tac ne fait pas des
substitutions aussi sur l'argument voisin
s'embêter à regarder si un letin_tac ne fait pas des
substitutions aussi sur l'argument voisin *)
Marche pas ... faut prendre en compte l'occurrence précise ...
let atomize_param_of_ind (indref,nparams) hyp0 gl =
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
let typ0 = pf_apply reduce_to_quantified_ref gl indref tmptyp0 in
let prods, indtyp = decompose_prod typ0 in
let argl = snd (decompose_app indtyp) in
let params = list_firstn nparams argl in
le gl est important pour ne pas préévaluer
let rec atomize_one i avoid gl =
if i<>nparams then
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
(* If argl <> [], we expect typ0 not to be quantified, in order to
avoid bound parameters... then we call pf_reduce_to_atomic_ind *)
let indtyp = pf_apply reduce_to_atomic_ref gl indref tmptyp0 in
let argl = snd (decompose_app indtyp) in
let c = List.nth argl (i-1) in
match kind_of_term c with
| Var id when not (List.exists (occur_var (pf_env gl) id) avoid) ->
atomize_one (i-1) ((mkVar id)::avoid) gl
| Var id ->
let x = fresh_id [] id gl in
tclTHEN
(letin_tac None (Name x) (mkVar id) None allClauses)
(atomize_one (i-1) ((mkVar x)::avoid)) gl
| _ ->
let id = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c)
Anonymous in
let x = fresh_id [] id gl in
tclTHEN
(letin_tac None (Name x) c None allClauses)
(atomize_one (i-1) ((mkVar x)::avoid)) gl
else
tclIDTAC gl
in
atomize_one (List.length argl) params gl
let find_atomic_param_of_ind nparams indtyp =
let argl = snd (decompose_app indtyp) in
let argv = Array.of_list argl in
let params = list_firstn nparams argl in
let indvars = ref Idset.empty in
for i = nparams to (Array.length argv)-1 do
match kind_of_term argv.(i) with
| Var id
when not (List.exists (occur_var (Global.env()) id) params) ->
indvars := Idset.add id !indvars
| _ -> ()
done;
Idset.elements !indvars;
[ cook_sign ] builds the lists [ indhyps ] of hyps that must be
erased , the lists of hyps to be generalize [ ( hdeps , ) ] on the
goal together with the places [ ( lstatus , rstatus ) ] where to re - intro
them after induction . To know where to re - intro the dep hyp , we
remember the name of the hypothesis [ lhyp ] after which ( if the dep
hyp is more recent than [ hyp0 ] ) or [ rhyp ] before which ( if older
than [ hyp0 ] ) its equivalent must be moved when the induction has
been applied . Since computation of dependencies and [ rhyp ] is from
more ancient ( on the right ) to more recent hyp ( on the left ) but
the computation of [ lhyp ] progresses from the other way , [ cook_hyp ]
is in two passes ( an alternative would have been to write an
higher - order algorithm ) . We use references to reduce
the accumulation of arguments .
To summarize , the situation looks like this
Goal(n , x ) -| ) ; x : A ; H5 : True ; H4:(le O n ) ; H3:(P n ) ; H2 : True ; n :
Left Right
Induction hypothesis is H4 ( [ hyp0 ] )
Variable parameters of ( le O n ) is the singleton list with " n " ( [ indvars ] )
Part of [ indvars ] really in context is the same ( [ indhyps ] )
The dependent hyps are H3 and H6 ( [ dephyps ] )
For H3 the memorized places are H5 ( [ lhyp ] ) and H2 ( [ rhyp ] )
because these names are among the hyp which are fixed through the induction
For H6 the neighbours are None ( [ lhyp ] ) and H5 ( [ rhyp ] )
For H3 , because on the right of H4 , we remember ( here H2 )
For H6 , because on the left of H4 , we remember ( here None )
For H4 , we remember ( here H5 )
The right neighbour is then translated into the left neighbour
because move_hyp tactic needs the name of the hyp _ after _ which we
move the hyp to move .
But , say in the 2nd subgoal of the hypotheses , the goal will be
( m : nat)((P m)->(Q m)->(Goal m ) ) - > ( P Sm)- > ( Q Sm)- > ( Goal Sm )
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^
both go where H4 was goes where goes where
H3 was H6 was
We have to intro and move m and the recursive hyp first , but then
where to move H3 ? ? ? Only the hyp on its right is relevant , but we
have to translate it into the name of the hyp on the left
Note : this case where some hyp(s ) in [ dephyps ] has(have ) the same
left neighbour as [ hyp0 ] is the only problematic case with right
neighbours . For the other cases ( e.g. an hyp H1:(R n ) between n and H2
would have posed no problem . But for uniformity , we decided to use
the right hyp for all hyps on the right of H4 .
Others solutions are welcome
PC 9 fev 06 : Adapted to accept multi argument principle with no
main arg hyp . hyp0 is now optional , meaning that it is possible
that there is no main induction hypotheses . In this case , we
consider the last " parameter " ( in [ indvars ] ) as the limit between
" left " and " right " , BUT it must be included in indhyps .
Other solutions are still welcome
erased, the lists of hyps to be generalize [(hdeps,tdeps)] on the
goal together with the places [(lstatus,rstatus)] where to re-intro
them after induction. To know where to re-intro the dep hyp, we
remember the name of the hypothesis [lhyp] after which (if the dep
hyp is more recent than [hyp0]) or [rhyp] before which (if older
than [hyp0]) its equivalent must be moved when the induction has
been applied. Since computation of dependencies and [rhyp] is from
more ancient (on the right) to more recent hyp (on the left) but
the computation of [lhyp] progresses from the other way, [cook_hyp]
is in two passes (an alternative would have been to write an
higher-order algorithm). We use references to reduce
the accumulation of arguments.
To summarize, the situation looks like this
Goal(n,x) -| H6:(Q n); x:A; H5:True; H4:(le O n); H3:(P n); H2:True; n:nat
Left Right
Induction hypothesis is H4 ([hyp0])
Variable parameters of (le O n) is the singleton list with "n" ([indvars])
Part of [indvars] really in context is the same ([indhyps])
The dependent hyps are H3 and H6 ([dephyps])
For H3 the memorized places are H5 ([lhyp]) and H2 ([rhyp])
because these names are among the hyp which are fixed through the induction
For H6 the neighbours are None ([lhyp]) and H5 ([rhyp])
For H3, because on the right of H4, we remember rhyp (here H2)
For H6, because on the left of H4, we remember lhyp (here None)
For H4, we remember lhyp (here H5)
The right neighbour is then translated into the left neighbour
because move_hyp tactic needs the name of the hyp _after_ which we
move the hyp to move.
But, say in the 2nd subgoal of the hypotheses, the goal will be
(m:nat)((P m)->(Q m)->(Goal m)) -> (P Sm)-> (Q Sm)-> (Goal Sm)
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^
both go where H4 was goes where goes where
H3 was H6 was
We have to intro and move m and the recursive hyp first, but then
where to move H3 ??? Only the hyp on its right is relevant, but we
have to translate it into the name of the hyp on the left
Note: this case where some hyp(s) in [dephyps] has(have) the same
left neighbour as [hyp0] is the only problematic case with right
neighbours. For the other cases (e.g. an hyp H1:(R n) between n and H2
would have posed no problem. But for uniformity, we decided to use
the right hyp for all hyps on the right of H4.
Others solutions are welcome
PC 9 fev 06: Adapted to accept multi argument principle with no
main arg hyp. hyp0 is now optional, meaning that it is possible
that there is no main induction hypotheses. In this case, we
consider the last "parameter" (in [indvars]) as the limit between
"left" and "right", BUT it must be included in indhyps.
Other solutions are still welcome
*)
exception Shunt of identifier move_location
let cook_sign hyp0_opt indvars env =
let hyp0,inhyps =
match hyp0_opt with
| None -> List.hd (List.rev indvars), []
| Some (hyp0,at_least_in_hyps) -> hyp0, at_least_in_hyps in
First phase from L to R : get [ indhyps ] , [ decldep ] and [ statuslist ]
for the hypotheses before (= more ancient than ) hyp0 ( see above )
for the hypotheses before (= more ancient than) hyp0 (see above) *)
let allindhyps = hyp0::indvars in
let indhyps = ref [] in
let decldeps = ref [] in
let ldeps = ref [] in
let rstatus = ref [] in
let lstatus = ref [] in
let before = ref true in
let seek_deps env (hyp,_,_ as decl) rhyp =
if hyp = hyp0 then begin
before:=false;
If there was no main induction hypotheses , then hyp is one of
indvars too , so add it to indhyps .
indvars too, so add it to indhyps. *)
(if hyp0_opt=None then indhyps := hyp::!indhyps);
MoveToEnd false (* fake value *)
end else if List.mem hyp indvars then begin
(* warning: hyp can still occur after induction *)
e.g. if the goal ( t hyp hyp0 ) with other occs of hyp in t
indhyps := hyp::!indhyps;
rhyp
end else
if inhyps <> [] && List.mem hyp inhyps || inhyps = [] &&
(List.exists (fun id -> occur_var_in_decl env id decl) allindhyps ||
List.exists (fun (id,_,_) -> occur_var_in_decl env id decl) !decldeps)
then begin
decldeps := decl::!decldeps;
if !before then
rstatus := (hyp,rhyp)::!rstatus
else
status computed in 2nd phase
MoveBefore hyp end
else
MoveBefore hyp
in
let _ = fold_named_context seek_deps env ~init:(MoveToEnd false) in
2nd phase from R to L : get left hyp of [ hyp0 ] and [ lhyps ]
let compute_lstatus lhyp (hyp,_,_) =
if hyp = hyp0 then raise (Shunt lhyp);
if List.mem hyp !ldeps then begin
lstatus := (hyp,lhyp)::!lstatus;
lhyp
end else
if List.mem hyp !indhyps then lhyp else MoveAfter hyp
in
try
let _ =
fold_named_context_reverse compute_lstatus ~init:(MoveToEnd true) env in
raise (Shunt (MoveToEnd true)) (* ?? FIXME *)
with Shunt lhyp0 ->
let statuslists = (!lstatus,List.rev !rstatus) in
(statuslists, (if hyp0_opt=None then MoveToEnd true else lhyp0),
!indhyps, !decldeps)
The general form of an induction principle is the following :
forall prm1 prm2 ... prmp , ( induction parameters )
forall Q1 ... ,(Qi : Ti_1 - > Ti_2 -> ... - > ... Qq , ( predicates )
, branch2 , ... , branchr , ( branches of the principle )
forall ( x1 : Ti_1 ) ( x2 : Ti_2 ) ... ( xni : Ti_ni ) , ( induction arguments )
( HI : I prm1 .. ... xni ) ( optional main induction arg )
- > ( Qi x1 ... xni HI ( f prm1 ... ... xni)).(conclusion )
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional argument added if
even if HI principle generated by functional
present above induction , only if HI does not exist
[ indarg ] [ farg ]
HI is not present when the induction principle does not come directly from an
inductive type ( like when it is generated by functional induction for
example ) . HI is present otherwise BUT may not appear in the conclusion
( dependent principle ) . HI and ( f ... ) can not be both present .
Principles taken from functional induction have the final ( f ... ) .
The general form of an induction principle is the following:
forall prm1 prm2 ... prmp, (induction parameters)
forall Q1...,(Qi:Ti_1 -> Ti_2 ->...-> Ti_ni),...Qq, (predicates)
branch1, branch2, ... , branchr, (branches of the principle)
forall (x1:Ti_1) (x2:Ti_2) ... (xni:Ti_ni), (induction arguments)
(HI: I prm1..prmp x1...xni) (optional main induction arg)
-> (Qi x1...xni HI (f prm1...prmp x1...xni)).(conclusion)
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional argument added if
even if HI principle generated by functional
present above induction, only if HI does not exist
[indarg] [farg]
HI is not present when the induction principle does not come directly from an
inductive type (like when it is generated by functional induction for
example). HI is present otherwise BUT may not appear in the conclusion
(dependent principle). HI and (f...) cannot be both present.
Principles taken from functional induction have the final (f...).*)
(* [rel_contexts] and [rel_declaration] actually contain triples, and
lists are actually in reverse order to fit [compose_prod]. *)
type elim_scheme = {
elimc: constr with_ebindings option;
elimt: types;
indref: global_reference option;
params: rel_context; (* (prm1,tprm1);(prm2,tprm2)...(prmp,tprmp) *)
nparams: int; (* number of parameters *)
predicates: rel_context; (* (Qq, (Tq_1 -> Tq_2 ->...-> Tq_nq)), (Q1,...) *)
npredicates: int; (* Number of predicates *)
branches: rel_context; (* branchr,...,branch1 *)
nbranches: int; (* Number of branches *)
( xni , Ti_ni ) ... ( x1 , Ti_1 )
nargs: int; (* number of arguments *)
Some ( H , I prm1 .. ... xni )
if HI is in premisses , None otherwise
if HI is in premisses, None otherwise *)
concl: types; (* Qi x1...xni HI (f...), HI and (f...)
are optional and mutually exclusive *)
indarg_in_concl: bool; (* true if HI appears at the end of conclusion *)
farg_in_concl: bool; (* true if (f...) appears at the end of conclusion *)
}
let empty_scheme =
{
elimc = None;
elimt = mkProp;
indref = None;
params = [];
nparams = 0;
predicates = [];
npredicates = 0;
branches = [];
nbranches = 0;
args = [];
nargs = 0;
indarg = None;
concl = mkProp;
indarg_in_concl = false;
farg_in_concl = false;
}
(* Unification between ((elimc:elimt) ?i ?j ?k ?l ... ?m) and the
hypothesis on which the induction is made *)
let induction_tac with_evars (varname,lbind) typ scheme gl =
let elimc,lbindelimc =
match scheme.elimc with | Some x -> x | None -> error "No definition of the principle." in
let elimt = scheme.elimt in
let indclause = make_clenv_binding gl (mkVar varname,typ) lbind in
let elimclause =
make_clenv_binding gl
(mkCast (elimc,DEFAULTcast, elimt),elimt) lbindelimc in
elimination_clause_scheme with_evars true elimclause indclause gl
let make_base n id =
if n=0 or n=1 then id
else
(* This extends the name to accept new digits if it already ends with *)
(* digits *)
id_of_string (atompart_of_id (make_ident (string_of_id id) (Some 0)))
Builds two different names from an optional inductive type and a
number , also deals with a list of names to avoid . If the inductive
type is None , then is where i is a number .
number, also deals with a list of names to avoid. If the inductive
type is None, then hyprecname is IHi where i is a number. *)
let make_up_names n ind_opt cname =
let is_hyp = atompart_of_id cname = "H" in
let base = string_of_id (make_base n cname) in
let ind_prefix = "IH" in
let base_ind =
if is_hyp then
match ind_opt with
| None -> id_of_string ind_prefix
| Some ind_id -> add_prefix ind_prefix (Nametab.id_of_global ind_id)
else add_prefix ind_prefix cname in
let hyprecname = make_base n base_ind in
let avoid =
Only one recursive argument
else
Forbid to use cname , cname0 , and hyprecname0
in order to get names such as f1 , f2 , ...
let avoid =
(make_ident (string_of_id hyprecname) None) ::
(make_ident (string_of_id hyprecname) (Some 0)) :: [] in
if atompart_of_id cname <> "H" then
(make_ident base (Some 0)) :: (make_ident base None) :: avoid
else avoid in
id_of_string base, hyprecname, avoid
let is_indhyp p n t =
let l, c = decompose_prod t in
let c,_ = decompose_app c in
let p = p + List.length l in
match kind_of_term c with
| Rel k when p < k & k <= p + n -> true
| _ -> false
let chop_context n l =
let rec chop_aux acc = function
| n, (_,Some _,_ as h :: t) -> chop_aux (h::acc) (n, t)
| 0, l2 -> (List.rev acc, l2)
| n, (h::t) -> chop_aux (h::acc) (n-1, t)
| _, [] -> anomaly "chop_context"
in
chop_aux [] (n,l)
let error_ind_scheme s =
let s = if s <> "" then s^" " else s in
error ("Cannot recognize "^s^"an induction scheme.")
let mkEq t x y =
mkApp (build_coq_eq (), [| t; x; y |])
let mkRefl t x =
mkApp ((build_coq_eq_data ()).refl, [| t; x |])
let mkHEq t x u y =
mkApp (coq_constant "mkHEq" ["Logic";"JMeq"] "JMeq",
[| t; x; u; y |])
let mkHRefl t x =
mkApp (coq_constant "mkHEq" ["Logic";"JMeq"] "JMeq_refl",
[| t; x |])
(* let id = lazy (coq_constant "mkHEq" ["Init";"Datatypes"] "id") *)
let mkHEq t x u y =
let ty = new_Type ( ) in
(* mkApp (coq_constant "mkHEq" ["Logic";"EqdepFacts"] "eq_dep", *)
[ | ty ; mkApp ( Lazy.force i d , [ |ty| ] ) ; t ; x ; u ; y | ] )
(* let mkHRefl t x = *)
let ty = new_Type ( ) in
mkApp ( coq_constant " mkHEq " [ " Logic";"EqdepFacts " ] " " ,
[ | ty ; mkApp ( Lazy.force i d , [ |ty| ] ) ; t ; x | ] )
let mkCoe a x p px y eq =
mkApp (Option.get (build_coq_eq_data ()).rect, [| a; x; p; px; y; eq |])
let lift_togethern n l =
let l', _ =
List.fold_right
(fun x (acc, n) ->
(lift n x :: acc, succ n))
l ([], n)
in l'
let lift_together l = lift_togethern 0 l
let lift_list l = List.map (lift 1) l
let ids_of_constr vars c =
let rec aux vars c =
match kind_of_term c with
| Var id -> if List.mem id vars then vars else id :: vars
| App (f, args) ->
(match kind_of_term f with
| Construct (ind,_)
| Ind ind ->
let (mib,mip) = Global.lookup_inductive ind in
array_fold_left_from mib.Declarations.mind_nparams
aux vars args
| _ -> fold_constr aux vars c)
| _ -> fold_constr aux vars c
in aux vars c
let make_abstract_generalize gl id concl dep ctx c eqs args refls =
let meta = Evarutil.new_meta() in
let term, typ = mkVar id, pf_get_hyp_typ gl id in
let eqslen = List.length eqs in
(* Abstract by the "generalized" hypothesis equality proof if necessary. *)
let abshypeq =
if dep then
mkProd (Anonymous, mkHEq (lift 1 c) (mkRel 1) typ term, lift 1 concl)
else concl
in
(* Abstract by equalitites *)
let eqs = lift_togethern 1 eqs in (* lift together and past genarg *)
let abseqs = it_mkProd_or_LetIn ~init:(lift eqslen abshypeq) (List.map (fun x -> (Anonymous, None, x)) eqs) in
(* Abstract by the "generalized" hypothesis. *)
let genarg = mkProd (Name id, c, abseqs) in
(* Abstract by the extension of the context *)
let genctyp = it_mkProd_or_LetIn ~init:genarg ctx in
(* The goal will become this product. *)
let genc = mkCast (mkMeta meta, DEFAULTcast, genctyp) in
Apply the old arguments giving the proper instantiation of the hyp
let instc = mkApp (genc, Array.of_list args) in
Then apply to the original instanciated hyp .
let instc = mkApp (instc, [| mkVar id |]) in
(* Apply the reflexivity proofs on the indices. *)
let appeqs = mkApp (instc, Array.of_list refls) in
(* Finaly, apply the reflexivity proof for the original hyp, to get a term of type gl again. *)
let newc = if dep then mkApp (appeqs, [| mkHRefl typ term |]) else appeqs in
newc
let abstract_args gl id =
let c = pf_get_hyp_typ gl id in
let sigma = project gl in
let env = pf_env gl in
let concl = pf_concl gl in
let dep = dependent (mkVar id) concl in
let avoid = ref [] in
let get_id name =
let id = fresh_id !avoid (match name with Name n -> n | Anonymous -> id_of_string "gen_x") gl in
avoid := id :: !avoid; id
in
match kind_of_term c with
App (f, args) ->
Build application generalized w.r.t . the argument plus the necessary eqs .
From env |- c : forall G , T and args : G we build
( T[G ' ] , G ' : ctx , env ; G ' |- args ' : G , eqs : = G'_i = G_i , refls : G ' = G , vars to generalize )
eqs are not lifted w.r.t . each other yet . ( * will be needed when going to dependent indexes
From env |- c : forall G, T and args : G we build
(T[G'], G' : ctx, env ; G' |- args' : G, eqs := G'_i = G_i, refls : G' = G, vars to generalize)
eqs are not lifted w.r.t. each other yet. (* will be needed when going to dependent indexes *)
*)
let aux (prod, ctx, ctxenv, c, args, eqs, refls, vars, env) arg =
let (name, _, ty), arity =
let rel, c = Reductionops.decomp_n_prod env sigma 1 prod in
List.hd rel, c
in
let argty = pf_type_of gl arg in
let liftargty = lift (List.length ctx) argty in
let convertible = Reductionops.is_conv_leq ctxenv sigma liftargty ty in
match kind_of_term arg with
| Var _ | Rel _ | Ind _ when convertible ->
(subst1 arg arity, ctx, ctxenv, mkApp (c, [|arg|]), args, eqs, refls, vars, env)
| _ ->
let name = get_id name in
let decl = (Name name, None, ty) in
let ctx = decl :: ctx in
let c' = mkApp (lift 1 c, [|mkRel 1|]) in
let args = arg :: args in
let liftarg = lift (List.length ctx) arg in
let eq, refl =
if convertible then
mkEq (lift 1 ty) (mkRel 1) liftarg, mkRefl argty arg
else
mkHEq (lift 1 ty) (mkRel 1) liftargty liftarg, mkHRefl argty arg
in
let eqs = eq :: lift_list eqs in
let refls = refl :: refls in
let vars = ids_of_constr vars arg in
(arity, ctx, push_rel decl ctxenv, c', args, eqs, refls, vars, env)
in
let f, args =
match kind_of_term f with
| Construct (ind,_)
| Ind ind ->
let (mib,mip) = Global.lookup_inductive ind in
let first = mib.Declarations.mind_nparams in
let pars, args = array_chop first args in
mkApp (f, pars), args
| _ -> f, args
in
let arity, ctx, ctxenv, c', args, eqs, refls, vars, env =
Array.fold_left aux (pf_type_of gl f,[],env,f,[],[],[],[],env) args
in
let args, refls = List.rev args, List.rev refls in
Some (make_abstract_generalize gl id concl dep ctx c' eqs args refls,
dep, succ (List.length ctx), vars)
| _ -> None
let abstract_generalize id ?(generalize_vars=true) gl =
Coqlib.check_required_library ["Coq";"Logic";"JMeq"];
let oldid = pf_get_new_id id gl in
let newc = abstract_args gl id in
match newc with
| None -> tclIDTAC gl
| Some (newc, dep, n, vars) ->
let tac =
if dep then
tclTHENLIST [refine newc; rename_hyp [(id, oldid)]; tclDO n intro;
generalize_dep (mkVar oldid)]
else
tclTHENLIST [refine newc; clear [id]; tclDO n intro]
in
if generalize_vars then tclTHEN tac
(tclFIRST [revert (List.rev vars) ;
tclMAP (fun id -> tclTRY (generalize_dep (mkVar id))) vars]) gl
else tac gl
let dependent_pattern c gl =
let cty = pf_type_of gl c in
let deps =
match kind_of_term cty with
| App (f, args) -> Array.to_list args
| _ -> []
in
let varname c = match kind_of_term c with
| Var id -> id
| _ -> id_of_string (hdchar (pf_env gl) c)
in
let mklambda ty (c, id, cty) =
let conclvar = subst_term_occ all_occurrences c ty in
mkNamedLambda id cty conclvar
in
let subst = (c, varname c, cty) :: List.rev_map (fun c -> (c, varname c, pf_type_of gl c)) deps in
let concllda = List.fold_left mklambda (pf_concl gl) subst in
let conclapp = applistc concllda (List.rev_map pi1 subst) in
convert_concl_no_check conclapp DEFAULTcast gl
let occur_rel n c =
let res = not (noccurn n c) in
res
let list_filter_firsts f l =
let rec list_filter_firsts_aux f acc l =
match l with
| e::l' when f e -> list_filter_firsts_aux f (acc@[e]) l'
| _ -> acc,l
in
list_filter_firsts_aux f [] l
let count_rels_from n c =
let rels = free_rels c in
let cpt,rg = ref 0, ref n in
while Intset.mem !rg rels do
cpt:= !cpt+1; rg:= !rg+1;
done;
!cpt
let count_nonfree_rels_from n c =
let rels = free_rels c in
if Intset.exists (fun x -> x >= n) rels then
let cpt,rg = ref 0, ref n in
while not (Intset.mem !rg rels) do
cpt:= !cpt+1; rg:= !rg+1;
done;
!cpt
else raise Not_found
cuts a list in two parts , first of size n. Size must be greater than n
let cut_list n l =
let rec cut_list_aux acc n l =
if n<=0 then acc,l
else match l with
| [] -> assert false
| e::l' -> cut_list_aux (acc@[e]) (n-1) l' in
let res = cut_list_aux [] n l in
res
This function splits the products of the induction scheme [ elimt ] into four
parts :
- branches , easily detectable ( they are not referred by rels in the subterm )
- what was found before branches ( acc1 ) that is : parameters and predicates
- what was found after branches ( acc3 ) that is : args and indarg if any
if there is no branch , we try to fill in acc3 with args / indargs .
We also return the conclusion .
parts:
- branches, easily detectable (they are not referred by rels in the subterm)
- what was found before branches (acc1) that is: parameters and predicates
- what was found after branches (acc3) that is: args and indarg if any
if there is no branch, we try to fill in acc3 with args/indargs.
We also return the conclusion.
*)
let decompose_paramspred_branch_args elimt =
let rec cut_noccur elimt acc2 : rel_context * rel_context * types =
match kind_of_term elimt with
| Prod(nme,tpe,elimt') ->
let hd_tpe,_ = decompose_app (snd (decompose_prod_assum tpe)) in
if not (occur_rel 1 elimt') && isRel hd_tpe
then cut_noccur elimt' ((nme,None,tpe)::acc2)
else let acc3,ccl = decompose_prod_assum elimt in acc2 , acc3 , ccl
| App(_, _) | Rel _ -> acc2 , [] , elimt
| _ -> error_ind_scheme "" in
let rec cut_occur elimt acc1 : rel_context * rel_context * rel_context * types =
match kind_of_term elimt with
| Prod(nme,tpe,c) when occur_rel 1 c -> cut_occur c ((nme,None,tpe)::acc1)
| Prod(nme,tpe,c) -> let acc2,acc3,ccl = cut_noccur elimt [] in acc1,acc2,acc3,ccl
| App(_, _) | Rel _ -> acc1,[],[],elimt
| _ -> error_ind_scheme "" in
let acc1, acc2 , acc3, ccl = cut_occur elimt [] in
Particular treatment when dealing with a dependent empty type elim scheme :
if there is no branch , then acc1 contains all hyps which is wrong ( acc1
should contain parameters and predicate only ) . This happens for an empty
type ( See for example Empty_set_ind , as False would actually be ok ) . Then
we must find the predicate of the conclusion to separate params_pred from
args . We suppose there is only one predicate here .
if there is no branch, then acc1 contains all hyps which is wrong (acc1
should contain parameters and predicate only). This happens for an empty
type (See for example Empty_set_ind, as False would actually be ok). Then
we must find the predicate of the conclusion to separate params_pred from
args. We suppose there is only one predicate here. *)
if List.length acc2 <> 0 then acc1, acc2 , acc3, ccl
else
let hyps,ccl = decompose_prod_assum elimt in
let hd_ccl_pred,_ = decompose_app ccl in
match kind_of_term hd_ccl_pred with
| Rel i -> let acc3,acc1 = cut_list (i-1) hyps in acc1 , [] , acc3 , ccl
| _ -> error_ind_scheme ""
let exchange_hd_app subst_hd t =
let hd,args= decompose_app t in mkApp (subst_hd,Array.of_list args)
(* [rebuild_elimtype_from_scheme scheme] rebuilds the type of an
eliminator from its [scheme_info]. The idea is to build variants of
eliminator by modifying their scheme_info, then rebuild the
eliminator type, then prove it (with tactics). *)
let rebuild_elimtype_from_scheme (scheme:elim_scheme): types =
let hiconcl =
match scheme.indarg with
| None -> scheme.concl
| Some x -> mkProd_or_LetIn x scheme.concl in
let xihiconcl = it_mkProd_or_LetIn hiconcl scheme.args in
let brconcl = it_mkProd_or_LetIn xihiconcl scheme.branches in
let predconcl = it_mkProd_or_LetIn brconcl scheme.predicates in
let paramconcl = it_mkProd_or_LetIn predconcl scheme.params in
paramconcl
exception NoLastArg
exception NoLastArgCcl
Builds an elim_scheme from its type and calling form ( const+binding ) . We
first separate branches . We obtain branches , hyps before ( params + preds ) ,
hyps after ( args < + indarg if present > ) and conclusion . Then we proceed as
follows :
- separate parameters and predicates in params_preds . For that we build :
forall ( x1 : Ti_1)(xni : Ti_ni ) ( HI : I prm1 .. ... xni ) , DUMMY x1 ... xni HI / farg
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^
optional opt
Free rels appearing in this term are parameters ( branches should not
appear , and the only predicate would have been but we replaced it by
DUMMY ) . We guess this heuristic catches all params . TODO : generalize to
the case where args are merged with branches ( ? ) and/or where several
predicates are cited in the conclusion .
- finish to fill in the elim_scheme : indarg / farg / args and finally indref .
first separate branches. We obtain branches, hyps before (params + preds),
hyps after (args <+ indarg if present>) and conclusion. Then we proceed as
follows:
- separate parameters and predicates in params_preds. For that we build:
forall (x1:Ti_1)(xni:Ti_ni) (HI:I prm1..prmp x1...xni), DUMMY x1...xni HI/farg
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^
optional opt
Free rels appearing in this term are parameters (branches should not
appear, and the only predicate would have been Qi but we replaced it by
DUMMY). We guess this heuristic catches all params. TODO: generalize to
the case where args are merged with branches (?) and/or where several
predicates are cited in the conclusion.
- finish to fill in the elim_scheme: indarg/farg/args and finally indref. *)
let compute_elim_sig ?elimc elimt =
let params_preds,branches,args_indargs,conclusion =
decompose_paramspred_branch_args elimt in
let ccl = exchange_hd_app (mkVar (id_of_string "__QI_DUMMY__")) conclusion in
let concl_with_args = it_mkProd_or_LetIn ccl args_indargs in
let nparams = Intset.cardinal (free_rels concl_with_args) in
let preds,params = cut_list (List.length params_preds - nparams) params_preds in
A first approximation , further analysis will tweak it
let res = ref { empty_scheme with
(* This fields are ok: *)
elimc = elimc; elimt = elimt; concl = conclusion;
predicates = preds; npredicates = List.length preds;
branches = branches; nbranches = List.length branches;
farg_in_concl = isApp ccl && isApp (last_arg ccl);
params = params; nparams = nparams;
(* all other fields are unsure at this point. Including these:*)
args = args_indargs; nargs = List.length args_indargs; } in
try
(* Order of tests below is important. Each of them exits if successful. *)
(* 1- First see if (f x...) is in the conclusion. *)
if !res.farg_in_concl
then begin
res := { !res with
indarg = None;
indarg_in_concl = false; farg_in_concl = true };
raise Exit
end;
(* 2- If no args_indargs (=!res.nargs at this point) then no indarg *)
if !res.nargs=0 then raise Exit;
(* 3- Look at last arg: is it the indarg? *)
ignore (
match List.hd args_indargs with
| hiname,Some _,hi -> error_ind_scheme ""
| hiname,None,hi ->
let hi_ind, hi_args = decompose_app hi in
hi est d'un
match kind_of_term hi_ind with
| Ind (mind,_) -> true
| Var _ -> true
| Const _ -> true
| Construct _ -> true
| _ -> false in
hi a le bon nbre d'arguments
List.length hi_args = List.length params + !res.nargs -1 in
FIXME : tests ne sont pas suffisants .
if not (hi_is_ind & hi_args_enough) then raise Exit (* No indarg *)
else (* Last arg is the indarg *)
res := {!res with
indarg = Some (List.hd !res.args);
indarg_in_concl = occur_rel 1 ccl;
args = List.tl !res.args; nargs = !res.nargs - 1;
};
raise Exit);
raise Exit(* exit anyway *)
with Exit -> (* Ending by computing indrev: *)
match !res.indarg with
| None -> !res (* No indref *)
| Some ( _,Some _,_) -> error_ind_scheme ""
| Some ( _,None,ind) ->
let indhd,indargs = decompose_app ind in
try {!res with indref = Some (global_of_constr indhd) }
with _ -> error "Cannot find the inductive type of the inductive scheme.";;
Check that the elimination scheme has a form similar to the
elimination schemes built by Coq . Schemes may have the standard
form computed from an inductive type OR ( feb . 2006 ) a non standard
form . That is : with no main induction argument and with an optional
extra final argument of the form ( f x y ... ) in the conclusion . In
the non standard case , naming of generated hypos is slightly
different .
elimination schemes built by Coq. Schemes may have the standard
form computed from an inductive type OR (feb. 2006) a non standard
form. That is: with no main induction argument and with an optional
extra final argument of the form (f x y ...) in the conclusion. In
the non standard case, naming of generated hypos is slightly
different. *)
let compute_elim_signature elimc elimt names_info ind_type_guess =
let scheme = compute_elim_sig ~elimc:elimc elimt in
let f,l = decompose_app scheme.concl in
Vérifier que les arguments .
match scheme.indarg with
| Some (_,Some _,_) -> error "Strange letin, cannot recognize an induction scheme."
| None -> (* Non standard scheme *)
let is_pred n c =
let hd = fst (decompose_app c) in match kind_of_term hd with
| Rel q when n < q & q <= n+scheme.npredicates -> IndArg
| _ when hd = ind_type_guess & not scheme.farg_in_concl -> RecArg
| _ -> OtherArg in
let rec check_branch p c =
match kind_of_term c with
| Prod (_,t,c) -> is_pred p t :: check_branch (p+1) c
| LetIn (_,_,_,c) -> OtherArg :: check_branch (p+1) c
| _ when is_pred p c = IndArg -> []
| _ -> raise Exit in
let rec find_branches p lbrch =
match lbrch with
| (_,None,t)::brs ->
(try
let lchck_brch = check_branch p t in
let n = List.fold_left
(fun n b -> if b=RecArg then n+1 else n) 0 lchck_brch in
let recvarname, hyprecname, avoid =
make_up_names n scheme.indref names_info in
let namesign =
List.map (fun b -> (b,if b=IndArg then hyprecname else recvarname))
lchck_brch in
(avoid,namesign) :: find_branches (p+1) brs
with Exit-> error_ind_scheme "the branches of")
| (_,Some _,_)::_ -> error_ind_scheme "the branches of"
| [] -> [] in
let indsign = Array.of_list (find_branches 0 (List.rev scheme.branches)) in
indsign,scheme
| Some ( _,None,ind) -> (* Standard scheme from an inductive type *)
let indhd,indargs = decompose_app ind in
let is_pred n c =
let hd = fst (decompose_app c) in match kind_of_term hd with
| Rel q when n < q & q <= n+scheme.npredicates -> IndArg
| _ when hd = indhd -> RecArg
| _ -> OtherArg in
let rec check_branch p c = match kind_of_term c with
| Prod (_,t,c) -> is_pred p t :: check_branch (p+1) c
| LetIn (_,_,_,c) -> OtherArg :: check_branch (p+1) c
| _ when is_pred p c = IndArg -> []
| _ -> raise Exit in
let rec find_branches p lbrch =
match lbrch with
| (_,None,t)::brs ->
(try
let lchck_brch = check_branch p t in
let n = List.fold_left
(fun n b -> if b=RecArg then n+1 else n) 0 lchck_brch in
let recvarname, hyprecname, avoid =
make_up_names n scheme.indref names_info in
let namesign =
List.map (fun b -> (b,if b=IndArg then hyprecname else recvarname))
lchck_brch in
(avoid,namesign) :: find_branches (p+1) brs
with Exit -> error_ind_scheme "the branches of")
| (_,Some _,_)::_ -> error_ind_scheme "the branches of"
| [] ->
(* Check again conclusion *)
let ccl_arg_ok = is_pred (p + scheme.nargs + 1) f = IndArg in
let ind_is_ok =
list_lastn scheme.nargs indargs
= extended_rel_list 0 scheme.args in
if not (ccl_arg_ok & ind_is_ok) then
error_ind_scheme "the conclusion of";
[]
in
let indsign = Array.of_list (find_branches 0 (List.rev scheme.branches)) in
indsign,scheme
let find_elim_signature isrec elim hyp0 gl =
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
let (elimc,elimt),ind = match elim with
| None ->
let mind,_ = pf_reduce_to_quantified_ind gl tmptyp0 in
let s = elimination_sort_of_goal gl in
let elimc =
if isrec then lookup_eliminator mind s
else pf_apply make_case_gen gl mind s in
let elimt = pf_type_of gl elimc in
((elimc, NoBindings), elimt), mkInd mind
| Some (elimc,lbind as e) ->
let ind_type_guess,_ = decompose_app (snd (decompose_prod tmptyp0)) in
(e, pf_type_of gl elimc), ind_type_guess in
let indsign,elim_scheme =
compute_elim_signature elimc elimt hyp0 ind in
(indsign,elim_scheme)
Instantiate all meta variables of elimclause using lid , some elts
of lid are parameters ( first ones ) , the other are
arguments . Returns the clause obtained .
of lid are parameters (first ones), the other are
arguments. Returns the clause obtained. *)
let recolle_clenv scheme lid elimclause gl =
let _,arr = destApp elimclause.templval.rebus in
let lindmv =
Array.map
(fun x ->
match kind_of_term x with
| Meta mv -> mv
| _ -> errorlabstrm "elimination_clause"
(str "The type of the elimination clause is not well-formed."))
arr in
let nmv = Array.length lindmv in
let lidparams,lidargs = cut_list (scheme.nparams) lid in
let nidargs = List.length lidargs in
(* parameters correspond to first elts of lid. *)
let clauses_params =
list_map_i (fun i id -> mkVar id , pf_get_hyp_typ gl id , lindmv.(i))
0 lidparams in
(* arguments correspond to last elts of lid. *)
let clauses_args =
list_map_i
(fun i id -> mkVar id , pf_get_hyp_typ gl id , lindmv.(nmv-nidargs+i))
0 lidargs in
let clause_indarg =
match scheme.indarg with
| None -> []
| Some (x,_,typx) -> []
in
let clauses = clauses_params@clauses_args@clause_indarg in
(* iteration of clenv_fchain with all infos we have. *)
List.fold_right
(fun e acc ->
let x,y,i = e in
from_n ( Some 0 ) means that x should be taken " as is " without
trying to unify ( which would lead to trying to apply it to
evars if y is a product ) .
trying to unify (which would lead to trying to apply it to
evars if y is a product). *)
let indclause = mk_clenv_from_n gl (Some 0) (x,y) in
let elimclause' = clenv_fchain i acc indclause in
elimclause')
(List.rev clauses)
elimclause
(* Unification of the goal and the principle applied to meta variables:
(elimc ?i ?j ?k...?l). This solves partly meta variables (and may
produce new ones). Then refine with the resulting term with holes.
*)
let induction_tac_felim with_evars indvars scheme gl =
let elimt = scheme.elimt in
let elimc,lbindelimc =
match scheme.elimc with | Some x -> x | None -> error "No definition of the principle." in
(* elimclause contains this: (elimc ?i ?j ?k...?l) *)
let elimclause =
make_clenv_binding gl (mkCast (elimc,DEFAULTcast, elimt),elimt) lbindelimc in
elimclause ' is built from elimclause by instanciating all args and params .
let elimclause' = recolle_clenv scheme indvars elimclause gl in
one last resolution ( useless ? )
let resolved = clenv_unique_resolver true elimclause' gl in
clenv_refine with_evars resolved gl
let apply_induction_in_context isrec hyp0 indsign indvars names induct_tac gl =
let env = pf_env gl in
let statlists,lhyp0,indhyps,deps = cook_sign hyp0 indvars env in
let deps = List.map (fun (id,c,t)-> (id,c,refresh_universes_strict t)) deps in
let tmpcl = it_mkNamedProd_or_LetIn (pf_concl gl) deps in
let names = compute_induction_names (Array.length indsign) names in
let dephyps = List.map (fun (id,_,_) -> id) deps in
let deps_cstr =
List.fold_left
(fun a (id,b,_) -> if b = None then (mkVar id)::a else a) [] deps in
tclTHENLIST
[
dependent hyps ( but not args )
if deps = [] then tclIDTAC else apply_type tmpcl deps_cstr;
clear dependent hyps
thin dephyps;
side - conditions in elim ( resp case ) schemes come last ( resp first )
(if isrec then tclTHENFIRSTn else tclTHENLASTn)
(tclTHEN induct_tac (tclTRY (thin (List.rev indhyps))))
(array_map2
(induct_discharge statlists lhyp0 (List.rev dephyps)) indsign names)
]
gl
(* Induction with several induction arguments, main differences with
induction_from_context is that there is no main induction argument,
so we chose one to be the positioning reference. On the other hand,
all args and params must be given, so we help a bit the unifier by
making the "pattern" by hand before calling induction_tac_felim
FIXME: REUNIF AVEC induction_tac_felim? *)
let induction_from_context_l isrec with_evars elim_info lid names gl =
let indsign,scheme = elim_info in
(* number of all args, counting farg and indarg if present. *)
let nargs_indarg_farg = scheme.nargs
+ (if scheme.farg_in_concl then 1 else 0)
+ (if scheme.indarg <> None then 1 else 0) in
(* Number of given induction args must be exact. *)
if List.length lid <> nargs_indarg_farg + scheme.nparams then
error "Not the right number of arguments given to induction scheme.";
hyp0 is used for re - introducing hyps at the right place afterward .
We chose the first element of the list of variables on which to
induct . It is probably the first of them appearing in the
context .
We chose the first element of the list of variables on which to
induct. It is probably the first of them appearing in the
context. *)
let hyp0,indvars,lid_params =
match lid with
| [] -> anomaly "induction_from_context_l"
| e::l ->
let nargs_without_first = nargs_indarg_farg - 1 in
let ivs,lp = cut_list nargs_without_first l in
e, ivs, lp in
terms to patternify we must or farg if present in concl
let lid_in_pattern =
if scheme.indarg <> None & not scheme.indarg_in_concl then List.rev indvars
else List.rev (hyp0::indvars) in
let lidcstr = List.map (fun x -> mkVar x) lid_in_pattern in
let realindvars = (* hyp0 is a real induction arg if it is not the
farg in the conclusion of the induction scheme *)
List.rev ((if scheme.farg_in_concl then indvars else hyp0::indvars) @ lid_params) in
let induct_tac = tclTHENLIST [
(* pattern to make the predicate appear. *)
reduce (Pattern (List.map inj_with_occurrences lidcstr)) onConcl;
(* Induction by "refine (indscheme ?i ?j ?k...)" + resolution of all
possible holes using arguments given by the user (but the
functional one). *)
FIXME : Tester ca principe dependant et non - dependant
induction_tac_felim with_evars realindvars scheme
] in
apply_induction_in_context isrec
None indsign (hyp0::indvars) names induct_tac gl
let induction_from_context isrec with_evars elim_info (hyp0,lbind) names
inhyps gl =
let indsign,scheme = elim_info in
let indref = match scheme.indref with | None -> assert false | Some x -> x in
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
let typ0 = pf_apply reduce_to_quantified_ref gl indref tmptyp0 in
let indvars =
find_atomic_param_of_ind scheme.nparams (snd (decompose_prod typ0)) in
let induct_tac = tclTHENLIST [
induction_tac with_evars (hyp0,lbind) typ0 scheme;
tclTRY (unfold_body hyp0);
thin [hyp0]
] in
apply_induction_in_context isrec
(Some (hyp0,inhyps)) indsign indvars names induct_tac gl
exception TryNewInduct of exn
let induction_with_atomization_of_ind_arg isrec with_evars elim names (hyp0,lbind) inhyps gl =
let (indsign,scheme as elim_info) = find_elim_signature isrec elim hyp0 gl in
if scheme.indarg = None then (* This is not a standard induction scheme (the
argument is probably a parameter) So try the
more general induction mechanism. *)
induction_from_context_l isrec with_evars elim_info [hyp0] names gl
else
let indref = match scheme.indref with | None -> assert false | Some x -> x in
tclTHEN
(atomize_param_of_ind (indref,scheme.nparams) hyp0)
(induction_from_context isrec with_evars elim_info
(hyp0,lbind) names inhyps) gl
(* Induction on a list of induction arguments. Analyse the elim
scheme (which is mandatory for multiple ind args), check that all
parameters and arguments are given (mandatory too). *)
let induction_without_atomization isrec with_evars elim names lid gl =
let (indsign,scheme as elim_info) =
find_elim_signature isrec elim (List.hd lid) gl in
let awaited_nargs =
scheme.nparams + scheme.nargs
+ (if scheme.farg_in_concl then 1 else 0)
+ (if scheme.indarg <> None then 1 else 0)
in
let nlid = List.length lid in
if nlid <> awaited_nargs
then error "Not the right number of induction arguments."
else induction_from_context_l isrec with_evars elim_info lid names gl
let enforce_eq_name id gl = function
| (b,(loc,IntroAnonymous)) ->
(b,(loc,IntroIdentifier (fresh_id [id] (add_prefix "Heq" id) gl)))
| (b,(loc,IntroFresh heq_base)) ->
(b,(loc,IntroIdentifier (fresh_id [id] heq_base gl)))
| x ->
x
let has_selected_occurrences = function
| None -> false
| Some cls ->
cls.concl_occs <> all_occurrences_expr ||
cls.onhyps <> None && List.exists (fun ((occs,_),hl) ->
occs <> all_occurrences_expr || hl <> InHyp) (Option.get cls.onhyps)
(* assume that no occurrences are selected *)
let clear_unselected_context id inhyps cls gl =
match cls with
| None -> tclIDTAC gl
| Some cls ->
if occur_var (pf_env gl) id (pf_concl gl) &&
cls.concl_occs = no_occurrences_expr
then errorlabstrm ""
(str "Conclusion must be mentioned: it depends on " ++ pr_id id
++ str ".");
match cls.onhyps with
| Some hyps ->
let to_erase (id',_,_ as d) =
if List.mem id' inhyps then (* if selected, do not erase *) None
else
erase if not selected and dependent on i d or selected hyps
let test id = occur_var_in_decl (pf_env gl) id d in
if List.exists test (id::inhyps) then Some id' else None in
let ids = list_map_filter to_erase (pf_hyps gl) in
thin ids gl
| None -> tclIDTAC gl
let new_induct_gen isrec with_evars elim (eqname,names) (c,lbind) cls gl =
let inhyps = match cls with
| Some {onhyps=Some hyps} -> List.map (fun ((_,id),_) -> id) hyps
| _ -> [] in
match kind_of_term c with
| Var id when not (mem_named_context id (Global.named_context()))
& lbind = NoBindings & not with_evars & eqname = None
& not (has_selected_occurrences cls) ->
tclTHEN
(clear_unselected_context id inhyps cls)
(induction_with_atomization_of_ind_arg
isrec with_evars elim names (id,lbind) inhyps) gl
| _ ->
let x = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c)
Anonymous in
let id = fresh_id [] x gl in
(* We need the equality name now *)
let with_eq = Option.map (fun eq -> (false,eq)) eqname in
TODO : if has predicate parameters , use JMeq instead of eq
tclTHEN
(letin_tac_gen with_eq (Name id) c None (Option.default allClauses cls,false))
(induction_with_atomization_of_ind_arg
isrec with_evars elim names (id,lbind) inhyps) gl
Induction on a list of arguments . First make induction arguments
atomic ( using ) , then do induction . The specificity here is
that all arguments and parameters of the scheme are given
( mandatory for the moment ) , so we do n't need to deal with
parameters of the inductive type as in .
atomic (using letins), then do induction. The specificity here is
that all arguments and parameters of the scheme are given
(mandatory for the moment), so we don't need to deal with
parameters of the inductive type as in new_induct_gen. *)
let new_induct_gen_l isrec with_evars elim (eqname,names) lc gl =
if eqname <> None then
errorlabstrm "" (str "Do not know what to do with " ++
pr_intro_pattern (Option.get eqname));
let newlc = ref [] in
let letids = ref [] in
let rec atomize_list l gl =
match l with
| [] -> tclIDTAC gl
| c::l' ->
match kind_of_term c with
| Var id when not (mem_named_context id (Global.named_context()))
& not with_evars ->
let _ = newlc:= id::!newlc in
atomize_list l' gl
| _ ->
let x =
id_of_name_using_hdchar (Global.env()) (pf_type_of gl c) Anonymous in
let id = fresh_id [] x gl in
let newl' = List.map (replace_term c (mkVar id)) l' in
let _ = newlc:=id::!newlc in
let _ = letids:=id::!letids in
tclTHEN
(letin_tac None (Name id) c None allClauses)
(atomize_list newl') gl in
tclTHENLIST
[
(atomize_list lc);
(fun gl' -> (* recompute each time to have the new value of newlc *)
induction_without_atomization isrec with_evars elim names !newlc gl') ;
after induction , try to unfold all created by atomize_list
FIXME : unfold_all does not exist anywhere else ?
FIXME: unfold_all does not exist anywhere else? *)
recompute each time to have the new value of letids
tclMAP (fun x -> tclTRY (unfold_all x)) !letids gl')
]
gl
let induct_destruct_l isrec with_evars lc elim names cls =
Several induction hyps : induction scheme is mandatory
let _ =
if elim = None
then
errorlabstrm "" (strbrk "Induction scheme must be given when several induction hypothesis are given.\n" ++
str "Example: induction x1 x2 x3 using my_scheme.") in
let newlc =
List.map
(fun x ->
match x with (* FIXME: should we deal with ElimOnIdent? *)
| ElimOnConstr (x,NoBindings) -> x
| _ -> error "Don't know where to find some argument.")
lc in
if cls <> None then
error
"'in' clause not supported when several induction hypothesis are given.";
new_induct_gen_l isrec with_evars elim names newlc
Induction either over a term , over a quantified premisse , or over
several quantified premisses ( like with functional induction
principles ) .
TODO : really unify induction with one and induction with several
args
several quantified premisses (like with functional induction
principles).
TODO: really unify induction with one and induction with several
args *)
let induct_destruct isrec with_evars (lc,elim,names,cls) =
assert (List.length lc > 0); (* ensured by syntax, but if called inside caml? *)
induction on one arg : use old mechanism
try
onInductionArg
(fun c -> new_induct_gen isrec with_evars elim names c cls)
(List.hd lc)
If this fails , try with new mechanism but if it fails too ,
then the exception is the first one .
then the exception is the first one. *)
| x ->
(try induct_destruct_l isrec with_evars lc elim names cls
with _ -> raise x)
else induct_destruct_l isrec with_evars lc elim names cls
let induction_destruct isrec with_evars = function
| [] -> tclIDTAC
| [a] -> induct_destruct isrec with_evars a
| a::l ->
tclTHEN
(induct_destruct isrec with_evars a)
(tclMAP (induct_destruct false with_evars) l)
let new_induct ev lc e idl cls = induct_destruct true ev (lc,e,idl,cls)
let new_destruct ev lc e idl cls = induct_destruct false ev (lc,e,idl,cls)
(* The registered tactic, which calls the default elimination
* if no elimination constant is provided. *)
(* Induction tactics *)
This was Induction before 6.3 ( induction only in quantified premisses )
let raw_induct s = tclTHEN (intros_until_id s) (tclLAST_HYP simplest_elim)
let raw_induct_nodep n = tclTHEN (intros_until_n n) (tclLAST_HYP simplest_elim)
let simple_induct_id hyp = raw_induct hyp
let simple_induct_nodep = raw_induct_nodep
let simple_induct = function
| NamedHyp id -> simple_induct_id id
| AnonHyp n -> simple_induct_nodep n
(* Destruction tactics *)
let simple_destruct_id s =
(tclTHEN (intros_until_id s) (tclLAST_HYP simplest_case))
let simple_destruct_nodep n =
(tclTHEN (intros_until_n n) (tclLAST_HYP simplest_case))
let simple_destruct = function
| NamedHyp id -> simple_destruct_id id
| AnonHyp n -> simple_destruct_nodep n
(*
* Eliminations giving the type instead of the proof.
* These tactics use the default elimination constant and
* no substitutions at all.
* May be they should be integrated into Elim ...
*)
let elim_scheme_type elim t gl =
let clause = mk_clenv_type_of gl elim in
match kind_of_term (last_arg clause.templval.rebus) with
| Meta mv ->
let clause' =
t is inductive , then CUMUL or CONV is irrelevant
clenv_unify true Reduction.CUMUL t
(clenv_meta_type clause mv) clause in
res_pf clause' ~allow_K:true gl
| _ -> anomaly "elim_scheme_type"
let elim_type t gl =
let (ind,t) = pf_reduce_to_atomic_ind gl t in
let elimc = lookup_eliminator ind (elimination_sort_of_goal gl) in
elim_scheme_type elimc t gl
let case_type t gl =
let (ind,t) = pf_reduce_to_atomic_ind gl t in
let env = pf_env gl in
let elimc = make_case_gen env (project gl) ind (elimination_sort_of_goal gl) in
elim_scheme_type elimc t gl
(* Some eliminations frequently used *)
These elimination tactics are particularly adapted for sequent
calculus . They take a clause as argument , and yield the
elimination rule if the clause is of the form ( Some i d ) and a
suitable introduction rule otherwise . They do not depend on
the name of the eliminated constant , so they can be also
used on ad - hoc disjunctions and conjunctions introduced by
the user .
-- ( 11/8/97 )
HH ( 29/5/99 ) replaces failures by specific error messages
calculus. They take a clause as argument, and yield the
elimination rule if the clause is of the form (Some id) and a
suitable introduction rule otherwise. They do not depend on
the name of the eliminated constant, so they can be also
used on ad-hoc disjunctions and conjunctions introduced by
the user.
-- Eduardo Gimenez (11/8/97)
HH (29/5/99) replaces failures by specific error messages
*)
let andE id gl =
let t = pf_get_hyp_typ gl id in
if is_conjunction (pf_hnf_constr gl t) then
(tclTHEN (simplest_elim (mkVar id)) (tclDO 2 intro)) gl
else
errorlabstrm "andE"
(str("Tactic andE expects "^(string_of_id id)^" is a conjunction."))
let dAnd cls =
onClauses
(function
| None -> simplest_split
| Some ((_,id),_) -> andE id)
cls
let orE id gl =
let t = pf_get_hyp_typ gl id in
if is_disjunction (pf_hnf_constr gl t) then
(tclTHEN (simplest_elim (mkVar id)) intro) gl
else
errorlabstrm "orE"
(str("Tactic orE expects "^(string_of_id id)^" is a disjunction."))
let dorE b cls =
onClauses
(function
| (Some ((_,id),_)) -> orE id
| None -> (if b then right else left) NoBindings)
cls
let impE id gl =
let t = pf_get_hyp_typ gl id in
if is_imp_term (pf_hnf_constr gl t) then
let (dom, _, rng) = destProd (pf_hnf_constr gl t) in
tclTHENLAST
(cut_intro rng)
(apply_term (mkVar id) [mkMeta (new_meta())]) gl
else
errorlabstrm "impE"
(str("Tactic impE expects "^(string_of_id id)^
" is a an implication."))
let dImp cls =
onClauses
(function
| None -> intro
| Some ((_,id),_) -> impE id)
cls
(************************************************)
(* Tactics related with logic connectives *)
(************************************************)
(* Reflexivity tactics *)
let setoid_reflexivity = ref (fun _ -> assert false)
let register_setoid_reflexivity f = setoid_reflexivity := f
let reflexivity_red allowred gl =
PL : usual reflexivity do n't perform any reduction when searching
for an equality , but we may need to do some when called back from
inside setoid_reflexivity ( see Optimize cases in setoid_replace.ml ) .
for an equality, but we may need to do some when called back from
inside setoid_reflexivity (see Optimize cases in setoid_replace.ml). *)
let concl = if not allowred then pf_concl gl
else whd_betadeltaiota (pf_env gl) (project gl) (pf_concl gl)
in
match match_with_equality_type concl with
| None -> None
| Some _ -> Some (one_constructor 1 NoBindings)
let reflexivity gl =
match reflexivity_red false gl with
| None -> !setoid_reflexivity gl
| Some tac -> tac gl
let intros_reflexivity = (tclTHEN intros reflexivity)
(* Symmetry tactics *)
This tactic first tries to apply a constant named sym_eq , where eq
is the name of the equality predicate . If this constant is not
defined and the conclusion is a = b , it solves the goal doing ( Cut
b = a;Intro H;Case H;Constructor 1 )
is the name of the equality predicate. If this constant is not
defined and the conclusion is a=b, it solves the goal doing (Cut
b=a;Intro H;Case H;Constructor 1) *)
let setoid_symmetry = ref (fun _ -> assert false)
let register_setoid_symmetry f = setoid_symmetry := f
let symmetry_red allowred gl =
PL : usual symmetry do n't perform any reduction when searching
for an equality , but we may need to do some when called back from
inside setoid_reflexivity ( see Optimize cases in setoid_replace.ml ) .
for an equality, but we may need to do some when called back from
inside setoid_reflexivity (see Optimize cases in setoid_replace.ml). *)
let concl = if not allowred then pf_concl gl
else whd_betadeltaiota (pf_env gl) (project gl) (pf_concl gl)
in
match match_with_equation concl with
| None -> None
| Some (hdcncl,args) -> Some (fun gl ->
let hdcncls = string_of_inductive hdcncl in
begin
try
tclTHEN
(convert_concl_no_check concl DEFAULTcast)
(apply (pf_parse_const gl ("sym_"^hdcncls))) gl
with _ ->
let symc = match args with
| [t1; c1; t2; c2] -> mkApp (hdcncl, [| t2; c2; t1; c1 |])
| [typ;c1;c2] -> mkApp (hdcncl, [| typ; c2; c1 |])
| [c1;c2] -> mkApp (hdcncl, [| c2; c1 |])
| _ -> assert false
in
tclTHENFIRST (cut symc)
(tclTHENLIST
[ intro;
tclLAST_HYP simplest_case;
one_constructor 1 NoBindings ])
gl
end)
let symmetry gl =
match symmetry_red false gl with
| None -> !setoid_symmetry gl
| Some tac -> tac gl
let setoid_symmetry_in = ref (fun _ _ -> assert false)
let register_setoid_symmetry_in f = setoid_symmetry_in := f
let symmetry_in id gl =
let ctype = pf_type_of gl (mkVar id) in
let sign,t = decompose_prod_assum ctype in
match match_with_equation t with
| None -> !setoid_symmetry_in id gl
| Some (hdcncl,args) ->
let symccl = match args with
| [t1; c1; t2; c2] -> mkApp (hdcncl, [| t2; c2; t1; c1 |])
| [typ;c1;c2] -> mkApp (hdcncl, [| typ; c2; c1 |])
| [c1;c2] -> mkApp (hdcncl, [| c2; c1 |])
| _ -> assert false in
tclTHENS (cut (it_mkProd_or_LetIn symccl sign))
[ intro_replacing id;
tclTHENLIST [ intros; symmetry; apply (mkVar id); assumption ] ]
gl
let intros_symmetry =
onClauses
(function
| None -> tclTHEN intros symmetry
| Some ((_,id),_) -> symmetry_in id)
(* Transitivity tactics *)
This tactic first tries to apply a constant named trans_eq , where eq
is the name of the equality predicate . If this constant is not
defined and the conclusion is a = b , it solves the goal doing
Cut x1 = x2 ;
[ Cut x2 = x3 ; [ Intros e1 e2 ; Case e2;Assumption
| Idtac ]
| Idtac ]
--Eduardo ( 19/8/97 )
is the name of the equality predicate. If this constant is not
defined and the conclusion is a=b, it solves the goal doing
Cut x1=x2;
[Cut x2=x3; [Intros e1 e2; Case e2;Assumption
| Idtac]
| Idtac]
--Eduardo (19/8/97)
*)
let setoid_transitivity = ref (fun _ _ -> assert false)
let register_setoid_transitivity f = setoid_transitivity := f
let transitivity_red allowred t gl =
PL : usual transitivity do n't perform any reduction when searching
for an equality , but we may need to do some when called back from
inside setoid_reflexivity ( see Optimize cases in setoid_replace.ml ) .
for an equality, but we may need to do some when called back from
inside setoid_reflexivity (see Optimize cases in setoid_replace.ml). *)
let concl = if not allowred then pf_concl gl
else whd_betadeltaiota (pf_env gl) (project gl) (pf_concl gl)
in
match match_with_equation concl with
| None -> None
| Some (hdcncl,args) -> Some (fun gl ->
let hdcncls = string_of_inductive hdcncl in
begin
try
apply_list [(pf_parse_const gl ("trans_"^hdcncls));t] gl
with _ ->
let eq1, eq2 = match args with
| [typ1;c1;typ2;c2] -> let typt = pf_type_of gl t in
( mkApp(hdcncl, [| typ1; c1; typt ;t |]),
mkApp(hdcncl, [| typt; t; typ2; c2 |]) )
| [typ;c1;c2] ->
( mkApp (hdcncl, [| typ; c1; t |]),
mkApp (hdcncl, [| typ; t; c2 |]) )
| [c1;c2] ->
( mkApp (hdcncl, [| c1; t|]),
mkApp (hdcncl, [| t; c2 |]) )
| _ -> assert false
in
tclTHENFIRST (cut eq2)
(tclTHENFIRST (cut eq1)
(tclTHENLIST
[ tclDO 2 intro;
tclLAST_HYP simplest_case;
assumption ])) gl
end)
let transitivity t gl =
match transitivity_red false t gl with
| None -> !setoid_transitivity t gl
| Some tac -> tac gl
let intros_transitivity n = tclTHEN intros (transitivity n)
(* tactical to save as name a subproof such that the generalisation of
the current goal, abstracted with respect to the local signature,
is solved by tac *)
let interpretable_as_section_decl d1 d2 = match d1,d2 with
| (_,Some _,_), (_,None,_) -> false
| (_,Some b1,t1), (_,Some b2,t2) -> eq_constr b1 b2 & eq_constr t1 t2
| (_,None,t1), (_,_,t2) -> eq_constr t1 t2
let abstract_subproof name tac gl =
let current_sign = Global.named_context()
and global_sign = pf_hyps gl in
let sign,secsign =
List.fold_right
(fun (id,_,_ as d) (s1,s2) ->
if mem_named_context id current_sign &
interpretable_as_section_decl (Sign.lookup_named id current_sign) d
then (s1,push_named_context_val d s2)
else (add_named_decl d s1,s2))
global_sign (empty_named_context,empty_named_context_val) in
let na = next_global_ident_away false name (pf_ids_of_hyps gl) in
let concl = it_mkNamedProd_or_LetIn (pf_concl gl) sign in
if occur_existential concl then
error "\"abstract\" cannot handle existentials.";
let lemme =
start_proof na (Global, Proof Lemma) secsign concl (fun _ _ -> ());
let _,(const,_,kind,_) =
try
by (tclCOMPLETE (tclTHEN (tclDO (List.length sign) intro) tac));
let r = cook_proof ignore in
delete_current_proof (); r
with
e ->
(delete_current_proof(); raise e)
Faudrait
let cd = Entries.DefinitionEntry const in
let con = Declare.declare_internal_constant na (cd,IsProof Lemma) in
constr_of_global (ConstRef con)
in
exact_no_check
(applist (lemme,
List.rev (Array.to_list (instance_from_named_context sign))))
gl
let tclABSTRACT name_op tac gl =
let s = match name_op with
| Some s -> s
| None -> add_suffix (get_current_proof_name ()) "_subproof"
in
abstract_subproof s tac gl
let admit_as_an_axiom gl =
let current_sign = Global.named_context()
and global_sign = pf_hyps gl in
let sign,secsign =
List.fold_right
(fun (id,_,_ as d) (s1,s2) ->
if mem_named_context id current_sign &
interpretable_as_section_decl (Sign.lookup_named id current_sign) d
then (s1,add_named_decl d s2)
else (add_named_decl d s1,s2))
global_sign (empty_named_context,empty_named_context) in
let name = add_suffix (get_current_proof_name ()) "_admitted" in
let na = next_global_ident_away false name (pf_ids_of_hyps gl) in
let concl = it_mkNamedProd_or_LetIn (pf_concl gl) sign in
if occur_existential concl then error"\"admit\" cannot handle existentials.";
let axiom =
let cd = Entries.ParameterEntry (concl,false) in
let con = Declare.declare_internal_constant na (cd,IsAssumption Logical) in
constr_of_global (ConstRef con)
in
exact_no_check
(applist (axiom,
List.rev (Array.to_list (instance_from_named_context sign))))
gl
let unify ?(state=full_transparent_state) x y gl =
try
let flags =
{default_unify_flags with
modulo_delta = state;
modulo_conv_on_closed_terms = Some state}
in
let evd = w_unify false (pf_env gl) Reduction.CONV
~flags x y (Evd.create_evar_defs (project gl))
in tclEVARS (Evd.evars_of evd) gl
with _ -> tclFAIL 0 (str"Not unifiable") gl
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/tactics/tactics.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
*******************************************
Tactics
*******************************************
**************************************
General functions
**************************************
****************************************
Primitive tactics
****************************************
Moving hypotheses
Renaming hypotheses
************************************************************
Fresh names
************************************************************
************************************************************
************************************************************
Refine as a fixpoint
Refine as a cofixpoint
************************************************************
Reduction and conversion tactics
************************************************************
The following tactic determines whether the reduction
function has to be applied to the conclusion or
to the hypotheses.
Now we introduce different instances of the previous tacticals
warn as much as possible on loss of occurrence information
check if cls has still specified occs
Anticipate on onClauses which removes concl if not at all occs
Pour usage interne (le niveau User est pris en compte par reduce)
Unfolding occurrences of a constant
*****************************************
Introduction tactics
*****************************************
this case must be compatible with [find_intro_names] below.
*** Multiple introduction tactics ***
User-level introduction tactics
Apply a tactic on a quantified hypothesis, an hypothesis in context
or a term with bindings
Identifier apart because id can be quantified in goal and not typable
************************
Refinement tactics
************************
************************
Cut tactics
************************
cut_replacing échoue si l'hypothèse à remplacer apparaît dans le
but, ou dans une autre hypothèse
******************************************
Elimination tactics
******************************************
cast added otherwise tactics Case (n1,n2) generates (?f x y) and
* refine fails
Elimination tactic with bindings but using the default elimination
* constant associated with the type.
The simplest elimination tactic, with no substitutions at all.
Elimination in hypothesis
Set the hypothesis name in the message
Case analysis tactics
Apply a tactic below the products of the conclusion of a lemma
**************************************************
Resolution tactics
**************************************************
Resolution with missing arguments
The actual type of the theorem. It will be matched against the
goal. If this fails, then the head constant will be unfolded step by
step.
Try to head-reduce the conclusion of the theorem
Resolution with no reduction on the type (used ?)
******************************************************************
Exact tactics
******************************************************************
on experimente la synthese d'ise dans exact
***************************************************************
Modification of a local context
***************************************************************
This tactic enables the user to remove hypotheses from the signature.
* Some care is taken to prevent him from removing variables that are
* subsequently used in other hypotheses or in the conclusion of the
* goal.
Takes a list of booleans, and introduces all the variables
* quantified in the goal which are associated with a value
* true in the boolean list.
Modifying/Adding an hypothesis
Keeping only a few hypotheses
**********************
Introduction tactics
**********************
***************************
Decomposing introductions
***************************
equality of the form eq_true
************************
Other cut tactics
************************
A rather arbitrary condition...
apply in as
************************
************************
Keep the name even if not occurring: may be used by intros later
(* Implementation with generalisation then re-intro: introduces noise
in proofs
in the extracted proof
Tactics "pose proof" (usetac=None) and "assert" (otherwise)
***************************
Ad hoc unfold
***************************
Unfolds x by its definition everywhere
Unfolds x by its definition everywhere and clear x. This may raise
an error if x is not defined.
If x has a body, simply replace x with body and clear x
***************************
High-level induction
***************************
buggy
Would need to pass peel_tac as a continuation of intros_patterns
(or to have hypotheses classified by blocks...)
If argl <> [], we expect typ0 not to be quantified, in order to
avoid bound parameters... then we call pf_reduce_to_atomic_ind
fake value
warning: hyp can still occur after induction
?? FIXME
[rel_contexts] and [rel_declaration] actually contain triples, and
lists are actually in reverse order to fit [compose_prod].
(prm1,tprm1);(prm2,tprm2)...(prmp,tprmp)
number of parameters
(Qq, (Tq_1 -> Tq_2 ->...-> Tq_nq)), (Q1,...)
Number of predicates
branchr,...,branch1
Number of branches
number of arguments
Qi x1...xni HI (f...), HI and (f...)
are optional and mutually exclusive
true if HI appears at the end of conclusion
true if (f...) appears at the end of conclusion
Unification between ((elimc:elimt) ?i ?j ?k ?l ... ?m) and the
hypothesis on which the induction is made
This extends the name to accept new digits if it already ends with
digits
let id = lazy (coq_constant "mkHEq" ["Init";"Datatypes"] "id")
mkApp (coq_constant "mkHEq" ["Logic";"EqdepFacts"] "eq_dep",
let mkHRefl t x =
Abstract by the "generalized" hypothesis equality proof if necessary.
Abstract by equalitites
lift together and past genarg
Abstract by the "generalized" hypothesis.
Abstract by the extension of the context
The goal will become this product.
Apply the reflexivity proofs on the indices.
Finaly, apply the reflexivity proof for the original hyp, to get a term of type gl again.
will be needed when going to dependent indexes
[rebuild_elimtype_from_scheme scheme] rebuilds the type of an
eliminator from its [scheme_info]. The idea is to build variants of
eliminator by modifying their scheme_info, then rebuild the
eliminator type, then prove it (with tactics).
This fields are ok:
all other fields are unsure at this point. Including these:
Order of tests below is important. Each of them exits if successful.
1- First see if (f x...) is in the conclusion.
2- If no args_indargs (=!res.nargs at this point) then no indarg
3- Look at last arg: is it the indarg?
No indarg
Last arg is the indarg
exit anyway
Ending by computing indrev:
No indref
Non standard scheme
Standard scheme from an inductive type
Check again conclusion
parameters correspond to first elts of lid.
arguments correspond to last elts of lid.
iteration of clenv_fchain with all infos we have.
Unification of the goal and the principle applied to meta variables:
(elimc ?i ?j ?k...?l). This solves partly meta variables (and may
produce new ones). Then refine with the resulting term with holes.
elimclause contains this: (elimc ?i ?j ?k...?l)
Induction with several induction arguments, main differences with
induction_from_context is that there is no main induction argument,
so we chose one to be the positioning reference. On the other hand,
all args and params must be given, so we help a bit the unifier by
making the "pattern" by hand before calling induction_tac_felim
FIXME: REUNIF AVEC induction_tac_felim?
number of all args, counting farg and indarg if present.
Number of given induction args must be exact.
hyp0 is a real induction arg if it is not the
farg in the conclusion of the induction scheme
pattern to make the predicate appear.
Induction by "refine (indscheme ?i ?j ?k...)" + resolution of all
possible holes using arguments given by the user (but the
functional one).
This is not a standard induction scheme (the
argument is probably a parameter) So try the
more general induction mechanism.
Induction on a list of induction arguments. Analyse the elim
scheme (which is mandatory for multiple ind args), check that all
parameters and arguments are given (mandatory too).
assume that no occurrences are selected
if selected, do not erase
We need the equality name now
recompute each time to have the new value of newlc
FIXME: should we deal with ElimOnIdent?
ensured by syntax, but if called inside caml?
The registered tactic, which calls the default elimination
* if no elimination constant is provided.
Induction tactics
Destruction tactics
* Eliminations giving the type instead of the proof.
* These tactics use the default elimination constant and
* no substitutions at all.
* May be they should be integrated into Elim ...
Some eliminations frequently used
**********************************************
Tactics related with logic connectives
**********************************************
Reflexivity tactics
Symmetry tactics
Transitivity tactics
tactical to save as name a subproof such that the generalisation of
the current goal, abstracted with respect to the local signature,
is solved by tac | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
$ I d : tactics.ml 12956 2010 - 04 - 20 08:49:15Z herbelin $
open Pp
open Util
open Names
open Nameops
open Sign
open Term
open Termops
open Declarations
open Inductive
open Inductiveops
open Reductionops
open Environ
open Libnames
open Evd
open Pfedit
open Tacred
open Rawterm
open Tacmach
open Proof_trees
open Proof_type
open Logic
open Evar_refiner
open Clenv
open Clenvtac
open Refiner
open Tacticals
open Hipattern
open Coqlib
open Nametab
open Genarg
open Tacexpr
open Decl_kinds
open Evarutil
open Indrec
open Pretype_errors
open Unification
exception Bound
let rec nb_prod x =
let rec count n c =
match kind_of_term c with
Prod(_,_,t) -> count (n+1) t
| LetIn(_,a,_,t) -> count n (subst1 a t)
| Cast(c,_,_) -> count n c
| _ -> n
in count 0 x
let inj_with_occurrences e = (all_occurrences_expr,e)
let inj_open c = (Evd.empty,c)
let inj_occ (occ,c) = (occ,inj_open c)
let inj_red_expr = function
| Simpl lo -> Simpl (Option.map inj_occ lo)
| Fold l -> Fold (List.map inj_open l)
| Pattern l -> Pattern (List.map inj_occ l)
| (ExtraRedExpr _ | CbvVm | Red _ | Hnf | Cbv _ | Lazy _ | Unfold _ as c)
-> c
let inj_ebindings = function
| NoBindings -> NoBindings
| ImplicitBindings l -> ImplicitBindings (List.map inj_open l)
| ExplicitBindings l ->
ExplicitBindings (List.map (fun (l,id,c) -> (l,id,inj_open c)) l)
let dloc = dummy_loc
let string_of_inductive c =
try match kind_of_term c with
| Ind ind_sp ->
let (mib,mip) = Global.lookup_inductive ind_sp in
string_of_id mip.mind_typename
| _ -> raise Bound
with Bound -> error "Bound head variable."
let rec head_constr_bound t =
let t = strip_outer_cast t in
let _,ccl = decompose_prod_assum t in
let hd,args = decompose_app ccl in
match kind_of_term hd with
| Const _ | Ind _ | Construct _ | Var _ -> (hd,args)
| _ -> raise Bound
let head_constr c =
try head_constr_bound c with Bound -> error "Bound head variable."
let introduction = Tacmach.introduction
let refine = Tacmach.refine
let convert_concl = Tacmach.convert_concl
let convert_hyp = Tacmach.convert_hyp
let thin_body = Tacmach.thin_body
let error_clear_dependency env id = function
| Evarutil.OccurHypInSimpleClause None ->
errorlabstrm "" (pr_id id ++ str " is used in conclusion.")
| Evarutil.OccurHypInSimpleClause (Some id') ->
errorlabstrm ""
(pr_id id ++ strbrk " is used in hypothesis " ++ pr_id id' ++ str".")
| Evarutil.EvarTypingBreak ev ->
errorlabstrm ""
(str "Cannot remove " ++ pr_id id ++
strbrk " without breaking the typing of " ++
Printer.pr_existential env ev ++ str".")
let thin l gl =
try thin l gl
with Evarutil.ClearDependencyError (id,err) ->
error_clear_dependency (pf_env gl) id err
let internal_cut_gen b d t gl =
try internal_cut b d t gl
with Evarutil.ClearDependencyError (id,err) ->
error_clear_dependency (pf_env gl) id err
let internal_cut = internal_cut_gen false
let internal_cut_replace = internal_cut_gen true
let internal_cut_rev_gen b d t gl =
try internal_cut_rev b d t gl
with Evarutil.ClearDependencyError (id,err) ->
error_clear_dependency (pf_env gl) id err
let internal_cut_rev = internal_cut_rev_gen false
let internal_cut_rev_replace = internal_cut_rev_gen true
let move_hyp = Tacmach.move_hyp
let order_hyps = Tacmach.order_hyps
let rename_hyp = Tacmach.rename_hyp
let fresh_id_avoid avoid id =
next_global_ident_away true id avoid
let fresh_id avoid id gl =
fresh_id_avoid (avoid@(pf_ids_of_hyps gl)) id
Fixpoints and CoFixpoints
let mutual_fix = Tacmach.mutual_fix
let fix ido n gl = match ido with
| None ->
mutual_fix (fresh_id [] (Pfedit.get_current_proof_name ()) gl) n [] gl
| Some id ->
mutual_fix id n [] gl
let mutual_cofix = Tacmach.mutual_cofix
let cofix ido gl = match ido with
| None ->
mutual_cofix (fresh_id [] (Pfedit.get_current_proof_name ()) gl) [] gl
| Some id ->
mutual_cofix id [] gl
type tactic_reduction = env -> evar_map -> constr -> constr
let pf_reduce_decl redfun where (id,c,ty) gl =
let redfun' = pf_reduce redfun gl in
match c with
| None ->
if where = InHypValueOnly then
errorlabstrm "" (pr_id id ++ str "has no value.");
(id,None,redfun' ty)
| Some b ->
let b' = if where <> InHypTypeOnly then redfun' b else b in
let ty' = if where <> InHypValueOnly then redfun' ty else ty in
(id,Some b',ty')
The following two tactics apply an arbitrary
reduction function either to the conclusion or to a
certain hypothesis
reduction function either to the conclusion or to a
certain hypothesis *)
let reduct_in_concl (redfun,sty) gl =
convert_concl_no_check (pf_reduce redfun gl (pf_concl gl)) sty gl
let reduct_in_hyp redfun ((_,id),where) gl =
convert_hyp_no_check
(pf_reduce_decl redfun where (pf_get_hyp gl id) gl) gl
let reduct_option redfun = function
| Some id -> reduct_in_hyp (fst redfun) id
| None -> reduct_in_concl redfun
let redin_combinator redfun =
onClauses (reduct_option redfun)
let change_and_check cv_pb t env sigma c =
if is_fconv cv_pb env sigma t c then
t
else
errorlabstrm "convert-check-hyp" (str "Not convertible.")
Use cumulutavity only if changing the conclusion not a subterm
let change_on_subterm cv_pb t = function
| None -> change_and_check cv_pb t
| Some occl -> contextually false occl (change_and_check Reduction.CONV t)
let change_in_concl occl t =
reduct_in_concl ((change_on_subterm Reduction.CUMUL t occl),DEFAULTcast)
let change_in_hyp occl t id =
with_check (reduct_in_hyp (change_on_subterm Reduction.CONV t occl) id)
let change_option occl t = function
Some id -> change_in_hyp occl t id
| None -> change_in_concl occl t
let out_arg = function
| ArgVar _ -> anomaly "Unevaluated or_var variable"
| ArgArg x -> x
let adjust_clause occl cls =
(match cls, occl with
({onhyps=(Some(_::_::_)|None)}
|{onhyps=Some(_::_);concl_occs=((false,_)|(true,_::_))}),
Some _ ->
error "No occurrences expected when changing several hypotheses."
| _ -> ());
get at clause from cls if only goal or one hyp specified
let occl,cls = match occl with
| None -> None,cls
| Some (occs,c) ->
if cls.onhyps=Some[] && occs=all_occurrences then
Some (on_snd (List.map out_arg) cls.concl_occs,c),
{cls with concl_occs=all_occurrences_expr}
else
match cls.onhyps with
| Some[(occs',id),l] when
cls.concl_occs=no_occurrences_expr && occs=all_occurrences ->
Some (on_snd (List.map out_arg) occs',c),
{cls with onhyps=Some[(all_occurrences_expr,id),l]}
| _ ->
occl,cls in
if cls.onhyps <> None &&
List.exists (fun ((occs,_),_) -> occs <> all_occurrences_expr)
(Option.get cls.onhyps)
|| cls.concl_occs <> all_occurrences_expr &&
cls.concl_occs <> no_occurrences_expr
then
Flags.if_verbose Pp.msg_warning
(if cls.onhyps=Some[] then
str "Trailing \"at\" modifier not taken into account."
else
str "\"at\" modifier in clause \"in\" not taken into account.");
if cls.concl_occs=no_occurrences_expr then cls
else {cls with concl_occs=all_occurrences_expr}
let change occl c cls =
onClauses (change_option occl c) (adjust_clause occl cls)
let red_in_concl = reduct_in_concl (red_product,DEFAULTcast)
let red_in_hyp = reduct_in_hyp red_product
let red_option = reduct_option (red_product,DEFAULTcast)
let hnf_in_concl = reduct_in_concl (hnf_constr,DEFAULTcast)
let hnf_in_hyp = reduct_in_hyp hnf_constr
let hnf_option = reduct_option (hnf_constr,DEFAULTcast)
let simpl_in_concl = reduct_in_concl (simpl,DEFAULTcast)
let simpl_in_hyp = reduct_in_hyp simpl
let simpl_option = reduct_option (simpl,DEFAULTcast)
let normalise_in_concl = reduct_in_concl (compute,DEFAULTcast)
let normalise_in_hyp = reduct_in_hyp compute
let normalise_option = reduct_option (compute,DEFAULTcast)
let normalise_vm_in_concl = reduct_in_concl (Redexpr.cbv_vm,VMcast)
let unfold_in_concl loccname = reduct_in_concl (unfoldn loccname,DEFAULTcast)
let unfold_in_hyp loccname = reduct_in_hyp (unfoldn loccname)
let unfold_option loccname = reduct_option (unfoldn loccname,DEFAULTcast)
let pattern_option l = reduct_option (pattern_occs l,DEFAULTcast)
A function which reduces accordingly to a reduction expression ,
as the command does .
as the command Eval does. *)
let checking_fun = function
Expansion is not necessarily well - typed : e.g. expansion of t into x is
not well - typed in [ H:(P t ) ; x:=t |- G ] because x is defined after H
not well-typed in [H:(P t); x:=t |- G] because x is defined after H *)
| Fold _ -> with_check
| Pattern _ -> with_check
| _ -> (fun x -> x)
let reduce redexp cl goal =
let red = Redexpr.reduction_of_red_expr redexp in
match redexp with
(Fold _|Pattern _) -> with_check (redin_combinator red cl) goal
| _ -> redin_combinator red cl goal
let unfold_constr = function
| ConstRef sp -> unfold_in_concl [all_occurrences,EvalConstRef sp]
| VarRef id -> unfold_in_concl [all_occurrences,EvalVarRef id]
| _ -> errorlabstrm "unfold_constr" (str "Cannot unfold a non-constant.")
let id_of_name_with_default id = function
| Anonymous -> id
| Name id -> id
let hid = id_of_string "H"
let xid = id_of_string "X"
let default_id_of_sort = function Prop _ -> hid | Type _ -> xid
let default_id env sigma = function
| (name,None,t) ->
let dft = default_id_of_sort (Typing.sort_of env sigma t) in
id_of_name_with_default dft name
| (name,Some b,_) -> id_of_name_using_hdchar env b name
Non primitive introduction tactics are treated by There is possibly renaming , with possibly names to avoid and
possibly a move to do after the introduction
There is possibly renaming, with possibly names to avoid and
possibly a move to do after the introduction *)
type intro_name_flag =
| IntroAvoid of identifier list
| IntroBasedOn of identifier * identifier list
| IntroMustBe of identifier
let find_name loc decl gl = function
| IntroAvoid idl ->
let id = fresh_id idl (default_id (pf_env gl) gl.sigma decl) gl in id
| IntroBasedOn (id,idl) -> fresh_id idl id gl
| IntroMustBe id ->
let id' = fresh_id [] id gl in
if id'<>id then user_err_loc (loc,"",pr_id id ++ str" is already used.");
id'
Returns the names that would be created by intros , without doing
intros . This function is supposed to be compatible with an
iteration of [ find_name ] above . As [ default_id ] checks the sort of
the type to build hyp names , we maintain an environment to be able
to type dependent hyps .
intros. This function is supposed to be compatible with an
iteration of [find_name] above. As [default_id] checks the sort of
the type to build hyp names, we maintain an environment to be able
to type dependent hyps. *)
let find_intro_names ctxt gl =
let _, res = List.fold_right
(fun decl acc ->
let wantedname,x,typdecl = decl in
let env,idl = acc in
let name = fresh_id idl (default_id env gl.sigma decl) gl in
let newenv = push_rel (wantedname,x,typdecl) env in
(newenv,(name::idl)))
ctxt (pf_env gl , []) in
List.rev res
let build_intro_tac id = function
| MoveToEnd true -> introduction id
| dest -> tclTHEN (introduction id) (move_hyp true id dest)
let rec intro_gen loc name_flag move_flag force_flag gl =
match kind_of_term (pf_concl gl) with
| Prod (name,t,_) ->
build_intro_tac (find_name loc (name,None,t) gl name_flag) move_flag gl
| LetIn (name,b,t,_) ->
build_intro_tac (find_name loc (name,Some b,t) gl name_flag) move_flag
gl
| _ ->
if not force_flag then raise (RefinerError IntroNeedsProduct);
try
tclTHEN
(reduce (Red true) onConcl)
(intro_gen loc name_flag move_flag force_flag) gl
with Redelimination ->
user_err_loc(loc,"Intro",str "No product even after head-reduction.")
let intro_mustbe_force id = intro_gen dloc (IntroMustBe id) no_move true
let intro_using id = intro_gen dloc (IntroBasedOn (id,[])) no_move false
let intro_force force_flag = intro_gen dloc (IntroAvoid []) no_move force_flag
let intro = intro_force false
let introf = intro_force true
let intro_avoiding l = intro_gen dloc (IntroAvoid l) no_move false
let introf_move_name destopt = intro_gen dloc (IntroAvoid []) destopt true
let rec intros_using = function
| [] -> tclIDTAC
| str::l -> tclTHEN (intro_using str) (intros_using l)
let intros = tclREPEAT (intro_force false)
let intro_erasing id = tclTHEN (thin [id]) (introduction id)
let rec get_next_hyp_position id = function
| [] -> error ("No such hypothesis: " ^ string_of_id id)
| (hyp,_,_) :: right ->
if hyp = id then
match right with (id,_,_)::_ -> MoveBefore id | [] -> MoveToEnd true
else
get_next_hyp_position id right
let thin_for_replacing l gl =
try Tacmach.thin l gl
with Evarutil.ClearDependencyError (id,err) -> match err with
| Evarutil.OccurHypInSimpleClause None ->
errorlabstrm ""
(str "Cannot change " ++ pr_id id ++ str ", it is used in conclusion.")
| Evarutil.OccurHypInSimpleClause (Some id') ->
errorlabstrm ""
(str "Cannot change " ++ pr_id id ++
strbrk ", it is used in hypothesis " ++ pr_id id' ++ str".")
| Evarutil.EvarTypingBreak ev ->
errorlabstrm ""
(str "Cannot change " ++ pr_id id ++
strbrk " without breaking the typing of " ++
Printer.pr_existential (pf_env gl) ev ++ str".")
let intro_replacing id gl =
let next_hyp = get_next_hyp_position id (pf_hyps gl) in
tclTHENLIST
[thin_for_replacing [id]; introduction id; move_hyp true id next_hyp] gl
let intros_replacing ids gl =
let rec introrec = function
| [] -> tclIDTAC
| id::tl ->
tclTHEN (tclORELSE (intro_replacing id) (intro_using id))
(introrec tl)
in
introrec ids gl
let intro_move idopt hto = match idopt with
| None -> intro_gen dloc (IntroAvoid []) hto true
| Some id -> intro_gen dloc (IntroMustBe id) hto true
let pf_lookup_hypothesis_as_renamed env ccl = function
| AnonHyp n -> pf_lookup_index_as_renamed env ccl n
| NamedHyp id -> pf_lookup_name_as_renamed env ccl id
let pf_lookup_hypothesis_as_renamed_gen red h gl =
let env = pf_env gl in
let rec aux ccl =
match pf_lookup_hypothesis_as_renamed env ccl h with
| None when red ->
aux
((fst (Redexpr.reduction_of_red_expr (Red true)))
env (project gl) ccl)
| x -> x
in
try aux (pf_concl gl)
with Redelimination -> None
let is_quantified_hypothesis id g =
match pf_lookup_hypothesis_as_renamed_gen true (NamedHyp id) g with
| Some _ -> true
| None -> false
let msg_quantified_hypothesis = function
| NamedHyp id ->
str "quantified hypothesis named " ++ pr_id id
| AnonHyp n ->
int n ++ str (match n with 1 -> "st" | 2 -> "nd" | _ -> "th") ++
str " non dependent hypothesis"
let depth_of_quantified_hypothesis red h gl =
match pf_lookup_hypothesis_as_renamed_gen red h gl with
| Some depth -> depth
| None ->
errorlabstrm "lookup_quantified_hypothesis"
(str "No " ++ msg_quantified_hypothesis h ++
strbrk " in current goal" ++
(if red then strbrk " even after head-reduction" else mt ()) ++
str".")
let intros_until_gen red h g =
tclDO (depth_of_quantified_hypothesis red h g) intro g
let intros_until_id id = intros_until_gen true (NamedHyp id)
let intros_until_n_gen red n = intros_until_gen red (AnonHyp n)
let intros_until = intros_until_gen true
let intros_until_n = intros_until_n_gen true
let intros_until_n_wored = intros_until_n_gen false
let try_intros_until tac = function
| NamedHyp id -> tclTHEN (tclTRY (intros_until_id id)) (tac id)
| AnonHyp n -> tclTHEN (intros_until_n n) (onLastHyp tac)
let rec intros_move = function
| [] -> tclIDTAC
| (hyp,destopt) :: rest ->
tclTHEN (intro_gen dloc (IntroMustBe hyp) destopt false)
(intros_move rest)
let dependent_in_decl a (_,c,t) =
match c with
| None -> dependent a t
| Some body -> dependent a body || dependent a t
let onInductionArg tac = function
| ElimOnConstr (c,lbindc as cbl) ->
if isVar c & lbindc = NoBindings then
tclTHEN (tclTRY (intros_until_id (destVar c))) (tac cbl)
else
tac cbl
| ElimOnAnonHyp n ->
tclTHEN (intros_until_n n) (tclLAST_HYP (fun c -> tac (c,NoBindings)))
| ElimOnIdent (_,id) ->
tclTHEN (tclTRY (intros_until_id id)) (tac (mkVar id,NoBindings))
let apply_type hdcty argl gl =
refine (applist (mkCast (Evarutil.mk_new_meta(),DEFAULTcast, hdcty),argl)) gl
let apply_term hdc argl gl =
refine (applist (hdc,argl)) gl
let bring_hyps hyps =
if hyps = [] then Refiner.tclIDTAC
else
(fun gl ->
let newcl = List.fold_right mkNamedProd_or_LetIn hyps (pf_concl gl) in
let f = mkCast (Evarutil.mk_new_meta(),DEFAULTcast, newcl) in
refine_no_check (mkApp (f, instance_from_named_context hyps)) gl)
let resolve_classes gl =
let env = pf_env gl and evd = project gl in
if evd = Evd.empty then tclIDTAC gl
else
let evd' = Typeclasses.resolve_typeclasses env (Evd.create_evar_defs evd) in
(tclTHEN (tclEVARS (Evd.evars_of evd')) tclNORMEVAR) gl
let cut c gl =
match kind_of_term (hnf_type_of gl c) with
| Sort _ ->
let id=next_name_away_with_default "H" Anonymous (pf_ids_of_hyps gl) in
let t = mkProd (Anonymous, c, pf_concl gl) in
tclTHENFIRST
(internal_cut_rev id c)
(tclTHEN (apply_type t [mkVar id]) (thin [id]))
gl
| _ -> error "Not a proposition or a type."
let cut_intro t = tclTHENFIRST (cut t) intro
let cut_replacing id t tac =
tclTHENLAST (internal_cut_rev_replace id t)
(tac (refine_no_check (mkVar id)))
let cut_in_parallel l =
let rec prec = function
| [] -> tclIDTAC
| h::t -> tclTHENFIRST (cut h) (prec t)
in
prec (List.rev l)
let error_uninstantiated_metas t clenv =
let na = meta_name clenv.evd (List.hd (Metaset.elements (metavars_of t))) in
let id = match na with Name id -> id | _ -> anomaly "unnamed dependent meta"
in errorlabstrm "" (str "Cannot find an instance for " ++ pr_id id ++ str".")
let clenv_refine_in with_evars ?(with_classes=true) id clenv gl =
let clenv = clenv_pose_dependent_evars with_evars clenv in
let clenv =
if with_classes then
{ clenv with evd = Typeclasses.resolve_typeclasses ~fail:(not with_evars) clenv.env clenv.evd }
else clenv
in
let new_hyp_typ = clenv_type clenv in
if not with_evars & occur_meta new_hyp_typ then
error_uninstantiated_metas new_hyp_typ clenv;
let new_hyp_prf = clenv_value clenv in
tclTHEN
(tclEVARS (evars_of clenv.evd))
(cut_replacing id new_hyp_typ
(fun x gl -> refine_no_check new_hyp_prf gl)) gl
let last_arg c = match kind_of_term c with
| App (f,cl) ->
array_last cl
| _ -> anomaly "last_arg"
let elim_flags = {
modulo_conv_on_closed_terms = Some full_transparent_state;
use_metas_eagerly = true;
modulo_delta = empty_transparent_state;
}
let elimination_clause_scheme with_evars allow_K elimclause indclause gl =
let indmv =
(match kind_of_term (last_arg elimclause.templval.rebus) with
| Meta mv -> mv
| _ -> errorlabstrm "elimination_clause"
(str "The type of elimination clause is not well-formed."))
in
let elimclause' = clenv_fchain indmv elimclause indclause in
res_pf elimclause' ~with_evars:with_evars ~allow_K:allow_K ~flags:elim_flags
gl
let type_clenv_binding wc (c,t) lbind =
clenv_type (make_clenv_binding wc (c,t) lbind)
* Elimination tactic with bindings and using an arbitrary
* elimination constant called elimc . This constant should end
* with a clause ( x : I)(P .. ) , where P is a bound variable .
* The term c is of type t , which is a product ending with a type
* matching I , lbindc are the expected terms for c arguments
* Elimination tactic with bindings and using an arbitrary
* elimination constant called elimc. This constant should end
* with a clause (x:I)(P .. ), where P is a bound variable.
* The term c is of type t, which is a product ending with a type
* matching I, lbindc are the expected terms for c arguments
*)
let general_elim_clause elimtac (c,lbindc) (elimc,lbindelimc) gl =
let ct = pf_type_of gl c in
let t = try snd (pf_reduce_to_quantified_ind gl ct) with UserError _ -> ct in
let indclause = make_clenv_binding gl (c,t) lbindc in
let elimt = pf_type_of gl elimc in
let elimclause = make_clenv_binding gl (elimc,elimt) lbindelimc in
elimtac elimclause indclause gl
let general_elim with_evars c e ?(allow_K=true) =
general_elim_clause (elimination_clause_scheme with_evars allow_K) c e
let find_eliminator c gl =
let (ind,t) = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
lookup_eliminator ind (elimination_sort_of_goal gl)
let default_elim with_evars (c,_ as cx) gl =
general_elim with_evars cx (find_eliminator c gl,NoBindings) gl
let elim_in_context with_evars c = function
| Some elim -> general_elim with_evars c elim ~allow_K:true
| None -> default_elim with_evars c
let elim with_evars (c,lbindc as cx) elim =
match kind_of_term c with
| Var id when lbindc = NoBindings ->
tclTHEN (tclTRY (intros_until_id id))
(elim_in_context with_evars cx elim)
| _ -> elim_in_context with_evars cx elim
let simplest_elim c = default_elim false (c,NoBindings)
Typically , elimclause : = ( eq_ind ? x ? P ? H ? y ? : ? P ? y )
indclause : forall ... , hyps - > a = b ( to take place of ? )
i d : ) ( to take place of ? H )
and the result is to overwrite i d with the proof of phi(b )
but this generalizes to any elimination scheme with one constructor
( e.g. it could replace id : A->B->C by id : C , knowing A/\B )
indclause : forall ..., hyps -> a=b (to take place of ?Heq)
id : phi(a) (to take place of ?H)
and the result is to overwrite id with the proof of phi(b)
but this generalizes to any elimination scheme with one constructor
(e.g. it could replace id:A->B->C by id:C, knowing A/\B)
*)
let clenv_fchain_in id elim_flags mv elimclause hypclause =
try clenv_fchain ~allow_K:false ~flags:elim_flags mv elimclause hypclause
with PretypeError (env,NoOccurrenceFound (op,_)) ->
raise (PretypeError (env,NoOccurrenceFound (op,Some id)))
let elimination_in_clause_scheme with_evars id elimclause indclause gl =
let (hypmv,indmv) =
match clenv_independent elimclause with
[k1;k2] -> (k1,k2)
| _ -> errorlabstrm "elimination_clause"
(str "The type of elimination clause is not well-formed.") in
let elimclause' = clenv_fchain indmv elimclause indclause in
let hyp = mkVar id in
let hyp_typ = pf_type_of gl hyp in
let hypclause = mk_clenv_from_n gl (Some 0) (hyp, hyp_typ) in
let elimclause'' =
clenv_fchain_in id elim_flags hypmv elimclause' hypclause in
let new_hyp_typ = clenv_type elimclause'' in
if eq_constr hyp_typ new_hyp_typ then
errorlabstrm "general_rewrite_in"
(str "Nothing to rewrite in " ++ pr_id id ++ str".");
clenv_refine_in with_evars id elimclause'' gl
let general_elim_in with_evars id =
general_elim_clause (elimination_in_clause_scheme with_evars id)
let general_case_analysis_in_context with_evars (c,lbindc) gl =
let (mind,_) = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
let sort = elimination_sort_of_goal gl in
let case =
if occur_term c (pf_concl gl) then make_case_dep else make_case_gen in
let elim = pf_apply case gl mind sort in
general_elim with_evars (c,lbindc) (elim,NoBindings) gl
let general_case_analysis with_evars (c,lbindc as cx) =
match kind_of_term c with
| Var id when lbindc = NoBindings ->
tclTHEN (tclTRY (intros_until_id id))
(general_case_analysis_in_context with_evars cx)
| _ ->
general_case_analysis_in_context with_evars cx
let simplest_case c = general_case_analysis false (c,NoBindings)
let descend_in_conjunctions with_evars tac exit c gl =
try
let (mind,t) = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
match match_with_record (snd (decompose_prod t)) with
| Some _ ->
let n = (mis_constr_nargs mind).(0) in
let sort = elimination_sort_of_goal gl in
let elim = pf_apply make_case_gen gl mind sort in
tclTHENLAST
(general_elim with_evars (c,NoBindings) (elim,NoBindings))
(tclTHENLIST [
tclDO n intro;
tclLAST_NHYPS n (fun l ->
tclFIRST
(List.map (fun id -> tclTHEN (tac (mkVar id)) (thin l)) l))])
gl
| None ->
raise Exit
with RefinerError _|UserError _|Exit -> exit ()
let check_evars sigma evm gl =
let origsigma = gl.sigma in
let rest =
Evd.fold (fun ev evi acc ->
if not (Evd.mem origsigma ev) && not (Evd.is_defined sigma ev)
then Evd.add acc ev evi else acc)
evm Evd.empty
in
if rest <> Evd.empty then
errorlabstrm "apply" (str"Uninstantiated existential variables: " ++
fnl () ++ pr_evar_map rest)
let general_apply with_delta with_destruct with_evars (c,lbind) gl0 =
let flags =
if with_delta then default_unify_flags else default_no_delta_unify_flags in
let concl_nprod = nb_prod (pf_concl gl0) in
let evm, c = c in
let rec try_main_apply c gl =
let thm_ty0 = nf_betaiota (project gl) (pf_type_of gl c) in
let try_apply thm_ty nprod =
let n = nb_prod thm_ty - nprod in
if n<0 then error "Applied theorem has not enough premisses.";
let clause = make_clenv_binding_apply gl (Some n) (c,thm_ty) lbind in
let res = Clenvtac.res_pf clause ~with_evars:with_evars ~flags:flags gl in
if not with_evars then check_evars (fst res).sigma evm gl0;
res
in
try try_apply thm_ty0 concl_nprod
with PretypeError _|RefinerError _|UserError _|Failure _ as exn ->
let rec try_red_apply thm_ty =
try
let red_thm = try_red_product (pf_env gl) (project gl) thm_ty in
try try_apply red_thm concl_nprod
with PretypeError _|RefinerError _|UserError _|Failure _ ->
try_red_apply red_thm
with Redelimination ->
Last chance : if the head is a variable , apply may try
second order unification
second order unification *)
try if concl_nprod <> 0 then try_apply thm_ty 0 else raise Exit
with PretypeError _|RefinerError _|UserError _|Failure _|Exit ->
if with_destruct then
descend_in_conjunctions with_evars
try_main_apply (fun _ -> raise exn) c gl
else
raise exn
in try_red_apply thm_ty0
in
if evm = Evd.empty then try_main_apply c gl0
else
tclTHEN (tclEVARS (Evd.merge gl0.sigma evm)) (try_main_apply c) gl0
let rec apply_with_ebindings_gen b e = function
| [] ->
tclIDTAC
| [cb] ->
general_apply b b e cb
| cb::cbl ->
tclTHENLAST (general_apply b b e cb) (apply_with_ebindings_gen b e cbl)
let apply_with_ebindings cb = apply_with_ebindings_gen false false [cb]
let eapply_with_ebindings cb = apply_with_ebindings_gen false true [cb]
let apply_with_bindings (c,bl) =
apply_with_ebindings (inj_open c,inj_ebindings bl)
let eapply_with_bindings (c,bl) =
apply_with_ebindings_gen false true [inj_open c,inj_ebindings bl]
let apply c =
apply_with_ebindings (inj_open c,NoBindings)
let apply_list = function
| c::l -> apply_with_bindings (c,ImplicitBindings l)
| _ -> assert false
let apply_without_reduce c gl =
let clause = mk_clenv_type_of gl c in
res_pf clause gl
[ apply_in hyp c ] replaces
hyp : forall y1 , ti - > t hyp : rho(u )
= = = = = = = = = = = = = = = = = = = = = = = = with = = = = = = = = = = = = and the = = = = = = =
goal goal rho(ti )
assuming that [ c ] has type [ forall x1 .. xn - > t ' - > u ] for some [ t ]
unifiable with [ t ' ] with unifier [ rho ]
hyp : forall y1, ti -> t hyp : rho(u)
======================== with ============ and the =======
goal goal rho(ti)
assuming that [c] has type [forall x1..xn -> t' -> u] for some [t]
unifiable with [t'] with unifier [rho]
*)
let find_matching_clause unifier clause =
let rec find clause =
try unifier clause
with exn when catchable_exception exn ->
try find (clenv_push_prod clause)
with NotExtensibleClause -> failwith "Cannot apply"
in find clause
let progress_with_clause flags innerclause clause =
let ordered_metas = List.rev (clenv_independent clause) in
if ordered_metas = [] then error "Statement without assumptions.";
let f mv =
find_matching_clause (clenv_fchain mv ~flags clause) innerclause in
try list_try_find f ordered_metas
with Failure _ -> error "Unable to unify."
let apply_in_once_main flags innerclause (d,lbind) gl =
let thm = nf_betaiota gl.sigma (pf_type_of gl d) in
let rec aux clause =
try progress_with_clause flags innerclause clause
with err ->
try aux (clenv_push_prod clause)
with NotExtensibleClause -> raise err in
aux (make_clenv_binding gl (d,thm) lbind)
let apply_in_once with_delta with_destruct with_evars id ((sigma,d),lbind) gl0 =
let flags =
if with_delta then default_unify_flags else default_no_delta_unify_flags in
let t' = pf_get_hyp_typ gl0 id in
let innerclause = mk_clenv_from_n gl0 (Some 0) (mkVar id,t') in
let rec aux c gl =
try
let clause = apply_in_once_main flags innerclause (c,lbind) gl in
let res = clenv_refine_in with_evars id clause gl in
if not with_evars then check_evars (fst res).sigma sigma gl0;
res
with exn when with_destruct ->
descend_in_conjunctions true aux (fun _ -> raise exn) c gl
in
if sigma = Evd.empty then aux d gl0
else
tclTHEN (tclEVARS (Evd.merge gl0.sigma sigma)) (aux d) gl0
A useful resolution tactic which , if c : A->B , transforms |- C into
|- B - > C and |- A
-------------------
Gamma |- c : A - > B Gamma |- ? 2 : A
----------------------------------------
Gamma |- B Gamma |- ? 1 : B - > C
-----------------------------------------------------
Gamma |- ? : C
: =
let ty : = check c in
match in ty with
? A - > ? B = > cut B ; [ idtac | apply c ]
end .
|- B -> C and |- A
-------------------
Gamma |- c : A -> B Gamma |- ?2 : A
----------------------------------------
Gamma |- B Gamma |- ?1 : B -> C
-----------------------------------------------------
Gamma |- ? : C
Ltac lapply c :=
let ty := check c in
match eval hnf in ty with
?A -> ?B => cut B; [ idtac | apply c ]
end.
*)
let cut_and_apply c gl =
let goal_constr = pf_concl gl in
match kind_of_term (pf_hnf_constr gl (pf_type_of gl c)) with
| Prod (_,c1,c2) when not (dependent (mkRel 1) c2) ->
tclTHENLAST
(apply_type (mkProd (Anonymous,c2,goal_constr)) [mkMeta(new_meta())])
(apply_term c [mkMeta (new_meta())]) gl
| _ -> error "lapply needs a non-dependent product."
let exact_check c gl =
let concl = (pf_concl gl) in
let ct = pf_type_of gl c in
if pf_conv_x_leq gl ct concl then
refine_no_check c gl
else
error "Not an exact proof."
let exact_no_check = refine_no_check
let vm_cast_no_check c gl =
let concl = pf_concl gl in
refine_no_check (Term.mkCast(c,Term.VMcast,concl)) gl
let exact_proof c gl =
let c = Constrintern.interp_casted_constr (project gl) (pf_env gl) c (pf_concl gl)
in refine_no_check c gl
let (assumption : tactic) = fun gl ->
let concl = pf_concl gl in
let hyps = pf_hyps gl in
let rec arec only_eq = function
| [] ->
if only_eq then arec false hyps else error "No such assumption."
| (id,c,t)::rest ->
if (only_eq & eq_constr t concl)
or (not only_eq & pf_conv_x_leq gl t concl)
then refine_no_check (mkVar id) gl
else arec only_eq rest
in
arec true hyps
avant seul dyn_clear n'echouait pas en [ ]
if ids=[] then tclIDTAC else thin ids
let clear_body = thin_body
let clear_wildcards ids =
tclMAP (fun (loc,id) gl ->
try with_check (Tacmach.thin_no_check [id]) gl
with ClearDependencyError (id,err) ->
Intercept standard [ thin ] error message
Stdpp.raise_with_loc loc
(error_clear_dependency (pf_env gl) (id_of_string "_") err))
ids
let rec intros_clearing = function
| [] -> tclIDTAC
| (false::tl) -> tclTHEN intro (intros_clearing tl)
| (true::tl) ->
tclTHENLIST
[ intro; onLastHyp (fun id -> clear [id]); intros_clearing tl]
let specialize mopt (c,lbind) g =
let evars, term =
if lbind = NoBindings then None, c
else
let clause = make_clenv_binding g (c,pf_type_of g c) lbind in
let clause = clenv_unify_meta_types clause in
let (thd,tstack) =
whd_stack (evars_of clause.evd) (clenv_value clause) in
let nargs = List.length tstack in
let tstack = match mopt with
| Some m ->
if m < nargs then list_firstn m tstack else tstack
| None ->
let rec chk = function
| [] -> []
| t::l -> if occur_meta t then [] else t :: chk l
in chk tstack
in
let term = applist(thd,tstack) in
if occur_meta term then
errorlabstrm "" (str "Cannot infer an instance for " ++
pr_name (meta_name clause.evd (List.hd (collect_metas term))) ++
str ".");
Some (evars_of clause.evd), term
in
tclTHEN
(match evars with Some e -> tclEVARS e | _ -> tclIDTAC)
(match kind_of_term (fst(decompose_app (snd(decompose_lam_assum c)))) with
| Var id when List.mem id (pf_ids_of_hyps g) ->
tclTHENFIRST
(fun g -> internal_cut_replace id (pf_type_of g term) g)
(exact_no_check term)
| _ -> tclTHENLAST
(fun g -> cut (pf_type_of g term) g)
(exact_no_check term))
g
let keep hyps gl =
let env = Global.env() in
let ccl = pf_concl gl in
let cl,_ =
fold_named_context_reverse (fun (clear,keep) (hyp,_,_ as decl) ->
if List.mem hyp hyps
or List.exists (occur_var_in_decl env hyp) keep
or occur_var env hyp ccl
then (clear,decl::keep)
else (hyp::clear,keep))
~init:([],[]) (pf_env gl)
in thin cl gl
let check_number_of_constructors expctdnumopt i nconstr =
if i=0 then error "The constructors are numbered starting from 1.";
begin match expctdnumopt with
| Some n when n <> nconstr ->
error ("Not an inductive goal with "^
string_of_int n^plural n " constructor"^".")
| _ -> ()
end;
if i > nconstr then error "Not enough constructors."
let constructor_tac with_evars expctdnumopt i lbind gl =
let cl = pf_concl gl in
let (mind,redcl) = pf_reduce_to_quantified_ind gl cl in
let nconstr =
Array.length (snd (Global.lookup_inductive mind)).mind_consnames in
check_number_of_constructors expctdnumopt i nconstr;
let cons = mkConstruct (ith_constructor_of_inductive mind i) in
let apply_tac = general_apply true false with_evars (inj_open cons,lbind) in
(tclTHENLIST
[convert_concl_no_check redcl DEFAULTcast; intros; apply_tac]) gl
let one_constructor i = constructor_tac false None i
Try to apply the constructor of the inductive definition followed by
a tactic t given as an argument .
Should be generalize in Constructor ( Fun c : I - > tactic )
a tactic t given as an argument.
Should be generalize in Constructor (Fun c : I -> tactic)
*)
let any_constructor with_evars tacopt gl =
let t = match tacopt with None -> tclIDTAC | Some t -> t in
let mind = fst (pf_reduce_to_quantified_ind gl (pf_concl gl)) in
let nconstr =
Array.length (snd (Global.lookup_inductive mind)).mind_consnames in
if nconstr = 0 then error "The type has no constructors.";
tclFIRST
(List.map
(fun i -> tclTHEN (constructor_tac with_evars None i NoBindings) t)
(interval 1 nconstr)) gl
let left_with_ebindings with_evars = constructor_tac with_evars (Some 2) 1
let right_with_ebindings with_evars = constructor_tac with_evars (Some 2) 2
let split_with_ebindings with_evars = constructor_tac with_evars (Some 1) 1
let left l = left_with_ebindings false (inj_ebindings l)
let simplest_left = left NoBindings
let right l = right_with_ebindings false (inj_ebindings l)
let simplest_right = right NoBindings
let split l = split_with_ebindings false (inj_ebindings l)
let simplest_split = split NoBindings
let forward_general_multi_rewrite =
ref (fun _ -> failwith "general_multi_rewrite undefined")
let register_general_multi_rewrite f =
forward_general_multi_rewrite := f
let error_unexpected_extra_pattern loc nb pat =
let s1,s2,s3 = match pat with
| IntroIdentifier _ -> "name", (plural nb " introduction pattern"), "no"
| _ -> "introduction pattern", "", "none" in
user_err_loc (loc,"",str "Unexpected " ++ str s1 ++ str " (" ++
(if nb = 0 then (str s3 ++ str s2) else
(str "at most " ++ int nb ++ str s2)) ++ spc () ++
str (if nb = 1 then "was" else "were") ++
strbrk " expected in the branch).")
let intro_or_and_pattern loc b ll l' tac id gl =
let c = mkVar id in
let ind,_ = pf_reduce_to_quantified_ind gl (pf_type_of gl c) in
let nv = mis_constr_nargs ind in
let bracketed = b or not (l'=[]) in
let rec adjust_names_length nb n = function
| [] when n = 0 or not bracketed -> []
| [] -> (dloc,IntroAnonymous) :: adjust_names_length nb (n-1) []
| (loc',pat) :: _ as l when n = 0 ->
if bracketed then error_unexpected_extra_pattern loc' nb pat;
l
| ip :: l -> ip :: adjust_names_length nb (n-1) l in
let ll = fix_empty_or_and_pattern (Array.length nv) ll in
check_or_and_pattern_size loc ll (Array.length nv);
tclTHENLASTn
(tclTHEN (simplest_case c) (clear [id]))
(array_map2 (fun n l -> tac ((adjust_names_length n n l)@l'))
nv (Array.of_list ll))
gl
let rewrite_hyp l2r id gl =
let rew_on l2r =
!forward_general_multi_rewrite l2r false (inj_open (mkVar id),NoBindings) in
let clear_var_and_eq c =
tclTRY (tclTHEN (clear [id]) (tclTRY (clear [destVar c]))) in
let t = pf_whd_betadeltaiota gl (pf_type_of gl (mkVar id)) in
TODO : detect equality ? better detect the different equalities
match match_with_equality_type t with
| Some (hdcncl,[_;lhs;rhs]) ->
if l2r & isVar lhs & not (occur_var (pf_env gl) (destVar lhs) rhs) then
tclTHEN (rew_on l2r allClauses) (clear_var_and_eq lhs) gl
else if not l2r & isVar rhs & not (occur_var (pf_env gl) (destVar rhs) lhs) then
tclTHEN (rew_on l2r allClauses) (clear_var_and_eq rhs) gl
else
tclTHEN (rew_on l2r onConcl) (tclTRY (clear [id])) gl
| Some (hdcncl,[c]) ->
if isVar c then
tclTHEN (rew_on l2r allClauses) (clear_var_and_eq c) gl
else
tclTHEN (rew_on l2r onConcl) (tclTRY (clear [id])) gl
| _ ->
error "Cannot find a known equation."
let rec explicit_intro_names = function
| (_, IntroIdentifier id) :: l ->
id :: explicit_intro_names l
| (_, (IntroWildcard | IntroAnonymous | IntroFresh _ | IntroRewrite _)) :: l ->
explicit_intro_names l
| (_, IntroOrAndPattern ll) :: l' ->
List.flatten (List.map (fun l -> explicit_intro_names (l@l')) ll)
| [] ->
[]
We delay thinning until the completion of the whole intros tactic
to ensure that dependent hypotheses are cleared in the right
dependency order ( see bug # 1000 ) ; we use fresh names , not used in
the tactic , for the hyps to clear
to ensure that dependent hypotheses are cleared in the right
dependency order (see bug #1000); we use fresh names, not used in
the tactic, for the hyps to clear *)
let rec intros_patterns b avoid thin destopt = function
| (loc, IntroWildcard) :: l ->
tclTHEN
(intro_gen loc (IntroAvoid(avoid@explicit_intro_names l)) no_move true)
(onLastHyp (fun id ->
tclORELSE
(tclTHEN (clear [id]) (intros_patterns b avoid thin destopt l))
(intros_patterns b avoid ((loc,id)::thin) destopt l)))
| (loc, IntroIdentifier id) :: l ->
tclTHEN
(intro_gen loc (IntroMustBe id) destopt true)
(intros_patterns b avoid thin destopt l)
| (loc, IntroAnonymous) :: l ->
tclTHEN
(intro_gen loc (IntroAvoid (avoid@explicit_intro_names l))
destopt true)
(intros_patterns b avoid thin destopt l)
| (loc, IntroFresh id) :: l ->
tclTHEN
(intro_gen loc (IntroBasedOn (id, avoid@explicit_intro_names l))
destopt true)
(intros_patterns b avoid thin destopt l)
| (loc, IntroOrAndPattern ll) :: l' ->
tclTHEN
introf
(onLastHyp
(intro_or_and_pattern loc b ll l'
(intros_patterns b avoid thin destopt)))
| (loc, IntroRewrite l2r) :: l ->
tclTHEN
(intro_gen loc (IntroAvoid(avoid@explicit_intro_names l)) no_move true)
(onLastHyp (fun id ->
tclTHEN
(rewrite_hyp l2r id)
(intros_patterns b avoid thin destopt l)))
| [] -> clear_wildcards thin
let intros_pattern = intros_patterns false [] []
let intro_pattern destopt pat = intros_patterns false [] [] destopt [dloc,pat]
let intro_patterns = function
| [] -> tclREPEAT intro
| l -> intros_pattern no_move l
let make_id s = fresh_id [] (default_id_of_sort s)
let prepare_intros s ipat gl = match ipat with
| None -> make_id s gl, tclIDTAC
| Some (loc,ipat) -> match ipat with
| IntroIdentifier id -> id, tclIDTAC
| IntroAnonymous -> make_id s gl, tclIDTAC
| IntroFresh id -> fresh_id [] id gl, tclIDTAC
| IntroWildcard -> let id = make_id s gl in id, clear_wildcards [dloc,id]
| IntroRewrite l2r ->
let id = make_id s gl in
id, !forward_general_multi_rewrite l2r false (inj_open (mkVar id),NoBindings) allClauses
| IntroOrAndPattern ll -> make_id s gl,
onLastHyp
(intro_or_and_pattern loc true ll []
(intros_patterns true [] [] no_move))
let ipat_of_name = function
| Anonymous -> None
| Name id -> Some (dloc, IntroIdentifier id)
| Some (_, IntroIdentifier id) ->
fst (decompose_app (snd (decompose_lam_assum c))) = mkVar id
| _ ->
false
let assert_as first ipat c gl =
match kind_of_term (hnf_type_of gl c) with
| Sort s ->
let id,tac = prepare_intros s ipat gl in
let repl = allow_replace c gl ipat in
tclTHENS
((if first then internal_cut_gen else internal_cut_rev_gen) repl id c)
(if first then [tclIDTAC; tac] else [tac; tclIDTAC]) gl
| _ -> error "Not a proposition or a type."
let assert_tac na = assert_as true (ipat_of_name na)
let as_tac id ipat = match ipat with
| Some (loc,IntroRewrite l2r) ->
!forward_general_multi_rewrite l2r false (inj_open (mkVar id),NoBindings) allClauses
| Some (loc,IntroOrAndPattern ll) ->
intro_or_and_pattern loc true ll [] (intros_patterns true [] [] no_move)
id
| Some (loc,
(IntroIdentifier _ | IntroAnonymous | IntroFresh _ | IntroWildcard)) ->
user_err_loc (loc,"", str "Disjunctive/conjunctive pattern expected")
| None -> tclIDTAC
let general_apply_in with_delta with_destruct with_evars id lemmas ipat gl =
tclTHEN
(tclMAP (apply_in_once with_delta with_destruct with_evars id) lemmas)
(as_tac id ipat)
gl
let apply_in simple with_evars = general_apply_in simple simple with_evars
tactics
let generalized_name c t ids cl = function
| Name id as na ->
if List.mem id ids then
errorlabstrm "" (pr_id id ++ str " is already used");
na
| Anonymous ->
match kind_of_term c with
| Var id ->
Name id
| _ ->
if noccurn 1 cl then Anonymous else
On ne s'etait pas casse la tete : on avait pris pour nom de
variable la premiere , meme si " c " avait ete une
constante do nt on aurait directement
variable la premiere lettre du type, meme si "c" avait ete une
constante dont on aurait pu prendre directement le nom *)
named_hd (Global.env()) t Anonymous
let generalize_goal gl i ((occs,c),na) cl =
let t = pf_type_of gl c in
let decls,cl = decompose_prod_n_assum i cl in
let dummy_prod = it_mkProd_or_LetIn mkProp decls in
let newdecls,_ = decompose_prod_n_assum i (subst_term c dummy_prod) in
let cl' = subst_term_occ occs c (it_mkProd_or_LetIn cl newdecls) in
let na = generalized_name c t (pf_ids_of_hyps gl) cl' na in
mkProd (na,t,cl')
let generalize_dep c gl =
let env = pf_env gl in
let sign = pf_hyps gl in
let init_ids = ids_of_named_context (Global.named_context()) in
let rec seek d toquant =
if List.exists (fun (id,_,_) -> occur_var_in_decl env id d) toquant
or dependent_in_decl c d then
d::toquant
else
toquant in
let to_quantify = Sign.fold_named_context seek sign ~init:[] in
let to_quantify_rev = List.rev to_quantify in
let qhyps = List.map (fun (id,_,_) -> id) to_quantify_rev in
let tothin = List.filter (fun id -> not (List.mem id init_ids)) qhyps in
let tothin' =
match kind_of_term c with
| Var id when mem_named_context id sign & not (List.mem id init_ids)
-> id::tothin
| _ -> tothin
in
let cl' = it_mkNamedProd_or_LetIn (pf_concl gl) to_quantify in
let cl'' = generalize_goal gl 0 ((all_occurrences,c),Anonymous) cl' in
let args = Array.to_list (instance_from_named_context to_quantify_rev) in
tclTHEN
(apply_type cl'' (c::args))
(thin (List.rev tothin'))
gl
let generalize_gen lconstr gl =
let newcl =
list_fold_right_i (generalize_goal gl) 0 lconstr (pf_concl gl) in
apply_type newcl (List.map (fun ((_,c),_) -> c) lconstr) gl
let generalize l =
generalize_gen (List.map (fun c -> ((all_occurrences,c),Anonymous)) l)
let revert hyps gl =
tclTHEN (generalize (List.map mkVar hyps)) (clear hyps) gl
Faudra - t - il une version avec plusieurs args de generalize_dep ?
troublant de faire " Generalize Dependent H n " dans
" n : ; H : n = n |- P(n ) " et d'échouer parce que H a disparu après la
généralisation quantify lconstr =
List.fold_right
( fun com tac - > tclTHEN tac ( tactic_com generalize_dep c ) )
lconstr
tclIDTAC
Cela peut-être troublant de faire "Generalize Dependent H n" dans
"n:nat; H:n=n |- P(n)" et d'échouer parce que H a disparu après la
généralisation dépendante par n.
let quantify lconstr =
List.fold_right
(fun com tac -> tclTHEN tac (tactic_com generalize_dep c))
lconstr
tclIDTAC
*)
A dependent cut rule à la sequent calculus
------------------------------------------
Sera simplifiable le jour où il y aura un let in primitif dans constr
[ letin_tac b na c ( occ_hyp , occ_ccl ) gl ] transforms
[ ... x1 : T1(c), ... ,x2 : T2(c ) , ... |- G(c ) ] into
[ ... x : T;Heqx:(x = c);x1 : T1(x), ... ,x2 : T2(x ) , ... |- G(x ) ] if [ b ] is false or
[ ... x:=c : T;x1 : T1(x), ... ,x2 : T2(x ) , ... |- G(x ) ] if [ b ] is true
[ occ_hyp , occ_ccl ] tells which occurrences of [ c ] have to be substituted ;
if [ occ_hyp = [ ] ] and [ occ_ccl = None ] then [ c ] is substituted
wherever it occurs , otherwise [ c ] is substituted only in hyps
present in [ occ_hyps ] at the specified occurrences ( everywhere if
the list of occurrences is empty ) , and in the goal at the specified
occurrences if [ occ_goal ] is not [ None ] ;
if name = Anonymous , the name is build from the first letter of the type ;
The tactic first quantify the goal over x1 , x2 , ... then substitute then
re - intro x1 , x2 , ... at their initial place ( [ marks ] is internally
used to remember the place of x1 , x2 , ... : it is the list of hypotheses on
the left of each x1 , ... ) .
------------------------------------------
Sera simplifiable le jour où il y aura un let in primitif dans constr
[letin_tac b na c (occ_hyp,occ_ccl) gl] transforms
[...x1:T1(c),...,x2:T2(c),... |- G(c)] into
[...x:T;Heqx:(x=c);x1:T1(x),...,x2:T2(x),... |- G(x)] if [b] is false or
[...x:=c:T;x1:T1(x),...,x2:T2(x),... |- G(x)] if [b] is true
[occ_hyp,occ_ccl] tells which occurrences of [c] have to be substituted;
if [occ_hyp = []] and [occ_ccl = None] then [c] is substituted
wherever it occurs, otherwise [c] is substituted only in hyps
present in [occ_hyps] at the specified occurrences (everywhere if
the list of occurrences is empty), and in the goal at the specified
occurrences if [occ_goal] is not [None];
if name = Anonymous, the name is build from the first letter of the type;
The tactic first quantify the goal over x1, x2,... then substitute then
re-intro x1, x2,... at their initial place ([marks] is internally
used to remember the place of x1, x2, ...: it is the list of hypotheses on
the left of each x1, ...).
*)
let occurrences_of_hyp id cls =
let rec hyp_occ = function
[] -> None
| (((b,occs),id'),hl)::_ when id=id' -> Some ((b,List.map out_arg occs),hl)
| _::l -> hyp_occ l in
match cls.onhyps with
None -> Some (all_occurrences,InHyp)
| Some l -> hyp_occ l
let occurrences_of_goal cls =
if cls.concl_occs = no_occurrences_expr then None
else Some (on_snd (List.map out_arg) cls.concl_occs)
let in_every_hyp cls = (cls.onhyps=None)
let letin_abstract id c occs gl =
let env = pf_env gl in
let compute_dependency _ (hyp,_,_ as d) ctxt =
let d' =
try
match occurrences_of_hyp hyp occs with
| None -> raise Not_found
| Some occ ->
let newdecl = subst_term_occ_decl occ c d in
if occ = [] & d = newdecl then
if not (in_every_hyp occs)
then raise (RefinerError (DoesNotOccurIn (c,hyp)))
else raise Not_found
else
(subst1_named_decl (mkVar id) newdecl, true)
with Not_found ->
(d,List.exists
(fun ((id,_,_),dep) -> dep && occur_var_in_decl env id d) ctxt)
in d'::ctxt
in
let ctxt' = fold_named_context compute_dependency env ~init:[] in
let compute_marks ((depdecls,marks as accu),lhyp) ((hyp,_,_) as d,b) =
if b then ((d::depdecls,(hyp,lhyp)::marks), lhyp)
else (accu, Some hyp) in
let (depdecls,marks),_ = List.fold_left compute_marks (([],[]),None) ctxt' in
let ccl = match occurrences_of_goal occs with
| None -> pf_concl gl
| Some occ -> subst1 (mkVar id) (subst_term_occ occ c (pf_concl gl))
in
(depdecls,marks,ccl)
let letin_tac with_eq name c occs gl =
let x = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c) name in
let id =
if name = Anonymous then fresh_id [] x gl else
if not (mem_named_context x (pf_hyps gl)) then x else
error ("The variable "^(string_of_id x)^" is already declared") in
let (depdecls,marks,ccl)= letin_abstract id c occs gl in
let t = pf_type_of gl c in
let tmpcl = List.fold_right mkNamedProd_or_LetIn depdecls ccl in
let args = Array.to_list (instance_from_named_context depdecls) in
let newcl = mkNamedLetIn id c t tmpcl in
let lastlhyp = if marks=[] then None else snd (List.hd marks) in
tclTHENLIST
[ apply_type newcl args;
thin (List.map (fun (id,_,_) -> id) depdecls);
intro_gen (IntroMustBe id) lastlhyp false;
if with_eq then tclIDTAC else thin_body [id];
intros_move marks ] gl
*)
Implementation without generalisation : abbrev will be lost in hyps in
let letin_abstract id c (occs,check_occs) gl =
let env = pf_env gl in
let compute_dependency _ (hyp,_,_ as d) depdecls =
match occurrences_of_hyp hyp occs with
| None -> depdecls
| Some occ ->
let newdecl = subst_term_occ_decl occ c d in
if occ = (all_occurrences,InHyp) & d = newdecl then
if check_occs & not (in_every_hyp occs)
then raise (RefinerError (DoesNotOccurIn (c,hyp)))
else depdecls
else
(subst1_named_decl (mkVar id) newdecl)::depdecls in
let depdecls = fold_named_context compute_dependency env ~init:[] in
let ccl = match occurrences_of_goal occs with
| None -> pf_concl gl
| Some occ -> subst1 (mkVar id) (subst_term_occ occ c (pf_concl gl)) in
let lastlhyp =
if depdecls = [] then no_move else MoveAfter(pi1(list_last depdecls)) in
(depdecls,lastlhyp,ccl)
let letin_tac_gen with_eq name c ty occs gl =
let id =
let x = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c) name in
if name = Anonymous then fresh_id [] x gl else
if not (mem_named_context x (pf_hyps gl)) then x else
error ("The variable "^(string_of_id x)^" is already declared.") in
let (depdecls,lastlhyp,ccl)= letin_abstract id c occs gl in
let t = match ty with Some t -> t | None -> pf_type_of gl c in
let newcl,eq_tac = match with_eq with
| Some (lr,(loc,ido)) ->
let heq = match ido with
| IntroAnonymous -> fresh_id [id] (add_prefix "Heq" id) gl
| IntroFresh heq_base -> fresh_id [id] heq_base gl
| IntroIdentifier id -> id
| _ -> error"Expect an introduction pattern naming one hypothesis." in
let eqdata = build_coq_eq_data () in
let args = if lr then [t;mkVar id;c] else [t;c;mkVar id]in
let eq = applist (eqdata.eq,args) in
let refl = applist (eqdata.refl, [t;mkVar id]) in
mkNamedLetIn id c t (mkLetIn (Name heq, refl, eq, ccl)),
tclTHEN
(intro_gen loc (IntroMustBe heq) lastlhyp true)
(thin_body [heq;id])
| None ->
mkNamedLetIn id c t ccl, tclIDTAC in
tclTHENLIST
[ convert_concl_no_check newcl DEFAULTcast;
intro_gen dloc (IntroMustBe id) lastlhyp true;
eq_tac;
tclMAP convert_hyp_no_check depdecls ] gl
let letin_tac with_eq name c ty occs =
letin_tac_gen with_eq name c ty (occs,true)
let forward usetac ipat c gl =
match usetac with
| None ->
let t = pf_type_of gl c in
tclTHENFIRST (assert_as true ipat t) (exact_no_check c) gl
| Some tac ->
tclTHENFIRST (assert_as true ipat c) tac gl
let pose_proof na c = forward None (ipat_of_name na) c
let assert_by na t tac = forward (Some tac) (ipat_of_name na) t
The two following functions should already exist , but found nowhere
let unfold_body x gl =
let hyps = pf_hyps gl in
let xval =
match Sign.lookup_named x hyps with
(_,Some xval,_) -> xval
| _ -> errorlabstrm "unfold_body"
(pr_id x ++ str" is not a defined hypothesis.") in
let aft = afterHyp x gl in
let hl = List.fold_right (fun (y,yval,_) cl -> (([],y),InHyp) :: cl) aft [] in
let xvar = mkVar x in
let rfun _ _ c = replace_term xvar xval c in
tclTHENLIST
[tclMAP (fun h -> reduct_in_hyp rfun h) hl;
reduct_in_concl (rfun,DEFAULTcast)] gl
let unfold_all x gl =
let (_,xval,_) = pf_get_hyp gl x in
if xval <> None then tclTHEN (unfold_body x) (clear [x]) gl
else tclIDTAC gl
* A " natural " induction tactic
*
- [ H0 : T0 , ... , Hi : Ti , hyp0 : ) , Hi+1 : Ti+1 , ... , Hn : Tn |-G ] is the goal
- [ hyp0 ] is the induction hypothesis
- we extract from [ args ] the variables which are not rigid parameters
of the inductive type , this is [ indvars ] ( other terms are forgotten ) ;
[ indhyps ] are the ones which actually are declared in context
( done in [ find_atomic_param_of_ind ] )
- we look for all hyps depending of [ hyp0 ] or one of [ indvars ] :
this is [ dephyps ] of types [ deptyps ] respectively
- [ statuslist ] tells for each hyps in [ dephyps ] after which other hyp
fixed in the context they must be moved ( when induction is done )
- [ hyp0succ ] is the name of the hyp fixed in the context after which to
move the subterms of [ hyp0succ ] in the i - th branch where it is supposed
to be the i - th constructor of the inductive type .
Strategy : ( cf in [ induction_from_context ] )
- requantify and clear all [ dephyps ]
- apply induction on [ hyp0 ]
- clear [ indhyps ] and [ hyp0 ]
- in the i - th subgoal , intro the arguments of the i - th constructor
of the inductive type after [ hyp0succ ] ( done in
[ induct_discharge ] ) let the induction hypotheses on top of the
hyps because they may depend on variables between [ hyp0 ] and the
top . A counterpart is that the dep hyps programmed to be intro - ed
on top must now be intro - ed after the induction hypotheses
- move each of [ dephyps ] at the right place following the
[ statuslist ]
* A "natural" induction tactic
*
- [H0:T0, ..., Hi:Ti, hyp0:P->I(args), Hi+1:Ti+1, ..., Hn:Tn |-G] is the goal
- [hyp0] is the induction hypothesis
- we extract from [args] the variables which are not rigid parameters
of the inductive type, this is [indvars] (other terms are forgotten);
[indhyps] are the ones which actually are declared in context
(done in [find_atomic_param_of_ind])
- we look for all hyps depending of [hyp0] or one of [indvars]:
this is [dephyps] of types [deptyps] respectively
- [statuslist] tells for each hyps in [dephyps] after which other hyp
fixed in the context they must be moved (when induction is done)
- [hyp0succ] is the name of the hyp fixed in the context after which to
move the subterms of [hyp0succ] in the i-th branch where it is supposed
to be the i-th constructor of the inductive type.
Strategy: (cf in [induction_from_context])
- requantify and clear all [dephyps]
- apply induction on [hyp0]
- clear [indhyps] and [hyp0]
- in the i-th subgoal, intro the arguments of the i-th constructor
of the inductive type after [hyp0succ] (done in
[induct_discharge]) let the induction hypotheses on top of the
hyps because they may depend on variables between [hyp0] and the
top. A counterpart is that the dep hyps programmed to be intro-ed
on top must now be intro-ed after the induction hypotheses
- move each of [dephyps] at the right place following the
[statuslist]
*)
let check_unused_names names =
if names <> [] & Flags.is_verbose () then
msg_warning
(str"Unused introduction " ++ str (plural (List.length names) "pattern")
++ str": " ++ prlist_with_sep spc pr_intro_pattern names)
let rec first_name_buggy avoid gl (loc,pat) = match pat with
| IntroOrAndPattern [] -> no_move
| IntroOrAndPattern ([]::l) ->
first_name_buggy avoid gl (loc,IntroOrAndPattern l)
| IntroOrAndPattern ((p::_)::_) -> first_name_buggy avoid gl p
| IntroWildcard -> no_move
| IntroRewrite _ -> no_move
| IntroIdentifier id -> MoveAfter id
let consume_pattern avoid id gl = function
| [] -> ((dloc, IntroIdentifier (fresh_id avoid id gl)), [])
| (loc,IntroAnonymous)::names ->
let avoid = avoid@explicit_intro_names names in
((loc,IntroIdentifier (fresh_id avoid id gl)), names)
| (loc,IntroFresh id')::names ->
let avoid = avoid@explicit_intro_names names in
((loc,IntroIdentifier (fresh_id avoid id' gl)), names)
| pat::names -> (pat,names)
let re_intro_dependent_hypotheses tophyp (lstatus,rstatus) =
if some IH has taken place at the top of hyps
List.map (function (hyp,MoveToEnd true) -> (hyp,tophyp) | x -> x) lstatus
in
tclTHEN
(intros_move rstatus)
(intros_move newlstatus)
let update destopt tophyp = if destopt = no_move then tophyp else destopt
type elim_arg_kind = RecArg | IndArg | OtherArg
let induct_discharge statuslists destopt avoid' (avoid,ra) names gl =
let avoid = avoid @ avoid' in
let rec peel_tac ra names tophyp gl =
match ra with
| (RecArg,recvarname) ::
(IndArg,hyprecname) :: ra' ->
let recpat,names = match names with
| [loc,IntroIdentifier id as pat] ->
let id' = next_ident_away (add_prefix "IH" id) avoid in
(pat, [dloc, IntroIdentifier id'])
| _ -> consume_pattern avoid recvarname gl names in
let hyprec,names = consume_pattern avoid hyprecname gl names in
IH stays at top : we need to update tophyp
This is buggy for intro - or - patterns with different first hypnames
let newtophyp =
if tophyp=no_move then first_name_buggy avoid gl hyprec else tophyp
in
tclTHENLIST
[ intros_patterns true avoid [] (update destopt tophyp) [recpat];
intros_patterns true avoid [] no_move [hyprec];
peel_tac ra' names newtophyp] gl
| (IndArg,hyprecname) :: ra' ->
Rem : does not happen in Coq schemes , only in user - defined schemes
let pat,names = consume_pattern avoid hyprecname gl names in
tclTHEN (intros_patterns true avoid [] (update destopt tophyp) [pat])
(peel_tac ra' names tophyp) gl
| (RecArg,recvarname) :: ra' ->
let pat,names = consume_pattern avoid recvarname gl names in
tclTHEN (intros_patterns true avoid [] (update destopt tophyp) [pat])
(peel_tac ra' names tophyp) gl
| (OtherArg,_) :: ra' ->
let pat,names = match names with
| [] -> (dloc, IntroAnonymous), []
| pat::names -> pat,names in
tclTHEN (intros_patterns true avoid [] (update destopt tophyp) [pat])
(peel_tac ra' names tophyp) gl
| [] ->
check_unused_names names;
re_intro_dependent_hypotheses tophyp statuslists gl
in
peel_tac ra names no_move gl
- le recalcul de indtyp à chaque itération de atomize_one est pour ne pas
s'embêter à regarder letin_tac ne fait pas des
substitutions aussi sur l'argument voisin
s'embêter à regarder si un letin_tac ne fait pas des
substitutions aussi sur l'argument voisin *)
Marche pas ... faut prendre en compte l'occurrence précise ...
let atomize_param_of_ind (indref,nparams) hyp0 gl =
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
let typ0 = pf_apply reduce_to_quantified_ref gl indref tmptyp0 in
let prods, indtyp = decompose_prod typ0 in
let argl = snd (decompose_app indtyp) in
let params = list_firstn nparams argl in
le gl est important pour ne pas préévaluer
let rec atomize_one i avoid gl =
if i<>nparams then
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
let indtyp = pf_apply reduce_to_atomic_ref gl indref tmptyp0 in
let argl = snd (decompose_app indtyp) in
let c = List.nth argl (i-1) in
match kind_of_term c with
| Var id when not (List.exists (occur_var (pf_env gl) id) avoid) ->
atomize_one (i-1) ((mkVar id)::avoid) gl
| Var id ->
let x = fresh_id [] id gl in
tclTHEN
(letin_tac None (Name x) (mkVar id) None allClauses)
(atomize_one (i-1) ((mkVar x)::avoid)) gl
| _ ->
let id = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c)
Anonymous in
let x = fresh_id [] id gl in
tclTHEN
(letin_tac None (Name x) c None allClauses)
(atomize_one (i-1) ((mkVar x)::avoid)) gl
else
tclIDTAC gl
in
atomize_one (List.length argl) params gl
let find_atomic_param_of_ind nparams indtyp =
let argl = snd (decompose_app indtyp) in
let argv = Array.of_list argl in
let params = list_firstn nparams argl in
let indvars = ref Idset.empty in
for i = nparams to (Array.length argv)-1 do
match kind_of_term argv.(i) with
| Var id
when not (List.exists (occur_var (Global.env()) id) params) ->
indvars := Idset.add id !indvars
| _ -> ()
done;
Idset.elements !indvars;
[ cook_sign ] builds the lists [ indhyps ] of hyps that must be
erased , the lists of hyps to be generalize [ ( hdeps , ) ] on the
goal together with the places [ ( lstatus , rstatus ) ] where to re - intro
them after induction . To know where to re - intro the dep hyp , we
remember the name of the hypothesis [ lhyp ] after which ( if the dep
hyp is more recent than [ hyp0 ] ) or [ rhyp ] before which ( if older
than [ hyp0 ] ) its equivalent must be moved when the induction has
been applied . Since computation of dependencies and [ rhyp ] is from
more ancient ( on the right ) to more recent hyp ( on the left ) but
the computation of [ lhyp ] progresses from the other way , [ cook_hyp ]
is in two passes ( an alternative would have been to write an
higher - order algorithm ) . We use references to reduce
the accumulation of arguments .
To summarize , the situation looks like this
Goal(n , x ) -| ) ; x : A ; H5 : True ; H4:(le O n ) ; H3:(P n ) ; H2 : True ; n :
Left Right
Induction hypothesis is H4 ( [ hyp0 ] )
Variable parameters of ( le O n ) is the singleton list with " n " ( [ indvars ] )
Part of [ indvars ] really in context is the same ( [ indhyps ] )
The dependent hyps are H3 and H6 ( [ dephyps ] )
For H3 the memorized places are H5 ( [ lhyp ] ) and H2 ( [ rhyp ] )
because these names are among the hyp which are fixed through the induction
For H6 the neighbours are None ( [ lhyp ] ) and H5 ( [ rhyp ] )
For H3 , because on the right of H4 , we remember ( here H2 )
For H6 , because on the left of H4 , we remember ( here None )
For H4 , we remember ( here H5 )
The right neighbour is then translated into the left neighbour
because move_hyp tactic needs the name of the hyp _ after _ which we
move the hyp to move .
But , say in the 2nd subgoal of the hypotheses , the goal will be
( m : nat)((P m)->(Q m)->(Goal m ) ) - > ( P Sm)- > ( Q Sm)- > ( Goal Sm )
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^
both go where H4 was goes where goes where
H3 was H6 was
We have to intro and move m and the recursive hyp first , but then
where to move H3 ? ? ? Only the hyp on its right is relevant , but we
have to translate it into the name of the hyp on the left
Note : this case where some hyp(s ) in [ dephyps ] has(have ) the same
left neighbour as [ hyp0 ] is the only problematic case with right
neighbours . For the other cases ( e.g. an hyp H1:(R n ) between n and H2
would have posed no problem . But for uniformity , we decided to use
the right hyp for all hyps on the right of H4 .
Others solutions are welcome
PC 9 fev 06 : Adapted to accept multi argument principle with no
main arg hyp . hyp0 is now optional , meaning that it is possible
that there is no main induction hypotheses . In this case , we
consider the last " parameter " ( in [ indvars ] ) as the limit between
" left " and " right " , BUT it must be included in indhyps .
Other solutions are still welcome
erased, the lists of hyps to be generalize [(hdeps,tdeps)] on the
goal together with the places [(lstatus,rstatus)] where to re-intro
them after induction. To know where to re-intro the dep hyp, we
remember the name of the hypothesis [lhyp] after which (if the dep
hyp is more recent than [hyp0]) or [rhyp] before which (if older
than [hyp0]) its equivalent must be moved when the induction has
been applied. Since computation of dependencies and [rhyp] is from
more ancient (on the right) to more recent hyp (on the left) but
the computation of [lhyp] progresses from the other way, [cook_hyp]
is in two passes (an alternative would have been to write an
higher-order algorithm). We use references to reduce
the accumulation of arguments.
To summarize, the situation looks like this
Goal(n,x) -| H6:(Q n); x:A; H5:True; H4:(le O n); H3:(P n); H2:True; n:nat
Left Right
Induction hypothesis is H4 ([hyp0])
Variable parameters of (le O n) is the singleton list with "n" ([indvars])
Part of [indvars] really in context is the same ([indhyps])
The dependent hyps are H3 and H6 ([dephyps])
For H3 the memorized places are H5 ([lhyp]) and H2 ([rhyp])
because these names are among the hyp which are fixed through the induction
For H6 the neighbours are None ([lhyp]) and H5 ([rhyp])
For H3, because on the right of H4, we remember rhyp (here H2)
For H6, because on the left of H4, we remember lhyp (here None)
For H4, we remember lhyp (here H5)
The right neighbour is then translated into the left neighbour
because move_hyp tactic needs the name of the hyp _after_ which we
move the hyp to move.
But, say in the 2nd subgoal of the hypotheses, the goal will be
(m:nat)((P m)->(Q m)->(Goal m)) -> (P Sm)-> (Q Sm)-> (Goal Sm)
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^
both go where H4 was goes where goes where
H3 was H6 was
We have to intro and move m and the recursive hyp first, but then
where to move H3 ??? Only the hyp on its right is relevant, but we
have to translate it into the name of the hyp on the left
Note: this case where some hyp(s) in [dephyps] has(have) the same
left neighbour as [hyp0] is the only problematic case with right
neighbours. For the other cases (e.g. an hyp H1:(R n) between n and H2
would have posed no problem. But for uniformity, we decided to use
the right hyp for all hyps on the right of H4.
Others solutions are welcome
PC 9 fev 06: Adapted to accept multi argument principle with no
main arg hyp. hyp0 is now optional, meaning that it is possible
that there is no main induction hypotheses. In this case, we
consider the last "parameter" (in [indvars]) as the limit between
"left" and "right", BUT it must be included in indhyps.
Other solutions are still welcome
*)
exception Shunt of identifier move_location
let cook_sign hyp0_opt indvars env =
let hyp0,inhyps =
match hyp0_opt with
| None -> List.hd (List.rev indvars), []
| Some (hyp0,at_least_in_hyps) -> hyp0, at_least_in_hyps in
First phase from L to R : get [ indhyps ] , [ decldep ] and [ statuslist ]
for the hypotheses before (= more ancient than ) hyp0 ( see above )
for the hypotheses before (= more ancient than) hyp0 (see above) *)
let allindhyps = hyp0::indvars in
let indhyps = ref [] in
let decldeps = ref [] in
let ldeps = ref [] in
let rstatus = ref [] in
let lstatus = ref [] in
let before = ref true in
let seek_deps env (hyp,_,_ as decl) rhyp =
if hyp = hyp0 then begin
before:=false;
If there was no main induction hypotheses , then hyp is one of
indvars too , so add it to indhyps .
indvars too, so add it to indhyps. *)
(if hyp0_opt=None then indhyps := hyp::!indhyps);
end else if List.mem hyp indvars then begin
e.g. if the goal ( t hyp hyp0 ) with other occs of hyp in t
indhyps := hyp::!indhyps;
rhyp
end else
if inhyps <> [] && List.mem hyp inhyps || inhyps = [] &&
(List.exists (fun id -> occur_var_in_decl env id decl) allindhyps ||
List.exists (fun (id,_,_) -> occur_var_in_decl env id decl) !decldeps)
then begin
decldeps := decl::!decldeps;
if !before then
rstatus := (hyp,rhyp)::!rstatus
else
status computed in 2nd phase
MoveBefore hyp end
else
MoveBefore hyp
in
let _ = fold_named_context seek_deps env ~init:(MoveToEnd false) in
2nd phase from R to L : get left hyp of [ hyp0 ] and [ lhyps ]
let compute_lstatus lhyp (hyp,_,_) =
if hyp = hyp0 then raise (Shunt lhyp);
if List.mem hyp !ldeps then begin
lstatus := (hyp,lhyp)::!lstatus;
lhyp
end else
if List.mem hyp !indhyps then lhyp else MoveAfter hyp
in
try
let _ =
fold_named_context_reverse compute_lstatus ~init:(MoveToEnd true) env in
with Shunt lhyp0 ->
let statuslists = (!lstatus,List.rev !rstatus) in
(statuslists, (if hyp0_opt=None then MoveToEnd true else lhyp0),
!indhyps, !decldeps)
The general form of an induction principle is the following :
forall prm1 prm2 ... prmp , ( induction parameters )
forall Q1 ... ,(Qi : Ti_1 - > Ti_2 -> ... - > ... Qq , ( predicates )
, branch2 , ... , branchr , ( branches of the principle )
forall ( x1 : Ti_1 ) ( x2 : Ti_2 ) ... ( xni : Ti_ni ) , ( induction arguments )
( HI : I prm1 .. ... xni ) ( optional main induction arg )
- > ( Qi x1 ... xni HI ( f prm1 ... ... xni)).(conclusion )
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional argument added if
even if HI principle generated by functional
present above induction , only if HI does not exist
[ indarg ] [ farg ]
HI is not present when the induction principle does not come directly from an
inductive type ( like when it is generated by functional induction for
example ) . HI is present otherwise BUT may not appear in the conclusion
( dependent principle ) . HI and ( f ... ) can not be both present .
Principles taken from functional induction have the final ( f ... ) .
The general form of an induction principle is the following:
forall prm1 prm2 ... prmp, (induction parameters)
forall Q1...,(Qi:Ti_1 -> Ti_2 ->...-> Ti_ni),...Qq, (predicates)
branch1, branch2, ... , branchr, (branches of the principle)
forall (x1:Ti_1) (x2:Ti_2) ... (xni:Ti_ni), (induction arguments)
(HI: I prm1..prmp x1...xni) (optional main induction arg)
-> (Qi x1...xni HI (f prm1...prmp x1...xni)).(conclusion)
^^ ^^^^^^^^^^^^^^^^^^^^^^^^
optional optional argument added if
even if HI principle generated by functional
present above induction, only if HI does not exist
[indarg] [farg]
HI is not present when the induction principle does not come directly from an
inductive type (like when it is generated by functional induction for
example). HI is present otherwise BUT may not appear in the conclusion
(dependent principle). HI and (f...) cannot be both present.
Principles taken from functional induction have the final (f...).*)
type elim_scheme = {
elimc: constr with_ebindings option;
elimt: types;
indref: global_reference option;
( xni , Ti_ni ) ... ( x1 , Ti_1 )
Some ( H , I prm1 .. ... xni )
if HI is in premisses , None otherwise
if HI is in premisses, None otherwise *)
}
let empty_scheme =
{
elimc = None;
elimt = mkProp;
indref = None;
params = [];
nparams = 0;
predicates = [];
npredicates = 0;
branches = [];
nbranches = 0;
args = [];
nargs = 0;
indarg = None;
concl = mkProp;
indarg_in_concl = false;
farg_in_concl = false;
}
let induction_tac with_evars (varname,lbind) typ scheme gl =
let elimc,lbindelimc =
match scheme.elimc with | Some x -> x | None -> error "No definition of the principle." in
let elimt = scheme.elimt in
let indclause = make_clenv_binding gl (mkVar varname,typ) lbind in
let elimclause =
make_clenv_binding gl
(mkCast (elimc,DEFAULTcast, elimt),elimt) lbindelimc in
elimination_clause_scheme with_evars true elimclause indclause gl
let make_base n id =
if n=0 or n=1 then id
else
id_of_string (atompart_of_id (make_ident (string_of_id id) (Some 0)))
Builds two different names from an optional inductive type and a
number , also deals with a list of names to avoid . If the inductive
type is None , then is where i is a number .
number, also deals with a list of names to avoid. If the inductive
type is None, then hyprecname is IHi where i is a number. *)
let make_up_names n ind_opt cname =
let is_hyp = atompart_of_id cname = "H" in
let base = string_of_id (make_base n cname) in
let ind_prefix = "IH" in
let base_ind =
if is_hyp then
match ind_opt with
| None -> id_of_string ind_prefix
| Some ind_id -> add_prefix ind_prefix (Nametab.id_of_global ind_id)
else add_prefix ind_prefix cname in
let hyprecname = make_base n base_ind in
let avoid =
Only one recursive argument
else
Forbid to use cname , cname0 , and hyprecname0
in order to get names such as f1 , f2 , ...
let avoid =
(make_ident (string_of_id hyprecname) None) ::
(make_ident (string_of_id hyprecname) (Some 0)) :: [] in
if atompart_of_id cname <> "H" then
(make_ident base (Some 0)) :: (make_ident base None) :: avoid
else avoid in
id_of_string base, hyprecname, avoid
let is_indhyp p n t =
let l, c = decompose_prod t in
let c,_ = decompose_app c in
let p = p + List.length l in
match kind_of_term c with
| Rel k when p < k & k <= p + n -> true
| _ -> false
let chop_context n l =
let rec chop_aux acc = function
| n, (_,Some _,_ as h :: t) -> chop_aux (h::acc) (n, t)
| 0, l2 -> (List.rev acc, l2)
| n, (h::t) -> chop_aux (h::acc) (n-1, t)
| _, [] -> anomaly "chop_context"
in
chop_aux [] (n,l)
let error_ind_scheme s =
let s = if s <> "" then s^" " else s in
error ("Cannot recognize "^s^"an induction scheme.")
let mkEq t x y =
mkApp (build_coq_eq (), [| t; x; y |])
let mkRefl t x =
mkApp ((build_coq_eq_data ()).refl, [| t; x |])
let mkHEq t x u y =
mkApp (coq_constant "mkHEq" ["Logic";"JMeq"] "JMeq",
[| t; x; u; y |])
let mkHRefl t x =
mkApp (coq_constant "mkHEq" ["Logic";"JMeq"] "JMeq_refl",
[| t; x |])
let mkHEq t x u y =
let ty = new_Type ( ) in
[ | ty ; mkApp ( Lazy.force i d , [ |ty| ] ) ; t ; x ; u ; y | ] )
let ty = new_Type ( ) in
mkApp ( coq_constant " mkHEq " [ " Logic";"EqdepFacts " ] " " ,
[ | ty ; mkApp ( Lazy.force i d , [ |ty| ] ) ; t ; x | ] )
let mkCoe a x p px y eq =
mkApp (Option.get (build_coq_eq_data ()).rect, [| a; x; p; px; y; eq |])
let lift_togethern n l =
let l', _ =
List.fold_right
(fun x (acc, n) ->
(lift n x :: acc, succ n))
l ([], n)
in l'
let lift_together l = lift_togethern 0 l
let lift_list l = List.map (lift 1) l
let ids_of_constr vars c =
let rec aux vars c =
match kind_of_term c with
| Var id -> if List.mem id vars then vars else id :: vars
| App (f, args) ->
(match kind_of_term f with
| Construct (ind,_)
| Ind ind ->
let (mib,mip) = Global.lookup_inductive ind in
array_fold_left_from mib.Declarations.mind_nparams
aux vars args
| _ -> fold_constr aux vars c)
| _ -> fold_constr aux vars c
in aux vars c
let make_abstract_generalize gl id concl dep ctx c eqs args refls =
let meta = Evarutil.new_meta() in
let term, typ = mkVar id, pf_get_hyp_typ gl id in
let eqslen = List.length eqs in
let abshypeq =
if dep then
mkProd (Anonymous, mkHEq (lift 1 c) (mkRel 1) typ term, lift 1 concl)
else concl
in
let abseqs = it_mkProd_or_LetIn ~init:(lift eqslen abshypeq) (List.map (fun x -> (Anonymous, None, x)) eqs) in
let genarg = mkProd (Name id, c, abseqs) in
let genctyp = it_mkProd_or_LetIn ~init:genarg ctx in
let genc = mkCast (mkMeta meta, DEFAULTcast, genctyp) in
Apply the old arguments giving the proper instantiation of the hyp
let instc = mkApp (genc, Array.of_list args) in
Then apply to the original instanciated hyp .
let instc = mkApp (instc, [| mkVar id |]) in
let appeqs = mkApp (instc, Array.of_list refls) in
let newc = if dep then mkApp (appeqs, [| mkHRefl typ term |]) else appeqs in
newc
let abstract_args gl id =
let c = pf_get_hyp_typ gl id in
let sigma = project gl in
let env = pf_env gl in
let concl = pf_concl gl in
let dep = dependent (mkVar id) concl in
let avoid = ref [] in
let get_id name =
let id = fresh_id !avoid (match name with Name n -> n | Anonymous -> id_of_string "gen_x") gl in
avoid := id :: !avoid; id
in
match kind_of_term c with
App (f, args) ->
Build application generalized w.r.t . the argument plus the necessary eqs .
From env |- c : forall G , T and args : G we build
( T[G ' ] , G ' : ctx , env ; G ' |- args ' : G , eqs : = G'_i = G_i , refls : G ' = G , vars to generalize )
eqs are not lifted w.r.t . each other yet . ( * will be needed when going to dependent indexes
From env |- c : forall G, T and args : G we build
(T[G'], G' : ctx, env ; G' |- args' : G, eqs := G'_i = G_i, refls : G' = G, vars to generalize)
*)
let aux (prod, ctx, ctxenv, c, args, eqs, refls, vars, env) arg =
let (name, _, ty), arity =
let rel, c = Reductionops.decomp_n_prod env sigma 1 prod in
List.hd rel, c
in
let argty = pf_type_of gl arg in
let liftargty = lift (List.length ctx) argty in
let convertible = Reductionops.is_conv_leq ctxenv sigma liftargty ty in
match kind_of_term arg with
| Var _ | Rel _ | Ind _ when convertible ->
(subst1 arg arity, ctx, ctxenv, mkApp (c, [|arg|]), args, eqs, refls, vars, env)
| _ ->
let name = get_id name in
let decl = (Name name, None, ty) in
let ctx = decl :: ctx in
let c' = mkApp (lift 1 c, [|mkRel 1|]) in
let args = arg :: args in
let liftarg = lift (List.length ctx) arg in
let eq, refl =
if convertible then
mkEq (lift 1 ty) (mkRel 1) liftarg, mkRefl argty arg
else
mkHEq (lift 1 ty) (mkRel 1) liftargty liftarg, mkHRefl argty arg
in
let eqs = eq :: lift_list eqs in
let refls = refl :: refls in
let vars = ids_of_constr vars arg in
(arity, ctx, push_rel decl ctxenv, c', args, eqs, refls, vars, env)
in
let f, args =
match kind_of_term f with
| Construct (ind,_)
| Ind ind ->
let (mib,mip) = Global.lookup_inductive ind in
let first = mib.Declarations.mind_nparams in
let pars, args = array_chop first args in
mkApp (f, pars), args
| _ -> f, args
in
let arity, ctx, ctxenv, c', args, eqs, refls, vars, env =
Array.fold_left aux (pf_type_of gl f,[],env,f,[],[],[],[],env) args
in
let args, refls = List.rev args, List.rev refls in
Some (make_abstract_generalize gl id concl dep ctx c' eqs args refls,
dep, succ (List.length ctx), vars)
| _ -> None
let abstract_generalize id ?(generalize_vars=true) gl =
Coqlib.check_required_library ["Coq";"Logic";"JMeq"];
let oldid = pf_get_new_id id gl in
let newc = abstract_args gl id in
match newc with
| None -> tclIDTAC gl
| Some (newc, dep, n, vars) ->
let tac =
if dep then
tclTHENLIST [refine newc; rename_hyp [(id, oldid)]; tclDO n intro;
generalize_dep (mkVar oldid)]
else
tclTHENLIST [refine newc; clear [id]; tclDO n intro]
in
if generalize_vars then tclTHEN tac
(tclFIRST [revert (List.rev vars) ;
tclMAP (fun id -> tclTRY (generalize_dep (mkVar id))) vars]) gl
else tac gl
let dependent_pattern c gl =
let cty = pf_type_of gl c in
let deps =
match kind_of_term cty with
| App (f, args) -> Array.to_list args
| _ -> []
in
let varname c = match kind_of_term c with
| Var id -> id
| _ -> id_of_string (hdchar (pf_env gl) c)
in
let mklambda ty (c, id, cty) =
let conclvar = subst_term_occ all_occurrences c ty in
mkNamedLambda id cty conclvar
in
let subst = (c, varname c, cty) :: List.rev_map (fun c -> (c, varname c, pf_type_of gl c)) deps in
let concllda = List.fold_left mklambda (pf_concl gl) subst in
let conclapp = applistc concllda (List.rev_map pi1 subst) in
convert_concl_no_check conclapp DEFAULTcast gl
let occur_rel n c =
let res = not (noccurn n c) in
res
let list_filter_firsts f l =
let rec list_filter_firsts_aux f acc l =
match l with
| e::l' when f e -> list_filter_firsts_aux f (acc@[e]) l'
| _ -> acc,l
in
list_filter_firsts_aux f [] l
let count_rels_from n c =
let rels = free_rels c in
let cpt,rg = ref 0, ref n in
while Intset.mem !rg rels do
cpt:= !cpt+1; rg:= !rg+1;
done;
!cpt
let count_nonfree_rels_from n c =
let rels = free_rels c in
if Intset.exists (fun x -> x >= n) rels then
let cpt,rg = ref 0, ref n in
while not (Intset.mem !rg rels) do
cpt:= !cpt+1; rg:= !rg+1;
done;
!cpt
else raise Not_found
cuts a list in two parts , first of size n. Size must be greater than n
let cut_list n l =
let rec cut_list_aux acc n l =
if n<=0 then acc,l
else match l with
| [] -> assert false
| e::l' -> cut_list_aux (acc@[e]) (n-1) l' in
let res = cut_list_aux [] n l in
res
This function splits the products of the induction scheme [ elimt ] into four
parts :
- branches , easily detectable ( they are not referred by rels in the subterm )
- what was found before branches ( acc1 ) that is : parameters and predicates
- what was found after branches ( acc3 ) that is : args and indarg if any
if there is no branch , we try to fill in acc3 with args / indargs .
We also return the conclusion .
parts:
- branches, easily detectable (they are not referred by rels in the subterm)
- what was found before branches (acc1) that is: parameters and predicates
- what was found after branches (acc3) that is: args and indarg if any
if there is no branch, we try to fill in acc3 with args/indargs.
We also return the conclusion.
*)
let decompose_paramspred_branch_args elimt =
let rec cut_noccur elimt acc2 : rel_context * rel_context * types =
match kind_of_term elimt with
| Prod(nme,tpe,elimt') ->
let hd_tpe,_ = decompose_app (snd (decompose_prod_assum tpe)) in
if not (occur_rel 1 elimt') && isRel hd_tpe
then cut_noccur elimt' ((nme,None,tpe)::acc2)
else let acc3,ccl = decompose_prod_assum elimt in acc2 , acc3 , ccl
| App(_, _) | Rel _ -> acc2 , [] , elimt
| _ -> error_ind_scheme "" in
let rec cut_occur elimt acc1 : rel_context * rel_context * rel_context * types =
match kind_of_term elimt with
| Prod(nme,tpe,c) when occur_rel 1 c -> cut_occur c ((nme,None,tpe)::acc1)
| Prod(nme,tpe,c) -> let acc2,acc3,ccl = cut_noccur elimt [] in acc1,acc2,acc3,ccl
| App(_, _) | Rel _ -> acc1,[],[],elimt
| _ -> error_ind_scheme "" in
let acc1, acc2 , acc3, ccl = cut_occur elimt [] in
Particular treatment when dealing with a dependent empty type elim scheme :
if there is no branch , then acc1 contains all hyps which is wrong ( acc1
should contain parameters and predicate only ) . This happens for an empty
type ( See for example Empty_set_ind , as False would actually be ok ) . Then
we must find the predicate of the conclusion to separate params_pred from
args . We suppose there is only one predicate here .
if there is no branch, then acc1 contains all hyps which is wrong (acc1
should contain parameters and predicate only). This happens for an empty
type (See for example Empty_set_ind, as False would actually be ok). Then
we must find the predicate of the conclusion to separate params_pred from
args. We suppose there is only one predicate here. *)
if List.length acc2 <> 0 then acc1, acc2 , acc3, ccl
else
let hyps,ccl = decompose_prod_assum elimt in
let hd_ccl_pred,_ = decompose_app ccl in
match kind_of_term hd_ccl_pred with
| Rel i -> let acc3,acc1 = cut_list (i-1) hyps in acc1 , [] , acc3 , ccl
| _ -> error_ind_scheme ""
let exchange_hd_app subst_hd t =
let hd,args= decompose_app t in mkApp (subst_hd,Array.of_list args)
let rebuild_elimtype_from_scheme (scheme:elim_scheme): types =
let hiconcl =
match scheme.indarg with
| None -> scheme.concl
| Some x -> mkProd_or_LetIn x scheme.concl in
let xihiconcl = it_mkProd_or_LetIn hiconcl scheme.args in
let brconcl = it_mkProd_or_LetIn xihiconcl scheme.branches in
let predconcl = it_mkProd_or_LetIn brconcl scheme.predicates in
let paramconcl = it_mkProd_or_LetIn predconcl scheme.params in
paramconcl
exception NoLastArg
exception NoLastArgCcl
Builds an elim_scheme from its type and calling form ( const+binding ) . We
first separate branches . We obtain branches , hyps before ( params + preds ) ,
hyps after ( args < + indarg if present > ) and conclusion . Then we proceed as
follows :
- separate parameters and predicates in params_preds . For that we build :
forall ( x1 : Ti_1)(xni : Ti_ni ) ( HI : I prm1 .. ... xni ) , DUMMY x1 ... xni HI / farg
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^
optional opt
Free rels appearing in this term are parameters ( branches should not
appear , and the only predicate would have been but we replaced it by
DUMMY ) . We guess this heuristic catches all params . TODO : generalize to
the case where args are merged with branches ( ? ) and/or where several
predicates are cited in the conclusion .
- finish to fill in the elim_scheme : indarg / farg / args and finally indref .
first separate branches. We obtain branches, hyps before (params + preds),
hyps after (args <+ indarg if present>) and conclusion. Then we proceed as
follows:
- separate parameters and predicates in params_preds. For that we build:
forall (x1:Ti_1)(xni:Ti_ni) (HI:I prm1..prmp x1...xni), DUMMY x1...xni HI/farg
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^
optional opt
Free rels appearing in this term are parameters (branches should not
appear, and the only predicate would have been Qi but we replaced it by
DUMMY). We guess this heuristic catches all params. TODO: generalize to
the case where args are merged with branches (?) and/or where several
predicates are cited in the conclusion.
- finish to fill in the elim_scheme: indarg/farg/args and finally indref. *)
let compute_elim_sig ?elimc elimt =
let params_preds,branches,args_indargs,conclusion =
decompose_paramspred_branch_args elimt in
let ccl = exchange_hd_app (mkVar (id_of_string "__QI_DUMMY__")) conclusion in
let concl_with_args = it_mkProd_or_LetIn ccl args_indargs in
let nparams = Intset.cardinal (free_rels concl_with_args) in
let preds,params = cut_list (List.length params_preds - nparams) params_preds in
A first approximation , further analysis will tweak it
let res = ref { empty_scheme with
elimc = elimc; elimt = elimt; concl = conclusion;
predicates = preds; npredicates = List.length preds;
branches = branches; nbranches = List.length branches;
farg_in_concl = isApp ccl && isApp (last_arg ccl);
params = params; nparams = nparams;
args = args_indargs; nargs = List.length args_indargs; } in
try
if !res.farg_in_concl
then begin
res := { !res with
indarg = None;
indarg_in_concl = false; farg_in_concl = true };
raise Exit
end;
if !res.nargs=0 then raise Exit;
ignore (
match List.hd args_indargs with
| hiname,Some _,hi -> error_ind_scheme ""
| hiname,None,hi ->
let hi_ind, hi_args = decompose_app hi in
hi est d'un
match kind_of_term hi_ind with
| Ind (mind,_) -> true
| Var _ -> true
| Const _ -> true
| Construct _ -> true
| _ -> false in
hi a le bon nbre d'arguments
List.length hi_args = List.length params + !res.nargs -1 in
FIXME : tests ne sont pas suffisants .
res := {!res with
indarg = Some (List.hd !res.args);
indarg_in_concl = occur_rel 1 ccl;
args = List.tl !res.args; nargs = !res.nargs - 1;
};
raise Exit);
match !res.indarg with
| Some ( _,Some _,_) -> error_ind_scheme ""
| Some ( _,None,ind) ->
let indhd,indargs = decompose_app ind in
try {!res with indref = Some (global_of_constr indhd) }
with _ -> error "Cannot find the inductive type of the inductive scheme.";;
Check that the elimination scheme has a form similar to the
elimination schemes built by Coq . Schemes may have the standard
form computed from an inductive type OR ( feb . 2006 ) a non standard
form . That is : with no main induction argument and with an optional
extra final argument of the form ( f x y ... ) in the conclusion . In
the non standard case , naming of generated hypos is slightly
different .
elimination schemes built by Coq. Schemes may have the standard
form computed from an inductive type OR (feb. 2006) a non standard
form. That is: with no main induction argument and with an optional
extra final argument of the form (f x y ...) in the conclusion. In
the non standard case, naming of generated hypos is slightly
different. *)
let compute_elim_signature elimc elimt names_info ind_type_guess =
let scheme = compute_elim_sig ~elimc:elimc elimt in
let f,l = decompose_app scheme.concl in
Vérifier que les arguments .
match scheme.indarg with
| Some (_,Some _,_) -> error "Strange letin, cannot recognize an induction scheme."
let is_pred n c =
let hd = fst (decompose_app c) in match kind_of_term hd with
| Rel q when n < q & q <= n+scheme.npredicates -> IndArg
| _ when hd = ind_type_guess & not scheme.farg_in_concl -> RecArg
| _ -> OtherArg in
let rec check_branch p c =
match kind_of_term c with
| Prod (_,t,c) -> is_pred p t :: check_branch (p+1) c
| LetIn (_,_,_,c) -> OtherArg :: check_branch (p+1) c
| _ when is_pred p c = IndArg -> []
| _ -> raise Exit in
let rec find_branches p lbrch =
match lbrch with
| (_,None,t)::brs ->
(try
let lchck_brch = check_branch p t in
let n = List.fold_left
(fun n b -> if b=RecArg then n+1 else n) 0 lchck_brch in
let recvarname, hyprecname, avoid =
make_up_names n scheme.indref names_info in
let namesign =
List.map (fun b -> (b,if b=IndArg then hyprecname else recvarname))
lchck_brch in
(avoid,namesign) :: find_branches (p+1) brs
with Exit-> error_ind_scheme "the branches of")
| (_,Some _,_)::_ -> error_ind_scheme "the branches of"
| [] -> [] in
let indsign = Array.of_list (find_branches 0 (List.rev scheme.branches)) in
indsign,scheme
let indhd,indargs = decompose_app ind in
let is_pred n c =
let hd = fst (decompose_app c) in match kind_of_term hd with
| Rel q when n < q & q <= n+scheme.npredicates -> IndArg
| _ when hd = indhd -> RecArg
| _ -> OtherArg in
let rec check_branch p c = match kind_of_term c with
| Prod (_,t,c) -> is_pred p t :: check_branch (p+1) c
| LetIn (_,_,_,c) -> OtherArg :: check_branch (p+1) c
| _ when is_pred p c = IndArg -> []
| _ -> raise Exit in
let rec find_branches p lbrch =
match lbrch with
| (_,None,t)::brs ->
(try
let lchck_brch = check_branch p t in
let n = List.fold_left
(fun n b -> if b=RecArg then n+1 else n) 0 lchck_brch in
let recvarname, hyprecname, avoid =
make_up_names n scheme.indref names_info in
let namesign =
List.map (fun b -> (b,if b=IndArg then hyprecname else recvarname))
lchck_brch in
(avoid,namesign) :: find_branches (p+1) brs
with Exit -> error_ind_scheme "the branches of")
| (_,Some _,_)::_ -> error_ind_scheme "the branches of"
| [] ->
let ccl_arg_ok = is_pred (p + scheme.nargs + 1) f = IndArg in
let ind_is_ok =
list_lastn scheme.nargs indargs
= extended_rel_list 0 scheme.args in
if not (ccl_arg_ok & ind_is_ok) then
error_ind_scheme "the conclusion of";
[]
in
let indsign = Array.of_list (find_branches 0 (List.rev scheme.branches)) in
indsign,scheme
let find_elim_signature isrec elim hyp0 gl =
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
let (elimc,elimt),ind = match elim with
| None ->
let mind,_ = pf_reduce_to_quantified_ind gl tmptyp0 in
let s = elimination_sort_of_goal gl in
let elimc =
if isrec then lookup_eliminator mind s
else pf_apply make_case_gen gl mind s in
let elimt = pf_type_of gl elimc in
((elimc, NoBindings), elimt), mkInd mind
| Some (elimc,lbind as e) ->
let ind_type_guess,_ = decompose_app (snd (decompose_prod tmptyp0)) in
(e, pf_type_of gl elimc), ind_type_guess in
let indsign,elim_scheme =
compute_elim_signature elimc elimt hyp0 ind in
(indsign,elim_scheme)
Instantiate all meta variables of elimclause using lid , some elts
of lid are parameters ( first ones ) , the other are
arguments . Returns the clause obtained .
of lid are parameters (first ones), the other are
arguments. Returns the clause obtained. *)
let recolle_clenv scheme lid elimclause gl =
let _,arr = destApp elimclause.templval.rebus in
let lindmv =
Array.map
(fun x ->
match kind_of_term x with
| Meta mv -> mv
| _ -> errorlabstrm "elimination_clause"
(str "The type of the elimination clause is not well-formed."))
arr in
let nmv = Array.length lindmv in
let lidparams,lidargs = cut_list (scheme.nparams) lid in
let nidargs = List.length lidargs in
let clauses_params =
list_map_i (fun i id -> mkVar id , pf_get_hyp_typ gl id , lindmv.(i))
0 lidparams in
let clauses_args =
list_map_i
(fun i id -> mkVar id , pf_get_hyp_typ gl id , lindmv.(nmv-nidargs+i))
0 lidargs in
let clause_indarg =
match scheme.indarg with
| None -> []
| Some (x,_,typx) -> []
in
let clauses = clauses_params@clauses_args@clause_indarg in
List.fold_right
(fun e acc ->
let x,y,i = e in
from_n ( Some 0 ) means that x should be taken " as is " without
trying to unify ( which would lead to trying to apply it to
evars if y is a product ) .
trying to unify (which would lead to trying to apply it to
evars if y is a product). *)
let indclause = mk_clenv_from_n gl (Some 0) (x,y) in
let elimclause' = clenv_fchain i acc indclause in
elimclause')
(List.rev clauses)
elimclause
let induction_tac_felim with_evars indvars scheme gl =
let elimt = scheme.elimt in
let elimc,lbindelimc =
match scheme.elimc with | Some x -> x | None -> error "No definition of the principle." in
let elimclause =
make_clenv_binding gl (mkCast (elimc,DEFAULTcast, elimt),elimt) lbindelimc in
elimclause ' is built from elimclause by instanciating all args and params .
let elimclause' = recolle_clenv scheme indvars elimclause gl in
one last resolution ( useless ? )
let resolved = clenv_unique_resolver true elimclause' gl in
clenv_refine with_evars resolved gl
let apply_induction_in_context isrec hyp0 indsign indvars names induct_tac gl =
let env = pf_env gl in
let statlists,lhyp0,indhyps,deps = cook_sign hyp0 indvars env in
let deps = List.map (fun (id,c,t)-> (id,c,refresh_universes_strict t)) deps in
let tmpcl = it_mkNamedProd_or_LetIn (pf_concl gl) deps in
let names = compute_induction_names (Array.length indsign) names in
let dephyps = List.map (fun (id,_,_) -> id) deps in
let deps_cstr =
List.fold_left
(fun a (id,b,_) -> if b = None then (mkVar id)::a else a) [] deps in
tclTHENLIST
[
dependent hyps ( but not args )
if deps = [] then tclIDTAC else apply_type tmpcl deps_cstr;
clear dependent hyps
thin dephyps;
side - conditions in elim ( resp case ) schemes come last ( resp first )
(if isrec then tclTHENFIRSTn else tclTHENLASTn)
(tclTHEN induct_tac (tclTRY (thin (List.rev indhyps))))
(array_map2
(induct_discharge statlists lhyp0 (List.rev dephyps)) indsign names)
]
gl
let induction_from_context_l isrec with_evars elim_info lid names gl =
let indsign,scheme = elim_info in
let nargs_indarg_farg = scheme.nargs
+ (if scheme.farg_in_concl then 1 else 0)
+ (if scheme.indarg <> None then 1 else 0) in
if List.length lid <> nargs_indarg_farg + scheme.nparams then
error "Not the right number of arguments given to induction scheme.";
hyp0 is used for re - introducing hyps at the right place afterward .
We chose the first element of the list of variables on which to
induct . It is probably the first of them appearing in the
context .
We chose the first element of the list of variables on which to
induct. It is probably the first of them appearing in the
context. *)
let hyp0,indvars,lid_params =
match lid with
| [] -> anomaly "induction_from_context_l"
| e::l ->
let nargs_without_first = nargs_indarg_farg - 1 in
let ivs,lp = cut_list nargs_without_first l in
e, ivs, lp in
terms to patternify we must or farg if present in concl
let lid_in_pattern =
if scheme.indarg <> None & not scheme.indarg_in_concl then List.rev indvars
else List.rev (hyp0::indvars) in
let lidcstr = List.map (fun x -> mkVar x) lid_in_pattern in
List.rev ((if scheme.farg_in_concl then indvars else hyp0::indvars) @ lid_params) in
let induct_tac = tclTHENLIST [
reduce (Pattern (List.map inj_with_occurrences lidcstr)) onConcl;
FIXME : Tester ca principe dependant et non - dependant
induction_tac_felim with_evars realindvars scheme
] in
apply_induction_in_context isrec
None indsign (hyp0::indvars) names induct_tac gl
let induction_from_context isrec with_evars elim_info (hyp0,lbind) names
inhyps gl =
let indsign,scheme = elim_info in
let indref = match scheme.indref with | None -> assert false | Some x -> x in
let tmptyp0 = pf_get_hyp_typ gl hyp0 in
let typ0 = pf_apply reduce_to_quantified_ref gl indref tmptyp0 in
let indvars =
find_atomic_param_of_ind scheme.nparams (snd (decompose_prod typ0)) in
let induct_tac = tclTHENLIST [
induction_tac with_evars (hyp0,lbind) typ0 scheme;
tclTRY (unfold_body hyp0);
thin [hyp0]
] in
apply_induction_in_context isrec
(Some (hyp0,inhyps)) indsign indvars names induct_tac gl
exception TryNewInduct of exn
let induction_with_atomization_of_ind_arg isrec with_evars elim names (hyp0,lbind) inhyps gl =
let (indsign,scheme as elim_info) = find_elim_signature isrec elim hyp0 gl in
induction_from_context_l isrec with_evars elim_info [hyp0] names gl
else
let indref = match scheme.indref with | None -> assert false | Some x -> x in
tclTHEN
(atomize_param_of_ind (indref,scheme.nparams) hyp0)
(induction_from_context isrec with_evars elim_info
(hyp0,lbind) names inhyps) gl
let induction_without_atomization isrec with_evars elim names lid gl =
let (indsign,scheme as elim_info) =
find_elim_signature isrec elim (List.hd lid) gl in
let awaited_nargs =
scheme.nparams + scheme.nargs
+ (if scheme.farg_in_concl then 1 else 0)
+ (if scheme.indarg <> None then 1 else 0)
in
let nlid = List.length lid in
if nlid <> awaited_nargs
then error "Not the right number of induction arguments."
else induction_from_context_l isrec with_evars elim_info lid names gl
let enforce_eq_name id gl = function
| (b,(loc,IntroAnonymous)) ->
(b,(loc,IntroIdentifier (fresh_id [id] (add_prefix "Heq" id) gl)))
| (b,(loc,IntroFresh heq_base)) ->
(b,(loc,IntroIdentifier (fresh_id [id] heq_base gl)))
| x ->
x
let has_selected_occurrences = function
| None -> false
| Some cls ->
cls.concl_occs <> all_occurrences_expr ||
cls.onhyps <> None && List.exists (fun ((occs,_),hl) ->
occs <> all_occurrences_expr || hl <> InHyp) (Option.get cls.onhyps)
let clear_unselected_context id inhyps cls gl =
match cls with
| None -> tclIDTAC gl
| Some cls ->
if occur_var (pf_env gl) id (pf_concl gl) &&
cls.concl_occs = no_occurrences_expr
then errorlabstrm ""
(str "Conclusion must be mentioned: it depends on " ++ pr_id id
++ str ".");
match cls.onhyps with
| Some hyps ->
let to_erase (id',_,_ as d) =
else
erase if not selected and dependent on i d or selected hyps
let test id = occur_var_in_decl (pf_env gl) id d in
if List.exists test (id::inhyps) then Some id' else None in
let ids = list_map_filter to_erase (pf_hyps gl) in
thin ids gl
| None -> tclIDTAC gl
let new_induct_gen isrec with_evars elim (eqname,names) (c,lbind) cls gl =
let inhyps = match cls with
| Some {onhyps=Some hyps} -> List.map (fun ((_,id),_) -> id) hyps
| _ -> [] in
match kind_of_term c with
| Var id when not (mem_named_context id (Global.named_context()))
& lbind = NoBindings & not with_evars & eqname = None
& not (has_selected_occurrences cls) ->
tclTHEN
(clear_unselected_context id inhyps cls)
(induction_with_atomization_of_ind_arg
isrec with_evars elim names (id,lbind) inhyps) gl
| _ ->
let x = id_of_name_using_hdchar (Global.env()) (pf_type_of gl c)
Anonymous in
let id = fresh_id [] x gl in
let with_eq = Option.map (fun eq -> (false,eq)) eqname in
TODO : if has predicate parameters , use JMeq instead of eq
tclTHEN
(letin_tac_gen with_eq (Name id) c None (Option.default allClauses cls,false))
(induction_with_atomization_of_ind_arg
isrec with_evars elim names (id,lbind) inhyps) gl
Induction on a list of arguments . First make induction arguments
atomic ( using ) , then do induction . The specificity here is
that all arguments and parameters of the scheme are given
( mandatory for the moment ) , so we do n't need to deal with
parameters of the inductive type as in .
atomic (using letins), then do induction. The specificity here is
that all arguments and parameters of the scheme are given
(mandatory for the moment), so we don't need to deal with
parameters of the inductive type as in new_induct_gen. *)
let new_induct_gen_l isrec with_evars elim (eqname,names) lc gl =
if eqname <> None then
errorlabstrm "" (str "Do not know what to do with " ++
pr_intro_pattern (Option.get eqname));
let newlc = ref [] in
let letids = ref [] in
let rec atomize_list l gl =
match l with
| [] -> tclIDTAC gl
| c::l' ->
match kind_of_term c with
| Var id when not (mem_named_context id (Global.named_context()))
& not with_evars ->
let _ = newlc:= id::!newlc in
atomize_list l' gl
| _ ->
let x =
id_of_name_using_hdchar (Global.env()) (pf_type_of gl c) Anonymous in
let id = fresh_id [] x gl in
let newl' = List.map (replace_term c (mkVar id)) l' in
let _ = newlc:=id::!newlc in
let _ = letids:=id::!letids in
tclTHEN
(letin_tac None (Name id) c None allClauses)
(atomize_list newl') gl in
tclTHENLIST
[
(atomize_list lc);
induction_without_atomization isrec with_evars elim names !newlc gl') ;
after induction , try to unfold all created by atomize_list
FIXME : unfold_all does not exist anywhere else ?
FIXME: unfold_all does not exist anywhere else? *)
recompute each time to have the new value of letids
tclMAP (fun x -> tclTRY (unfold_all x)) !letids gl')
]
gl
let induct_destruct_l isrec with_evars lc elim names cls =
Several induction hyps : induction scheme is mandatory
let _ =
if elim = None
then
errorlabstrm "" (strbrk "Induction scheme must be given when several induction hypothesis are given.\n" ++
str "Example: induction x1 x2 x3 using my_scheme.") in
let newlc =
List.map
(fun x ->
| ElimOnConstr (x,NoBindings) -> x
| _ -> error "Don't know where to find some argument.")
lc in
if cls <> None then
error
"'in' clause not supported when several induction hypothesis are given.";
new_induct_gen_l isrec with_evars elim names newlc
Induction either over a term , over a quantified premisse , or over
several quantified premisses ( like with functional induction
principles ) .
TODO : really unify induction with one and induction with several
args
several quantified premisses (like with functional induction
principles).
TODO: really unify induction with one and induction with several
args *)
let induct_destruct isrec with_evars (lc,elim,names,cls) =
induction on one arg : use old mechanism
try
onInductionArg
(fun c -> new_induct_gen isrec with_evars elim names c cls)
(List.hd lc)
If this fails , try with new mechanism but if it fails too ,
then the exception is the first one .
then the exception is the first one. *)
| x ->
(try induct_destruct_l isrec with_evars lc elim names cls
with _ -> raise x)
else induct_destruct_l isrec with_evars lc elim names cls
let induction_destruct isrec with_evars = function
| [] -> tclIDTAC
| [a] -> induct_destruct isrec with_evars a
| a::l ->
tclTHEN
(induct_destruct isrec with_evars a)
(tclMAP (induct_destruct false with_evars) l)
let new_induct ev lc e idl cls = induct_destruct true ev (lc,e,idl,cls)
let new_destruct ev lc e idl cls = induct_destruct false ev (lc,e,idl,cls)
This was Induction before 6.3 ( induction only in quantified premisses )
let raw_induct s = tclTHEN (intros_until_id s) (tclLAST_HYP simplest_elim)
let raw_induct_nodep n = tclTHEN (intros_until_n n) (tclLAST_HYP simplest_elim)
let simple_induct_id hyp = raw_induct hyp
let simple_induct_nodep = raw_induct_nodep
let simple_induct = function
| NamedHyp id -> simple_induct_id id
| AnonHyp n -> simple_induct_nodep n
let simple_destruct_id s =
(tclTHEN (intros_until_id s) (tclLAST_HYP simplest_case))
let simple_destruct_nodep n =
(tclTHEN (intros_until_n n) (tclLAST_HYP simplest_case))
let simple_destruct = function
| NamedHyp id -> simple_destruct_id id
| AnonHyp n -> simple_destruct_nodep n
let elim_scheme_type elim t gl =
let clause = mk_clenv_type_of gl elim in
match kind_of_term (last_arg clause.templval.rebus) with
| Meta mv ->
let clause' =
t is inductive , then CUMUL or CONV is irrelevant
clenv_unify true Reduction.CUMUL t
(clenv_meta_type clause mv) clause in
res_pf clause' ~allow_K:true gl
| _ -> anomaly "elim_scheme_type"
let elim_type t gl =
let (ind,t) = pf_reduce_to_atomic_ind gl t in
let elimc = lookup_eliminator ind (elimination_sort_of_goal gl) in
elim_scheme_type elimc t gl
let case_type t gl =
let (ind,t) = pf_reduce_to_atomic_ind gl t in
let env = pf_env gl in
let elimc = make_case_gen env (project gl) ind (elimination_sort_of_goal gl) in
elim_scheme_type elimc t gl
These elimination tactics are particularly adapted for sequent
calculus . They take a clause as argument , and yield the
elimination rule if the clause is of the form ( Some i d ) and a
suitable introduction rule otherwise . They do not depend on
the name of the eliminated constant , so they can be also
used on ad - hoc disjunctions and conjunctions introduced by
the user .
-- ( 11/8/97 )
HH ( 29/5/99 ) replaces failures by specific error messages
calculus. They take a clause as argument, and yield the
elimination rule if the clause is of the form (Some id) and a
suitable introduction rule otherwise. They do not depend on
the name of the eliminated constant, so they can be also
used on ad-hoc disjunctions and conjunctions introduced by
the user.
-- Eduardo Gimenez (11/8/97)
HH (29/5/99) replaces failures by specific error messages
*)
let andE id gl =
let t = pf_get_hyp_typ gl id in
if is_conjunction (pf_hnf_constr gl t) then
(tclTHEN (simplest_elim (mkVar id)) (tclDO 2 intro)) gl
else
errorlabstrm "andE"
(str("Tactic andE expects "^(string_of_id id)^" is a conjunction."))
let dAnd cls =
onClauses
(function
| None -> simplest_split
| Some ((_,id),_) -> andE id)
cls
let orE id gl =
let t = pf_get_hyp_typ gl id in
if is_disjunction (pf_hnf_constr gl t) then
(tclTHEN (simplest_elim (mkVar id)) intro) gl
else
errorlabstrm "orE"
(str("Tactic orE expects "^(string_of_id id)^" is a disjunction."))
let dorE b cls =
onClauses
(function
| (Some ((_,id),_)) -> orE id
| None -> (if b then right else left) NoBindings)
cls
let impE id gl =
let t = pf_get_hyp_typ gl id in
if is_imp_term (pf_hnf_constr gl t) then
let (dom, _, rng) = destProd (pf_hnf_constr gl t) in
tclTHENLAST
(cut_intro rng)
(apply_term (mkVar id) [mkMeta (new_meta())]) gl
else
errorlabstrm "impE"
(str("Tactic impE expects "^(string_of_id id)^
" is a an implication."))
let dImp cls =
onClauses
(function
| None -> intro
| Some ((_,id),_) -> impE id)
cls
let setoid_reflexivity = ref (fun _ -> assert false)
let register_setoid_reflexivity f = setoid_reflexivity := f
let reflexivity_red allowred gl =
PL : usual reflexivity do n't perform any reduction when searching
for an equality , but we may need to do some when called back from
inside setoid_reflexivity ( see Optimize cases in setoid_replace.ml ) .
for an equality, but we may need to do some when called back from
inside setoid_reflexivity (see Optimize cases in setoid_replace.ml). *)
let concl = if not allowred then pf_concl gl
else whd_betadeltaiota (pf_env gl) (project gl) (pf_concl gl)
in
match match_with_equality_type concl with
| None -> None
| Some _ -> Some (one_constructor 1 NoBindings)
let reflexivity gl =
match reflexivity_red false gl with
| None -> !setoid_reflexivity gl
| Some tac -> tac gl
let intros_reflexivity = (tclTHEN intros reflexivity)
This tactic first tries to apply a constant named sym_eq , where eq
is the name of the equality predicate . If this constant is not
defined and the conclusion is a = b , it solves the goal doing ( Cut
b = a;Intro H;Case H;Constructor 1 )
is the name of the equality predicate. If this constant is not
defined and the conclusion is a=b, it solves the goal doing (Cut
b=a;Intro H;Case H;Constructor 1) *)
let setoid_symmetry = ref (fun _ -> assert false)
let register_setoid_symmetry f = setoid_symmetry := f
let symmetry_red allowred gl =
PL : usual symmetry do n't perform any reduction when searching
for an equality , but we may need to do some when called back from
inside setoid_reflexivity ( see Optimize cases in setoid_replace.ml ) .
for an equality, but we may need to do some when called back from
inside setoid_reflexivity (see Optimize cases in setoid_replace.ml). *)
let concl = if not allowred then pf_concl gl
else whd_betadeltaiota (pf_env gl) (project gl) (pf_concl gl)
in
match match_with_equation concl with
| None -> None
| Some (hdcncl,args) -> Some (fun gl ->
let hdcncls = string_of_inductive hdcncl in
begin
try
tclTHEN
(convert_concl_no_check concl DEFAULTcast)
(apply (pf_parse_const gl ("sym_"^hdcncls))) gl
with _ ->
let symc = match args with
| [t1; c1; t2; c2] -> mkApp (hdcncl, [| t2; c2; t1; c1 |])
| [typ;c1;c2] -> mkApp (hdcncl, [| typ; c2; c1 |])
| [c1;c2] -> mkApp (hdcncl, [| c2; c1 |])
| _ -> assert false
in
tclTHENFIRST (cut symc)
(tclTHENLIST
[ intro;
tclLAST_HYP simplest_case;
one_constructor 1 NoBindings ])
gl
end)
let symmetry gl =
match symmetry_red false gl with
| None -> !setoid_symmetry gl
| Some tac -> tac gl
let setoid_symmetry_in = ref (fun _ _ -> assert false)
let register_setoid_symmetry_in f = setoid_symmetry_in := f
let symmetry_in id gl =
let ctype = pf_type_of gl (mkVar id) in
let sign,t = decompose_prod_assum ctype in
match match_with_equation t with
| None -> !setoid_symmetry_in id gl
| Some (hdcncl,args) ->
let symccl = match args with
| [t1; c1; t2; c2] -> mkApp (hdcncl, [| t2; c2; t1; c1 |])
| [typ;c1;c2] -> mkApp (hdcncl, [| typ; c2; c1 |])
| [c1;c2] -> mkApp (hdcncl, [| c2; c1 |])
| _ -> assert false in
tclTHENS (cut (it_mkProd_or_LetIn symccl sign))
[ intro_replacing id;
tclTHENLIST [ intros; symmetry; apply (mkVar id); assumption ] ]
gl
let intros_symmetry =
onClauses
(function
| None -> tclTHEN intros symmetry
| Some ((_,id),_) -> symmetry_in id)
This tactic first tries to apply a constant named trans_eq , where eq
is the name of the equality predicate . If this constant is not
defined and the conclusion is a = b , it solves the goal doing
Cut x1 = x2 ;
[ Cut x2 = x3 ; [ Intros e1 e2 ; Case e2;Assumption
| Idtac ]
| Idtac ]
--Eduardo ( 19/8/97 )
is the name of the equality predicate. If this constant is not
defined and the conclusion is a=b, it solves the goal doing
Cut x1=x2;
[Cut x2=x3; [Intros e1 e2; Case e2;Assumption
| Idtac]
| Idtac]
--Eduardo (19/8/97)
*)
let setoid_transitivity = ref (fun _ _ -> assert false)
let register_setoid_transitivity f = setoid_transitivity := f
let transitivity_red allowred t gl =
PL : usual transitivity do n't perform any reduction when searching
for an equality , but we may need to do some when called back from
inside setoid_reflexivity ( see Optimize cases in setoid_replace.ml ) .
for an equality, but we may need to do some when called back from
inside setoid_reflexivity (see Optimize cases in setoid_replace.ml). *)
let concl = if not allowred then pf_concl gl
else whd_betadeltaiota (pf_env gl) (project gl) (pf_concl gl)
in
match match_with_equation concl with
| None -> None
| Some (hdcncl,args) -> Some (fun gl ->
let hdcncls = string_of_inductive hdcncl in
begin
try
apply_list [(pf_parse_const gl ("trans_"^hdcncls));t] gl
with _ ->
let eq1, eq2 = match args with
| [typ1;c1;typ2;c2] -> let typt = pf_type_of gl t in
( mkApp(hdcncl, [| typ1; c1; typt ;t |]),
mkApp(hdcncl, [| typt; t; typ2; c2 |]) )
| [typ;c1;c2] ->
( mkApp (hdcncl, [| typ; c1; t |]),
mkApp (hdcncl, [| typ; t; c2 |]) )
| [c1;c2] ->
( mkApp (hdcncl, [| c1; t|]),
mkApp (hdcncl, [| t; c2 |]) )
| _ -> assert false
in
tclTHENFIRST (cut eq2)
(tclTHENFIRST (cut eq1)
(tclTHENLIST
[ tclDO 2 intro;
tclLAST_HYP simplest_case;
assumption ])) gl
end)
let transitivity t gl =
match transitivity_red false t gl with
| None -> !setoid_transitivity t gl
| Some tac -> tac gl
let intros_transitivity n = tclTHEN intros (transitivity n)
let interpretable_as_section_decl d1 d2 = match d1,d2 with
| (_,Some _,_), (_,None,_) -> false
| (_,Some b1,t1), (_,Some b2,t2) -> eq_constr b1 b2 & eq_constr t1 t2
| (_,None,t1), (_,_,t2) -> eq_constr t1 t2
let abstract_subproof name tac gl =
let current_sign = Global.named_context()
and global_sign = pf_hyps gl in
let sign,secsign =
List.fold_right
(fun (id,_,_ as d) (s1,s2) ->
if mem_named_context id current_sign &
interpretable_as_section_decl (Sign.lookup_named id current_sign) d
then (s1,push_named_context_val d s2)
else (add_named_decl d s1,s2))
global_sign (empty_named_context,empty_named_context_val) in
let na = next_global_ident_away false name (pf_ids_of_hyps gl) in
let concl = it_mkNamedProd_or_LetIn (pf_concl gl) sign in
if occur_existential concl then
error "\"abstract\" cannot handle existentials.";
let lemme =
start_proof na (Global, Proof Lemma) secsign concl (fun _ _ -> ());
let _,(const,_,kind,_) =
try
by (tclCOMPLETE (tclTHEN (tclDO (List.length sign) intro) tac));
let r = cook_proof ignore in
delete_current_proof (); r
with
e ->
(delete_current_proof(); raise e)
Faudrait
let cd = Entries.DefinitionEntry const in
let con = Declare.declare_internal_constant na (cd,IsProof Lemma) in
constr_of_global (ConstRef con)
in
exact_no_check
(applist (lemme,
List.rev (Array.to_list (instance_from_named_context sign))))
gl
let tclABSTRACT name_op tac gl =
let s = match name_op with
| Some s -> s
| None -> add_suffix (get_current_proof_name ()) "_subproof"
in
abstract_subproof s tac gl
let admit_as_an_axiom gl =
let current_sign = Global.named_context()
and global_sign = pf_hyps gl in
let sign,secsign =
List.fold_right
(fun (id,_,_ as d) (s1,s2) ->
if mem_named_context id current_sign &
interpretable_as_section_decl (Sign.lookup_named id current_sign) d
then (s1,add_named_decl d s2)
else (add_named_decl d s1,s2))
global_sign (empty_named_context,empty_named_context) in
let name = add_suffix (get_current_proof_name ()) "_admitted" in
let na = next_global_ident_away false name (pf_ids_of_hyps gl) in
let concl = it_mkNamedProd_or_LetIn (pf_concl gl) sign in
if occur_existential concl then error"\"admit\" cannot handle existentials.";
let axiom =
let cd = Entries.ParameterEntry (concl,false) in
let con = Declare.declare_internal_constant na (cd,IsAssumption Logical) in
constr_of_global (ConstRef con)
in
exact_no_check
(applist (axiom,
List.rev (Array.to_list (instance_from_named_context sign))))
gl
let unify ?(state=full_transparent_state) x y gl =
try
let flags =
{default_unify_flags with
modulo_delta = state;
modulo_conv_on_closed_terms = Some state}
in
let evd = w_unify false (pf_env gl) Reduction.CONV
~flags x y (Evd.create_evar_defs (project gl))
in tclEVARS (Evd.evars_of evd) gl
with _ -> tclFAIL 0 (str"Not unifiable") gl
|
82cae85fcc4280b7ff9723ae22659dad01af65e567d89fd36f939a95ad0bfdb9 | maybevoid/casimir | Free.hs | # OPTIONS_GHC -fno - warn - orphans #
module Casimir.Ops.Io.Free
where
import Casimir.Ops.Io.Base
import Casimir.Free
data IoCoOp a where
LiftIoOp :: forall x a . IO x -> (x -> a) -> IoCoOp a
instance EffCoOp IoEff where
type CoOperation IoEff = IoCoOp
instance Functor IoCoOp where
fmap
:: forall a b
. (a -> b)
-> IoCoOp a
-> IoCoOp b
fmap f (LiftIoOp io cont) = LiftIoOp io (f . cont)
instance FreeOps IoEff where
mkFreeOps liftCoOp = IoOps {
liftIoOp = \io -> liftCoOp $ LiftIoOp io id
}
| null | https://raw.githubusercontent.com/maybevoid/casimir/ebbfa403739d6f258e6ac6793549006a0e8bff42/casimir/src/lib/Casimir/Ops/Io/Free.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module Casimir.Ops.Io.Free
where
import Casimir.Ops.Io.Base
import Casimir.Free
data IoCoOp a where
LiftIoOp :: forall x a . IO x -> (x -> a) -> IoCoOp a
instance EffCoOp IoEff where
type CoOperation IoEff = IoCoOp
instance Functor IoCoOp where
fmap
:: forall a b
. (a -> b)
-> IoCoOp a
-> IoCoOp b
fmap f (LiftIoOp io cont) = LiftIoOp io (f . cont)
instance FreeOps IoEff where
mkFreeOps liftCoOp = IoOps {
liftIoOp = \io -> liftCoOp $ LiftIoOp io id
}
| |
75879e69339a3c4472a61d40e6d1edcfb96955b02011090834a7491ea3e1c148 | OtpChatBot/Ybot | ybot_notification.erl | %%%-----------------------------------------------------------------------------
%%% @author 0xAX <>
%%% @doc
Ybot notification manager
%%% @end
%%%-----------------------------------------------------------------------------
-module(ybot_notification).
-behaviour(gen_server).
-export([start_link/2]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(state, {}).
start_link(Notification, NotificationsDir) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Notification, NotificationsDir], []).
init([Notification, NotificationsDir]) ->
% init notification manager
gen_server:cast(?MODULE, {init, Notification, NotificationsDir}),
% return
{ok, #state{}}.
handle_call(_Request, _From, State) ->
{reply, ignored, State}.
handle_cast({init, Notification, NotificationsDir}, State) ->
% traverse all notifications and start notification handler
lists:foreach(fun(Not) ->
% start notification handler
ybot_notification_sup:start_notification_proc(Not, NotificationsDir)
end,
Notification),
% return
{noreply, State};
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions | null | https://raw.githubusercontent.com/OtpChatBot/Ybot/5ce05fea0eb9001d1c0ff89702729f4c80743872/src/ybot_notification.erl | erlang | -----------------------------------------------------------------------------
@author 0xAX <>
@doc
@end
-----------------------------------------------------------------------------
gen_server callbacks
init notification manager
return
traverse all notifications and start notification handler
start notification handler
return | Ybot notification manager
-module(ybot_notification).
-behaviour(gen_server).
-export([start_link/2]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(state, {}).
start_link(Notification, NotificationsDir) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Notification, NotificationsDir], []).
init([Notification, NotificationsDir]) ->
gen_server:cast(?MODULE, {init, Notification, NotificationsDir}),
{ok, #state{}}.
handle_call(_Request, _From, State) ->
{reply, ignored, State}.
handle_cast({init, Notification, NotificationsDir}, State) ->
lists:foreach(fun(Not) ->
ybot_notification_sup:start_notification_proc(Not, NotificationsDir)
end,
Notification),
{noreply, State};
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions |
63b77dacf46d5960acc5a74c34d306e022d8dff40302cfb7b6585c42c98a1b35 | metosin/reitit | core.cljc | (ns reitit.core
(:require [reitit.exception :as exception]
[reitit.impl :as impl]
[reitit.trie :as trie]))
;;
;; Expand
;;
(defprotocol Expand
(expand [this opts]))
(extend-protocol Expand
#?(:clj clojure.lang.Keyword
:cljs cljs.core.Keyword)
(expand [this _] {:name this})
#?(:clj clojure.lang.PersistentArrayMap
:cljs cljs.core.PersistentArrayMap)
(expand [this _] this)
#?(:clj clojure.lang.PersistentHashMap
:cljs cljs.core.PersistentHashMap)
(expand [this _] this)
#?(:clj clojure.lang.Fn
:cljs function)
(expand [this _] {:handler this})
nil
(expand [_ _]))
;;
;; Router
;;
(defprotocol Router
(router-name [this])
(routes [this])
(compiled-routes [this])
(options [this])
(route-names [this])
(match-by-path [this path])
(match-by-name [this name] [this name path-params]))
(defn router? [x]
(satisfies? Router x))
(defrecord Match [template data result path-params path])
(defrecord PartialMatch [template data result path-params required])
(defn partial-match? [x]
(instance? PartialMatch x))
(defn match-by-name!
([this name]
(match-by-name! this name nil))
([this name path-params]
(if-let [match (match-by-name this name path-params)]
(if-not (partial-match? match)
match
(impl/throw-on-missing-path-params
(:template match) (:required match) path-params)))))
(defn match->path
([match]
(match->path match nil))
([match query-params]
(some-> match :path (cond-> (seq query-params) (str "?" (impl/query-string query-params))))))
;;
;; Different routers
;;
(defn linear-router
"Creates a linear-router from resolved routes and optional
expanded options. See [[router]] for available options, plus the following:
| key | description |
| -----------------------------|-------------|
| `:reitit.trie/trie-compiler` | Optional trie-compiler.
| `:reitit.trie/parameters` | Optional function to create empty map(-like) path parameters value from sequence of keys."
([compiled-routes]
(linear-router compiled-routes {}))
([compiled-routes opts]
(let [compiler (::trie/trie-compiler opts (trie/compiler))
names (impl/find-names compiled-routes opts)
[pl nl] (reduce
(fn [[pl nl] [p {:keys [name] :as data} result]]
(let [{:keys [path-params] :as route} (impl/parse p opts)
f #(if-let [path (impl/path-for route %)]
(->Match p data result (impl/url-decode-coll %) path)
(->PartialMatch p data result (impl/url-decode-coll %) path-params))]
[(conj pl (-> (trie/insert nil p (->Match p data result nil nil) opts) (trie/compile)))
(if name (assoc nl name f) nl)]))
[[] {}]
compiled-routes)
lookup (impl/fast-map nl)
matcher (trie/linear-matcher compiler pl true)
match-by-path (trie/path-matcher matcher compiler)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify
Router
(router-name [_] :linear-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(if-let [match (match-by-path path)]
(-> (:data match)
(assoc :path-params (:params match))
(assoc :path path))))
(match-by-name [_ name]
(if-let [match (impl/fast-get lookup name)]
(match nil)))
(match-by-name [_ name path-params]
(if-let [match (impl/fast-get lookup name)]
(match (impl/path-params path-params))))))))
(defn lookup-router
"Creates a lookup-router from resolved routes and optional
expanded options. See [[router]] for available options."
([compiled-routes]
(lookup-router compiled-routes {}))
([compiled-routes opts]
(when-let [wilds (seq (filter (impl/->wild-route? opts) compiled-routes))]
(exception/fail!
(str "can't create :lookup-router with wildcard routes: " wilds)
{:wilds wilds
:routes compiled-routes}))
(let [names (impl/find-names compiled-routes opts)
[pl nl] (reduce
(fn [[pl nl] [p {:keys [name] :as data} result]]
[(assoc pl p (->Match p data result {} p))
(if name
(assoc nl name #(->Match p data result % p))
nl)])
[{} {}]
compiled-routes)
data (impl/fast-map pl)
lookup (impl/fast-map nl)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :lookup-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(impl/fast-get data path))
(match-by-name [_ name]
(if-let [match (impl/fast-get lookup name)]
(match nil)))
(match-by-name [_ name path-params]
(if-let [match (impl/fast-get lookup name)]
(match (impl/path-params path-params))))))))
(defn trie-router
"Creates a special prefix-tree router from resolved routes and optional
expanded options. See [[router]] for available options, plus the following:
| key | description |
| -----------------------------|-------------|
| `:reitit.trie/trie-compiler` | Optional trie-compiler.
| `:reitit.trie/parameters` | Optional function to create empty map(-like) path parameters value from sequence of keys."
([compiled-routes]
(trie-router compiled-routes {}))
([compiled-routes opts]
(let [compiler (::trie/trie-compiler opts (trie/compiler))
names (impl/find-names compiled-routes opts)
[pl nl] (reduce
(fn [[pl nl] [p {:keys [name] :as data} result]]
(let [{:keys [path-params] :as route} (impl/parse p opts)
f #(if-let [path (impl/path-for route %)]
(->Match p data result (impl/url-decode-coll %) path)
(->PartialMatch p data result (impl/url-decode-coll %) path-params))]
[(trie/insert pl p (->Match p data result nil nil) opts)
(if name (assoc nl name f) nl)]))
[nil {}]
compiled-routes)
matcher (trie/compile pl compiler)
match-by-path (if matcher (trie/path-matcher matcher compiler))
lookup (impl/fast-map nl)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify
Router
(router-name [_] :trie-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(if-let [match (and match-by-path (match-by-path path))]
(-> (:data match)
(assoc :path-params (:params match))
(assoc :path path))))
(match-by-name [_ name]
(if-let [match (impl/fast-get lookup name)]
(match nil)))
(match-by-name [_ name path-params]
(if-let [match (impl/fast-get lookup name)]
(match (impl/path-params path-params))))))))
(defn single-static-path-router
"Creates a fast router of 1 static route(s) and optional
expanded options. See [[router]] for available options."
([compiled-routes]
(single-static-path-router compiled-routes {}))
([compiled-routes opts]
(when (or (not= (count compiled-routes) 1) (some (impl/->wild-route? opts) compiled-routes))
(exception/fail!
(str ":single-static-path-router requires exactly 1 static route: " compiled-routes)
{:routes compiled-routes}))
(let [[n :as names] (impl/find-names compiled-routes opts)
[[p data result]] compiled-routes
p #?(:clj (.intern ^String p) :cljs p)
match (->Match p data result {} p)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :single-static-path-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(if (#?(:clj .equals :cljs =) p path) match))
(match-by-name [_ name]
(if (= n name) match))
(match-by-name [_ name path-params]
(if (= n name) (impl/fast-assoc match :path-params (impl/path-params path-params))))))))
(defn mixed-router
"Creates two routers: [[lookup-router]] or [[single-static-path-router]] for
static routes and [[segment-router]] for wildcard routes. All
routes should be non-conflicting. Takes resolved routes and optional
expanded options. See [[router]] for options."
([compiled-routes]
(mixed-router compiled-routes {}))
([compiled-routes opts]
(let [{wild true, lookup false} (group-by (impl/->wild-route? opts) compiled-routes)
->static-router (if (= 1 (count lookup)) single-static-path-router lookup-router)
wildcard-router (trie-router wild opts)
static-router (->static-router lookup opts)
names (impl/find-names compiled-routes opts)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :mixed-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(or (match-by-path static-router path)
(match-by-path wildcard-router path)))
(match-by-name [_ name]
(or (match-by-name static-router name)
(match-by-name wildcard-router name)))
(match-by-name [_ name path-params]
(or (match-by-name static-router name path-params)
(match-by-name wildcard-router name path-params)))))))
(defn quarantine-router
"Creates two routers: [[mixed-router]] for non-conflicting routes
and [[linear-router]] for conflicting routes. Takes resolved routes
and optional expanded options. See [[router]] for options."
([compiled-routes]
(quarantine-router compiled-routes {}))
([compiled-routes opts]
(let [conflicting-paths (impl/conflicting-paths (or (::path-conflicting opts) (impl/path-conflicting-routes compiled-routes opts)))
conflicting? #(contains? conflicting-paths (first %))
{conflicting true, non-conflicting false} (group-by conflicting? compiled-routes)
linear-router (linear-router conflicting opts)
mixed-router (mixed-router non-conflicting opts)
names (impl/find-names compiled-routes opts)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :quarantine-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(or (match-by-path mixed-router path)
(match-by-path linear-router path)))
(match-by-name [_ name]
(or (match-by-name mixed-router name)
(match-by-name linear-router name)))
(match-by-name [_ name path-params]
(or (match-by-name mixed-router name path-params)
(match-by-name linear-router name path-params)))))))
;;
Creating Routers
;;
(defn ^:no-doc default-router-options []
{:lookup (fn lookup [[_ {:keys [name]}] _] (if name #{name}))
:expand expand
:coerce (fn coerce [route _] route)
:compile (fn compile [[_ {:keys [handler]}] _] handler)
:exception exception/exception
:conflicts (fn throw! [conflicts] (exception/fail! :path-conflicts conflicts))})
(defn router
"Create a [[Router]] from raw route data and optionally an options map.
Selects implementation based on route details. The following options
are available:
| key | description
| -------------|-------------
| `:path` | Base-path for routes
| `:routes` | Initial resolved routes (default `[]`)
| `:data` | Initial route data (default `{}`)
| `:spec` | clojure.spec definition for a route data, see `reitit.spec` on how to use this
| `:syntax` | Path-parameter syntax as keyword or set of keywords (default #{:bracket :colon})
| `:expand` | Function of `arg opts => data` to expand route arg to route data (default `reitit.core/expand`)
| `:coerce` | Function of `route opts => route` to coerce resolved route, can throw or return `nil`
| `:compile` | Function of `route opts => result` to compile a route handler
| `:validate` | Function of `routes opts => ()` to validate route (data) via side-effects
| `:conflicts` | Function of `{route #{route}} => ()` to handle conflicting routes
| `:exception` | Function of `Exception => Exception ` to handle creation time exceptions (default `reitit.exception/exception`)
| `:router` | Function of `routes opts => router` to override the actual router implementation"
([raw-routes]
(router raw-routes {}))
([raw-routes opts]
(let [{:keys [router conflicts] :as opts} (merge (default-router-options) opts)]
(try
(let [routes (impl/resolve-routes raw-routes opts)
path-conflicting (if-not (and router (not conflicts)) (impl/path-conflicting-routes routes opts))
name-conflicting (impl/name-conflicting-routes routes)
compiled-routes (impl/compile-routes routes opts)
wilds? (boolean (some (impl/->wild-route? opts) compiled-routes))
all-wilds? (every? (impl/->wild-route? opts) compiled-routes)
router (cond
router router
(and (= 1 (count compiled-routes)) (not wilds?)) single-static-path-router
path-conflicting quarantine-router
(not wilds?) lookup-router
all-wilds? trie-router
:else mixed-router)]
(when-let [conflict-report (and conflicts (impl/unresolved-conflicts path-conflicting))]
(conflicts conflict-report))
(when name-conflicting
(exception/fail! :name-conflicts name-conflicting))
(when-let [validate (:validate opts)]
(validate compiled-routes opts))
(router compiled-routes (assoc opts ::path-conflicting path-conflicting)))
(catch #?(:clj Exception, :cljs js/Error) e
(throw ((get opts :exception identity) e)))))))
| null | https://raw.githubusercontent.com/metosin/reitit/ae73d031b9fd1dfe05e969af6221626956465698/modules/reitit-core/src/reitit/core.cljc | clojure |
Expand
Router
Different routers
| (ns reitit.core
(:require [reitit.exception :as exception]
[reitit.impl :as impl]
[reitit.trie :as trie]))
(defprotocol Expand
(expand [this opts]))
(extend-protocol Expand
#?(:clj clojure.lang.Keyword
:cljs cljs.core.Keyword)
(expand [this _] {:name this})
#?(:clj clojure.lang.PersistentArrayMap
:cljs cljs.core.PersistentArrayMap)
(expand [this _] this)
#?(:clj clojure.lang.PersistentHashMap
:cljs cljs.core.PersistentHashMap)
(expand [this _] this)
#?(:clj clojure.lang.Fn
:cljs function)
(expand [this _] {:handler this})
nil
(expand [_ _]))
(defprotocol Router
(router-name [this])
(routes [this])
(compiled-routes [this])
(options [this])
(route-names [this])
(match-by-path [this path])
(match-by-name [this name] [this name path-params]))
(defn router? [x]
(satisfies? Router x))
(defrecord Match [template data result path-params path])
(defrecord PartialMatch [template data result path-params required])
(defn partial-match? [x]
(instance? PartialMatch x))
(defn match-by-name!
([this name]
(match-by-name! this name nil))
([this name path-params]
(if-let [match (match-by-name this name path-params)]
(if-not (partial-match? match)
match
(impl/throw-on-missing-path-params
(:template match) (:required match) path-params)))))
(defn match->path
([match]
(match->path match nil))
([match query-params]
(some-> match :path (cond-> (seq query-params) (str "?" (impl/query-string query-params))))))
(defn linear-router
"Creates a linear-router from resolved routes and optional
expanded options. See [[router]] for available options, plus the following:
| key | description |
| -----------------------------|-------------|
| `:reitit.trie/trie-compiler` | Optional trie-compiler.
| `:reitit.trie/parameters` | Optional function to create empty map(-like) path parameters value from sequence of keys."
([compiled-routes]
(linear-router compiled-routes {}))
([compiled-routes opts]
(let [compiler (::trie/trie-compiler opts (trie/compiler))
names (impl/find-names compiled-routes opts)
[pl nl] (reduce
(fn [[pl nl] [p {:keys [name] :as data} result]]
(let [{:keys [path-params] :as route} (impl/parse p opts)
f #(if-let [path (impl/path-for route %)]
(->Match p data result (impl/url-decode-coll %) path)
(->PartialMatch p data result (impl/url-decode-coll %) path-params))]
[(conj pl (-> (trie/insert nil p (->Match p data result nil nil) opts) (trie/compile)))
(if name (assoc nl name f) nl)]))
[[] {}]
compiled-routes)
lookup (impl/fast-map nl)
matcher (trie/linear-matcher compiler pl true)
match-by-path (trie/path-matcher matcher compiler)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify
Router
(router-name [_] :linear-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(if-let [match (match-by-path path)]
(-> (:data match)
(assoc :path-params (:params match))
(assoc :path path))))
(match-by-name [_ name]
(if-let [match (impl/fast-get lookup name)]
(match nil)))
(match-by-name [_ name path-params]
(if-let [match (impl/fast-get lookup name)]
(match (impl/path-params path-params))))))))
(defn lookup-router
"Creates a lookup-router from resolved routes and optional
expanded options. See [[router]] for available options."
([compiled-routes]
(lookup-router compiled-routes {}))
([compiled-routes opts]
(when-let [wilds (seq (filter (impl/->wild-route? opts) compiled-routes))]
(exception/fail!
(str "can't create :lookup-router with wildcard routes: " wilds)
{:wilds wilds
:routes compiled-routes}))
(let [names (impl/find-names compiled-routes opts)
[pl nl] (reduce
(fn [[pl nl] [p {:keys [name] :as data} result]]
[(assoc pl p (->Match p data result {} p))
(if name
(assoc nl name #(->Match p data result % p))
nl)])
[{} {}]
compiled-routes)
data (impl/fast-map pl)
lookup (impl/fast-map nl)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :lookup-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(impl/fast-get data path))
(match-by-name [_ name]
(if-let [match (impl/fast-get lookup name)]
(match nil)))
(match-by-name [_ name path-params]
(if-let [match (impl/fast-get lookup name)]
(match (impl/path-params path-params))))))))
(defn trie-router
"Creates a special prefix-tree router from resolved routes and optional
expanded options. See [[router]] for available options, plus the following:
| key | description |
| -----------------------------|-------------|
| `:reitit.trie/trie-compiler` | Optional trie-compiler.
| `:reitit.trie/parameters` | Optional function to create empty map(-like) path parameters value from sequence of keys."
([compiled-routes]
(trie-router compiled-routes {}))
([compiled-routes opts]
(let [compiler (::trie/trie-compiler opts (trie/compiler))
names (impl/find-names compiled-routes opts)
[pl nl] (reduce
(fn [[pl nl] [p {:keys [name] :as data} result]]
(let [{:keys [path-params] :as route} (impl/parse p opts)
f #(if-let [path (impl/path-for route %)]
(->Match p data result (impl/url-decode-coll %) path)
(->PartialMatch p data result (impl/url-decode-coll %) path-params))]
[(trie/insert pl p (->Match p data result nil nil) opts)
(if name (assoc nl name f) nl)]))
[nil {}]
compiled-routes)
matcher (trie/compile pl compiler)
match-by-path (if matcher (trie/path-matcher matcher compiler))
lookup (impl/fast-map nl)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify
Router
(router-name [_] :trie-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(if-let [match (and match-by-path (match-by-path path))]
(-> (:data match)
(assoc :path-params (:params match))
(assoc :path path))))
(match-by-name [_ name]
(if-let [match (impl/fast-get lookup name)]
(match nil)))
(match-by-name [_ name path-params]
(if-let [match (impl/fast-get lookup name)]
(match (impl/path-params path-params))))))))
(defn single-static-path-router
"Creates a fast router of 1 static route(s) and optional
expanded options. See [[router]] for available options."
([compiled-routes]
(single-static-path-router compiled-routes {}))
([compiled-routes opts]
(when (or (not= (count compiled-routes) 1) (some (impl/->wild-route? opts) compiled-routes))
(exception/fail!
(str ":single-static-path-router requires exactly 1 static route: " compiled-routes)
{:routes compiled-routes}))
(let [[n :as names] (impl/find-names compiled-routes opts)
[[p data result]] compiled-routes
p #?(:clj (.intern ^String p) :cljs p)
match (->Match p data result {} p)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :single-static-path-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(if (#?(:clj .equals :cljs =) p path) match))
(match-by-name [_ name]
(if (= n name) match))
(match-by-name [_ name path-params]
(if (= n name) (impl/fast-assoc match :path-params (impl/path-params path-params))))))))
(defn mixed-router
"Creates two routers: [[lookup-router]] or [[single-static-path-router]] for
static routes and [[segment-router]] for wildcard routes. All
routes should be non-conflicting. Takes resolved routes and optional
expanded options. See [[router]] for options."
([compiled-routes]
(mixed-router compiled-routes {}))
([compiled-routes opts]
(let [{wild true, lookup false} (group-by (impl/->wild-route? opts) compiled-routes)
->static-router (if (= 1 (count lookup)) single-static-path-router lookup-router)
wildcard-router (trie-router wild opts)
static-router (->static-router lookup opts)
names (impl/find-names compiled-routes opts)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :mixed-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(or (match-by-path static-router path)
(match-by-path wildcard-router path)))
(match-by-name [_ name]
(or (match-by-name static-router name)
(match-by-name wildcard-router name)))
(match-by-name [_ name path-params]
(or (match-by-name static-router name path-params)
(match-by-name wildcard-router name path-params)))))))
(defn quarantine-router
"Creates two routers: [[mixed-router]] for non-conflicting routes
and [[linear-router]] for conflicting routes. Takes resolved routes
and optional expanded options. See [[router]] for options."
([compiled-routes]
(quarantine-router compiled-routes {}))
([compiled-routes opts]
(let [conflicting-paths (impl/conflicting-paths (or (::path-conflicting opts) (impl/path-conflicting-routes compiled-routes opts)))
conflicting? #(contains? conflicting-paths (first %))
{conflicting true, non-conflicting false} (group-by conflicting? compiled-routes)
linear-router (linear-router conflicting opts)
mixed-router (mixed-router non-conflicting opts)
names (impl/find-names compiled-routes opts)
routes (impl/uncompile-routes compiled-routes)]
^{:type ::router}
(reify Router
(router-name [_] :quarantine-router)
(routes [_] routes)
(compiled-routes [_] compiled-routes)
(options [_] opts)
(route-names [_] names)
(match-by-path [_ path]
(or (match-by-path mixed-router path)
(match-by-path linear-router path)))
(match-by-name [_ name]
(or (match-by-name mixed-router name)
(match-by-name linear-router name)))
(match-by-name [_ name path-params]
(or (match-by-name mixed-router name path-params)
(match-by-name linear-router name path-params)))))))
Creating Routers
(defn ^:no-doc default-router-options []
{:lookup (fn lookup [[_ {:keys [name]}] _] (if name #{name}))
:expand expand
:coerce (fn coerce [route _] route)
:compile (fn compile [[_ {:keys [handler]}] _] handler)
:exception exception/exception
:conflicts (fn throw! [conflicts] (exception/fail! :path-conflicts conflicts))})
(defn router
"Create a [[Router]] from raw route data and optionally an options map.
Selects implementation based on route details. The following options
are available:
| key | description
| -------------|-------------
| `:path` | Base-path for routes
| `:routes` | Initial resolved routes (default `[]`)
| `:data` | Initial route data (default `{}`)
| `:spec` | clojure.spec definition for a route data, see `reitit.spec` on how to use this
| `:syntax` | Path-parameter syntax as keyword or set of keywords (default #{:bracket :colon})
| `:expand` | Function of `arg opts => data` to expand route arg to route data (default `reitit.core/expand`)
| `:coerce` | Function of `route opts => route` to coerce resolved route, can throw or return `nil`
| `:compile` | Function of `route opts => result` to compile a route handler
| `:validate` | Function of `routes opts => ()` to validate route (data) via side-effects
| `:conflicts` | Function of `{route #{route}} => ()` to handle conflicting routes
| `:exception` | Function of `Exception => Exception ` to handle creation time exceptions (default `reitit.exception/exception`)
| `:router` | Function of `routes opts => router` to override the actual router implementation"
([raw-routes]
(router raw-routes {}))
([raw-routes opts]
(let [{:keys [router conflicts] :as opts} (merge (default-router-options) opts)]
(try
(let [routes (impl/resolve-routes raw-routes opts)
path-conflicting (if-not (and router (not conflicts)) (impl/path-conflicting-routes routes opts))
name-conflicting (impl/name-conflicting-routes routes)
compiled-routes (impl/compile-routes routes opts)
wilds? (boolean (some (impl/->wild-route? opts) compiled-routes))
all-wilds? (every? (impl/->wild-route? opts) compiled-routes)
router (cond
router router
(and (= 1 (count compiled-routes)) (not wilds?)) single-static-path-router
path-conflicting quarantine-router
(not wilds?) lookup-router
all-wilds? trie-router
:else mixed-router)]
(when-let [conflict-report (and conflicts (impl/unresolved-conflicts path-conflicting))]
(conflicts conflict-report))
(when name-conflicting
(exception/fail! :name-conflicts name-conflicting))
(when-let [validate (:validate opts)]
(validate compiled-routes opts))
(router compiled-routes (assoc opts ::path-conflicting path-conflicting)))
(catch #?(:clj Exception, :cljs js/Error) e
(throw ((get opts :exception identity) e)))))))
|
44990fc69bc9fa455dd921c8ea4ac76de64f5ba7d32d001f7852a2ccf1584173 | kafka4beam/kflow | kflow_app.erl | %%%===================================================================
2019 Klarna Bank AB ( publ )
%%%
@private This module reads kflow pipe configuration
%%% @end
%%%===================================================================
-module(kflow_app).
-behaviour(application).
-include("kflow_int.hrl").
-ifndef(TEST).
-define(TEST, false).
-endif.
%% Application callbacks
-export([start/2, stop/1]).
%%=============================================================================
%% API
%%=============================================================================
start(_StartType, _StartArgs) ->
%% Avoid writing duplicate messages to the default log (unless in
test build , where we want all logs in one place ):
Filter = {fun logger_filters:domain/2, {stop, sub, [kflow_pipe]}},
?TEST orelse logger:add_handler_filter(default, no_kflow_pipe, Filter),
%% Start healthcheck listener:
ok = kflow_http:init(),
%% Load pipe configuration:
case load_pipe_config() of
ok -> kflow_sup:start_link();
Err -> Err
end.
%% -----------------------------------------------------------------------------
stop(_State) ->
ok.
%%=============================================================================
Internal functions :
%%=============================================================================
-spec load_pipe_config() -> ok | {error, term()}.
load_pipe_config() ->
case {?cfg(config_module_dir), ?cfg(pipes)} of
{undefined, _} ->
Pipe configuration is baked into the OTP release , do n't do
%% anything:
ok;
{_, undefined} ->
%% Pipe configuration is dynamic, don't do anything:
ok;
{Dir, {Module, _, _}} when is_list(Dir) ->
Path = filename:join(Dir, atom_to_list(Module)),
?slog(notice, #{ what => "Loading pipe configuration"
, path => Path ++ ".erl"
}),
Options = [binary, verbose, return],
case compile:file(Path, Options) of
{ok, Module, Binary, Warnings} ->
?slog(notice, #{ what => "Pipe configuration has been loaded"
, warnings => Warnings
}),
code:delete(Module),
code:purge(Module),
code:load_binary(Module, Path, Binary),
ok;
{error, Errors, Warnings} ->
?slog(critical, #{ what => "Failed to compile pipe config"
, errors => Errors
, warnings => Warnings
}),
{error, badconfig};
error ->
?log(critical, "Failed to compile pipe config! Missing file?", []),
{error, badconfig}
end
end.
%%%_* Emacs ====================================================================
%%% Local Variables:
%%% allout-layout: t
erlang - indent - level : 2
%%% End:
| null | https://raw.githubusercontent.com/kafka4beam/kflow/2c44379a2934385a17fb504e7a933c859da7ab06/src/framework/kflow_app.erl | erlang | ===================================================================
@end
===================================================================
Application callbacks
=============================================================================
API
=============================================================================
Avoid writing duplicate messages to the default log (unless in
Start healthcheck listener:
Load pipe configuration:
-----------------------------------------------------------------------------
=============================================================================
=============================================================================
anything:
Pipe configuration is dynamic, don't do anything:
_* Emacs ====================================================================
Local Variables:
allout-layout: t
End: | 2019 Klarna Bank AB ( publ )
@private This module reads kflow pipe configuration
-module(kflow_app).
-behaviour(application).
-include("kflow_int.hrl").
-ifndef(TEST).
-define(TEST, false).
-endif.
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
test build , where we want all logs in one place ):
Filter = {fun logger_filters:domain/2, {stop, sub, [kflow_pipe]}},
?TEST orelse logger:add_handler_filter(default, no_kflow_pipe, Filter),
ok = kflow_http:init(),
case load_pipe_config() of
ok -> kflow_sup:start_link();
Err -> Err
end.
stop(_State) ->
ok.
Internal functions :
-spec load_pipe_config() -> ok | {error, term()}.
load_pipe_config() ->
case {?cfg(config_module_dir), ?cfg(pipes)} of
{undefined, _} ->
Pipe configuration is baked into the OTP release , do n't do
ok;
{_, undefined} ->
ok;
{Dir, {Module, _, _}} when is_list(Dir) ->
Path = filename:join(Dir, atom_to_list(Module)),
?slog(notice, #{ what => "Loading pipe configuration"
, path => Path ++ ".erl"
}),
Options = [binary, verbose, return],
case compile:file(Path, Options) of
{ok, Module, Binary, Warnings} ->
?slog(notice, #{ what => "Pipe configuration has been loaded"
, warnings => Warnings
}),
code:delete(Module),
code:purge(Module),
code:load_binary(Module, Path, Binary),
ok;
{error, Errors, Warnings} ->
?slog(critical, #{ what => "Failed to compile pipe config"
, errors => Errors
, warnings => Warnings
}),
{error, badconfig};
error ->
?log(critical, "Failed to compile pipe config! Missing file?", []),
{error, badconfig}
end
end.
erlang - indent - level : 2
|
a448fd8df9f615d10c4831d8411964a0eef18960a08557d87870589a1f1db92e | maitria/avi | pervasive.clj | (ns avi.pervasive)
(defn n-times
[thing n a-fn]
(reduce
(fn [thing n]
(a-fn thing))
thing
(range n)))
(defn subs-with-spaces
"Like subs, except that it is not an error to index past the end of the
string. If `start` is greater, we pretend that the string was longer. If
`end` is greater, we pretend as theough the string were padded with spaces."
([s start]
{:pre (string? s)}
(if (> start (count s))
""
(subs s start)))
([s start end]
{:pre [(string? s)]
:post [(= (count %) (- end start))]}
(let [s-start (min start (count s))
s-end (min end (count s))]
(apply str
(subs s s-start s-end)
(repeat (- end start (- s-end s-start)) \space)))))
| null | https://raw.githubusercontent.com/maitria/avi/c641e9e32af4300ea7273a41e86b4f47d0f2c092/src/avi/pervasive.clj | clojure | (ns avi.pervasive)
(defn n-times
[thing n a-fn]
(reduce
(fn [thing n]
(a-fn thing))
thing
(range n)))
(defn subs-with-spaces
"Like subs, except that it is not an error to index past the end of the
string. If `start` is greater, we pretend that the string was longer. If
`end` is greater, we pretend as theough the string were padded with spaces."
([s start]
{:pre (string? s)}
(if (> start (count s))
""
(subs s start)))
([s start end]
{:pre [(string? s)]
:post [(= (count %) (- end start))]}
(let [s-start (min start (count s))
s-end (min end (count s))]
(apply str
(subs s s-start s-end)
(repeat (- end start (- s-end s-start)) \space)))))
| |
0ce1cf4a00dea5daf2401c5fc11913135b31ee4e2f4e7d22a536b6635e1d0b68 | jaked/ocamljs | syntax_quotations.ml |
* This file is part of ocamljs , OCaml to Javascript compiler
* Copyright ( C ) 2007 - 9 Skydeck , Inc
* Copyright ( C ) 2010
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation ; either
* version 2 of the License , or ( at your option ) any later version .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Library General Public License for more details .
*
* You should have received a copy of the GNU Library General Public
* License along with this library ; if not , write to the Free
* Software Foundation , Inc. , 59 Temple Place - Suite 330 , Boston ,
* MA 02111 - 1307 , USA
* This file is part of ocamljs, OCaml to Javascript compiler
* Copyright (C) 2007-9 Skydeck, Inc
* Copyright (C) 2010 Jake Donham
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 59 Temple Place - Suite 330, Boston,
* MA 02111-1307, USA
*)
open Camlp4.PreCast
module Q = Syntax.Quotation
module TheAntiquotSyntax = Syntax.AntiquotSyntax
I do n't totally understand what 's going on here but this is how
Camlp4QuotationCommon.ml does it .
Camlp4QuotationCommon.ml does it. *)
module MetaLocHere = Jslib_ast.Meta.MetaLoc
module MetaLoc =
struct
module
let loc_name = ref None
let meta_loc_expr _loc loc =
match !loc_name with
| None -> <:expr< $lid:!Loc.name$ >>
| Some "here" -> MetaLocHere.meta_loc_expr _loc loc
| Some x -> <:expr< $lid:x$ >>
let meta_loc_patt _loc _ = <:patt< _ >>;
end
module MetaAst = Jslib_ast.Meta.Make(MetaLoc)
module ME = MetaAst.Expr
module MP = MetaAst.Patt
let is_antiquot s =
let len = String.length s in
len > 2 && s.[0] = '\\' && s.[1] = '$'
let handle_antiquot_in_string s term parse loc decorate =
(* prerr_endline ("handle_antiquot_in_string " ^ s); *)
if is_antiquot s then
let pos = String.index s ':' in
let name = String.sub s 2 (pos - 2)
and code = String.sub s (pos + 1) (String.length s - pos - 1) in
decorate name (parse loc code)
else term
let antiquot_expander =
object
inherit Ast.map as super
method patt =
function
| <:patt@_loc< $anti:s$ >>
| <:patt@_loc< $str:s$ >> as p ->
handle_antiquot_in_string s p TheAntiquotSyntax.parse_patt _loc (fun n p -> p)
| p -> super#patt p
method expr =
function
| <:expr@_loc< $anti:s$ >>
| <:expr@_loc< $str:s$ >> as e ->
handle_antiquot_in_string s e TheAntiquotSyntax.parse_expr _loc (fun n e ->
match n with
| "`int" -> <:expr< string_of_int $e$ >>
| "`flo" -> <:expr< string_of_float $e$ >>
| "listexp" -> <:expr< Jslib_ast.exp_of_list $e$ >>
| "liststmt" -> <:expr< Jslib_ast.stmt_of_list $e$ >>
(* | "`str" -> <:expr< Ast.safe_string_escaped $e$ >> *)
| _ -> e )
| e -> super#expr e
end
let add_quotation name entry mexpr mpatt =
let = Jslib_parse.Gram.Entry.mk ( Jslib_parse.Gram.Entry.name entry ) in
let entry_eoi = entry in
let parse_quot_string entry loc s =
let q = !Camlp4_config.antiquotations in
let () = Camlp4_config.antiquotations := true in
let res = Jslib_parse.Gram.parse_string entry loc s in
let () = Camlp4_config.antiquotations := q in
res in
let expand_expr loc loc_name_opt s =
let ast = parse_quot_string entry_eoi loc s in
let () = MetaLoc.loc_name := loc_name_opt in
let meta_ast = mexpr loc ast in
let exp_ast = antiquot_expander#expr meta_ast in
exp_ast in
let expand_str_item loc loc_name_opt s =
let exp_ast = expand_expr loc loc_name_opt s in
<:str_item@loc< $exp:exp_ast$ >> in
let expand_patt _loc loc_name_opt s =
let ast = parse_quot_string entry_eoi _loc s in
let meta_ast = mpatt _loc ast in
let exp_ast = antiquot_expander#patt meta_ast in
match loc_name_opt with
| None -> exp_ast
| Some name ->
let rec subst_first_loc =
function
| <:patt@_loc< Ast.$uid:u$ $_$ >> -> <:patt< Ast.$uid:u$ $lid:name$ >>
| <:patt@_loc< $a$ $b$ >> -> <:patt< $subst_first_loc a$ $b$ >>
| p -> p in
subst_first_loc exp_ast in
EXTEND Jslib_parse . Gram
entry_eoi :
[ [ x = entry ; ` EOI - > x ] ]
;
END ;
EXTEND Jslib_parse.Gram
entry_eoi:
[ [ x = entry; `EOI -> x ] ]
;
END;
*)
Q.add name Q.DynAst.expr_tag expand_expr;
Q.add name Q.DynAst.patt_tag expand_patt;
Q.add name Q.DynAst.str_item_tag expand_str_item;
;;
add_quotation "exp" Jslib_parse.expression ME.meta_exp MP.meta_exp;
add_quotation "stmt" Jslib_parse.statementList ME.meta_stmt MP.meta_stmt;
Q.default := "exp";
| null | https://raw.githubusercontent.com/jaked/ocamljs/378080ff1c8033bb15ed2bd29bf1443e301d7af8/src/jslib/syntax_quotations.ml | ocaml | prerr_endline ("handle_antiquot_in_string " ^ s);
| "`str" -> <:expr< Ast.safe_string_escaped $e$ >> |
* This file is part of ocamljs , OCaml to Javascript compiler
* Copyright ( C ) 2007 - 9 Skydeck , Inc
* Copyright ( C ) 2010
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation ; either
* version 2 of the License , or ( at your option ) any later version .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Library General Public License for more details .
*
* You should have received a copy of the GNU Library General Public
* License along with this library ; if not , write to the Free
* Software Foundation , Inc. , 59 Temple Place - Suite 330 , Boston ,
* MA 02111 - 1307 , USA
* This file is part of ocamljs, OCaml to Javascript compiler
* Copyright (C) 2007-9 Skydeck, Inc
* Copyright (C) 2010 Jake Donham
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 59 Temple Place - Suite 330, Boston,
* MA 02111-1307, USA
*)
open Camlp4.PreCast
module Q = Syntax.Quotation
module TheAntiquotSyntax = Syntax.AntiquotSyntax
I do n't totally understand what 's going on here but this is how
Camlp4QuotationCommon.ml does it .
Camlp4QuotationCommon.ml does it. *)
module MetaLocHere = Jslib_ast.Meta.MetaLoc
module MetaLoc =
struct
module
let loc_name = ref None
let meta_loc_expr _loc loc =
match !loc_name with
| None -> <:expr< $lid:!Loc.name$ >>
| Some "here" -> MetaLocHere.meta_loc_expr _loc loc
| Some x -> <:expr< $lid:x$ >>
let meta_loc_patt _loc _ = <:patt< _ >>;
end
module MetaAst = Jslib_ast.Meta.Make(MetaLoc)
module ME = MetaAst.Expr
module MP = MetaAst.Patt
let is_antiquot s =
let len = String.length s in
len > 2 && s.[0] = '\\' && s.[1] = '$'
let handle_antiquot_in_string s term parse loc decorate =
if is_antiquot s then
let pos = String.index s ':' in
let name = String.sub s 2 (pos - 2)
and code = String.sub s (pos + 1) (String.length s - pos - 1) in
decorate name (parse loc code)
else term
let antiquot_expander =
object
inherit Ast.map as super
method patt =
function
| <:patt@_loc< $anti:s$ >>
| <:patt@_loc< $str:s$ >> as p ->
handle_antiquot_in_string s p TheAntiquotSyntax.parse_patt _loc (fun n p -> p)
| p -> super#patt p
method expr =
function
| <:expr@_loc< $anti:s$ >>
| <:expr@_loc< $str:s$ >> as e ->
handle_antiquot_in_string s e TheAntiquotSyntax.parse_expr _loc (fun n e ->
match n with
| "`int" -> <:expr< string_of_int $e$ >>
| "`flo" -> <:expr< string_of_float $e$ >>
| "listexp" -> <:expr< Jslib_ast.exp_of_list $e$ >>
| "liststmt" -> <:expr< Jslib_ast.stmt_of_list $e$ >>
| _ -> e )
| e -> super#expr e
end
let add_quotation name entry mexpr mpatt =
let = Jslib_parse.Gram.Entry.mk ( Jslib_parse.Gram.Entry.name entry ) in
let entry_eoi = entry in
let parse_quot_string entry loc s =
let q = !Camlp4_config.antiquotations in
let () = Camlp4_config.antiquotations := true in
let res = Jslib_parse.Gram.parse_string entry loc s in
let () = Camlp4_config.antiquotations := q in
res in
let expand_expr loc loc_name_opt s =
let ast = parse_quot_string entry_eoi loc s in
let () = MetaLoc.loc_name := loc_name_opt in
let meta_ast = mexpr loc ast in
let exp_ast = antiquot_expander#expr meta_ast in
exp_ast in
let expand_str_item loc loc_name_opt s =
let exp_ast = expand_expr loc loc_name_opt s in
<:str_item@loc< $exp:exp_ast$ >> in
let expand_patt _loc loc_name_opt s =
let ast = parse_quot_string entry_eoi _loc s in
let meta_ast = mpatt _loc ast in
let exp_ast = antiquot_expander#patt meta_ast in
match loc_name_opt with
| None -> exp_ast
| Some name ->
let rec subst_first_loc =
function
| <:patt@_loc< Ast.$uid:u$ $_$ >> -> <:patt< Ast.$uid:u$ $lid:name$ >>
| <:patt@_loc< $a$ $b$ >> -> <:patt< $subst_first_loc a$ $b$ >>
| p -> p in
subst_first_loc exp_ast in
EXTEND Jslib_parse . Gram
entry_eoi :
[ [ x = entry ; ` EOI - > x ] ]
;
END ;
EXTEND Jslib_parse.Gram
entry_eoi:
[ [ x = entry; `EOI -> x ] ]
;
END;
*)
Q.add name Q.DynAst.expr_tag expand_expr;
Q.add name Q.DynAst.patt_tag expand_patt;
Q.add name Q.DynAst.str_item_tag expand_str_item;
;;
add_quotation "exp" Jslib_parse.expression ME.meta_exp MP.meta_exp;
add_quotation "stmt" Jslib_parse.statementList ME.meta_stmt MP.meta_stmt;
Q.default := "exp";
|
33b9f0e91bcba7edc9ffe293912c77551fa24d9dfc3c442d44a95612f6a40405 | travelping/eradius | eradius_server_top_sup.erl | @private
%% @doc Supervisor for RADIUS server supervisor tree.
%% This is a one_for_all supervisor because the server_mon must always die when the server_sup goes down, and vice-versa.
-module(eradius_server_top_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
%% ------------------------------------------------------------------------------------------
%% -- supervisor callbacks
init([]) ->
RestartStrategy = one_for_all,
MaxRestarts = 10,
MaxSecondsBetweenRestarts = 1,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
ServerSup = {sup, {eradius_server_sup, start_link, []}, permanent, infinity, supervisor, [eradius_server_sup]},
ServerMon = {mon, {eradius_server_mon, start_link, []}, permanent, brutal_kill, worker, [eradius_server_mon]},
{ok, {SupFlags, [ServerSup, ServerMon]}}.
| null | https://raw.githubusercontent.com/travelping/eradius/bac1a92f547ac4f8e009e9052f28c430b6f9b82d/src/eradius_server_top_sup.erl | erlang | @doc Supervisor for RADIUS server supervisor tree.
This is a one_for_all supervisor because the server_mon must always die when the server_sup goes down, and vice-versa.
------------------------------------------------------------------------------------------
-- supervisor callbacks | @private
-module(eradius_server_top_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
init([]) ->
RestartStrategy = one_for_all,
MaxRestarts = 10,
MaxSecondsBetweenRestarts = 1,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
ServerSup = {sup, {eradius_server_sup, start_link, []}, permanent, infinity, supervisor, [eradius_server_sup]},
ServerMon = {mon, {eradius_server_mon, start_link, []}, permanent, brutal_kill, worker, [eradius_server_mon]},
{ok, {SupFlags, [ServerSup, ServerMon]}}.
|
c4bf0d7b789e2ba4410aafe2672e8a72551ed591168ba7959692387d8f8b2e5b | cark/cark.behavior-tree | state_machine_test.cljc | (ns cark.behavior-tree.state-machine-test
(:require [clojure.test :as t :refer [deftest is]]
[cark.behavior-tree.core :as bt]
[cark.behavior-tree.state-machine :as sm]
[cark.behavior-tree.context :as ctx]
[cark.behavior-tree.hiccup :as h]))
(defn log [value]
(tap> value)
value)
(defn is-thrown?
([func]
(is (thrown? #?(:cljs js/Object :clj Exception) (func))))
([func text]
(is (thrown? #?(:cljs js/Object :clj Exception) (func))
text)))
(defn get-state [ctx]
(:state (:sm (bt/bb-get ctx))))
(deftest simple-test
(let [tree (-> (sm/make [:sm] :green
(sm/state :green
(sm/event :advance
(sm/transition :yellow)))
(sm/state :yellow
(sm/event :advance (sm/transition :red)))
(sm/state :red
(sm/event :advance (sm/transition :green))))
bt/hiccup->context bt/tick)
advance (fn [tree] (-> tree (bt/send-event :advance)))]
(is (= :green (-> tree get-state)))
(is (= :running (-> tree bt/get-status)))
(is (= :yellow (-> tree advance get-state)))
(is (= :running (-> tree advance bt/get-status)))
(is (= :red (-> tree advance advance get-state)))
(is (= :running (-> tree advance advance bt/get-status)))
(is (= :green (-> tree advance advance advance get-state)))
(is (= :running (-> tree advance advance advance bt/get-status)))))
(deftest end-state-test
(let [tree (-> (sm/make [:sm] :start
(sm/state :start (sm/event :advance (sm/transition :end)))
(sm/end-state :end))
bt/hiccup->context bt/tick)
advance (fn [tree] (-> tree (bt/send-event :advance)))]
(is (= :start (-> tree get-state)))
(is (= :running (-> tree bt/get-status)))
(is (= :end (-> tree advance get-state)))
(is (= :success (-> tree advance bt/get-status)))))
(deftest with-enter-event+state-test
(let [tree (-> [:sequence
[:update {:func (bt/bb-assocer-in [:val] 0)}]
(sm/make [:sm] :start
(sm/state :start
(sm/event :advance (sm/transition :end)))
(sm/state :end
(sm/enter-event [:update {:func (bt/bb-updater-in [:val] inc)
:id :increment-here!}])
(sm/event :advance (sm/transition :end))
(sm/event :noop [:update {:func identity}])
(sm/event :stop (sm/transition :real-end)))
(sm/end-state :real-end))]
bt/hiccup->context bt/tick)
advance (fn [tree] (-> tree (bt/send-event :advance)))
noop (fn [tree] (-> tree (bt/send-event :noop)))
stop (fn [tree] (-> tree (bt/send-event :stop)))]
(is (= :start (-> tree get-state)))
(is (= :running (-> tree bt/get-status)))
(is (= 0 (-> tree bt/bb-get :val)))
(is (= :end (-> tree advance get-state)))
(is (= :running (-> tree advance bt/get-status)))
(is (= :running (-> tree advance advance bt/get-status)))
(is (= 1 (-> tree advance bt/bb-get :val)))
(is (= 2 (-> tree advance advance bt/bb-get :val)))
(is (= :real-end (-> tree advance advance stop get-state)))
(is (= :success (-> tree advance advance stop bt/get-status)))
(is (= 1 (-> tree advance noop bt/bb-get :val)))))
(deftest change-state-then-event-also-in-old-state-test
(let [ctx (-> (sm/make [:sm] :start
(sm/state :start
(sm/event :foo (sm/transition :bar))
(sm/event :baz [:send-event {:event :start-baz}]))
(sm/state :bar
(sm/event :baz [:send-event {:event :bar-baz}])))
bt/hiccup->context bt/tick)]
(is (= :running (bt/get-status ctx)))
(is (= :start (get-state ctx)))
(is (= :bar (-> ctx (bt/send-event :foo) get-state)))
(is (= [[:start-baz nil]] (-> (bt/send-event ctx :baz) bt/get-events)))
(is (= [[:bar-baz nil]] (-> ctx (bt/send-event :foo) (bt/send-event :baz) bt/get-events)))))
(deftest parallel-test
(let [ctx (-> [:parallel {:policy :select}
(sm/make [:aba] :a
(sm/state :a (sm/event :a (sm/transition :ab)))
(sm/state :ab (sm/event :b (sm/transition :aba)))
(sm/state :aba (sm/event :a (sm/transition :end)))
(sm/end-state :end))
(sm/make [:abb] :a
(sm/state :a (sm/event :a (sm/transition :ab)))
(sm/state :ab (sm/event :b (sm/transition :abb)))
(sm/state :abb (sm/event :b (sm/transition :end)))
(sm/end-state :end))]
bt/hiccup->context bt/tick)]
(is (= :running (-> ctx bt/get-status)))
(is (= :success (-> ctx
(bt/send-event :a)
(bt/send-event :b)
(bt/send-event :a)
bt/get-status)))
(is (= :success (-> ctx
(bt/send-event :a)
(bt/send-event :b)
(bt/send-event :b)
bt/get-status)))
(is (= :running (-> ctx
(bt/send-event :a)
(bt/send-event :b)
(bt/send-event :c)
bt/get-status)))))
(deftest hierachy-test
(let [b-machine (sm/make [:b] :b
(sm/state :b
(sm/enter-event [:send-event {:event :entered-b-b}])
(sm/event :c (sm/transition :c)))
(sm/state :c
(sm/enter-event [:send-event {:event :entered-b-c}])
(sm/event :d (sm/transition :d)))
(sm/end-state :d
[:send-event {:event :entered-b-d}]))
ctx (-> (sm/make [:a] :a
(sm/state :a
(sm/enter-event
[:sequence
[:send-event {:event :entered-a-a}]
b-machine
[:send-event {:event :entered-a-a-after}]])
(sm/event :e
(sm/transition :e)))
(sm/state :e
(sm/enter-event [:send-event {:event :entered-a-e}])))
bt/hiccup->context bt/tick)]
(is (= :running (-> ctx bt/get-status)))
(is (= [[:entered-a-a nil]
[:entered-b-b nil]]
(-> ctx bt/get-events)))
(is (= [[:entered-b-c nil]]
(-> ctx (bt/send-event :c) bt/get-events)))
(is (= [[:entered-b-d nil]
[:entered-a-a-after nil]]
(-> ctx (bt/send-event :c) (bt/send-event :d) bt/get-events)))
(is (= [[:entered-a-e nil]]
(-> ctx (bt/send-event :e) bt/get-events)))))
| null | https://raw.githubusercontent.com/cark/cark.behavior-tree/4e229fcc2ed3af3c66e74d2c51dda6684927d254/src/test/cark/behavior_tree/state_machine_test.cljc | clojure | (ns cark.behavior-tree.state-machine-test
(:require [clojure.test :as t :refer [deftest is]]
[cark.behavior-tree.core :as bt]
[cark.behavior-tree.state-machine :as sm]
[cark.behavior-tree.context :as ctx]
[cark.behavior-tree.hiccup :as h]))
(defn log [value]
(tap> value)
value)
(defn is-thrown?
([func]
(is (thrown? #?(:cljs js/Object :clj Exception) (func))))
([func text]
(is (thrown? #?(:cljs js/Object :clj Exception) (func))
text)))
(defn get-state [ctx]
(:state (:sm (bt/bb-get ctx))))
(deftest simple-test
(let [tree (-> (sm/make [:sm] :green
(sm/state :green
(sm/event :advance
(sm/transition :yellow)))
(sm/state :yellow
(sm/event :advance (sm/transition :red)))
(sm/state :red
(sm/event :advance (sm/transition :green))))
bt/hiccup->context bt/tick)
advance (fn [tree] (-> tree (bt/send-event :advance)))]
(is (= :green (-> tree get-state)))
(is (= :running (-> tree bt/get-status)))
(is (= :yellow (-> tree advance get-state)))
(is (= :running (-> tree advance bt/get-status)))
(is (= :red (-> tree advance advance get-state)))
(is (= :running (-> tree advance advance bt/get-status)))
(is (= :green (-> tree advance advance advance get-state)))
(is (= :running (-> tree advance advance advance bt/get-status)))))
(deftest end-state-test
(let [tree (-> (sm/make [:sm] :start
(sm/state :start (sm/event :advance (sm/transition :end)))
(sm/end-state :end))
bt/hiccup->context bt/tick)
advance (fn [tree] (-> tree (bt/send-event :advance)))]
(is (= :start (-> tree get-state)))
(is (= :running (-> tree bt/get-status)))
(is (= :end (-> tree advance get-state)))
(is (= :success (-> tree advance bt/get-status)))))
(deftest with-enter-event+state-test
(let [tree (-> [:sequence
[:update {:func (bt/bb-assocer-in [:val] 0)}]
(sm/make [:sm] :start
(sm/state :start
(sm/event :advance (sm/transition :end)))
(sm/state :end
(sm/enter-event [:update {:func (bt/bb-updater-in [:val] inc)
:id :increment-here!}])
(sm/event :advance (sm/transition :end))
(sm/event :noop [:update {:func identity}])
(sm/event :stop (sm/transition :real-end)))
(sm/end-state :real-end))]
bt/hiccup->context bt/tick)
advance (fn [tree] (-> tree (bt/send-event :advance)))
noop (fn [tree] (-> tree (bt/send-event :noop)))
stop (fn [tree] (-> tree (bt/send-event :stop)))]
(is (= :start (-> tree get-state)))
(is (= :running (-> tree bt/get-status)))
(is (= 0 (-> tree bt/bb-get :val)))
(is (= :end (-> tree advance get-state)))
(is (= :running (-> tree advance bt/get-status)))
(is (= :running (-> tree advance advance bt/get-status)))
(is (= 1 (-> tree advance bt/bb-get :val)))
(is (= 2 (-> tree advance advance bt/bb-get :val)))
(is (= :real-end (-> tree advance advance stop get-state)))
(is (= :success (-> tree advance advance stop bt/get-status)))
(is (= 1 (-> tree advance noop bt/bb-get :val)))))
(deftest change-state-then-event-also-in-old-state-test
(let [ctx (-> (sm/make [:sm] :start
(sm/state :start
(sm/event :foo (sm/transition :bar))
(sm/event :baz [:send-event {:event :start-baz}]))
(sm/state :bar
(sm/event :baz [:send-event {:event :bar-baz}])))
bt/hiccup->context bt/tick)]
(is (= :running (bt/get-status ctx)))
(is (= :start (get-state ctx)))
(is (= :bar (-> ctx (bt/send-event :foo) get-state)))
(is (= [[:start-baz nil]] (-> (bt/send-event ctx :baz) bt/get-events)))
(is (= [[:bar-baz nil]] (-> ctx (bt/send-event :foo) (bt/send-event :baz) bt/get-events)))))
(deftest parallel-test
(let [ctx (-> [:parallel {:policy :select}
(sm/make [:aba] :a
(sm/state :a (sm/event :a (sm/transition :ab)))
(sm/state :ab (sm/event :b (sm/transition :aba)))
(sm/state :aba (sm/event :a (sm/transition :end)))
(sm/end-state :end))
(sm/make [:abb] :a
(sm/state :a (sm/event :a (sm/transition :ab)))
(sm/state :ab (sm/event :b (sm/transition :abb)))
(sm/state :abb (sm/event :b (sm/transition :end)))
(sm/end-state :end))]
bt/hiccup->context bt/tick)]
(is (= :running (-> ctx bt/get-status)))
(is (= :success (-> ctx
(bt/send-event :a)
(bt/send-event :b)
(bt/send-event :a)
bt/get-status)))
(is (= :success (-> ctx
(bt/send-event :a)
(bt/send-event :b)
(bt/send-event :b)
bt/get-status)))
(is (= :running (-> ctx
(bt/send-event :a)
(bt/send-event :b)
(bt/send-event :c)
bt/get-status)))))
(deftest hierachy-test
(let [b-machine (sm/make [:b] :b
(sm/state :b
(sm/enter-event [:send-event {:event :entered-b-b}])
(sm/event :c (sm/transition :c)))
(sm/state :c
(sm/enter-event [:send-event {:event :entered-b-c}])
(sm/event :d (sm/transition :d)))
(sm/end-state :d
[:send-event {:event :entered-b-d}]))
ctx (-> (sm/make [:a] :a
(sm/state :a
(sm/enter-event
[:sequence
[:send-event {:event :entered-a-a}]
b-machine
[:send-event {:event :entered-a-a-after}]])
(sm/event :e
(sm/transition :e)))
(sm/state :e
(sm/enter-event [:send-event {:event :entered-a-e}])))
bt/hiccup->context bt/tick)]
(is (= :running (-> ctx bt/get-status)))
(is (= [[:entered-a-a nil]
[:entered-b-b nil]]
(-> ctx bt/get-events)))
(is (= [[:entered-b-c nil]]
(-> ctx (bt/send-event :c) bt/get-events)))
(is (= [[:entered-b-d nil]
[:entered-a-a-after nil]]
(-> ctx (bt/send-event :c) (bt/send-event :d) bt/get-events)))
(is (= [[:entered-a-e nil]]
(-> ctx (bt/send-event :e) bt/get-events)))))
| |
6efe0933802d7c9e515f15733721d34d5d732d7669744723b39487c60d8f0160 | esl/MongooseIM | mongoose_graphql_commands.erl | %% @doc Management and execution of administration commands with GraphQL API
-module(mongoose_graphql_commands).
%% API
-export([start/0, stop/0, process/1]).
%% Internal API
-export([wrap_type/2]).
%% Only for tests
-export([build_specs/1, get_specs/0]).
-ignore_xref([build_specs/1, get_specs/0]).
% Needed to get the 'agent' vCard Fields inside a vCard
-define(MAX_TYPE_RECURSION_DEPTH, 2).
Needed to handle e.g. [ String ! ] ! , which has 3 wrapper types : NON_NULL , LIST , NON_NULL
-define(MAX_INTROSPECTION_DEPTH, 3).
-type context() :: #{args := [string()],
category => category(),
commands => command_map(),
command => command(),
args_spec => [arg_spec()],
doc => doc(),
vars => json_map(),
reason => atom() | tuple(),
result => result(),
status => executed | error | usage}.
-type result() :: {ok, #{atom() => graphql:json()}} | {error, any()}.
-type specs() :: #{category() => category_spec()}.
-type category() :: binary().
-type category_spec() :: #{desc := binary(), commands := command_map()}.
-type command_map() :: #{command() => command_spec()}.
-type command() :: binary().
-type command_spec() :: #{desc := binary(),
op_type := op_type(),
args := [arg_spec()],
fields := [field_spec()],
doc := doc()}.
-type arg_spec() :: #{name := binary(), type := binary(), kind := binary(), wrap := [list | required]}.
-type field_spec() :: #{name | on := binary(), fields => [field_spec()]}.
-type op_type() :: binary().
-type doc() :: binary().
-type ep() :: graphql:endpoint_context().
-type json_map() :: #{binary() => graphql:json()}.
-export_type([category/0, command/0, command_map/0, arg_spec/0, context/0]).
%% API
-spec start() -> ok.
start() ->
Specs = build_specs(admin),
persistent_term:put(?MODULE, Specs).
-spec stop() -> ok.
stop() ->
persistent_term:erase(?MODULE),
ok.
%% The returned context has 'status' with the following values:
- ' executed ' means that a GraphQL command was called , and ' result ' contains the returned value
%% - 'error' means that the arguments were incorrect, and 'reason' contains more information
%% - 'usage' means that help needs to be displayed
-spec process([string()]) -> context().
process(Args) ->
lists:foldl(fun(_, #{status := _} = Ctx) -> Ctx;
(StepF, Ctx) -> StepF(Ctx)
end, #{args => Args}, steps()).
%% Internal API
-spec build_specs(atom()) -> specs().
build_specs(EpName) ->
Ep = mongoose_graphql:get_endpoint(EpName),
CatSpecs = get_category_specs(Ep),
lists:foldl(fun({Category, CategorySpec}, Acc) ->
insert_category(Category, CategorySpec, Acc)
end, #{}, CatSpecs).
-spec get_specs() -> specs().
get_specs() ->
persistent_term:get(?MODULE).
%% Internals
steps() ->
[fun find_category/1, fun find_command/1, fun parse_args/1, fun check_args/1, fun execute/1].
-spec find_category(context()) -> context().
find_category(CtxIn = #{args := [CategoryStr | Args]}) ->
Category = list_to_binary(CategoryStr),
Ctx = CtxIn#{category => Category, args => Args},
case get_specs() of
#{Category := #{commands := Commands}} ->
Ctx#{commands => Commands};
#{} ->
Ctx#{status => error, reason => unknown_category}
end;
find_category(Ctx = #{args := []}) ->
Ctx#{status => error, reason => no_args}.
-spec find_command(context()) -> context().
find_command(CtxIn = #{args := [CommandStr | Args]}) ->
Command = list_to_binary(CommandStr),
Ctx = #{commands := Commands} = CtxIn#{command => Command, args => Args},
case Commands of
#{Command := CommandSpec} ->
#{doc := Doc, args := ArgSpec} = CommandSpec,
Ctx#{doc => Doc, args_spec => ArgSpec};
#{} ->
Ctx#{status => error, reason => unknown_command}
end;
find_command(Ctx) ->
Ctx#{status => usage}.
-spec parse_args(context()) -> context().
parse_args(Ctx = #{args := ["--help"]}) ->
Ctx#{status => usage};
parse_args(Ctx) ->
parse_args_loop(Ctx#{vars => #{}}).
parse_args_loop(Ctx = #{vars := Vars,
args_spec := ArgsSpec,
args := ["--" ++ ArgNameStr, ArgValueStr | Rest]}) ->
ArgName = list_to_binary(ArgNameStr),
case lists:filter(fun(#{name := Name}) -> Name =:= ArgName end, ArgsSpec) of
[] ->
Ctx#{status => error, reason => {unknown_arg, ArgName}};
[ArgSpec] ->
ArgValue = list_to_binary(ArgValueStr),
try parse_arg(ArgValue, ArgSpec) of
ParsedValue ->
NewVars = Vars#{ArgName => ParsedValue},
parse_args_loop(Ctx#{vars := NewVars, args := Rest})
catch _:_ ->
Ctx#{status => error, reason => {invalid_arg_value, ArgName, ArgValue}}
end
end;
parse_args_loop(Ctx = #{args := []}) ->
Ctx;
parse_args_loop(Ctx) ->
Ctx#{status => error, reason => invalid_args}.
-spec parse_arg(binary(), arg_spec()) -> jiffy:json_value().
parse_arg(Value, ArgSpec = #{type := Type}) ->
case is_json_arg(ArgSpec) of
true ->
jiffy:decode(Value, [return_maps]);
false ->
convert_input_type(Type, Value)
end.
%% Used input types that are not parsed from binaries should be handled here
convert_input_type(Type, Value) when Type =:= <<"Int">>;
Type =:= <<"PosInt">>;
Type =:= <<"NonNegInt">> -> binary_to_integer(Value);
convert_input_type(_, Value) -> Value.
Complex argument values should be provided in JSON
-spec is_json_arg(arg_spec()) -> boolean().
is_json_arg(#{kind := <<"INPUT_OBJECT">>}) -> true;
is_json_arg(#{kind := Kind, wrap := Wrap}) when Kind =:= <<"SCALAR">>;
Kind =:= <<"ENUM">> ->
lists:member(list, Wrap).
-spec check_args(context()) -> context().
check_args(Ctx = #{args_spec := ArgsSpec, vars := Vars}) ->
MissingArgs = [Name || #{name := Name, wrap := [required|_]} <- ArgsSpec,
not maps:is_key(Name, Vars)],
case MissingArgs of
[] -> Ctx;
_ -> Ctx#{status => error, reason => {missing_args, MissingArgs}}
end.
-spec execute(context()) -> context().
execute(#{doc := Doc, vars := Vars} = Ctx) ->
Ctx#{status => executed, result => execute(mongoose_graphql:get_endpoint(admin), Doc, Vars)}.
-spec get_category_specs(ep()) -> [{category(), category_spec()}].
get_category_specs(Ep) ->
lists:flatmap(fun(OpType) -> get_category_specs(Ep, OpType) end, op_types()).
get_category_specs(Ep, OpType) ->
OpTypeName = <<OpType/binary, "Type">>,
Doc = iolist_to_binary(["{ __schema { ", OpTypeName, " ", category_spec_query(), " } }"]),
{ok, #{data := #{<<"__schema">> := Schema}}} = mongoose_graphql:execute(Ep, undefined, Doc),
#{OpTypeName := #{<<"fields">> := Categories}} = Schema,
get_category_specs(Ep, OpType, Categories).
op_types() ->
[<<"query">>, <<"mutation">>, <<"subscription">>].
-spec get_category_specs(ep(), op_type(), [json_map()]) -> [{category(), category_spec()}].
get_category_specs(Ep, OpType, Categories) ->
[get_category_spec(Ep, OpType, Category) || Category <- Categories, is_category(Category)].
is_category(#{<<"name">> := <<"checkAuth">>}) ->
false;
is_category(#{}) ->
true.
-spec get_category_spec(ep(), op_type(), json_map()) -> {category(), category_spec()}.
get_category_spec(Ep, OpType, #{<<"name">> := Category, <<"description">> := Desc,
<<"type">> := #{<<"name">> := CategoryType}}) ->
Doc = iolist_to_binary(
["query ($type: String!) { __type(name: $type) "
"{name fields {name description args {name type ", arg_type_query(), "} type ",
field_type_query(), "}}}"]),
Vars = #{<<"type">> => CategoryType},
{ok, #{data := #{<<"__type">> := #{<<"fields">> := Commands}}}} = execute(Ep, Doc, Vars),
CommandSpecs = [get_command_spec(Ep, Category, OpType, Command) || Command <- Commands],
{Category, #{desc => Desc, commands => maps:from_list(CommandSpecs)}}.
-spec get_command_spec(ep(), category(), op_type(), json_map()) -> {command(), command_spec()}.
get_command_spec(Ep, Category, OpType,
#{<<"name">> := Name, <<"args">> := Args, <<"type">> := TypeMap} = Map) ->
Spec = #{op_type => OpType, args => get_args(Args), fields => get_fields(Ep, TypeMap, [])},
Doc = prepare_doc(Category, Name, Spec),
{Name, add_description(Spec#{doc => Doc}, Map)}.
add_description(Spec, #{<<"description">> := Desc}) ->
Spec#{desc => Desc};
add_description(Spec, #{}) ->
Spec.
-spec get_args([json_map()]) -> [arg_spec()].
get_args(Args) ->
lists:map(fun get_arg_info/1, Args).
-spec get_arg_info(json_map()) -> arg_spec().
get_arg_info(#{<<"name">> := ArgName, <<"type">> := Arg}) ->
(get_arg_type(Arg, []))#{name => ArgName}.
get_arg_type(#{<<"kind">> := <<"NON_NULL">>, <<"ofType">> := TypeMap}, Wrap) ->
get_arg_type(TypeMap, [required | Wrap]);
get_arg_type(#{<<"kind">> := <<"LIST">>, <<"ofType">> := TypeMap}, Wrap) ->
get_arg_type(TypeMap, [list | Wrap]);
get_arg_type(#{<<"name">> := Type, <<"kind">> := Kind}, Wrap) when Kind =:= <<"SCALAR">>;
Kind =:= <<"ENUM">>;
Kind =:= <<"INPUT_OBJECT">> ->
#{type => Type, kind => Kind, wrap => lists:reverse(Wrap)}.
-spec get_fields(ep(), json_map(), [binary()]) -> [field_spec()].
get_fields(_Ep, #{<<"kind">> := Kind}, _Path)
when Kind =:= <<"SCALAR">>;
Kind =:= <<"ENUM">> -> [];
get_fields(Ep, #{<<"kind">> := <<"UNION">>, <<"possibleTypes">> := TypeMaps}, Path) ->
[get_union_type(Ep, TypeMap, Path) || TypeMap <- TypeMaps];
get_fields(Ep, #{<<"kind">> := Kind, <<"ofType">> := Type}, Path)
when Kind =:= <<"NON_NULL">>;
Kind =:= <<"LIST">> ->
get_fields(Ep, Type, Path);
get_fields(Ep, #{<<"kind">> := <<"OBJECT">>, <<"name">> := Type}, Path) ->
case length([T || T <- Path, T =:= Type]) >= ?MAX_TYPE_RECURSION_DEPTH of
true ->
[#{name => <<"__typename">>}]; % inform about the type of the trimmed subtree
false ->
Fields = get_object_fields(Ep, Type),
[get_field(Ep, Field, [Type | Path]) || Field <- Fields]
end.
-spec get_union_type(ep(), json_map(), [binary()]) -> field_spec().
get_union_type(Ep, #{<<"kind">> := <<"OBJECT">>, <<"name">> := Type} = M, Path) ->
#{on => Type, fields => get_fields(Ep, M, Path)}.
-spec get_field(ep(), json_map(), [binary()]) -> field_spec().
get_field(Ep, #{<<"type">> := Type, <<"name">> := Name}, Path) ->
case get_fields(Ep, Type, Path) of
[] -> #{name => Name};
Fields -> #{name => Name, fields => Fields}
end.
-spec get_object_fields(ep(), binary()) -> [json_map()].
get_object_fields(Ep, ObjectType) ->
Doc = iolist_to_binary(["query ($type: String!) { __type(name: $type) "
"{name fields {name type ", field_type_query(), "}}}"]),
Vars = #{<<"type">> => ObjectType},
{ok, #{data := #{<<"__type">> := #{<<"fields">> := Fields}}}} = execute(Ep, Doc, Vars),
Fields.
-spec insert_category(category(), category_spec(), specs()) -> specs().
insert_category(Category, NewCatSpec = #{commands := NewCommands}, Specs) ->
case Specs of
#{Category := #{desc := OldDesc, commands := OldCommands}} ->
case maps:with(maps:keys(OldCommands), NewCommands) of
Common when Common =:= #{} ->
Specs#{Category := #{desc => OldDesc,
commands => maps:merge(OldCommands, NewCommands)}};
Common ->
error(#{what => overlapping_graphql_commands,
text => <<"GraphQL query and mutation names are not unique">>,
category => Category,
commands => maps:keys(Common)})
end;
_ ->
Specs#{Category => NewCatSpec}
end.
-spec prepare_doc(category(), command(), map()) -> doc().
prepare_doc(Category, Command, #{op_type := OpType, args := Args, fields := Fields}) ->
iolist_to_binary([OpType, " ", declare_variables(Args), "{ ", Category, " { ", Command,
use_variables(Args), return_fields(Fields), " } }"]).
-spec declare_variables([arg_spec()]) -> iolist().
declare_variables([]) -> "";
declare_variables(Args) ->
["(", lists:join(", ", lists:map(fun declare_variable/1, Args)), ") "].
-spec declare_variable(arg_spec()) -> iolist().
declare_variable(#{name := Name, type := Type, wrap := Wrap}) ->
["$", Name, ": ", wrap_type(Wrap, Type)].
-spec wrap_type([required | list], binary()) -> iolist().
wrap_type([required | Wrap], Type) ->
[wrap_type(Wrap, Type), $!];
wrap_type([list | Wrap], Type) ->
[$[, wrap_type(Wrap, Type), $]];
wrap_type([], Type) ->
[Type].
-spec use_variables([arg_spec()]) -> iolist().
use_variables([]) -> "";
use_variables(Args) ->
["(", lists:join(", ", lists:map(fun use_variable/1, Args)), ")"].
-spec use_variable(arg_spec()) -> iolist().
use_variable(#{name := Name}) ->
[Name, ": $", Name].
-spec return_fields([field_spec()]) -> iolist().
return_fields([]) -> "";
return_fields(Fields) ->
[" { ", lists:join(" ", [return_field(F) || F <- Fields]), " }"].
-spec return_field(field_spec()) -> iodata().
return_field(#{name := Name, fields := Fields}) ->
[Name, return_fields(Fields)];
return_field(#{name := Name}) ->
Name;
return_field(#{on := Type, fields := Fields}) ->
["... on ", Type, return_fields(Fields)].
-spec execute(ep(), doc(), json_map()) -> result().
execute(Ep, Doc, Vars) ->
mongoose_graphql:execute(Ep, #{document => Doc,
operation_name => undefined,
vars => Vars,
authorized => true,
ctx => #{method => cli}}).
field_type_query() ->
nested_type_query("name kind possibleTypes {name kind}").
arg_type_query() ->
nested_type_query("name kind").
nested_type_query(BasicQuery) ->
lists:foldl(fun(_, QueryAcc) -> ["{ ", BasicQuery, " ofType ", QueryAcc, " }"] end,
["{ ", BasicQuery, " }"], lists:seq(1, ?MAX_INTROSPECTION_DEPTH)).
category_spec_query() ->
"{name fields {name description type {name fields {name}}}}".
| null | https://raw.githubusercontent.com/esl/MongooseIM/caab05da825c28505d526890883fb88aaa6e3618/src/graphql/mongoose_graphql_commands.erl | erlang | @doc Management and execution of administration commands with GraphQL API
API
Internal API
Only for tests
Needed to get the 'agent' vCard Fields inside a vCard
API
The returned context has 'status' with the following values:
- 'error' means that the arguments were incorrect, and 'reason' contains more information
- 'usage' means that help needs to be displayed
Internal API
Internals
Used input types that are not parsed from binaries should be handled here
inform about the type of the trimmed subtree |
-module(mongoose_graphql_commands).
-export([start/0, stop/0, process/1]).
-export([wrap_type/2]).
-export([build_specs/1, get_specs/0]).
-ignore_xref([build_specs/1, get_specs/0]).
-define(MAX_TYPE_RECURSION_DEPTH, 2).
Needed to handle e.g. [ String ! ] ! , which has 3 wrapper types : NON_NULL , LIST , NON_NULL
-define(MAX_INTROSPECTION_DEPTH, 3).
-type context() :: #{args := [string()],
category => category(),
commands => command_map(),
command => command(),
args_spec => [arg_spec()],
doc => doc(),
vars => json_map(),
reason => atom() | tuple(),
result => result(),
status => executed | error | usage}.
-type result() :: {ok, #{atom() => graphql:json()}} | {error, any()}.
-type specs() :: #{category() => category_spec()}.
-type category() :: binary().
-type category_spec() :: #{desc := binary(), commands := command_map()}.
-type command_map() :: #{command() => command_spec()}.
-type command() :: binary().
-type command_spec() :: #{desc := binary(),
op_type := op_type(),
args := [arg_spec()],
fields := [field_spec()],
doc := doc()}.
-type arg_spec() :: #{name := binary(), type := binary(), kind := binary(), wrap := [list | required]}.
-type field_spec() :: #{name | on := binary(), fields => [field_spec()]}.
-type op_type() :: binary().
-type doc() :: binary().
-type ep() :: graphql:endpoint_context().
-type json_map() :: #{binary() => graphql:json()}.
-export_type([category/0, command/0, command_map/0, arg_spec/0, context/0]).
-spec start() -> ok.
start() ->
Specs = build_specs(admin),
persistent_term:put(?MODULE, Specs).
-spec stop() -> ok.
stop() ->
persistent_term:erase(?MODULE),
ok.
- ' executed ' means that a GraphQL command was called , and ' result ' contains the returned value
-spec process([string()]) -> context().
process(Args) ->
lists:foldl(fun(_, #{status := _} = Ctx) -> Ctx;
(StepF, Ctx) -> StepF(Ctx)
end, #{args => Args}, steps()).
-spec build_specs(atom()) -> specs().
build_specs(EpName) ->
Ep = mongoose_graphql:get_endpoint(EpName),
CatSpecs = get_category_specs(Ep),
lists:foldl(fun({Category, CategorySpec}, Acc) ->
insert_category(Category, CategorySpec, Acc)
end, #{}, CatSpecs).
-spec get_specs() -> specs().
get_specs() ->
persistent_term:get(?MODULE).
steps() ->
[fun find_category/1, fun find_command/1, fun parse_args/1, fun check_args/1, fun execute/1].
-spec find_category(context()) -> context().
find_category(CtxIn = #{args := [CategoryStr | Args]}) ->
Category = list_to_binary(CategoryStr),
Ctx = CtxIn#{category => Category, args => Args},
case get_specs() of
#{Category := #{commands := Commands}} ->
Ctx#{commands => Commands};
#{} ->
Ctx#{status => error, reason => unknown_category}
end;
find_category(Ctx = #{args := []}) ->
Ctx#{status => error, reason => no_args}.
-spec find_command(context()) -> context().
find_command(CtxIn = #{args := [CommandStr | Args]}) ->
Command = list_to_binary(CommandStr),
Ctx = #{commands := Commands} = CtxIn#{command => Command, args => Args},
case Commands of
#{Command := CommandSpec} ->
#{doc := Doc, args := ArgSpec} = CommandSpec,
Ctx#{doc => Doc, args_spec => ArgSpec};
#{} ->
Ctx#{status => error, reason => unknown_command}
end;
find_command(Ctx) ->
Ctx#{status => usage}.
-spec parse_args(context()) -> context().
parse_args(Ctx = #{args := ["--help"]}) ->
Ctx#{status => usage};
parse_args(Ctx) ->
parse_args_loop(Ctx#{vars => #{}}).
parse_args_loop(Ctx = #{vars := Vars,
args_spec := ArgsSpec,
args := ["--" ++ ArgNameStr, ArgValueStr | Rest]}) ->
ArgName = list_to_binary(ArgNameStr),
case lists:filter(fun(#{name := Name}) -> Name =:= ArgName end, ArgsSpec) of
[] ->
Ctx#{status => error, reason => {unknown_arg, ArgName}};
[ArgSpec] ->
ArgValue = list_to_binary(ArgValueStr),
try parse_arg(ArgValue, ArgSpec) of
ParsedValue ->
NewVars = Vars#{ArgName => ParsedValue},
parse_args_loop(Ctx#{vars := NewVars, args := Rest})
catch _:_ ->
Ctx#{status => error, reason => {invalid_arg_value, ArgName, ArgValue}}
end
end;
parse_args_loop(Ctx = #{args := []}) ->
Ctx;
parse_args_loop(Ctx) ->
Ctx#{status => error, reason => invalid_args}.
-spec parse_arg(binary(), arg_spec()) -> jiffy:json_value().
parse_arg(Value, ArgSpec = #{type := Type}) ->
case is_json_arg(ArgSpec) of
true ->
jiffy:decode(Value, [return_maps]);
false ->
convert_input_type(Type, Value)
end.
convert_input_type(Type, Value) when Type =:= <<"Int">>;
Type =:= <<"PosInt">>;
Type =:= <<"NonNegInt">> -> binary_to_integer(Value);
convert_input_type(_, Value) -> Value.
Complex argument values should be provided in JSON
-spec is_json_arg(arg_spec()) -> boolean().
is_json_arg(#{kind := <<"INPUT_OBJECT">>}) -> true;
is_json_arg(#{kind := Kind, wrap := Wrap}) when Kind =:= <<"SCALAR">>;
Kind =:= <<"ENUM">> ->
lists:member(list, Wrap).
-spec check_args(context()) -> context().
check_args(Ctx = #{args_spec := ArgsSpec, vars := Vars}) ->
MissingArgs = [Name || #{name := Name, wrap := [required|_]} <- ArgsSpec,
not maps:is_key(Name, Vars)],
case MissingArgs of
[] -> Ctx;
_ -> Ctx#{status => error, reason => {missing_args, MissingArgs}}
end.
-spec execute(context()) -> context().
execute(#{doc := Doc, vars := Vars} = Ctx) ->
Ctx#{status => executed, result => execute(mongoose_graphql:get_endpoint(admin), Doc, Vars)}.
-spec get_category_specs(ep()) -> [{category(), category_spec()}].
get_category_specs(Ep) ->
lists:flatmap(fun(OpType) -> get_category_specs(Ep, OpType) end, op_types()).
get_category_specs(Ep, OpType) ->
OpTypeName = <<OpType/binary, "Type">>,
Doc = iolist_to_binary(["{ __schema { ", OpTypeName, " ", category_spec_query(), " } }"]),
{ok, #{data := #{<<"__schema">> := Schema}}} = mongoose_graphql:execute(Ep, undefined, Doc),
#{OpTypeName := #{<<"fields">> := Categories}} = Schema,
get_category_specs(Ep, OpType, Categories).
op_types() ->
[<<"query">>, <<"mutation">>, <<"subscription">>].
-spec get_category_specs(ep(), op_type(), [json_map()]) -> [{category(), category_spec()}].
get_category_specs(Ep, OpType, Categories) ->
[get_category_spec(Ep, OpType, Category) || Category <- Categories, is_category(Category)].
is_category(#{<<"name">> := <<"checkAuth">>}) ->
false;
is_category(#{}) ->
true.
-spec get_category_spec(ep(), op_type(), json_map()) -> {category(), category_spec()}.
get_category_spec(Ep, OpType, #{<<"name">> := Category, <<"description">> := Desc,
<<"type">> := #{<<"name">> := CategoryType}}) ->
Doc = iolist_to_binary(
["query ($type: String!) { __type(name: $type) "
"{name fields {name description args {name type ", arg_type_query(), "} type ",
field_type_query(), "}}}"]),
Vars = #{<<"type">> => CategoryType},
{ok, #{data := #{<<"__type">> := #{<<"fields">> := Commands}}}} = execute(Ep, Doc, Vars),
CommandSpecs = [get_command_spec(Ep, Category, OpType, Command) || Command <- Commands],
{Category, #{desc => Desc, commands => maps:from_list(CommandSpecs)}}.
-spec get_command_spec(ep(), category(), op_type(), json_map()) -> {command(), command_spec()}.
get_command_spec(Ep, Category, OpType,
#{<<"name">> := Name, <<"args">> := Args, <<"type">> := TypeMap} = Map) ->
Spec = #{op_type => OpType, args => get_args(Args), fields => get_fields(Ep, TypeMap, [])},
Doc = prepare_doc(Category, Name, Spec),
{Name, add_description(Spec#{doc => Doc}, Map)}.
add_description(Spec, #{<<"description">> := Desc}) ->
Spec#{desc => Desc};
add_description(Spec, #{}) ->
Spec.
-spec get_args([json_map()]) -> [arg_spec()].
get_args(Args) ->
lists:map(fun get_arg_info/1, Args).
-spec get_arg_info(json_map()) -> arg_spec().
get_arg_info(#{<<"name">> := ArgName, <<"type">> := Arg}) ->
(get_arg_type(Arg, []))#{name => ArgName}.
get_arg_type(#{<<"kind">> := <<"NON_NULL">>, <<"ofType">> := TypeMap}, Wrap) ->
get_arg_type(TypeMap, [required | Wrap]);
get_arg_type(#{<<"kind">> := <<"LIST">>, <<"ofType">> := TypeMap}, Wrap) ->
get_arg_type(TypeMap, [list | Wrap]);
get_arg_type(#{<<"name">> := Type, <<"kind">> := Kind}, Wrap) when Kind =:= <<"SCALAR">>;
Kind =:= <<"ENUM">>;
Kind =:= <<"INPUT_OBJECT">> ->
#{type => Type, kind => Kind, wrap => lists:reverse(Wrap)}.
-spec get_fields(ep(), json_map(), [binary()]) -> [field_spec()].
get_fields(_Ep, #{<<"kind">> := Kind}, _Path)
when Kind =:= <<"SCALAR">>;
Kind =:= <<"ENUM">> -> [];
get_fields(Ep, #{<<"kind">> := <<"UNION">>, <<"possibleTypes">> := TypeMaps}, Path) ->
[get_union_type(Ep, TypeMap, Path) || TypeMap <- TypeMaps];
get_fields(Ep, #{<<"kind">> := Kind, <<"ofType">> := Type}, Path)
when Kind =:= <<"NON_NULL">>;
Kind =:= <<"LIST">> ->
get_fields(Ep, Type, Path);
get_fields(Ep, #{<<"kind">> := <<"OBJECT">>, <<"name">> := Type}, Path) ->
case length([T || T <- Path, T =:= Type]) >= ?MAX_TYPE_RECURSION_DEPTH of
true ->
false ->
Fields = get_object_fields(Ep, Type),
[get_field(Ep, Field, [Type | Path]) || Field <- Fields]
end.
-spec get_union_type(ep(), json_map(), [binary()]) -> field_spec().
get_union_type(Ep, #{<<"kind">> := <<"OBJECT">>, <<"name">> := Type} = M, Path) ->
#{on => Type, fields => get_fields(Ep, M, Path)}.
-spec get_field(ep(), json_map(), [binary()]) -> field_spec().
get_field(Ep, #{<<"type">> := Type, <<"name">> := Name}, Path) ->
case get_fields(Ep, Type, Path) of
[] -> #{name => Name};
Fields -> #{name => Name, fields => Fields}
end.
-spec get_object_fields(ep(), binary()) -> [json_map()].
get_object_fields(Ep, ObjectType) ->
Doc = iolist_to_binary(["query ($type: String!) { __type(name: $type) "
"{name fields {name type ", field_type_query(), "}}}"]),
Vars = #{<<"type">> => ObjectType},
{ok, #{data := #{<<"__type">> := #{<<"fields">> := Fields}}}} = execute(Ep, Doc, Vars),
Fields.
-spec insert_category(category(), category_spec(), specs()) -> specs().
insert_category(Category, NewCatSpec = #{commands := NewCommands}, Specs) ->
case Specs of
#{Category := #{desc := OldDesc, commands := OldCommands}} ->
case maps:with(maps:keys(OldCommands), NewCommands) of
Common when Common =:= #{} ->
Specs#{Category := #{desc => OldDesc,
commands => maps:merge(OldCommands, NewCommands)}};
Common ->
error(#{what => overlapping_graphql_commands,
text => <<"GraphQL query and mutation names are not unique">>,
category => Category,
commands => maps:keys(Common)})
end;
_ ->
Specs#{Category => NewCatSpec}
end.
-spec prepare_doc(category(), command(), map()) -> doc().
prepare_doc(Category, Command, #{op_type := OpType, args := Args, fields := Fields}) ->
iolist_to_binary([OpType, " ", declare_variables(Args), "{ ", Category, " { ", Command,
use_variables(Args), return_fields(Fields), " } }"]).
-spec declare_variables([arg_spec()]) -> iolist().
declare_variables([]) -> "";
declare_variables(Args) ->
["(", lists:join(", ", lists:map(fun declare_variable/1, Args)), ") "].
-spec declare_variable(arg_spec()) -> iolist().
declare_variable(#{name := Name, type := Type, wrap := Wrap}) ->
["$", Name, ": ", wrap_type(Wrap, Type)].
-spec wrap_type([required | list], binary()) -> iolist().
wrap_type([required | Wrap], Type) ->
[wrap_type(Wrap, Type), $!];
wrap_type([list | Wrap], Type) ->
[$[, wrap_type(Wrap, Type), $]];
wrap_type([], Type) ->
[Type].
-spec use_variables([arg_spec()]) -> iolist().
use_variables([]) -> "";
use_variables(Args) ->
["(", lists:join(", ", lists:map(fun use_variable/1, Args)), ")"].
-spec use_variable(arg_spec()) -> iolist().
use_variable(#{name := Name}) ->
[Name, ": $", Name].
-spec return_fields([field_spec()]) -> iolist().
return_fields([]) -> "";
return_fields(Fields) ->
[" { ", lists:join(" ", [return_field(F) || F <- Fields]), " }"].
-spec return_field(field_spec()) -> iodata().
return_field(#{name := Name, fields := Fields}) ->
[Name, return_fields(Fields)];
return_field(#{name := Name}) ->
Name;
return_field(#{on := Type, fields := Fields}) ->
["... on ", Type, return_fields(Fields)].
-spec execute(ep(), doc(), json_map()) -> result().
execute(Ep, Doc, Vars) ->
mongoose_graphql:execute(Ep, #{document => Doc,
operation_name => undefined,
vars => Vars,
authorized => true,
ctx => #{method => cli}}).
field_type_query() ->
nested_type_query("name kind possibleTypes {name kind}").
arg_type_query() ->
nested_type_query("name kind").
nested_type_query(BasicQuery) ->
lists:foldl(fun(_, QueryAcc) -> ["{ ", BasicQuery, " ofType ", QueryAcc, " }"] end,
["{ ", BasicQuery, " }"], lists:seq(1, ?MAX_INTROSPECTION_DEPTH)).
category_spec_query() ->
"{name fields {name description type {name fields {name}}}}".
|
db9531b0cdffe7d8240349febdeed4de367291e73a2efffaa8caccf9d0afb12e | fulcro-legacy/semantic-ui-wrapper | ui_popup.cljs | (ns fulcrologic.semantic-ui.modules.popup.ui-popup
(:require
[fulcrologic.semantic-ui.factory-helpers :as h]
["semantic-ui-react/dist/commonjs/modules/Popup/Popup" :default Popup]))
(def ui-popup
"A Popup displays additional information on top of a page.
Props:
- as (custom): An element type to render as (string or function).
- basic (bool): Display the popup without the pointing arrow.
- children (node): Primary content.
- className (string): Additional classes.
- content (custom): Simple text content for the popover.
- context (object): Existing element the pop-up should be bound to.
- flowing (bool): A flowing Popup has no maximum width and continues to flow to fit its content.
- header (custom): Header displayed above the content in bold.
- hideOnScroll (bool): Hide the Popup when scrolling the window.
- horizontalOffset (number): Horizontal offset in pixels to be applied to the Popup.
- hoverable (bool): Whether the popup should not close on hover.
- inverted (bool): Invert the colors of the Popup.
- keepInViewPort (bool): Element to be rendered within the confines of the viewport whenever possible.
- on (enum|arrayOf): Events triggering the popup. (hover, click, focus)
- onClose (func): Called when a close event happens.
- onMount (func): Called when the portal is mounted on the DOM.
- onOpen (func): Called when an open event happens.
- onUnmount (func): Called when the portal is unmounted from the DOM.
- position (enum): Position for the popover. (top left, top right, bottom right, bottom left, right center, left center, top center, bottom center)
- size (enum): Popup size. (mini, tiny, small, large, huge)
- style (object): Custom Popup style.
- trigger (node): Element to be rendered in-place where the popup is defined.
- verticalOffset (number): Vertical offset in pixels to be applied to the Popup.
- wide (bool|enum): Popup width. (very)"
(h/factory-apply Popup))
| null | https://raw.githubusercontent.com/fulcro-legacy/semantic-ui-wrapper/b0473480ddfff18496df086bf506099ac897f18f/semantic-ui-wrappers-shadow/src/main/fulcrologic/semantic_ui/modules/popup/ui_popup.cljs | clojure | (ns fulcrologic.semantic-ui.modules.popup.ui-popup
(:require
[fulcrologic.semantic-ui.factory-helpers :as h]
["semantic-ui-react/dist/commonjs/modules/Popup/Popup" :default Popup]))
(def ui-popup
"A Popup displays additional information on top of a page.
Props:
- as (custom): An element type to render as (string or function).
- basic (bool): Display the popup without the pointing arrow.
- children (node): Primary content.
- className (string): Additional classes.
- content (custom): Simple text content for the popover.
- context (object): Existing element the pop-up should be bound to.
- flowing (bool): A flowing Popup has no maximum width and continues to flow to fit its content.
- header (custom): Header displayed above the content in bold.
- hideOnScroll (bool): Hide the Popup when scrolling the window.
- horizontalOffset (number): Horizontal offset in pixels to be applied to the Popup.
- hoverable (bool): Whether the popup should not close on hover.
- inverted (bool): Invert the colors of the Popup.
- keepInViewPort (bool): Element to be rendered within the confines of the viewport whenever possible.
- on (enum|arrayOf): Events triggering the popup. (hover, click, focus)
- onClose (func): Called when a close event happens.
- onMount (func): Called when the portal is mounted on the DOM.
- onOpen (func): Called when an open event happens.
- onUnmount (func): Called when the portal is unmounted from the DOM.
- position (enum): Position for the popover. (top left, top right, bottom right, bottom left, right center, left center, top center, bottom center)
- size (enum): Popup size. (mini, tiny, small, large, huge)
- style (object): Custom Popup style.
- trigger (node): Element to be rendered in-place where the popup is defined.
- verticalOffset (number): Vertical offset in pixels to be applied to the Popup.
- wide (bool|enum): Popup width. (very)"
(h/factory-apply Popup))
| |
59ba5691d97fb900c50df2e253c10d97815aad0f59677e58dbb438220f06ce6c | flipstone/haskell-for-beginners | 4_derived_instances.hs | import Data.List (maximumBy)
import Data.Function (on)
-- Augment your complex number data definition from section
3 allow complex numbers to be equated , shown and read
--
data Complex a = Complex a a deriving (Show, Read, Eq)
-- Create a Player type that includes a name and a Score.
-- Then define a function to pick a winner from a list of
-- Players based on who has the highest score.
data Score = Score Int deriving (Eq, Ord, Show)
data Player = Player { name::String, score::Score } deriving (Show)
winner :: [Player] -> Player
winner = maximumBy (compare `on` score)
| null | https://raw.githubusercontent.com/flipstone/haskell-for-beginners/e586a1f3ef08f21d5181171fe7a7b27057391f0b/answers/chapter_08/4_derived_instances.hs | haskell | Augment your complex number data definition from section
Create a Player type that includes a name and a Score.
Then define a function to pick a winner from a list of
Players based on who has the highest score. | import Data.List (maximumBy)
import Data.Function (on)
3 allow complex numbers to be equated , shown and read
data Complex a = Complex a a deriving (Show, Read, Eq)
data Score = Score Int deriving (Eq, Ord, Show)
data Player = Player { name::String, score::Score } deriving (Show)
winner :: [Player] -> Player
winner = maximumBy (compare `on` score)
|
0420f337b5a9e9a4e19dabca86930a4a4657b2449424bccb2bcb8549fc959aed | bennn/dissertation | solver.rkt | #lang racket/base
(require racket/class
; sugar/container sugar/debug
racket/list
racket/bool racket/generator racket/match "helper.rkt")
(provide (all-defined-out))
(define solver%
;; Abstract base class for solvers
(class object%
(super-new)
(abstract get-solution)
(abstract get-solutions)
(abstract get-solution-iter)))
(define solver%? (is-a?/c solver%))
(struct vvp (variable values pushdomains))
(define-syntax-rule (pop-vvp-values! vvps)
(if (empty? vvps)
(error 'pop-vvp-values! (format "~a is null" vvps))
(let ([vvp (car vvps)])
(set! vvps (cdr vvps))
(values (vvp-variable vvp) (vvp-values vvp) (vvp-pushdomains vvp)))))
#|
(define (recursive-backtracking assignment csp)
(if (complete? assignment)
assignment
(let ([var (select-unassigned-variable csp-variables, assignment, csp)])
(for/or ([value (in-list (order-domain-values var assignment csp))])
if ((value . consistent-with? . assignment csp-constraints))
(add-to assignment var value)
(define result (recursive-backtracking assignment csp))
(when result
(and result (remove-from assignment var value)))
#f))))
|#
(define backtracking-solver%
;; Problem solver with backtracking capabilities
(class solver%
(super-new)
(init-field [forwardcheck #t])
(field [_forwardcheck forwardcheck])
(define/override (get-solution-iter domains constraints vconstraints)
(define sorted-variables (sort (hash-keys domains) list-comparator
#:key (λ(var)
(list (- (length (hash-ref vconstraints var)))
(length ((hash-ref domains var)))
var))))
;; state-retention variables
(define possible-solution (make-hash))
(define variable-queue null)
(define variable #f)
(define values null)
(define pushdomains null)
(define (get-next-unassigned-variable)
(for/first ([sorted-variable (in-list sorted-variables)]
#:unless (hash-has-key? possible-solution sorted-variable))
(set! variable sorted-variable)
(set! values ((hash-ref domains variable)))
(set! pushdomains
(if _forwardcheck
(for/list ([(var domain) (in-hash domains)]
#:unless (and (equal? variable var)
(hash-has-key? possible-solution var)))
domain)
null))
variable))
(define (set!-previous-variable)
(set!-values (variable values pushdomains) (pop-vvp-values! variable-queue))
(for-each-send pop-state pushdomains))
(let/ec exit-k
mix the degree and minimum - remaining - values ( MRV ) heuristics
(forever
(unless (get-next-unassigned-variable)
(yield (hash-copy possible-solution)) ; if there are no unassigned variables, solution is complete.
(if (empty? variable-queue)
(exit-k) ; all done, no other solutions possible.
(set!-previous-variable))) ; otherwise return to previous variable
(let value-checking-loop () ; we have a variable. Do we have any values left?
(when (empty? values) ; no, so try going back to last variable and getting some values
(forever/until
(when (empty? variable-queue) (exit-k)) ; no variables left, so solver is done
(hash-remove! possible-solution variable)
(set!-previous-variable)
(not (empty? values))))
;; Got a value. Check it.
(hash-set! possible-solution variable (car-pop! values))
(for-each-send push-state pushdomains)
(unless (for/and ([constraint+variables (in-list (hash-ref vconstraints variable))])
(let ([constraint (car constraint+variables)]
[variables (cadr constraint+variables)])
(send constraint is-true? variables domains possible-solution pushdomains)))
;; constraint failed, so try again
(for-each-send pop-state pushdomains)
(value-checking-loop)))
;; Push state before looking for next variable.
(set! variable-queue (cons (vvp variable values pushdomains) variable-queue)))
(error 'get-solution-iter "impossible to reach this"))
(void))
(define (call-solution-generator domains constraints vconstraints #:first-only [first-only #f])
(for/list ([solution (in-generator (get-solution-iter domains constraints vconstraints))] #:final first-only)
solution))
(define/override (get-solution . args)
(car (apply call-solution-generator #:first-only #t args)))
(define/override (get-solutions . args)
(apply call-solution-generator args))))
(define backtracking-solver%? (is-a?/c backtracking-solver%))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/quadU/base/csp/solver.rkt | racket | sugar/container sugar/debug
Abstract base class for solvers
(define (recursive-backtracking assignment csp)
(if (complete? assignment)
assignment
(let ([var (select-unassigned-variable csp-variables, assignment, csp)])
(for/or ([value (in-list (order-domain-values var assignment csp))])
if ((value . consistent-with? . assignment csp-constraints))
(add-to assignment var value)
(define result (recursive-backtracking assignment csp))
(when result
(and result (remove-from assignment var value)))
#f))))
Problem solver with backtracking capabilities
state-retention variables
if there are no unassigned variables, solution is complete.
all done, no other solutions possible.
otherwise return to previous variable
we have a variable. Do we have any values left?
no, so try going back to last variable and getting some values
no variables left, so solver is done
Got a value. Check it.
constraint failed, so try again
Push state before looking for next variable. | #lang racket/base
(require racket/class
racket/list
racket/bool racket/generator racket/match "helper.rkt")
(provide (all-defined-out))
(define solver%
(class object%
(super-new)
(abstract get-solution)
(abstract get-solutions)
(abstract get-solution-iter)))
(define solver%? (is-a?/c solver%))
(struct vvp (variable values pushdomains))
(define-syntax-rule (pop-vvp-values! vvps)
(if (empty? vvps)
(error 'pop-vvp-values! (format "~a is null" vvps))
(let ([vvp (car vvps)])
(set! vvps (cdr vvps))
(values (vvp-variable vvp) (vvp-values vvp) (vvp-pushdomains vvp)))))
(define backtracking-solver%
(class solver%
(super-new)
(init-field [forwardcheck #t])
(field [_forwardcheck forwardcheck])
(define/override (get-solution-iter domains constraints vconstraints)
(define sorted-variables (sort (hash-keys domains) list-comparator
#:key (λ(var)
(list (- (length (hash-ref vconstraints var)))
(length ((hash-ref domains var)))
var))))
(define possible-solution (make-hash))
(define variable-queue null)
(define variable #f)
(define values null)
(define pushdomains null)
(define (get-next-unassigned-variable)
(for/first ([sorted-variable (in-list sorted-variables)]
#:unless (hash-has-key? possible-solution sorted-variable))
(set! variable sorted-variable)
(set! values ((hash-ref domains variable)))
(set! pushdomains
(if _forwardcheck
(for/list ([(var domain) (in-hash domains)]
#:unless (and (equal? variable var)
(hash-has-key? possible-solution var)))
domain)
null))
variable))
(define (set!-previous-variable)
(set!-values (variable values pushdomains) (pop-vvp-values! variable-queue))
(for-each-send pop-state pushdomains))
(let/ec exit-k
mix the degree and minimum - remaining - values ( MRV ) heuristics
(forever
(unless (get-next-unassigned-variable)
(if (empty? variable-queue)
(forever/until
(hash-remove! possible-solution variable)
(set!-previous-variable)
(not (empty? values))))
(hash-set! possible-solution variable (car-pop! values))
(for-each-send push-state pushdomains)
(unless (for/and ([constraint+variables (in-list (hash-ref vconstraints variable))])
(let ([constraint (car constraint+variables)]
[variables (cadr constraint+variables)])
(send constraint is-true? variables domains possible-solution pushdomains)))
(for-each-send pop-state pushdomains)
(value-checking-loop)))
(set! variable-queue (cons (vvp variable values pushdomains) variable-queue)))
(error 'get-solution-iter "impossible to reach this"))
(void))
(define (call-solution-generator domains constraints vconstraints #:first-only [first-only #f])
(for/list ([solution (in-generator (get-solution-iter domains constraints vconstraints))] #:final first-only)
solution))
(define/override (get-solution . args)
(car (apply call-solution-generator #:first-only #t args)))
(define/override (get-solutions . args)
(apply call-solution-generator args))))
(define backtracking-solver%? (is-a?/c backtracking-solver%))
|
19cd23362fb99106f62f223484e40041625a6cf4ca7339aea5eeaddb70551a70 | YoEight/lambda-database-experiment | EndPoint.hs | --------------------------------------------------------------------------------
-- |
Module : Lambda . Client . EndPoint
Copyright : ( C ) 2017
-- License : (see the file LICENSE)
--
Maintainer : < >
-- Stability : provisional
-- Portability : non-portable
--
--------------------------------------------------------------------------------
module Lambda.Client.EndPoint where
--------------------------------------------------------------------------------
import Lambda.Prelude
--------------------------------------------------------------------------------
-- | Gathers both an IPv4 and a port.
data EndPoint =
EndPoint
{ endPointIp :: !String
, endPointPort :: !Int
} deriving Eq
--------------------------------------------------------------------------------
instance Show EndPoint where
show (EndPoint h p) = h <> ":" <> show p
--------------------------------------------------------------------------------
emptyEndPoint :: EndPoint
emptyEndPoint = EndPoint "" 0
--------------------------------------------------------------------------------
data NodeEndPoints =
NodeEndPoints
{ tcpEndPoint :: !EndPoint
, secureEndPoint :: !(Maybe EndPoint)
} deriving Show
| null | https://raw.githubusercontent.com/YoEight/lambda-database-experiment/da4fab8bd358fb8fb78412c805d6f5bc05854432/lambda-client/library/Lambda/Client/EndPoint.hs | haskell | ------------------------------------------------------------------------------
|
License : (see the file LICENSE)
Stability : provisional
Portability : non-portable
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Gathers both an IPv4 and a port.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | Module : Lambda . Client . EndPoint
Copyright : ( C ) 2017
Maintainer : < >
module Lambda.Client.EndPoint where
import Lambda.Prelude
data EndPoint =
EndPoint
{ endPointIp :: !String
, endPointPort :: !Int
} deriving Eq
instance Show EndPoint where
show (EndPoint h p) = h <> ":" <> show p
emptyEndPoint :: EndPoint
emptyEndPoint = EndPoint "" 0
data NodeEndPoints =
NodeEndPoints
{ tcpEndPoint :: !EndPoint
, secureEndPoint :: !(Maybe EndPoint)
} deriving Show
|
a1ca93bbb4d9304ba6ed57439c004d8e9a96ed4e5423396ddb6b396fd48c5d55 | nikita-volkov/rebase | Error.hs | module Rebase.Data.Text.Encoding.Error
(
module Data.Text.Encoding.Error
)
where
import Data.Text.Encoding.Error
| null | https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/Data/Text/Encoding/Error.hs | haskell | module Rebase.Data.Text.Encoding.Error
(
module Data.Text.Encoding.Error
)
where
import Data.Text.Encoding.Error
| |
cb252c717ee882ebafefe313a264003cc60327f3fc0b6fccc381c2be6bcdd99c | facjure/mesh | styles.clj | (ns typography.styles
(:refer-clojure :exclude [+ - * /])
(:require [garden.def :refer [defstyles defrule]]
[garden.core :refer [css]]
[garden.units :as u :refer [px pt em]]
[garden.arithmetic :refer [+ - * /]]
[facjure.mesh.media :as respond]
[facjure.mesh.typography :as typo :refer [typeset vr-block scales scale-type make-serifs]]
[facjure.mesh.grid :as grid]))
(def alegreya ["Alegreya" "Baskerville" "Georgia" "Times" "serif"])
(def sans ["\"Open Sans\"" "Avenir" "Helvetica" "sans-serif"])
(def mono ["Inconsolata" "Menlo" "Courier" "monospace"])
(defstyles fonts
(typeset alegreya sans mono))
(defrule homepage :section.home)
(defrule body :body)
(defrule h1 :h1)
(defrule h2 :h2)
(defrule h3 :h3)
(defrule h4 :h4)
(defrule h5 :h5)
(defrule h6 :h6)
(defrule small :p.small)
(defrule medium :p.medium)
(defrule large :p.large)
(def ms
(let [f (typo/modular-scale-fn 16 (:golden scales))]
(fn [n]
(px (f n)))))
(defstyles typography
(body
{:font-family alegreya})
(homepage
(h1
(respond/tablet
{:padding [[0 (ms 2)]]
:font-size (ms 5)
:line-height (ms 5)})
(respond/iphone-5
{:padding [[0 (ms 2)]]
:font-size (ms 5)
:line-height (ms 5)}))
(h2
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 4)
:line-height (ms 4)}))
(h3
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 3)
:line-height (ms 3)}))
(h4
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 2)
:line-height (ms 2)}))
(h5
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 1)
:line-height (ms 1)}))
(h6
(respond/desktop
{:padding [[0 (ms 1)]]
:font-size (ms 0)
:line-height (ms 0)}))
(large
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms -1)
:line-height (ms -1)}))
(medium
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms -2)
:line-height (ms -2)}))
(small
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms -3)
:line-height (ms -3)}))))
(defstyles typesettings
typography)
(def index
typesettings)
| null | https://raw.githubusercontent.com/facjure/mesh/e37887304c271bacfc017055180f56935f893682/examples/typography/styles.clj | clojure | (ns typography.styles
(:refer-clojure :exclude [+ - * /])
(:require [garden.def :refer [defstyles defrule]]
[garden.core :refer [css]]
[garden.units :as u :refer [px pt em]]
[garden.arithmetic :refer [+ - * /]]
[facjure.mesh.media :as respond]
[facjure.mesh.typography :as typo :refer [typeset vr-block scales scale-type make-serifs]]
[facjure.mesh.grid :as grid]))
(def alegreya ["Alegreya" "Baskerville" "Georgia" "Times" "serif"])
(def sans ["\"Open Sans\"" "Avenir" "Helvetica" "sans-serif"])
(def mono ["Inconsolata" "Menlo" "Courier" "monospace"])
(defstyles fonts
(typeset alegreya sans mono))
(defrule homepage :section.home)
(defrule body :body)
(defrule h1 :h1)
(defrule h2 :h2)
(defrule h3 :h3)
(defrule h4 :h4)
(defrule h5 :h5)
(defrule h6 :h6)
(defrule small :p.small)
(defrule medium :p.medium)
(defrule large :p.large)
(def ms
(let [f (typo/modular-scale-fn 16 (:golden scales))]
(fn [n]
(px (f n)))))
(defstyles typography
(body
{:font-family alegreya})
(homepage
(h1
(respond/tablet
{:padding [[0 (ms 2)]]
:font-size (ms 5)
:line-height (ms 5)})
(respond/iphone-5
{:padding [[0 (ms 2)]]
:font-size (ms 5)
:line-height (ms 5)}))
(h2
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 4)
:line-height (ms 4)}))
(h3
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 3)
:line-height (ms 3)}))
(h4
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 2)
:line-height (ms 2)}))
(h5
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms 1)
:line-height (ms 1)}))
(h6
(respond/desktop
{:padding [[0 (ms 1)]]
:font-size (ms 0)
:line-height (ms 0)}))
(large
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms -1)
:line-height (ms -1)}))
(medium
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms -2)
:line-height (ms -2)}))
(small
(respond/tablet
{:padding [[0 (ms 1)]]
:font-size (ms -3)
:line-height (ms -3)}))))
(defstyles typesettings
typography)
(def index
typesettings)
| |
0f4bf8c76ed4860934923201f19d32a8ba7dcbe2dc2ccd2840e5bfcc43806706 | paurkedal/batyr | types.ml | Copyright ( C ) 2022 < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
module Decode = Decoders_yojson.Basic.Decode
module Encode = Decoders_yojson.Basic.Encode
type 'a decoder = 'a Decode.decoder
type 'a encoder = 'a Encode.encoder
let ptime_decoder =
let open Decode in
let timestamp_ms =
let* t_ms = one_of ["float", float; "int", int >|= float_of_int] in
(match Ptime.of_float_s (t_ms /. 1000.0) with
| Some ts -> succeed ts
| None -> fail "The argument of $date is out of range.")
in
let rfc3339_date =
let* s = string in
(match Ptime.of_rfc3339 s with
| Ok (t, _tz, _) -> succeed t
| Error _ -> fail "Bad timestamp string.")
in
one_of [
"timestamp/ms", field "$date" timestamp_ms;
"RFC-3339 date", rfc3339_date;
]
let ptime_encoder ts =
let open Encode in
obj ["$date", float (Ptime.to_float_s ts *. 1000.0)]
| null | https://raw.githubusercontent.com/paurkedal/batyr/814791b6ce6476b79ecddc12b7d28fa4d23dc591/rockettime/lib/types.ml | ocaml | Copyright ( C ) 2022 < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
module Decode = Decoders_yojson.Basic.Decode
module Encode = Decoders_yojson.Basic.Encode
type 'a decoder = 'a Decode.decoder
type 'a encoder = 'a Encode.encoder
let ptime_decoder =
let open Decode in
let timestamp_ms =
let* t_ms = one_of ["float", float; "int", int >|= float_of_int] in
(match Ptime.of_float_s (t_ms /. 1000.0) with
| Some ts -> succeed ts
| None -> fail "The argument of $date is out of range.")
in
let rfc3339_date =
let* s = string in
(match Ptime.of_rfc3339 s with
| Ok (t, _tz, _) -> succeed t
| Error _ -> fail "Bad timestamp string.")
in
one_of [
"timestamp/ms", field "$date" timestamp_ms;
"RFC-3339 date", rfc3339_date;
]
let ptime_encoder ts =
let open Encode in
obj ["$date", float (Ptime.to_float_s ts *. 1000.0)]
| |
a916ed18cc2b533587289b5235bf8c657eb3341055018097b690062df449dc38 | jaspervdj/advent-of-code | 12.hs | {-# LANGUAGE DeriveFunctor #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified AdventOfCode.NanoParser as NP
import Control.Applicative (many, optional, (<|>))
import Control.Monad (guard)
import Data.Char (isDigit)
import Data.Functor (($>))
import Data.Functor.Fix
import Data.Maybe (catMaybes, mapMaybe)
import Data.Maybe (fromMaybe)
data Json a
= Int Int
| String String
| Array [a]
| Object [(String, a)]
deriving (Eq, Functor, Show)
parseJson :: NP.Parser Char (Fix Json)
parseJson = fmap Fix $
(String <$> string) <|>
(Int <$> int) <|>
(Array <$> array parseJson) <|>
(Object <$> object parseJson)
where
string = NP.char '"' *> many notQuote <* NP.char '"'
int = sign <*> (read <$> NP.many1 digit)
array p = NP.char '[' *> NP.sepBy p (NP.char ',') <* NP.char ']'
object p = NP.char '{' *> NP.sepBy (item p) (NP.char ',') <* NP.char '}'
item p = (,) <$> string <* NP.char ':' <*> p
notQuote = NP.satisfy "non-quote" (/= '"')
sign = fromMaybe id <$> optional (NP.char '-' $> negate)
digit = NP.satisfy "digit" isDigit
jsonSum :: Fix Json -> Int
jsonSum = cata $ \case
Int n -> n
String _ -> 0
Array arr -> sum arr
Object obj -> sum (map snd obj)
removeRed :: Fix Json -> Maybe (Fix Json)
removeRed = cata $ fmap Fix . \case
Int n -> Just $ Int n
String s -> Just $ String s
Array arr -> Just $ Array (catMaybes arr)
Object obj ->
let obj' = mapMaybe (\(k, v) -> (,) k <$> v) obj in
guard (not $ any (isRed . snd) obj') $> Object obj'
where
isRed (Fix (String "red")) = True
isRed _ = False
main :: IO ()
main = do
errOrJson <- NP.runParser parseJson <$> getContents
either fail (print . jsonSum) errOrJson
either fail (print . maybe 0 jsonSum . removeRed) errOrJson
| null | https://raw.githubusercontent.com/jaspervdj/advent-of-code/179ec4301ab642efafead373717203742d92800a/2015/12.hs | haskell | # LANGUAGE DeriveFunctor #
# LANGUAGE OverloadedStrings # | # LANGUAGE LambdaCase #
module Main where
import qualified AdventOfCode.NanoParser as NP
import Control.Applicative (many, optional, (<|>))
import Control.Monad (guard)
import Data.Char (isDigit)
import Data.Functor (($>))
import Data.Functor.Fix
import Data.Maybe (catMaybes, mapMaybe)
import Data.Maybe (fromMaybe)
data Json a
= Int Int
| String String
| Array [a]
| Object [(String, a)]
deriving (Eq, Functor, Show)
parseJson :: NP.Parser Char (Fix Json)
parseJson = fmap Fix $
(String <$> string) <|>
(Int <$> int) <|>
(Array <$> array parseJson) <|>
(Object <$> object parseJson)
where
string = NP.char '"' *> many notQuote <* NP.char '"'
int = sign <*> (read <$> NP.many1 digit)
array p = NP.char '[' *> NP.sepBy p (NP.char ',') <* NP.char ']'
object p = NP.char '{' *> NP.sepBy (item p) (NP.char ',') <* NP.char '}'
item p = (,) <$> string <* NP.char ':' <*> p
notQuote = NP.satisfy "non-quote" (/= '"')
sign = fromMaybe id <$> optional (NP.char '-' $> negate)
digit = NP.satisfy "digit" isDigit
jsonSum :: Fix Json -> Int
jsonSum = cata $ \case
Int n -> n
String _ -> 0
Array arr -> sum arr
Object obj -> sum (map snd obj)
removeRed :: Fix Json -> Maybe (Fix Json)
removeRed = cata $ fmap Fix . \case
Int n -> Just $ Int n
String s -> Just $ String s
Array arr -> Just $ Array (catMaybes arr)
Object obj ->
let obj' = mapMaybe (\(k, v) -> (,) k <$> v) obj in
guard (not $ any (isRed . snd) obj') $> Object obj'
where
isRed (Fix (String "red")) = True
isRed _ = False
main :: IO ()
main = do
errOrJson <- NP.runParser parseJson <$> getContents
either fail (print . jsonSum) errOrJson
either fail (print . maybe 0 jsonSum . removeRed) errOrJson
|
93d45d2d8fa6dd1ac819476e4d4a8be0176a2ef4f0e4b5e513cb27c0b271df48 | mrkkrp/flac | Level2Interface.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE LambdaCase #
-- |
Module : Codec . Audio . FLAC.Metadata . Internal . Level2Interface
Copyright : © 2016 – present
License : BSD 3 clause
--
Maintainer : < >
-- Stability : experimental
-- Portability : portable
--
Low - level wrapper around C functions to work with the level 2
-- FLAC metadata interface, see:
--
-- <>.
module Codec.Audio.FLAC.Metadata.Internal.Level2Interface
( -- * Chain
withChain,
chainStatus,
chainRead,
chainWrite,
chainSortPadding,
-- * Iterator
withIterator,
iteratorGetBlockType,
iteratorGetBlock,
iteratorSetBlock,
iteratorDeleteBlock,
iteratorInsertBlockAfter,
)
where
import Codec.Audio.FLAC.Metadata.Internal.Types
import Codec.Audio.FLAC.Util
import Control.Monad.Catch
import Control.Monad.IO.Class (MonadIO (..))
import Foreign.C.String
import Foreign.C.Types
----------------------------------------------------------------------------
-- Chain
-- | Create and use a 'MetaChain' (metadata chain). The chain is guaranteed
-- to be freed even in the case of exception thrown.
--
-- If memory for the chain cannot be allocated, corresponding
-- 'MetaException' is raised.
withChain :: (MetaChain -> IO a) -> IO a
withChain f = bracket chainNew (mapM_ chainDelete) $ \case
Nothing ->
throwM
(MetaGeneralProblem MetaChainStatusMemoryAllocationError)
Just x -> f x
-- | Create a new 'MetaChain'. In the case of memory allocation problem
-- 'Nothing' is returned.
chainNew :: IO (Maybe MetaChain)
chainNew = maybePtr <$> c_chain_new
foreign import ccall unsafe "FLAC__metadata_chain_new"
c_chain_new :: IO MetaChain
-- | Free a 'MetaChain' instance. Delete the object pointed to by
-- 'MetaChain'.
chainDelete :: MetaChain -> IO ()
chainDelete = c_chain_delete
foreign import ccall unsafe "FLAC__metadata_chain_delete"
c_chain_delete :: MetaChain -> IO ()
-- | Check status of a given 'MetaChain'. This can be used to find out what
-- went wrong. It also resets status to 'MetaChainStatusOK'.
chainStatus :: MetaChain -> IO MetaChainStatus
chainStatus = fmap toEnum' . c_chain_status
foreign import ccall unsafe "FLAC__metadata_chain_status"
c_chain_status :: MetaChain -> IO CUInt
-- | Read all metadata from a FLAC file into the chain. Return 'False' if
-- something went wrong.
chainRead :: MetaChain -> FilePath -> IO Bool
chainRead chain path = withCString path (c_chain_read chain)
foreign import ccall unsafe "FLAC__metadata_chain_read"
c_chain_read :: MetaChain -> CString -> IO Bool
-- | Write all metadata out to the FLAC file.
chainWrite ::
-- | The chain to write
MetaChain ->
-- | Whether to use padding
Bool ->
-- | Whether to preserve file stats
Bool ->
-- | 'False' if something went wrong
IO Bool
chainWrite chain usePadding preserveStats =
c_chain_write chain (fromEnum' usePadding) (fromEnum' preserveStats)
foreign import ccall unsafe "FLAC__metadata_chain_write"
c_chain_write :: MetaChain -> CInt -> CInt -> IO Bool
-- | Move all padding blocks to the end on the metadata, then merge them
-- into a single block. Useful to get maximum padding to have better changes
-- for re-writing only metadata blocks, not entire FLAC file. Any iterator
-- on the current chain will become invalid after this call. You should
-- delete the iterator and get a new one.
--
-- NOTE: this function does not write to the FLAC file, it only modifies the
-- chain.
chainSortPadding :: MetaChain -> IO ()
chainSortPadding = c_chain_sort_padding
foreign import ccall unsafe "FLAC__metadata_chain_sort_padding"
c_chain_sort_padding :: MetaChain -> IO ()
----------------------------------------------------------------------------
-- Iterator
-- | Traverse all metadata blocks from beginning to end collecting 'Just'
-- values and possibly performing some actions. This is the only way to
traverse metadata chain and get access to ' MetaIterator ' and by exporting
-- only this, we eliminate a certain class of possible errors making finding
-- and traversing metadata blocks always correct and safe.
--
-- If memory for the iterator cannot be allocated, corresponding
-- 'MetaException' is raised.
withIterator ::
(MonadMask m, MonadIO m) =>
-- | Metadata chain to traverse
MetaChain ->
-- | Action to perform on each block
(MetaIterator -> m (Maybe a)) ->
-- | Accumulated results
m [a]
withIterator chain f = bracket acquire release action
where
acquire = liftIO iteratorNew
release = mapM_ (liftIO . iteratorDelete)
action mi =
case mi of
Nothing ->
throwM
(MetaGeneralProblem MetaChainStatusMemoryAllocationError)
Just i -> do
liftIO (iteratorInit i chain)
let go thisNext =
if thisNext
then do
res <- f i
let next = liftIO (iteratorNext i) >>= go
case res of
Nothing -> next
Just x -> (x :) <$> next
else return []
go True
-- | Create a new iterator. Return 'Nothing' if there was a problem with
-- memory allocation.
iteratorNew :: IO (Maybe MetaIterator)
iteratorNew = maybePtr <$> c_iterator_new
foreign import ccall unsafe "FLAC__metadata_iterator_new"
c_iterator_new :: IO MetaIterator
-- | Free an iterator instance. Delete the object pointed to by
' MetaIterator ' .
iteratorDelete :: MetaIterator -> IO ()
iteratorDelete = c_iterator_delete
foreign import ccall unsafe "FLAC__metadata_iterator_delete"
c_iterator_delete :: MetaIterator -> IO ()
| Initialize the iterator to point to the first metadata block in the
-- given chain.
iteratorInit ::
-- | Existing iterator
MetaIterator ->
-- | Existing initialized chain
MetaChain ->
IO ()
iteratorInit = c_iterator_init
foreign import ccall unsafe "FLAC__metadata_iterator_init"
c_iterator_init :: MetaIterator -> MetaChain -> IO ()
| Move the iterator forward one metadata block , returning ' False ' if
-- already at the end.
iteratorNext :: MetaIterator -> IO Bool
iteratorNext = c_iterator_next
foreign import ccall unsafe "FLAC__metadata_iterator_next"
c_iterator_next :: MetaIterator -> IO Bool
-- | Get the type of the metadata block at the current position. Useful for
-- fast searching.
iteratorGetBlockType :: MetaIterator -> IO MetadataType
iteratorGetBlockType = fmap toEnum' . c_iterator_get_block_type
foreign import ccall unsafe "FLAC__metadata_iterator_get_block_type"
c_iterator_get_block_type :: MetaIterator -> IO CUInt
-- | Get metadata block at the current position.
iteratorGetBlock :: MetaIterator -> IO Metadata
iteratorGetBlock = c_iterator_get_block
foreign import ccall unsafe "FLAC__metadata_iterator_get_block"
c_iterator_get_block :: MetaIterator -> IO Metadata
-- | Write given 'Metadata' block at the position pointed to by
' MetaIterator ' replacing an existing block .
iteratorSetBlock :: MetaIterator -> Metadata -> IO Bool
iteratorSetBlock = c_iterator_set_block
foreign import ccall unsafe "FLAC__metadata_iterator_set_block"
c_iterator_set_block :: MetaIterator -> Metadata -> IO Bool
-- | Remove the current block from the chain.
iteratorDeleteBlock ::
-- | Iterator that determines the position
MetaIterator ->
-- | 'False' if something went wrong
IO Bool
iteratorDeleteBlock block = c_iterator_delete_block block False
foreign import ccall unsafe "FLAC__metadata_iterator_delete_block"
c_iterator_delete_block :: MetaIterator -> Bool -> IO Bool
-- | Insert a new block after the current block. You cannot insert a
' StreamInfo ' block as there can be only one , the one that already exists
-- at the head when you read in a chain. The chain takes ownership of the
-- new block and it will be deleted when the chain is deleted. The iterator
-- will be left pointing to the new block.
--
-- The function returns 'False' if something went wrong.
iteratorInsertBlockAfter :: MetaIterator -> Metadata -> IO Bool
iteratorInsertBlockAfter = c_iterator_insert_block_after
foreign import ccall unsafe "FLAC__metadata_iterator_insert_block_after"
c_iterator_insert_block_after :: MetaIterator -> Metadata -> IO Bool
| null | https://raw.githubusercontent.com/mrkkrp/flac/9a7beb51e74a396ef65f30c6619a601ee6b9655c/Codec/Audio/FLAC/Metadata/Internal/Level2Interface.hs | haskell | |
Stability : experimental
Portability : portable
FLAC metadata interface, see:
<>.
* Chain
* Iterator
--------------------------------------------------------------------------
Chain
| Create and use a 'MetaChain' (metadata chain). The chain is guaranteed
to be freed even in the case of exception thrown.
If memory for the chain cannot be allocated, corresponding
'MetaException' is raised.
| Create a new 'MetaChain'. In the case of memory allocation problem
'Nothing' is returned.
| Free a 'MetaChain' instance. Delete the object pointed to by
'MetaChain'.
| Check status of a given 'MetaChain'. This can be used to find out what
went wrong. It also resets status to 'MetaChainStatusOK'.
| Read all metadata from a FLAC file into the chain. Return 'False' if
something went wrong.
| Write all metadata out to the FLAC file.
| The chain to write
| Whether to use padding
| Whether to preserve file stats
| 'False' if something went wrong
| Move all padding blocks to the end on the metadata, then merge them
into a single block. Useful to get maximum padding to have better changes
for re-writing only metadata blocks, not entire FLAC file. Any iterator
on the current chain will become invalid after this call. You should
delete the iterator and get a new one.
NOTE: this function does not write to the FLAC file, it only modifies the
chain.
--------------------------------------------------------------------------
Iterator
| Traverse all metadata blocks from beginning to end collecting 'Just'
values and possibly performing some actions. This is the only way to
only this, we eliminate a certain class of possible errors making finding
and traversing metadata blocks always correct and safe.
If memory for the iterator cannot be allocated, corresponding
'MetaException' is raised.
| Metadata chain to traverse
| Action to perform on each block
| Accumulated results
| Create a new iterator. Return 'Nothing' if there was a problem with
memory allocation.
| Free an iterator instance. Delete the object pointed to by
given chain.
| Existing iterator
| Existing initialized chain
already at the end.
| Get the type of the metadata block at the current position. Useful for
fast searching.
| Get metadata block at the current position.
| Write given 'Metadata' block at the position pointed to by
| Remove the current block from the chain.
| Iterator that determines the position
| 'False' if something went wrong
| Insert a new block after the current block. You cannot insert a
at the head when you read in a chain. The chain takes ownership of the
new block and it will be deleted when the chain is deleted. The iterator
will be left pointing to the new block.
The function returns 'False' if something went wrong. | # LANGUAGE FlexibleContexts #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE LambdaCase #
Module : Codec . Audio . FLAC.Metadata . Internal . Level2Interface
Copyright : © 2016 – present
License : BSD 3 clause
Maintainer : < >
Low - level wrapper around C functions to work with the level 2
module Codec.Audio.FLAC.Metadata.Internal.Level2Interface
withChain,
chainStatus,
chainRead,
chainWrite,
chainSortPadding,
withIterator,
iteratorGetBlockType,
iteratorGetBlock,
iteratorSetBlock,
iteratorDeleteBlock,
iteratorInsertBlockAfter,
)
where
import Codec.Audio.FLAC.Metadata.Internal.Types
import Codec.Audio.FLAC.Util
import Control.Monad.Catch
import Control.Monad.IO.Class (MonadIO (..))
import Foreign.C.String
import Foreign.C.Types
withChain :: (MetaChain -> IO a) -> IO a
withChain f = bracket chainNew (mapM_ chainDelete) $ \case
Nothing ->
throwM
(MetaGeneralProblem MetaChainStatusMemoryAllocationError)
Just x -> f x
chainNew :: IO (Maybe MetaChain)
chainNew = maybePtr <$> c_chain_new
foreign import ccall unsafe "FLAC__metadata_chain_new"
c_chain_new :: IO MetaChain
chainDelete :: MetaChain -> IO ()
chainDelete = c_chain_delete
foreign import ccall unsafe "FLAC__metadata_chain_delete"
c_chain_delete :: MetaChain -> IO ()
chainStatus :: MetaChain -> IO MetaChainStatus
chainStatus = fmap toEnum' . c_chain_status
foreign import ccall unsafe "FLAC__metadata_chain_status"
c_chain_status :: MetaChain -> IO CUInt
chainRead :: MetaChain -> FilePath -> IO Bool
chainRead chain path = withCString path (c_chain_read chain)
foreign import ccall unsafe "FLAC__metadata_chain_read"
c_chain_read :: MetaChain -> CString -> IO Bool
chainWrite ::
MetaChain ->
Bool ->
Bool ->
IO Bool
chainWrite chain usePadding preserveStats =
c_chain_write chain (fromEnum' usePadding) (fromEnum' preserveStats)
foreign import ccall unsafe "FLAC__metadata_chain_write"
c_chain_write :: MetaChain -> CInt -> CInt -> IO Bool
chainSortPadding :: MetaChain -> IO ()
chainSortPadding = c_chain_sort_padding
foreign import ccall unsafe "FLAC__metadata_chain_sort_padding"
c_chain_sort_padding :: MetaChain -> IO ()
traverse metadata chain and get access to ' MetaIterator ' and by exporting
withIterator ::
(MonadMask m, MonadIO m) =>
MetaChain ->
(MetaIterator -> m (Maybe a)) ->
m [a]
withIterator chain f = bracket acquire release action
where
acquire = liftIO iteratorNew
release = mapM_ (liftIO . iteratorDelete)
action mi =
case mi of
Nothing ->
throwM
(MetaGeneralProblem MetaChainStatusMemoryAllocationError)
Just i -> do
liftIO (iteratorInit i chain)
let go thisNext =
if thisNext
then do
res <- f i
let next = liftIO (iteratorNext i) >>= go
case res of
Nothing -> next
Just x -> (x :) <$> next
else return []
go True
iteratorNew :: IO (Maybe MetaIterator)
iteratorNew = maybePtr <$> c_iterator_new
foreign import ccall unsafe "FLAC__metadata_iterator_new"
c_iterator_new :: IO MetaIterator
' MetaIterator ' .
iteratorDelete :: MetaIterator -> IO ()
iteratorDelete = c_iterator_delete
foreign import ccall unsafe "FLAC__metadata_iterator_delete"
c_iterator_delete :: MetaIterator -> IO ()
| Initialize the iterator to point to the first metadata block in the
iteratorInit ::
MetaIterator ->
MetaChain ->
IO ()
iteratorInit = c_iterator_init
foreign import ccall unsafe "FLAC__metadata_iterator_init"
c_iterator_init :: MetaIterator -> MetaChain -> IO ()
| Move the iterator forward one metadata block , returning ' False ' if
iteratorNext :: MetaIterator -> IO Bool
iteratorNext = c_iterator_next
foreign import ccall unsafe "FLAC__metadata_iterator_next"
c_iterator_next :: MetaIterator -> IO Bool
iteratorGetBlockType :: MetaIterator -> IO MetadataType
iteratorGetBlockType = fmap toEnum' . c_iterator_get_block_type
foreign import ccall unsafe "FLAC__metadata_iterator_get_block_type"
c_iterator_get_block_type :: MetaIterator -> IO CUInt
iteratorGetBlock :: MetaIterator -> IO Metadata
iteratorGetBlock = c_iterator_get_block
foreign import ccall unsafe "FLAC__metadata_iterator_get_block"
c_iterator_get_block :: MetaIterator -> IO Metadata
' MetaIterator ' replacing an existing block .
iteratorSetBlock :: MetaIterator -> Metadata -> IO Bool
iteratorSetBlock = c_iterator_set_block
foreign import ccall unsafe "FLAC__metadata_iterator_set_block"
c_iterator_set_block :: MetaIterator -> Metadata -> IO Bool
iteratorDeleteBlock ::
MetaIterator ->
IO Bool
iteratorDeleteBlock block = c_iterator_delete_block block False
foreign import ccall unsafe "FLAC__metadata_iterator_delete_block"
c_iterator_delete_block :: MetaIterator -> Bool -> IO Bool
' StreamInfo ' block as there can be only one , the one that already exists
iteratorInsertBlockAfter :: MetaIterator -> Metadata -> IO Bool
iteratorInsertBlockAfter = c_iterator_insert_block_after
foreign import ccall unsafe "FLAC__metadata_iterator_insert_block_after"
c_iterator_insert_block_after :: MetaIterator -> Metadata -> IO Bool
|
19719449e0b78a50815a70b81a17c84ef9732686ce420e9277329b737405ba83 | avsm/eeww | input_lines.ml | (* TEST
*)
open Printf
let data_file =
"data.txt"
let length = 500
let rec check lo hi l =
if lo = hi + 1 then begin
if l <> [] then failwith "list too long"
end else begin
match l with
| [] -> failwith "list too short"
| h :: t ->
if int_of_string h <> lo then failwith "wrong value";
check (lo + 1) hi t
end
let _ =
Out_channel.with_open_text data_file
(fun oc ->
fprintf oc "0";
for i = 1 to length do fprintf oc "\n%d" i done);
In_channel.with_open_text data_file In_channel.input_lines
|> check 0 length;
In_channel.with_open_text data_file
(In_channel.fold_lines (fun accu line -> line :: accu) [])
|> List.rev
|> check 0 length;
Sys.remove data_file
| null | https://raw.githubusercontent.com/avsm/eeww/4d65720b5dd51376842ffe5c8c220d5329c1dc10/boot/ocaml/testsuite/tests/lib-channels/input_lines.ml | ocaml | TEST
|
open Printf
let data_file =
"data.txt"
let length = 500
let rec check lo hi l =
if lo = hi + 1 then begin
if l <> [] then failwith "list too long"
end else begin
match l with
| [] -> failwith "list too short"
| h :: t ->
if int_of_string h <> lo then failwith "wrong value";
check (lo + 1) hi t
end
let _ =
Out_channel.with_open_text data_file
(fun oc ->
fprintf oc "0";
for i = 1 to length do fprintf oc "\n%d" i done);
In_channel.with_open_text data_file In_channel.input_lines
|> check 0 length;
In_channel.with_open_text data_file
(In_channel.fold_lines (fun accu line -> line :: accu) [])
|> List.rev
|> check 0 length;
Sys.remove data_file
|
b7725b797b18df62b5ac554ad9fc0a1cde7f528157babf6d10c94ec6780299f6 | philnguyen/soft-contract | dvh-5.rkt | Demonstrates shortcoming in 's email from 7/13
Weakened assume L1 * L1 = L3 and ( not ( zero ? L3 ) )
verify : ( not ( zero ? L1 ) )
#lang racket
(require soft-contract/fake-contract)
(define phil
(lambda (l1)
(lambda (l3)
l1)))
(provide/contract
[phil (->i ([l1 number?])
(res (l1)
(->i ([l3 (and/c number? (not/c zero?) (=/c (* l1 l1)))]) (res (l3) (not/c zero?)))))])
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/safe/dvh/dvh-5.rkt | racket | Demonstrates shortcoming in 's email from 7/13
Weakened assume L1 * L1 = L3 and ( not ( zero ? L3 ) )
verify : ( not ( zero ? L1 ) )
#lang racket
(require soft-contract/fake-contract)
(define phil
(lambda (l1)
(lambda (l3)
l1)))
(provide/contract
[phil (->i ([l1 number?])
(res (l1)
(->i ([l3 (and/c number? (not/c zero?) (=/c (* l1 l1)))]) (res (l3) (not/c zero?)))))])
| |
81aa3d2a20b604fc0ca8deb69b8cf7b2e4bf477a1541d79497dd290f23da9ef3 | alex-mckenna/clash-systolic | TestLibrary.hs | module Main (main) where
import Prelude
import Test.Tasty
main :: IO ()
main =
defaultMain $
testGroup "." []
| null | https://raw.githubusercontent.com/alex-mckenna/clash-systolic/decce655ea7dd1039ad7b319d00f731d8d776f46/tests/TestLibrary.hs | haskell | module Main (main) where
import Prelude
import Test.Tasty
main :: IO ()
main =
defaultMain $
testGroup "." []
| |
487d55147b7e10be91051877124f5084d6daf986483388a92586703ec34080cd | cstar/ejabberd-old | gen_muc_handler.erl | -module(gen_muc_handler).
-author('').
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[{process_groupchat_message,5},
{process_private_message,4},
{process_iq,7},
{process_presence,5},
{process_user_iq,5},
{get_config,4},
{set_config,5},
{init,2},
{init,4},
{process_changed_ra,5},
{can_join,7},
{user_leaving, 4},
{can_change_ra,8},
{can_invite,6},
{is_persistent,1},
{is_anonymous,1},
{get_max_users,1},
{can_get_affiliations,3},
{allow_nick_change,2},
{should_log,1},
{can_get_full_jids,2},
{handle_info,2},
{handle_sync_event,3},
{list_to_role,2},
{list_to_affiliation,2},
{get_disco_info, 2}];
behaviour_info(_Other) ->
undefined.
| null | https://raw.githubusercontent.com/cstar/ejabberd-old/559f8b6b0a935710fe93e9afacb4270d6d6ea00f/src/mod_muc/gen_muc_handler.erl | erlang | -module(gen_muc_handler).
-author('').
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[{process_groupchat_message,5},
{process_private_message,4},
{process_iq,7},
{process_presence,5},
{process_user_iq,5},
{get_config,4},
{set_config,5},
{init,2},
{init,4},
{process_changed_ra,5},
{can_join,7},
{user_leaving, 4},
{can_change_ra,8},
{can_invite,6},
{is_persistent,1},
{is_anonymous,1},
{get_max_users,1},
{can_get_affiliations,3},
{allow_nick_change,2},
{should_log,1},
{can_get_full_jids,2},
{handle_info,2},
{handle_sync_event,3},
{list_to_role,2},
{list_to_affiliation,2},
{get_disco_info, 2}];
behaviour_info(_Other) ->
undefined.
| |
2426c59d43847b3c1626256b1554c4fbce0fad06826f456e4809e4095a231bfe | Quid2/zm | K551d9f2adb72.hs | {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.Message.K551d9f2adb72 (Message(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
import qualified Test.ZM.ADT.User.K0e1df25dc480
import qualified Test.ZM.ADT.Subject.Kfced5b0f3c1f
import qualified Test.ZM.ADT.Content.K957357183935
data Message = Message {fromUser :: Test.ZM.ADT.User.K0e1df25dc480.User,
subject :: Test.ZM.ADT.Subject.Kfced5b0f3c1f.Subject,
content :: Test.ZM.ADT.Content.K957357183935.Content Test.ZM.ADT.User.K0e1df25dc480.User
Test.ZM.ADT.Message.K551d9f2adb72.Message}
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance Data.Model.Model Message
| null | https://raw.githubusercontent.com/Quid2/zm/02c0514777a75ac054bfd6251edd884372faddea/test/Test/ZM/ADT/Message/K551d9f2adb72.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveGeneric # | module Test.ZM.ADT.Message.K551d9f2adb72 (Message(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
import qualified Test.ZM.ADT.User.K0e1df25dc480
import qualified Test.ZM.ADT.Subject.Kfced5b0f3c1f
import qualified Test.ZM.ADT.Content.K957357183935
data Message = Message {fromUser :: Test.ZM.ADT.User.K0e1df25dc480.User,
subject :: Test.ZM.ADT.Subject.Kfced5b0f3c1f.Subject,
content :: Test.ZM.ADT.Content.K957357183935.Content Test.ZM.ADT.User.K0e1df25dc480.User
Test.ZM.ADT.Message.K551d9f2adb72.Message}
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance Data.Model.Model Message
|
8d7e190266975910154726efb591fabec08989466b81c6f3eefc246c4df93c89 | unisonweb/unison | Find.hs | module Unison.Util.Find
( fuzzyFinder,
simpleFuzzyFinder,
simpleFuzzyScore,
fuzzyFindInBranch,
fuzzyFindMatchArray,
prefixFindInBranch,
)
where
import qualified Data.Char as Char
import qualified Data.List as List
import qualified Data.Text as Text
-- -haskell-regular-expression-tutorial/
-13.9/regex-base-0.93.2/Text-Regex-Base-Context.html -- re - exported by
import qualified Text.Regex.TDFA as RE
import qualified Unison.HashQualified as HQ
import qualified Unison.HashQualified' as HQ'
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.Names (Names)
import qualified Unison.Names as Names
import Unison.Prelude
import qualified Unison.Reference as Reference
import qualified Unison.Referent as Referent
import Unison.Server.SearchResult (SearchResult)
import qualified Unison.Server.SearchResult as SR
import qualified Unison.ShortHash as SH
import qualified Unison.Syntax.Name as Name (toString)
import Unison.Syntax.NamePrinter (prettyHashQualified)
import Unison.Util.Monoid (intercalateMap)
import qualified Unison.Util.Pretty as P
import qualified Unison.Util.Relation as R
fuzzyFinder ::
forall a.
String ->
[a] ->
(a -> String) ->
[(a, P.Pretty P.ColorText)]
fuzzyFinder query items render =
sortAndCleanup $ fuzzyFindMatchArray query items render
where
sortAndCleanup = List.map snd . List.sortOn fst
simpleFuzzyFinder ::
forall a.
String ->
[a] ->
(a -> String) ->
[(a, P.Pretty P.ColorText)]
simpleFuzzyFinder query items render =
sortAndCleanup $ do
a <- items
let s = render a
score <- toList (simpleFuzzyScore query s)
pure ((a, hi s), score)
where
hi = highlightSimple query
sortAndCleanup = List.map fst . List.sortOn snd
-- highlights `query` if it is a prefix of `s`, or if it
-- appears in the final segement of s (after the final `.`)
highlightSimple :: String -> String -> P.Pretty P.ColorText
highlightSimple "" = P.string
highlightSimple query = go
where
go [] = mempty
go s@(h : t)
| query `List.isPrefixOf` s = hiQuery <> go (drop len s)
| otherwise = P.string [h] <> go t
len = length query
hiQuery = P.hiBlack (P.string query)
simpleFuzzyScore :: String -> String -> Maybe Int
simpleFuzzyScore query s
| query `List.isPrefixOf` s = Just (bonus s 2)
| query `List.isSuffixOf` s = Just (bonus s 1)
| query `List.isInfixOf` s = Just (bonus s 3)
| lowerquery `List.isInfixOf` lowers = Just (bonus s 4)
| otherwise = Nothing
where
-- prefer relative names
bonus ('.' : _) n = n * 10
bonus _ n = n
lowerquery = Char.toLower <$> query
lowers = Char.toLower <$> s
This logic was split out of fuzzyFinder because the ` RE.MatchArray ` has an
-- `Ord` instance that helps us sort the fuzzy matches in a nice way. (see
comment below . ) ` Editor.fuzzyNameDistance ` uses this ` Ord ` instance .
fuzzyFindMatchArray ::
forall a.
String ->
[a] ->
(a -> String) ->
[(RE.MatchArray, (a, P.Pretty P.ColorText))]
fuzzyFindMatchArray query items render =
scoreAndHighlight $ items
where
scoreAndHighlight = catMaybes . List.map go
go :: a -> Maybe (RE.MatchArray, (a, P.Pretty P.ColorText))
go a =
let string = render a
text = Text.pack string
matches = RE.matchOnce regex string
addContext matches =
let highlighted = highlight P.bold text . tail . toList $ matches
in (matches, (a, highlighted))
in addContext <$> matches
-- regex "Foo" = "(\\F).*(\\o).*(\\o)"
regex :: RE.Regex
regex =
let s =
if null query
then ".*"
else intercalateMap ".*" esc query
where
esc c = "(\\" <> [c] <> ")"
in RE.makeRegexOpts
RE.defaultCompOpt
{ RE.caseSensitive = False,
-- newSyntax = False, otherwise "\<" and "\>"
-- matches word boundaries instead of literal < and >
RE.newSyntax = False
}
RE.defaultExecOpt
s
-- Sort on:
-- a. length of match group to find the most compact match
-- b. start position of the match group to find the earliest match
-- c. the item itself for alphabetical ranking
already provides a. and : c.
prefixFindInBranch ::
Names -> HQ'.HashQualified Name -> [(SearchResult, P.Pretty P.ColorText)]
prefixFindInBranch b hq =
fmap getName $
-- query string includes a name component, so do a prefix find on that
filter (filterName (HQ'.toName hq)) (candidates b hq)
where
filterName :: Name -> SearchResult -> Bool
filterName n1 sr =
fromMaybe False do
n2 <- HQ.toName (SR.name sr)
pure (n1 `Name.isPrefixOf` n2)
-- only search before the # before the # and after the # after the #
fuzzyFindInBranch ::
(HasCallStack) =>
Names ->
HQ'.HashQualified Name ->
[(SearchResult, P.Pretty P.ColorText)]
fuzzyFindInBranch b hq =
simpleFuzzyFinder
(Name.toString (HQ'.toName hq))
(candidates b hq)
( \sr ->
case HQ.toName (SR.name sr) of
-- see invariant on `candidates` below.
Nothing -> error "search result without name"
Just name -> Name.toString name
)
getName :: SearchResult -> (SearchResult, P.Pretty P.ColorText)
getName sr = (sr, P.syntaxToColor $ prettyHashQualified (SR.name sr))
Invariant : all ` SearchResult ` in the output will have names , even though the type allows them to have only hashes
candidates :: Names.Names -> HQ'.HashQualified Name -> [SearchResult]
candidates b hq = typeCandidates <> termCandidates
where
-- filter branch by hash
typeCandidates =
fmap typeResult . filterTypes . R.toList . Names.types $ b
termCandidates =
fmap termResult . filterTerms . R.toList . Names.terms $ b
filterTerms = case HQ'.toHash hq of
Just sh -> List.filter $ SH.isPrefixOf sh . Referent.toShortHash . snd
Nothing -> id
filterTypes = case HQ'.toHash hq of
Just sh -> List.filter $ SH.isPrefixOf sh . Reference.toShortHash . snd
Nothing -> id
typeResult (n, r) = SR.typeSearchResult b n r
termResult (n, r) = SR.termSearchResult b n r
type Pos = Int
type Len = Int
This [ ( Pos , ) ] type is the same as ` tail . toList ` of a regex MatchArray
highlight ::
(P.Pretty P.ColorText -> P.Pretty P.ColorText) ->
Text ->
[(Pos, Len)] ->
P.Pretty P.ColorText
highlight on = highlight' on id
highlight' ::
(P.Pretty P.ColorText -> P.Pretty P.ColorText) ->
(P.Pretty P.ColorText -> P.Pretty P.ColorText) ->
Text ->
[(Pos, Len)] ->
P.Pretty P.ColorText
highlight' on off t groups = case groups of
[] -> (off . P.text) t
(0, _) : _ -> go groups
(start, _) : _ -> (off . P.text . Text.take start) t <> go groups
where
go = \case
[] -> error "unpossible I think"
(start, len) : (start2, len2) : groups
| start + len == start2 ->
-- avoid an on/off since there's no gap between groups
go ((start, len + len2) : groups)
(start, len) : groups ->
let (selected, remaining) = Text.splitAt len . Text.drop start $ t
in (on . P.text) selected <> case groups of
[] -> (off . P.text) remaining
(start2, _) : _ ->
(off . P.text . Text.drop (start + len) . Text.take start2 $ t)
<> go groups
| null | https://raw.githubusercontent.com/unisonweb/unison/cf278f9fb66ccb9436bf8a2eb4ab03fc7a92021d/unison-share-api/src/Unison/Util/Find.hs | haskell | -haskell-regular-expression-tutorial/
re - exported by
highlights `query` if it is a prefix of `s`, or if it
appears in the final segement of s (after the final `.`)
prefer relative names
`Ord` instance that helps us sort the fuzzy matches in a nice way. (see
regex "Foo" = "(\\F).*(\\o).*(\\o)"
newSyntax = False, otherwise "\<" and "\>"
matches word boundaries instead of literal < and >
Sort on:
a. length of match group to find the most compact match
b. start position of the match group to find the earliest match
c. the item itself for alphabetical ranking
query string includes a name component, so do a prefix find on that
only search before the # before the # and after the # after the #
see invariant on `candidates` below.
filter branch by hash
avoid an on/off since there's no gap between groups | module Unison.Util.Find
( fuzzyFinder,
simpleFuzzyFinder,
simpleFuzzyScore,
fuzzyFindInBranch,
fuzzyFindMatchArray,
prefixFindInBranch,
)
where
import qualified Data.Char as Char
import qualified Data.List as List
import qualified Data.Text as Text
import qualified Text.Regex.TDFA as RE
import qualified Unison.HashQualified as HQ
import qualified Unison.HashQualified' as HQ'
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.Names (Names)
import qualified Unison.Names as Names
import Unison.Prelude
import qualified Unison.Reference as Reference
import qualified Unison.Referent as Referent
import Unison.Server.SearchResult (SearchResult)
import qualified Unison.Server.SearchResult as SR
import qualified Unison.ShortHash as SH
import qualified Unison.Syntax.Name as Name (toString)
import Unison.Syntax.NamePrinter (prettyHashQualified)
import Unison.Util.Monoid (intercalateMap)
import qualified Unison.Util.Pretty as P
import qualified Unison.Util.Relation as R
fuzzyFinder ::
forall a.
String ->
[a] ->
(a -> String) ->
[(a, P.Pretty P.ColorText)]
fuzzyFinder query items render =
sortAndCleanup $ fuzzyFindMatchArray query items render
where
sortAndCleanup = List.map snd . List.sortOn fst
simpleFuzzyFinder ::
forall a.
String ->
[a] ->
(a -> String) ->
[(a, P.Pretty P.ColorText)]
simpleFuzzyFinder query items render =
sortAndCleanup $ do
a <- items
let s = render a
score <- toList (simpleFuzzyScore query s)
pure ((a, hi s), score)
where
hi = highlightSimple query
sortAndCleanup = List.map fst . List.sortOn snd
highlightSimple :: String -> String -> P.Pretty P.ColorText
highlightSimple "" = P.string
highlightSimple query = go
where
go [] = mempty
go s@(h : t)
| query `List.isPrefixOf` s = hiQuery <> go (drop len s)
| otherwise = P.string [h] <> go t
len = length query
hiQuery = P.hiBlack (P.string query)
simpleFuzzyScore :: String -> String -> Maybe Int
simpleFuzzyScore query s
| query `List.isPrefixOf` s = Just (bonus s 2)
| query `List.isSuffixOf` s = Just (bonus s 1)
| query `List.isInfixOf` s = Just (bonus s 3)
| lowerquery `List.isInfixOf` lowers = Just (bonus s 4)
| otherwise = Nothing
where
bonus ('.' : _) n = n * 10
bonus _ n = n
lowerquery = Char.toLower <$> query
lowers = Char.toLower <$> s
This logic was split out of fuzzyFinder because the ` RE.MatchArray ` has an
comment below . ) ` Editor.fuzzyNameDistance ` uses this ` Ord ` instance .
fuzzyFindMatchArray ::
forall a.
String ->
[a] ->
(a -> String) ->
[(RE.MatchArray, (a, P.Pretty P.ColorText))]
fuzzyFindMatchArray query items render =
scoreAndHighlight $ items
where
scoreAndHighlight = catMaybes . List.map go
go :: a -> Maybe (RE.MatchArray, (a, P.Pretty P.ColorText))
go a =
let string = render a
text = Text.pack string
matches = RE.matchOnce regex string
addContext matches =
let highlighted = highlight P.bold text . tail . toList $ matches
in (matches, (a, highlighted))
in addContext <$> matches
regex :: RE.Regex
regex =
let s =
if null query
then ".*"
else intercalateMap ".*" esc query
where
esc c = "(\\" <> [c] <> ")"
in RE.makeRegexOpts
RE.defaultCompOpt
{ RE.caseSensitive = False,
RE.newSyntax = False
}
RE.defaultExecOpt
s
already provides a. and : c.
prefixFindInBranch ::
Names -> HQ'.HashQualified Name -> [(SearchResult, P.Pretty P.ColorText)]
prefixFindInBranch b hq =
fmap getName $
filter (filterName (HQ'.toName hq)) (candidates b hq)
where
filterName :: Name -> SearchResult -> Bool
filterName n1 sr =
fromMaybe False do
n2 <- HQ.toName (SR.name sr)
pure (n1 `Name.isPrefixOf` n2)
fuzzyFindInBranch ::
(HasCallStack) =>
Names ->
HQ'.HashQualified Name ->
[(SearchResult, P.Pretty P.ColorText)]
fuzzyFindInBranch b hq =
simpleFuzzyFinder
(Name.toString (HQ'.toName hq))
(candidates b hq)
( \sr ->
case HQ.toName (SR.name sr) of
Nothing -> error "search result without name"
Just name -> Name.toString name
)
getName :: SearchResult -> (SearchResult, P.Pretty P.ColorText)
getName sr = (sr, P.syntaxToColor $ prettyHashQualified (SR.name sr))
Invariant : all ` SearchResult ` in the output will have names , even though the type allows them to have only hashes
candidates :: Names.Names -> HQ'.HashQualified Name -> [SearchResult]
candidates b hq = typeCandidates <> termCandidates
where
typeCandidates =
fmap typeResult . filterTypes . R.toList . Names.types $ b
termCandidates =
fmap termResult . filterTerms . R.toList . Names.terms $ b
filterTerms = case HQ'.toHash hq of
Just sh -> List.filter $ SH.isPrefixOf sh . Referent.toShortHash . snd
Nothing -> id
filterTypes = case HQ'.toHash hq of
Just sh -> List.filter $ SH.isPrefixOf sh . Reference.toShortHash . snd
Nothing -> id
typeResult (n, r) = SR.typeSearchResult b n r
termResult (n, r) = SR.termSearchResult b n r
type Pos = Int
type Len = Int
This [ ( Pos , ) ] type is the same as ` tail . toList ` of a regex MatchArray
highlight ::
(P.Pretty P.ColorText -> P.Pretty P.ColorText) ->
Text ->
[(Pos, Len)] ->
P.Pretty P.ColorText
highlight on = highlight' on id
highlight' ::
(P.Pretty P.ColorText -> P.Pretty P.ColorText) ->
(P.Pretty P.ColorText -> P.Pretty P.ColorText) ->
Text ->
[(Pos, Len)] ->
P.Pretty P.ColorText
highlight' on off t groups = case groups of
[] -> (off . P.text) t
(0, _) : _ -> go groups
(start, _) : _ -> (off . P.text . Text.take start) t <> go groups
where
go = \case
[] -> error "unpossible I think"
(start, len) : (start2, len2) : groups
| start + len == start2 ->
go ((start, len + len2) : groups)
(start, len) : groups ->
let (selected, remaining) = Text.splitAt len . Text.drop start $ t
in (on . P.text) selected <> case groups of
[] -> (off . P.text) remaining
(start2, _) : _ ->
(off . P.text . Text.drop (start + len) . Text.take start2 $ t)
<> go groups
|
13b85357a60368a55556351a20b82a09dc6b0ab39db62fcd6715c387943c531b | pallet/pallet-aws | static.clj | (ns pallet.compute.ec2.static
"Static data for EC2 (data not available in ec2 apis)")
(defn GiB [n] (* 1024 n))
(def instance-types
{;;; Burstable Performance Instances
:t2.micro
{:ram (GiB 1)
:cpus [{:cores 1 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
:t2.small
{:ram (GiB 2)
:cpus [{:cores 1 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
:t2.medium
{:ram (GiB 4)
:cpus [{:cores 2 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
:t2.large
{:ram (GiB 8)
:cpus [{:cores 2 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
;;; General Purpose
:m3.medium
{:ram (GiB 3.75)
:cpus [{:cores 1 :speed 3}]
:disks [{:size 4}]
:64-bit true
:io :moderate
:ebs-optimized false}
:m3.large
{:ram (GiB 7.5)
:cpus [{:cores 2 :speed 3.25}]
:disks [{:size 32}]
:64-bit true
:io :moderate
:ebs-optimized false}
:m3.xlarge
{:ram (GiB 15)
:cpus [{:cores 4 :speed 3.25}]
:disks [{:size 40} {:size 40}]
:64-bit true
:io :high
:ebs-optimized 500}
:m3.2xlarge
{:ram (GiB 30)
:cpus [{:cores 8 :speed 3.25}]
:disks [{:size 80} {:size 80}]
:64-bit true
:io :high
:ebs-optimized 1000}
;;; Compute Optimized
:c3.large
{:ram (GiB 3.75)
:cpus [{:cores 2 :speed 3.5}]
:disks [{:size 16} {:size 16}]
:64-bit true
:io :moderate
:ebs-optimized false}
:c3.xlarge
{:ram (GiB 7.5)
:cpus [{:cores 4 :speed 3.5}]
:disks [{:size 40} {:size 40}]
:64-bit true
:io :moderate
:ebs-optimized 500}
:c3.2xlarge
{:ram (GiB 15)
:cpus [{:cores 8 :speed 3.5}]
:disks [{:size 80} {:size 80}]
:64-bit true
:io :high
:ebs-optimized 1000}
:c3.4xlarge
{:ram (GiB 30)
:cpus [{:cores 16 :speed 3.4375}]
:disks [{:size 160} {:size 160}]
:64-bit true
:io :high
:ebs-optimized 2000}
:c3.8xlarge
{:ram (GiB 60)
:cpus [{:cores 32 :speed 3.375}]
:disks [{:size 320} {:size 320}]
:64-bit true
:io :very-high
:ebs-optimized false}
;;; GPU Instances
:g2.2xlarge
{:ram (GiB 15)
:cpus [{:cores 8 :speed 3.25}]
:disks [{:size 60}]
:64-bit true
:io :high
:ebs-optimized 1000}
;;; Memory Optimized
:r3.large
{:ram (GiB 15)
:cpus [{:cores 2 :speed 3.25}]
:disks [{:size 32}]
:64-bit true
:io :moderate
:ebs-optimized false}
:r3.xlarge
{:ram (GiB 30.5)
:cpus [{:cores 4 :speed 3.25}]
:disks [{:size 80}]
:64-bit true
:io :moderate
:ebs-optimized 500}
:r3.2xlarge
{:ram (GiB 61)
:cpus [{:cores 8 :speed 3.25}]
:disks [{:size 160}]
:64-bit true
:io :high
:ebs-optimized 1000}
:r3.4xlarge
{:ram (GiB 122)
:cpus [{:cores 16 :speed 3.25}]
:disks [{:size 320}]
:64-bit true
:io :high
:ebs-optimized 2000}
:r3.8xlarge
{:ram (GiB 244)
:cpus [{:cores 32 :speed 3.25}]
:disks [{:size 320} {:size 320}]
:64-bit true
:io :very-high
:ebs-optimized false}
;;; Storage Optimized
:i2.xlarge
{:ram (GiB 30.5)
:cpus [{:cores 4 :speed 3.5}]
:disks [{:size 800}]
:64-bit true
:io :moderate
:ebs-optimized 500}
:i2.2xlarge
{:ram (GiB 61)
:cpus [{:cores 8 :speed 3.375}]
:disks [{:size 800} {:size 800}]
:64-bit true
:io :high
:ebs-optimized 1000}
:i2.4xlarge
{:ram (GiB 122)
:cpus [{:cores 16 :speed 3.3125}]
:disks [(repeat 4 {:size 800})]
:64-bit true
:io :high
:ebs-optimized 2000}
:i2.8xlarge
{:ram (GiB 244)
:cpus [{:cores 32 :speed 3.25}]
:disks [(repeat 8 {:size 800})]
:64-bit true
:io :very-high
:ebs-optimized false}
:hs1.8xlarge
{:ram (GiB 117)
:cpus [{:cores 16 :speed 2.1875}]
:disks [(repeat 24 {:size 2048})]
:64-bit true
:io :very-high
:ebs-optimized false}
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Previous generation instance types
:m1.small
memory
:cpus [{:cores 1 :speed 1}] ; 1 EC2 Compute Unit
:disks [{:size 160}] ; GB instance storage
:32-bit true
:64-bit true
:io :moderate
:ebs-optimised false}
:m1.medium
memory
:cpus [{:cores 1 :speed 2}] ; EC2 Compute Unit
:disks [{:size 410}] ; GB instance storage
:32-bit true
:64-bit true
:io :moderate
:ebs-optimised false}
:m1.large
memory
4 Compute Units
:disks [{:size 850}] ; GB instance storage
:64-bit true
:io :high
:ebs-optimised 500} ; Mbps
:m1.xlarge
memory
8 Compute Units
:disks [{:size 1690}] ; GB instance storage
:64-bit true
:io :high
:ebs-optimised 1000} ; Mbps
:t1.micro
MiB memory
Up to 2 EC2 Compute Units
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimised false}
:m2.xlarge
of memory
6.5 Compute Units
:disks [{:size 420}] ; GB of instance storage
:64-bit true
:io :moderate
:ebs-optimised false}
:m2.2xlarge
of memory
13 Compute Units
:disks [{:size 850}] ; GB of instance storage
:64-bit true
:io :high
:ebs-optimised false}
:m2.4xlarge
of memory
26 Compute Units
:disks [{:size 1690}] ; GB of instance storage
:64-bit true
:io :high
:ebs-optimised 1000} ; Mbps
:c1.medium
of memory
5 Compute Units
:disks [{:size 350}] ; GB of instance storage
:32-bit true
:64-bit true
:io :moderate
:ebs-optimised false}
:c1.xlarge
of memory
20 EC2 Compute Units
:disks [{:size 1690}] ; GB of instance storage
:64-bit true
:io :high
:ebs-optimised false}
:cc1.4xlarge
of memory
33.5 EC2 Compute Units
:disks [{:size 1690}] ; GB of instance storage
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:cc2.8xlarge
of memory
88 EC2 Compute Units
:disks [{:size 3370}] ; GB of instance storage
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:cg1.4xlarge
of memory
33.5 EC2 Compute Units
:disks [{:size 1690}] ; GB of instance storage
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:hi1.4xlarge
of memory
35 EC2 Compute Units
2 SSD - based volumes each with 1024 GB
; of instance storage
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:hi1.8xlarge
of memory
35 EC2 Compute Units
24 SSD - based volumes each with 1024 GB
; of instance storage
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
})
| null | https://raw.githubusercontent.com/pallet/pallet-aws/6f650ca93c853f8a574ad36875f4d164e46e8e8d/src/pallet/compute/ec2/static.clj | clojure | Burstable Performance Instances
General Purpose
Compute Optimized
GPU Instances
Memory Optimized
Storage Optimized
Previous generation instance types
1 EC2 Compute Unit
GB instance storage
EC2 Compute Unit
GB instance storage
GB instance storage
Mbps
GB instance storage
Mbps
GB of instance storage
GB of instance storage
GB of instance storage
Mbps
GB of instance storage
GB of instance storage
GB of instance storage
GB of instance storage
GB of instance storage
of instance storage
of instance storage | (ns pallet.compute.ec2.static
"Static data for EC2 (data not available in ec2 apis)")
(defn GiB [n] (* 1024 n))
(def instance-types
:t2.micro
{:ram (GiB 1)
:cpus [{:cores 1 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
:t2.small
{:ram (GiB 2)
:cpus [{:cores 1 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
:t2.medium
{:ram (GiB 4)
:cpus [{:cores 2 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
:t2.large
{:ram (GiB 8)
:cpus [{:cores 2 :speed 1}]
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimized false}
:m3.medium
{:ram (GiB 3.75)
:cpus [{:cores 1 :speed 3}]
:disks [{:size 4}]
:64-bit true
:io :moderate
:ebs-optimized false}
:m3.large
{:ram (GiB 7.5)
:cpus [{:cores 2 :speed 3.25}]
:disks [{:size 32}]
:64-bit true
:io :moderate
:ebs-optimized false}
:m3.xlarge
{:ram (GiB 15)
:cpus [{:cores 4 :speed 3.25}]
:disks [{:size 40} {:size 40}]
:64-bit true
:io :high
:ebs-optimized 500}
:m3.2xlarge
{:ram (GiB 30)
:cpus [{:cores 8 :speed 3.25}]
:disks [{:size 80} {:size 80}]
:64-bit true
:io :high
:ebs-optimized 1000}
:c3.large
{:ram (GiB 3.75)
:cpus [{:cores 2 :speed 3.5}]
:disks [{:size 16} {:size 16}]
:64-bit true
:io :moderate
:ebs-optimized false}
:c3.xlarge
{:ram (GiB 7.5)
:cpus [{:cores 4 :speed 3.5}]
:disks [{:size 40} {:size 40}]
:64-bit true
:io :moderate
:ebs-optimized 500}
:c3.2xlarge
{:ram (GiB 15)
:cpus [{:cores 8 :speed 3.5}]
:disks [{:size 80} {:size 80}]
:64-bit true
:io :high
:ebs-optimized 1000}
:c3.4xlarge
{:ram (GiB 30)
:cpus [{:cores 16 :speed 3.4375}]
:disks [{:size 160} {:size 160}]
:64-bit true
:io :high
:ebs-optimized 2000}
:c3.8xlarge
{:ram (GiB 60)
:cpus [{:cores 32 :speed 3.375}]
:disks [{:size 320} {:size 320}]
:64-bit true
:io :very-high
:ebs-optimized false}
:g2.2xlarge
{:ram (GiB 15)
:cpus [{:cores 8 :speed 3.25}]
:disks [{:size 60}]
:64-bit true
:io :high
:ebs-optimized 1000}
:r3.large
{:ram (GiB 15)
:cpus [{:cores 2 :speed 3.25}]
:disks [{:size 32}]
:64-bit true
:io :moderate
:ebs-optimized false}
:r3.xlarge
{:ram (GiB 30.5)
:cpus [{:cores 4 :speed 3.25}]
:disks [{:size 80}]
:64-bit true
:io :moderate
:ebs-optimized 500}
:r3.2xlarge
{:ram (GiB 61)
:cpus [{:cores 8 :speed 3.25}]
:disks [{:size 160}]
:64-bit true
:io :high
:ebs-optimized 1000}
:r3.4xlarge
{:ram (GiB 122)
:cpus [{:cores 16 :speed 3.25}]
:disks [{:size 320}]
:64-bit true
:io :high
:ebs-optimized 2000}
:r3.8xlarge
{:ram (GiB 244)
:cpus [{:cores 32 :speed 3.25}]
:disks [{:size 320} {:size 320}]
:64-bit true
:io :very-high
:ebs-optimized false}
:i2.xlarge
{:ram (GiB 30.5)
:cpus [{:cores 4 :speed 3.5}]
:disks [{:size 800}]
:64-bit true
:io :moderate
:ebs-optimized 500}
:i2.2xlarge
{:ram (GiB 61)
:cpus [{:cores 8 :speed 3.375}]
:disks [{:size 800} {:size 800}]
:64-bit true
:io :high
:ebs-optimized 1000}
:i2.4xlarge
{:ram (GiB 122)
:cpus [{:cores 16 :speed 3.3125}]
:disks [(repeat 4 {:size 800})]
:64-bit true
:io :high
:ebs-optimized 2000}
:i2.8xlarge
{:ram (GiB 244)
:cpus [{:cores 32 :speed 3.25}]
:disks [(repeat 8 {:size 800})]
:64-bit true
:io :very-high
:ebs-optimized false}
:hs1.8xlarge
{:ram (GiB 117)
:cpus [{:cores 16 :speed 2.1875}]
:disks [(repeat 24 {:size 2048})]
:64-bit true
:io :very-high
:ebs-optimized false}
:m1.small
memory
:32-bit true
:64-bit true
:io :moderate
:ebs-optimised false}
:m1.medium
memory
:32-bit true
:64-bit true
:io :moderate
:ebs-optimised false}
:m1.large
memory
4 Compute Units
:64-bit true
:io :high
:m1.xlarge
memory
8 Compute Units
:64-bit true
:io :high
:t1.micro
MiB memory
Up to 2 EC2 Compute Units
:disks []
:32-bit true
:64-bit true
:io :low
:ebs-optimised false}
:m2.xlarge
of memory
6.5 Compute Units
:64-bit true
:io :moderate
:ebs-optimised false}
:m2.2xlarge
of memory
13 Compute Units
:64-bit true
:io :high
:ebs-optimised false}
:m2.4xlarge
of memory
26 Compute Units
:64-bit true
:io :high
:c1.medium
of memory
5 Compute Units
:32-bit true
:64-bit true
:io :moderate
:ebs-optimised false}
:c1.xlarge
of memory
20 EC2 Compute Units
:64-bit true
:io :high
:ebs-optimised false}
:cc1.4xlarge
of memory
33.5 EC2 Compute Units
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:cc2.8xlarge
of memory
88 EC2 Compute Units
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:cg1.4xlarge
of memory
33.5 EC2 Compute Units
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:hi1.4xlarge
of memory
35 EC2 Compute Units
2 SSD - based volumes each with 1024 GB
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
:hi1.8xlarge
of memory
35 EC2 Compute Units
24 SSD - based volumes each with 1024 GB
:64-bit true
( 10 Gigabit Ethernet )
:ebs-optimised false}
})
|
d46e76b61051b8b212eb334e6e3c964a6ac7ac8ca24a7e2c35e4bda002100963 | fakedata-haskell/fakedata | Hipster.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Faker.Provider.Hipster where
import Config
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.Map.Strict (Map)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Vector (Vector)
import Data.Yaml
import Faker
import Faker.Internal
import Faker.Provider.TH
import Language.Haskell.TH
parseHipster :: FromJSON a => FakerSettings -> Value -> Parser a
parseHipster settings (Object obj) = do
en <- obj .: (getLocaleKey settings)
faker <- en .: "faker"
hipster <- faker .: "hipster"
pure hipster
parseHipster settings val = fail $ "expected Object, but got " <> (show val)
parseHipsterField ::
(FromJSON a, Monoid a) => FakerSettings -> AesonKey -> Value -> Parser a
parseHipsterField settings txt val = do
hipster <- parseHipster settings val
field <- hipster .:? txt .!= mempty
pure field
parseHipsterFields ::
(FromJSON a, Monoid a) => FakerSettings -> [AesonKey] -> Value -> Parser a
parseHipsterFields settings txts val = do
hipster <- parseHipster settings val
helper hipster txts
where
helper :: (FromJSON a) => Value -> [AesonKey] -> Parser a
helper a [] = parseJSON a
helper (Object a) (x:xs) = do
field <- a .: x
helper field xs
helper a (x:xs) = fail $ "expect Object, but got " <> (show a)
$(genParser "hipster" "words")
$(genProvider "hipster" "words")
| null | https://raw.githubusercontent.com/fakedata-haskell/fakedata/7b0875067386e9bb844c8b985c901c91a58842ff/src/Faker/Provider/Hipster.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE TemplateHaskell #
module Faker.Provider.Hipster where
import Config
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.Map.Strict (Map)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Vector (Vector)
import Data.Yaml
import Faker
import Faker.Internal
import Faker.Provider.TH
import Language.Haskell.TH
parseHipster :: FromJSON a => FakerSettings -> Value -> Parser a
parseHipster settings (Object obj) = do
en <- obj .: (getLocaleKey settings)
faker <- en .: "faker"
hipster <- faker .: "hipster"
pure hipster
parseHipster settings val = fail $ "expected Object, but got " <> (show val)
parseHipsterField ::
(FromJSON a, Monoid a) => FakerSettings -> AesonKey -> Value -> Parser a
parseHipsterField settings txt val = do
hipster <- parseHipster settings val
field <- hipster .:? txt .!= mempty
pure field
parseHipsterFields ::
(FromJSON a, Monoid a) => FakerSettings -> [AesonKey] -> Value -> Parser a
parseHipsterFields settings txts val = do
hipster <- parseHipster settings val
helper hipster txts
where
helper :: (FromJSON a) => Value -> [AesonKey] -> Parser a
helper a [] = parseJSON a
helper (Object a) (x:xs) = do
field <- a .: x
helper field xs
helper a (x:xs) = fail $ "expect Object, but got " <> (show a)
$(genParser "hipster" "words")
$(genProvider "hipster" "words")
|
4b3a5295343a084d091a56ce1493796fac1bb27024c4130903531c7c950d6f17 | schleyfox/erlang_ann | ann_test.erl | -module(ann_test).
-export([setup/0, run/0]).
setup() ->
ann_graph:start(),
N1_pid = spawn(ann, perceptron, [[],[],[],[]]),
N2_pid = spawn(ann, perceptron, [[],[],[],[]]),
N3_pid = spawn(ann, perceptron, [[],[],[],[]]),
N4_pid = spawn(ann, perceptron, [[],[],[],[]]),
N5_pid = spawn(ann, perceptron, [[],[],[],[]]),
N6_pid = spawn(ann, perceptron, [[],[],[],[]]),
N7_pid = spawn(ann, perceptron, [[],[],[],[]]),
ann:connect(N1_pid, N2_pid),
ann:connect(N1_pid, N3_pid),
ann:connect(N2_pid, N4_pid),
ann:connect(N2_pid, N5_pid),
ann:connect(N2_pid, N6_pid),
ann:connect(N3_pid, N4_pid),
ann:connect(N3_pid, N5_pid),
ann:connect(N3_pid, N6_pid),
ann:connect(N4_pid, N7_pid),
ann:connect(N5_pid, N7_pid),
ann:connect(N6_pid, N7_pid),
N1_pid ! {pass, 0.5, 0.7}.
run() ->
ann_graph:start(),
X1_pid = spawn(ann, perceptron, [[],[],[]]),
X2_pid = spawn(ann, perceptron, [[],[],[]]),
H1_pid = spawn(ann, perceptron, [[],[],[]]),
H2_pid = spawn(ann, perceptron, [[],[],[]]),
O_pid = spawn(ann, perceptron, [[],[],[]]),
Connect input node X1 to hidden nodes H1 and H2
ann:connect(X1_pid, H1_pid),
ann:connect(X1_pid, H2_pid),
Connect input node X2 to hidden nodes H1 and H2
ann:connect(X2_pid, H1_pid),
ann:connect(X2_pid, H2_pid),
% Connect input node H1 and H2 to output node O
ann:connect(H1_pid, O_pid),
ann:connect(H2_pid, O_pid),
X1_pid ! {status},
X2_pid ! {status},
H1_pid ! {status},
H2_pid ! {status},
O_pid ! {status},
X1_pid ! {pass, 1.8},
X2_pid ! {pass, 1.3}.
| null | https://raw.githubusercontent.com/schleyfox/erlang_ann/98c108cdd47d9636799f0ee1a1a284c1e6d190bb/ann_test.erl | erlang | Connect input node H1 and H2 to output node O | -module(ann_test).
-export([setup/0, run/0]).
setup() ->
ann_graph:start(),
N1_pid = spawn(ann, perceptron, [[],[],[],[]]),
N2_pid = spawn(ann, perceptron, [[],[],[],[]]),
N3_pid = spawn(ann, perceptron, [[],[],[],[]]),
N4_pid = spawn(ann, perceptron, [[],[],[],[]]),
N5_pid = spawn(ann, perceptron, [[],[],[],[]]),
N6_pid = spawn(ann, perceptron, [[],[],[],[]]),
N7_pid = spawn(ann, perceptron, [[],[],[],[]]),
ann:connect(N1_pid, N2_pid),
ann:connect(N1_pid, N3_pid),
ann:connect(N2_pid, N4_pid),
ann:connect(N2_pid, N5_pid),
ann:connect(N2_pid, N6_pid),
ann:connect(N3_pid, N4_pid),
ann:connect(N3_pid, N5_pid),
ann:connect(N3_pid, N6_pid),
ann:connect(N4_pid, N7_pid),
ann:connect(N5_pid, N7_pid),
ann:connect(N6_pid, N7_pid),
N1_pid ! {pass, 0.5, 0.7}.
run() ->
ann_graph:start(),
X1_pid = spawn(ann, perceptron, [[],[],[]]),
X2_pid = spawn(ann, perceptron, [[],[],[]]),
H1_pid = spawn(ann, perceptron, [[],[],[]]),
H2_pid = spawn(ann, perceptron, [[],[],[]]),
O_pid = spawn(ann, perceptron, [[],[],[]]),
Connect input node X1 to hidden nodes H1 and H2
ann:connect(X1_pid, H1_pid),
ann:connect(X1_pid, H2_pid),
Connect input node X2 to hidden nodes H1 and H2
ann:connect(X2_pid, H1_pid),
ann:connect(X2_pid, H2_pid),
ann:connect(H1_pid, O_pid),
ann:connect(H2_pid, O_pid),
X1_pid ! {status},
X2_pid ! {status},
H1_pid ! {status},
H2_pid ! {status},
O_pid ! {status},
X1_pid ! {pass, 1.8},
X2_pid ! {pass, 1.3}.
|
48fede92b10dca1607c4620b24dc774a25dec9b8ceffb454c32d665e6abea77f | lambdaforge/wanderung | build.clj | (ns build
(:refer-clojure :exclude [test compile])
(:require [clojure.tools.build.api :as b]
[borkdude.gh-release-artifact :as gh]
[org.corfield.build :as bb]))
(def lib 'io.lambdaforge/wanderung)
(def version (format "0.2.%s" (b/git-count-revs nil)))
(def class-dir "target/classes")
(def basis (b/create-basis {:project "deps.edn"}))
(def jar-file (format "target/%s-%s.jar" (name lib) version))
(defn clean
[_]
(b/delete {:path "target"}))
(defn jar
[opts]
(-> opts
(assoc :class-dir class-dir
:src-pom "./template/pom.xml"
:lib lib
:version version
:basis basis
:jar-file jar-file
:src-dirs ["src"])
bb/jar))
(defn test "Run the tests." [opts]
(bb/run-tests opts))
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/run-tests)
bb/clean
bb/jar))
(defn install "Install the JAR locally." [opts]
(-> opts
jar
(bb/install)))
(defn deploy "Deploy the JAR to Clojars." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/deploy)))
(defn release
[_]
(-> (gh/overwrite-asset {:org "lambdaforge"
:repo (name lib)
:tag version
:commit (gh/current-commit)
:file jar-file
:content-type "application/java-archive"})
:url
println))
| null | https://raw.githubusercontent.com/lambdaforge/wanderung/a7ff572cd454dfb7a60d5cd281a079a0d2ae2db7/build.clj | clojure | (ns build
(:refer-clojure :exclude [test compile])
(:require [clojure.tools.build.api :as b]
[borkdude.gh-release-artifact :as gh]
[org.corfield.build :as bb]))
(def lib 'io.lambdaforge/wanderung)
(def version (format "0.2.%s" (b/git-count-revs nil)))
(def class-dir "target/classes")
(def basis (b/create-basis {:project "deps.edn"}))
(def jar-file (format "target/%s-%s.jar" (name lib) version))
(defn clean
[_]
(b/delete {:path "target"}))
(defn jar
[opts]
(-> opts
(assoc :class-dir class-dir
:src-pom "./template/pom.xml"
:lib lib
:version version
:basis basis
:jar-file jar-file
:src-dirs ["src"])
bb/jar))
(defn test "Run the tests." [opts]
(bb/run-tests opts))
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/run-tests)
bb/clean
bb/jar))
(defn install "Install the JAR locally." [opts]
(-> opts
jar
(bb/install)))
(defn deploy "Deploy the JAR to Clojars." [opts]
(-> opts
(assoc :lib lib :version version)
(bb/deploy)))
(defn release
[_]
(-> (gh/overwrite-asset {:org "lambdaforge"
:repo (name lib)
:tag version
:commit (gh/current-commit)
:file jar-file
:content-type "application/java-archive"})
:url
println))
| |
c3002cc5ecacf6ce5e2239038a13241a781a2d2b75c47df4e02910cf56c3064a | DerekCuevas/interview-cake-clj | project.clj | (defproject balanced-binary-tree "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:main ^:skip-aot balanced-binary-tree.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
| null | https://raw.githubusercontent.com/DerekCuevas/interview-cake-clj/f17d3239bb30bcc17ced473f055a9859f9d1fb8d/balanced-binary-tree/project.clj | clojure | (defproject balanced-binary-tree "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:main ^:skip-aot balanced-binary-tree.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.