_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
3a45d5ce4f4542bfe764be40b4aeff6e02389855ee84abc9b824038d094fdd3a | nickmi11er/tinkoff-invest-haskell | Helpers.hs | # LANGUAGE TupleSections #
module Invest.Client.Helpers (
runGrpc
, runUnary
, runUnary_
, GrpcIO
, GrpcClient
, (<#>), (#>>), (#>)
, ChanFlow(..)
) where
import Control.Exception (Exception, IOException,
SomeException (SomeException))
import Control.Monad.Except (ExceptT, lift, runExceptT,
throwError)
import Data.Text as T (Text, pack)
import Network.GRPC.Client (RawReply)
import Network.GRPC.Client.Helpers (GrpcClient, rawUnary)
import Network.HTTP2.Client (ClientIO, TooMuchConcurrency,
runClientIO)
type GrpcIO a = ExceptT IOException IO a
type GrpcContext a = (GrpcClient, a)
data ChanFlow = Next | Break
runGrpc :: ClientIO (Either TooMuchConcurrency (RawReply a)) -> GrpcIO a
runGrpc f = lift $ runExceptT f >>= \case
Right (Right (Right (_, _, Right res))) -> pure res
Right (Right (Right (_, _, Left err))) -> throwError $ userError err
Right (Right (Left err)) -> throwError . userError . show $ err
Right (Left err) -> throwError . userError . show $ err
Left err -> throwError . userError . show $ err
runUnary rpc client req = runGrpc $ rawUnary rpc client req
runUnary_ m rpc gc req = fmap m (runUnary rpc gc req)
(<#>) :: GrpcIO GrpcClient -> (GrpcClient -> GrpcIO a) -> GrpcIO (GrpcContext a)
(<#>) clIO f = clIO >>= \client -> (client,) <$> f client
(#>>) :: GrpcIO (GrpcContext a) -> (GrpcClient -> a -> GrpcIO b) -> GrpcIO (GrpcContext b)
(#>>) ctx f = ctx >>= \(cl, a) -> (cl,) <$> f cl a
(#>) :: GrpcIO (GrpcContext a) -> (a -> GrpcIO b) -> GrpcIO b
(#>) ctx f = ctx >>= \(_, val) -> f val
| null | https://raw.githubusercontent.com/nickmi11er/tinkoff-invest-haskell/043c9fa096aa50cc9bf6ae75ac84a80c31e1c7ff/sdk/src/Invest/Client/Helpers.hs | haskell | # LANGUAGE TupleSections #
module Invest.Client.Helpers (
runGrpc
, runUnary
, runUnary_
, GrpcIO
, GrpcClient
, (<#>), (#>>), (#>)
, ChanFlow(..)
) where
import Control.Exception (Exception, IOException,
SomeException (SomeException))
import Control.Monad.Except (ExceptT, lift, runExceptT,
throwError)
import Data.Text as T (Text, pack)
import Network.GRPC.Client (RawReply)
import Network.GRPC.Client.Helpers (GrpcClient, rawUnary)
import Network.HTTP2.Client (ClientIO, TooMuchConcurrency,
runClientIO)
type GrpcIO a = ExceptT IOException IO a
type GrpcContext a = (GrpcClient, a)
data ChanFlow = Next | Break
runGrpc :: ClientIO (Either TooMuchConcurrency (RawReply a)) -> GrpcIO a
runGrpc f = lift $ runExceptT f >>= \case
Right (Right (Right (_, _, Right res))) -> pure res
Right (Right (Right (_, _, Left err))) -> throwError $ userError err
Right (Right (Left err)) -> throwError . userError . show $ err
Right (Left err) -> throwError . userError . show $ err
Left err -> throwError . userError . show $ err
runUnary rpc client req = runGrpc $ rawUnary rpc client req
runUnary_ m rpc gc req = fmap m (runUnary rpc gc req)
(<#>) :: GrpcIO GrpcClient -> (GrpcClient -> GrpcIO a) -> GrpcIO (GrpcContext a)
(<#>) clIO f = clIO >>= \client -> (client,) <$> f client
(#>>) :: GrpcIO (GrpcContext a) -> (GrpcClient -> a -> GrpcIO b) -> GrpcIO (GrpcContext b)
(#>>) ctx f = ctx >>= \(cl, a) -> (cl,) <$> f cl a
(#>) :: GrpcIO (GrpcContext a) -> (a -> GrpcIO b) -> GrpcIO b
(#>) ctx f = ctx >>= \(_, val) -> f val
| |
ac3c2e8229fc025876261b9c515f7cec260de501e82c582780abd625f33a1a6b | awakesecurity/proto3-suite | Lens.hs | {-# LANGUAGE TemplateHaskell #-}
module Proto3.Suite.DotProto.AST.Lens where
import Control.Lens.TH
import Proto3.Suite.DotProto.AST
makePrisms ''DotProtoDefinition
makePrisms ''DotProtoMessagePart
| null | https://raw.githubusercontent.com/awakesecurity/proto3-suite/489d5a3450411ebb9cf0ce7c0854b1c3f2f9eb69/src/Proto3/Suite/DotProto/AST/Lens.hs | haskell | # LANGUAGE TemplateHaskell # | module Proto3.Suite.DotProto.AST.Lens where
import Control.Lens.TH
import Proto3.Suite.DotProto.AST
makePrisms ''DotProtoDefinition
makePrisms ''DotProtoMessagePart
|
b4000efb4b63a7a9d014ef66a58a8db82eeea3f63162c9e67affa674e8e28779 | vbmithr/ocaml-ledger-wallet | ledgerwallet_ssh_agent.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2017 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2017 Vincent Bernardoff. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open Rresult
open Ledgerwallet
type ins =
| Get_public_key
| Sign_ssh_blob
| Sign_generic_hash
| Sign_direct_hash
| Get_ecdh_secret
let int_of_ins = function
| Get_public_key -> 0x02
| Sign_ssh_blob -> 0x04
| Sign_generic_hash -> 0x06
| Sign_direct_hash -> 0x08
| Get_ecdh_secret -> 0x0A
let wrap_ins cmd =
Apdu.create_cmd ~cmd ~cla_of_cmd:(fun _ -> 0x80) ~ins_of_cmd:int_of_ins
type curve = Prime256v1 | Curve25519
let int_of_curve = function Prime256v1 -> 0x01 | Curve25519 -> 0x02
let get_public_key ?pp ?buf ~curve ~path h =
let nb_derivations = List.length path in
if nb_derivations > 10 then invalid_arg "get_public_key: max 10 derivations" ;
let lc = 1 + (4 * nb_derivations) in
let p2 = int_of_curve curve in
let data_init = Cstruct.create lc in
Cstruct.set_uint8 data_init 0 nb_derivations ;
let data = Cstruct.shift data_init 1 in
let _data =
ListLabels.fold_left path ~init:data ~f:(fun cs i ->
Cstruct.BE.set_uint32 cs 0 i ;
Cstruct.shift cs 4)
in
Transport.apdu
?pp
?buf
h
Apdu.(create ~lc ~p2 ~data:data_init (wrap_ins Get_public_key))
>>| fun addr ->
let keylen = Cstruct.get_uint8 addr 0 in
Cstruct.sub addr 1 keylen
---------------------------------------------------------------------------
Copyright ( c ) 2017
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2017 Vincent Bernardoff
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/vbmithr/ocaml-ledger-wallet/9a21b74f3066bca48f7aef1a194ed8fe6496db78/src/ledgerwallet_ssh_agent.ml | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2017 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2017 Vincent Bernardoff. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open Rresult
open Ledgerwallet
type ins =
| Get_public_key
| Sign_ssh_blob
| Sign_generic_hash
| Sign_direct_hash
| Get_ecdh_secret
let int_of_ins = function
| Get_public_key -> 0x02
| Sign_ssh_blob -> 0x04
| Sign_generic_hash -> 0x06
| Sign_direct_hash -> 0x08
| Get_ecdh_secret -> 0x0A
let wrap_ins cmd =
Apdu.create_cmd ~cmd ~cla_of_cmd:(fun _ -> 0x80) ~ins_of_cmd:int_of_ins
type curve = Prime256v1 | Curve25519
let int_of_curve = function Prime256v1 -> 0x01 | Curve25519 -> 0x02
let get_public_key ?pp ?buf ~curve ~path h =
let nb_derivations = List.length path in
if nb_derivations > 10 then invalid_arg "get_public_key: max 10 derivations" ;
let lc = 1 + (4 * nb_derivations) in
let p2 = int_of_curve curve in
let data_init = Cstruct.create lc in
Cstruct.set_uint8 data_init 0 nb_derivations ;
let data = Cstruct.shift data_init 1 in
let _data =
ListLabels.fold_left path ~init:data ~f:(fun cs i ->
Cstruct.BE.set_uint32 cs 0 i ;
Cstruct.shift cs 4)
in
Transport.apdu
?pp
?buf
h
Apdu.(create ~lc ~p2 ~data:data_init (wrap_ins Get_public_key))
>>| fun addr ->
let keylen = Cstruct.get_uint8 addr 0 in
Cstruct.sub addr 1 keylen
---------------------------------------------------------------------------
Copyright ( c ) 2017
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2017 Vincent Bernardoff
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| |
eb8289503d390f2a376e9a110ebf69df4a4c4502ac881e021e34a5f1a91d6b44 | Yleisradio/relastic | test_helpers.clj | (ns relastic.test-helpers
(:require [clojure.test :refer :all]
[clojurewerkz.elastisch.rest.index :as esi]
[clojurewerkz.elastisch.rest.document :as esd]
[clojurewerkz.elastisch.query :as q]
[clojurewerkz.elastisch.rest :as elastisch]
[clojurewerkz.elastisch.native :as elastisch-native]
[clojurewerkz.elastisch.rest.index :as eri]
[environ.core :refer [env]]))
(def cluster-name (get env :es-cluster-name "elasticsearch"))
(def elastic-host (get env :es-host "dockerhost"))
(def rest-port (Integer/parseInt (get env :es-rest-port "9200")))
(def binary-port (Integer/parseInt (get env :es-binary-port "9300")))
(def conn (elastisch/connect (str "http://" elastic-host ":" rest-port)))
(def native-conn (elastisch-native/connect [[elastic-host binary-port]]))
(def mapping-v1 {:tweet {:properties {:content {:type "string"}}}})
(def mapping-v2 (assoc-in mapping-v1 [:tweet :properties :user] {:type "string" :index "not_analyzed"}))
(def settings {"index" {"refresh_interval" "20s"}})
(defn- cleanup-db []
(esi/delete conn "relastic_test_v*")
(esi/delete conn "twitter_v*"))
(defn with-clean-slate [tests]
(cleanup-db)
(tests))
| null | https://raw.githubusercontent.com/Yleisradio/relastic/4d5f65b8c203f873caa270fa78097dd5ca23bd60/test/relastic/test_helpers.clj | clojure | (ns relastic.test-helpers
(:require [clojure.test :refer :all]
[clojurewerkz.elastisch.rest.index :as esi]
[clojurewerkz.elastisch.rest.document :as esd]
[clojurewerkz.elastisch.query :as q]
[clojurewerkz.elastisch.rest :as elastisch]
[clojurewerkz.elastisch.native :as elastisch-native]
[clojurewerkz.elastisch.rest.index :as eri]
[environ.core :refer [env]]))
(def cluster-name (get env :es-cluster-name "elasticsearch"))
(def elastic-host (get env :es-host "dockerhost"))
(def rest-port (Integer/parseInt (get env :es-rest-port "9200")))
(def binary-port (Integer/parseInt (get env :es-binary-port "9300")))
(def conn (elastisch/connect (str "http://" elastic-host ":" rest-port)))
(def native-conn (elastisch-native/connect [[elastic-host binary-port]]))
(def mapping-v1 {:tweet {:properties {:content {:type "string"}}}})
(def mapping-v2 (assoc-in mapping-v1 [:tweet :properties :user] {:type "string" :index "not_analyzed"}))
(def settings {"index" {"refresh_interval" "20s"}})
(defn- cleanup-db []
(esi/delete conn "relastic_test_v*")
(esi/delete conn "twitter_v*"))
(defn with-clean-slate [tests]
(cleanup-db)
(tests))
| |
e2c65d0741327840f147baa6ceeb6bf52a4966b31a04aff5c60dd84b0f462a6c | bmeurer/ocaml-experimental | searchpos.mli | (*************************************************************************)
(* *)
(* Objective Caml LablTk library *)
(* *)
, Kyoto University RIMS
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
(* General Public License, with the special exception on linking *)
(* described in file ../../../LICENSE. *)
(* *)
(*************************************************************************)
$ Id$
open Widget
val top_widgets : any widget list ref
type module_widgets =
{ mw_frame: frame widget;
mw_title: label widget option;
mw_detach: button widget;
mw_edit: button widget;
mw_intf: button widget }
val add_shown_module : Path.t -> widgets:module_widgets -> unit
val find_shown_module : Path.t -> module_widgets
val is_shown_module : Path.t -> bool
val default_frame : module_widgets option ref
val set_path : (Path.t -> sign:Types.signature -> unit) ref
val view_defined_ref : (Longident.t -> env:Env.t -> unit) ref
val editor_ref :
(?file:string -> ?pos:int -> ?opendialog:bool -> unit -> unit) ref
val view_signature :
?title:string ->
?path:Path.t -> ?env:Env.t -> ?detach:bool -> Types.signature -> unit
val view_signature_item :
Types.signature -> path:Path.t -> env:Env.t -> unit
val view_module_id : Longident.t -> env:Env.t -> unit
val view_type_id : Longident.t -> env:Env.t -> unit
val view_class_id : Longident.t -> env:Env.t -> unit
val view_cltype_id : Longident.t -> env:Env.t -> unit
val view_modtype_id : Longident.t -> env:Env.t -> unit
val view_type_decl : Path.t -> env:Env.t -> unit
type skind = [`Type|`Class|`Module|`Modtype]
val search_pos_signature :
Parsetree.signature -> pos:int -> env:Env.t ->
((skind * Longident.t) * Env.t * Location.t) list
val view_decl : Longident.t -> kind:skind -> env:Env.t -> unit
val view_decl_menu :
Longident.t ->
kind:skind -> env:Env.t -> parent:text widget -> menu widget
type fkind = [
`Exp of
[`Expr|`Pat|`Const|`Val of Path.t|`Var of Path.t|`New of Path.t]
* Types.type_expr
| `Class of Path.t * Types.class_type
| `Module of Path.t * Types.module_type
]
val search_pos_structure :
pos:int -> Typedtree.structure_item list ->
(fkind * Env.t * Location.t) list
val search_pos_info :
pos:int -> Stypes.annotation list -> (fkind * Env.t * Location.t) list
val view_type : fkind -> env:Env.t -> unit
val view_type_menu : fkind -> env:Env.t -> parent:'a widget -> menu widget
val parent_path : Path.t -> Path.t option
val string_of_path : Path.t -> string
val string_of_longident : Longident.t -> string
val lines_to_chars : int -> text:string -> int
| null | https://raw.githubusercontent.com/bmeurer/ocaml-experimental/fe5c10cdb0499e43af4b08f35a3248e5c1a8b541/otherlibs/labltk/browser/searchpos.mli | ocaml | ***********************************************************************
Objective Caml LablTk library
General Public License, with the special exception on linking
described in file ../../../LICENSE.
*********************************************************************** | , Kyoto University RIMS
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
$ Id$
open Widget
val top_widgets : any widget list ref
type module_widgets =
{ mw_frame: frame widget;
mw_title: label widget option;
mw_detach: button widget;
mw_edit: button widget;
mw_intf: button widget }
val add_shown_module : Path.t -> widgets:module_widgets -> unit
val find_shown_module : Path.t -> module_widgets
val is_shown_module : Path.t -> bool
val default_frame : module_widgets option ref
val set_path : (Path.t -> sign:Types.signature -> unit) ref
val view_defined_ref : (Longident.t -> env:Env.t -> unit) ref
val editor_ref :
(?file:string -> ?pos:int -> ?opendialog:bool -> unit -> unit) ref
val view_signature :
?title:string ->
?path:Path.t -> ?env:Env.t -> ?detach:bool -> Types.signature -> unit
val view_signature_item :
Types.signature -> path:Path.t -> env:Env.t -> unit
val view_module_id : Longident.t -> env:Env.t -> unit
val view_type_id : Longident.t -> env:Env.t -> unit
val view_class_id : Longident.t -> env:Env.t -> unit
val view_cltype_id : Longident.t -> env:Env.t -> unit
val view_modtype_id : Longident.t -> env:Env.t -> unit
val view_type_decl : Path.t -> env:Env.t -> unit
type skind = [`Type|`Class|`Module|`Modtype]
val search_pos_signature :
Parsetree.signature -> pos:int -> env:Env.t ->
((skind * Longident.t) * Env.t * Location.t) list
val view_decl : Longident.t -> kind:skind -> env:Env.t -> unit
val view_decl_menu :
Longident.t ->
kind:skind -> env:Env.t -> parent:text widget -> menu widget
type fkind = [
`Exp of
[`Expr|`Pat|`Const|`Val of Path.t|`Var of Path.t|`New of Path.t]
* Types.type_expr
| `Class of Path.t * Types.class_type
| `Module of Path.t * Types.module_type
]
val search_pos_structure :
pos:int -> Typedtree.structure_item list ->
(fkind * Env.t * Location.t) list
val search_pos_info :
pos:int -> Stypes.annotation list -> (fkind * Env.t * Location.t) list
val view_type : fkind -> env:Env.t -> unit
val view_type_menu : fkind -> env:Env.t -> parent:'a widget -> menu widget
val parent_path : Path.t -> Path.t option
val string_of_path : Path.t -> string
val string_of_longident : Longident.t -> string
val lines_to_chars : int -> text:string -> int
|
c0fd28bef6a128a3a22a3590a282c3b91e4a472683882e22ec0f2e9be946b00c | yzh44yzh/practical_erlang | my_crypt_sup.erl | -module(my_crypt_sup).
-behaviour(supervisor).
-export([start_link/0, init/1]).
-include("otp_types.hrl").
-spec(start_link() -> {ok, pid()}).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec(init(gs_args()) -> sup_init_reply()).
init(_Args) ->
SupervisorSpecification =
#{strategy => one_for_one, % one_for_one | one_for_all | rest_for_one
intensity => 10, % max restarts
period => 1000 % in period of time
},
ChildSpecifications =
[
#{id => my_crypt,
start => {my_crypt, start_link, []},
restart => permanent, % permanent | transient | temporary
shutdown => 2000, % milliseconds | brutal_kill | infinity
type => worker, % worker | supervisor
modules => [my_crypt]
}
],
{ok, {SupervisorSpecification, ChildSpecifications}}.
| null | https://raw.githubusercontent.com/yzh44yzh/practical_erlang/c9eec8cf44e152bf50d9bc6d5cb87fee4764f609/14_otp_project/solution/src/my_crypt_sup.erl | erlang | one_for_one | one_for_all | rest_for_one
max restarts
in period of time
permanent | transient | temporary
milliseconds | brutal_kill | infinity
worker | supervisor | -module(my_crypt_sup).
-behaviour(supervisor).
-export([start_link/0, init/1]).
-include("otp_types.hrl").
-spec(start_link() -> {ok, pid()}).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec(init(gs_args()) -> sup_init_reply()).
init(_Args) ->
SupervisorSpecification =
},
ChildSpecifications =
[
#{id => my_crypt,
start => {my_crypt, start_link, []},
modules => [my_crypt]
}
],
{ok, {SupervisorSpecification, ChildSpecifications}}.
|
7a755ef4d16442061f595f127b138a94a183513884d0402b5bcf20aeb253b5dc | tomhanika/conexp-clj | incomplete_contexts.clj | ;; Copyright ⓒ the conexp-clj developers; all rights reserved.
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file LICENSE at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns conexp.io.incomplete-contexts
"Implements IO for Incomplete Contexts."
(:require
[conexp.base :refer [defalias illegal-argument unsupported-operation set-of]]
[conexp.fca.incomplete-contexts.incomplete-contexts :refer :all]
[conexp.fca.incomplete-contexts.conexp-interop :refer :all]
[conexp.io.util :refer [define-format-dispatch with-out-writer with-in-reader get-line get-lines]])
(:import [java.io PushbackReader]))
;;; Input format dispatch
(define-format-dispatch "incomplete-context")
(set-default-incomplete-context-format! :json)
(defalias read-?-context read-incomplete-context)
(defalias write-?-context write-incomplete-context)
;; Data Table Format
;; Note the following restrictions:
- we need at least one object and at least two attributes ( to
;; reliably determine the file type)
- the first line must contain of the attributes in the correct order
;; - if the subsequent lines have the same number of entries as the
first , the resulting mv - context will have the line number as
;; objects,
- if the subsequent lines have one more element as the first , the first
;; entry will be the object for that line
(add-incomplete-context-input-format :data-table
(fn [rdr]
(try
(re-matches #"^[^,]+,[^,]+.*$" (read-line))
(catch Exception _))))
(define-incomplete-context-output-format :data-table
[mv-context file]
(with-out-writer file
(when (> 2 (count (attributes mv-context)))
(unsupported-operation
"Cannot store many-valued contexts with less then 2 attributes in format :data-table."))
(when (= 0 (count (objects mv-context)))
(unsupported-operation
"Cannot store many-valued context without objects in format :data-table."))
(let [write-comma-line (fn [things]
(cond
(empty? things) nil,
(= 1 (count things)) (prn (first things)),
:else (do (pr (first things))
(print ",")
(recur (rest things)))))]
(write-comma-line (attributes mv-context))
(doseq [g (objects mv-context)]
(write-comma-line (cons g (map #((incidence mv-context) [g %])
(attributes mv-context))))))))
(define-incomplete-context-input-format :data-table
[file]
(with-in-reader file
(let [read-comma-line (fn []
(try
(let [line (get-line)]
(read-string (str "(" line ")")))
(catch java.io.EOFException _
nil))),
attributes (read-comma-line),
lines (doall
(take-while #(not (nil? %))
(repeatedly read-comma-line))),
line-lengths (set-of (count line) [line lines])]
(when (< 1 (count line-lengths))
(illegal-argument "Many-Valued Context in file " file " has lines of different length."))
(when (and (not= (count attributes) (first line-lengths))
(not= (inc (count attributes)) (first line-lengths)))
(illegal-argument
"Number of values in lines in file " file " does not match given attributes.\n"
"Number of values given should be equal or once more to the number of attributes."))
(let [lines (if (not= (first line-lengths) (count attributes))
lines
(map #(cons %1 %2) (iterate inc 0) lines)),
objects (map first lines),
object-set (set objects)]
(when (not= (count objects) (count object-set))
(illegal-argument "Given file " file " contains double entries for objects."))
(let [interpretation (into {}
(for [line lines
:let [g (first line),
values (rest line),
mapped_values (map map-true-false-unknown-to-x-o-? values)],
[m w] (map vector attributes mapped_values)]
[[g m] w]))]
(make-incomplete-context object-set attributes interpretation))))))
(add-incomplete-context-input-format :burmeister
(fn [rdr]
(= "B" (read-line))))
(defn convert-incomplete-context-incidences-to-burmeister-output
"converts x to X; o to . and ? to ? for burmeister output"
[in]
(if (= in known-true) "X" (if (= in known-false) "." (if (= in unknown) "?" (throw "input not x,o,?")))))
(define-incomplete-context-output-format :burmeister
[ctx file]
(with-out-writer file
(println \B)
(println)
(println (count (objects ctx)))
(println (count (attributes ctx)))
(println)
(doseq [g (objects ctx)] (println g))
(doseq [m (attributes ctx)] (println m))
(let [inz (incidence ctx)]
(doseq [g (objects ctx)]
(doseq [m (attributes ctx)]
(print (convert-incomplete-context-incidences-to-burmeister-output (inz [g m]))))
(println)))))
(define-incomplete-context-input-format :burmeister
[file]
(with-in-reader file
" B\n\n " , we do n't support names
number-of-objects (Integer/parseInt (.trim (get-line)))
number-of-attributes (Integer/parseInt (.trim (get-line)))
_ (get-line) ; "\n"
seq-of-objects (get-lines number-of-objects)
seq-of-attributes (get-lines number-of-attributes)]
(loop [objs seq-of-objects
incidence {}]
(if (empty? objs)
(make-incomplete-context (set seq-of-objects)
(set seq-of-attributes)
incidence)
(let [line (get-line)]
(recur (rest objs)
(into incidence
(for [idx-m (range number-of-attributes)]
[[(first objs) (nth seq-of-attributes idx-m)] (map-true-false-unknown-to-x-o-? (nth line idx-m))])))))))))
;;; Json
(defn icxt->json
"Returns a formal context as a map that can easily be converted into json format."
[cxt]
(let [icxt (to-incomplete-context cxt)]
{:attributes (attributes icxt)
:objects (objects icxt)
:certain-incidences (mapv first (true-incidence icxt))
:possible-incidences (mapv first (true-or-unknown-incidences icxt))}
))
(defn json->icxt
"Returns a Context object for the given json context."
[json-icxt]
(let [attributes (:attributes json-icxt)
objects (:objects json-icxt)
certain-incidences (into {} (map #(vector % known-true) (:certain-incidences json-icxt)))
possible-incidences (into {} (map #(vector % unknown) (:possible-incidences json-icxt)))
hm (into {} (map #(vector % known-false) (clojure.math.combinatorics/cartesian-product objects attributes)))
incidence (reduce into [hm possible-incidences certain-incidences])
]
(make-incomplete-context objects attributes incidence)))
(add-incomplete-context-input-format :json (fn [rdr]
(try (conexp.io.json/json-object? rdr)
(catch Exception _))))
(define-incomplete-context-output-format :json
[cxt file]
(with-out-writer file
(print (clojure.data.json/write-str (icxt->json cxt)))))
(define-incomplete-context-input-format :json
[file]
(with-in-reader file
(let [file-content (clojure.data.json/read *in* :key-fn keyword)
json-cxt file-content]
(json->icxt json-cxt))))
nil
| null | https://raw.githubusercontent.com/tomhanika/conexp-clj/9f53c71913fa39b95516c388d9e426bec34e3a77/src/main/clojure/conexp/io/incomplete_contexts.clj | clojure | Copyright ⓒ the conexp-clj developers; all rights reserved.
The use and distribution terms for this software are covered by the
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
Input format dispatch
Data Table Format
Note the following restrictions:
reliably determine the file type)
- if the subsequent lines have the same number of entries as the
objects,
entry will be the object for that line
"\n"
Json | Eclipse Public License 1.0 ( -1.0.php )
(ns conexp.io.incomplete-contexts
"Implements IO for Incomplete Contexts."
(:require
[conexp.base :refer [defalias illegal-argument unsupported-operation set-of]]
[conexp.fca.incomplete-contexts.incomplete-contexts :refer :all]
[conexp.fca.incomplete-contexts.conexp-interop :refer :all]
[conexp.io.util :refer [define-format-dispatch with-out-writer with-in-reader get-line get-lines]])
(:import [java.io PushbackReader]))
(define-format-dispatch "incomplete-context")
(set-default-incomplete-context-format! :json)
(defalias read-?-context read-incomplete-context)
(defalias write-?-context write-incomplete-context)
- we need at least one object and at least two attributes ( to
- the first line must contain of the attributes in the correct order
first , the resulting mv - context will have the line number as
- if the subsequent lines have one more element as the first , the first
(add-incomplete-context-input-format :data-table
(fn [rdr]
(try
(re-matches #"^[^,]+,[^,]+.*$" (read-line))
(catch Exception _))))
(define-incomplete-context-output-format :data-table
[mv-context file]
(with-out-writer file
(when (> 2 (count (attributes mv-context)))
(unsupported-operation
"Cannot store many-valued contexts with less then 2 attributes in format :data-table."))
(when (= 0 (count (objects mv-context)))
(unsupported-operation
"Cannot store many-valued context without objects in format :data-table."))
(let [write-comma-line (fn [things]
(cond
(empty? things) nil,
(= 1 (count things)) (prn (first things)),
:else (do (pr (first things))
(print ",")
(recur (rest things)))))]
(write-comma-line (attributes mv-context))
(doseq [g (objects mv-context)]
(write-comma-line (cons g (map #((incidence mv-context) [g %])
(attributes mv-context))))))))
(define-incomplete-context-input-format :data-table
[file]
(with-in-reader file
(let [read-comma-line (fn []
(try
(let [line (get-line)]
(read-string (str "(" line ")")))
(catch java.io.EOFException _
nil))),
attributes (read-comma-line),
lines (doall
(take-while #(not (nil? %))
(repeatedly read-comma-line))),
line-lengths (set-of (count line) [line lines])]
(when (< 1 (count line-lengths))
(illegal-argument "Many-Valued Context in file " file " has lines of different length."))
(when (and (not= (count attributes) (first line-lengths))
(not= (inc (count attributes)) (first line-lengths)))
(illegal-argument
"Number of values in lines in file " file " does not match given attributes.\n"
"Number of values given should be equal or once more to the number of attributes."))
(let [lines (if (not= (first line-lengths) (count attributes))
lines
(map #(cons %1 %2) (iterate inc 0) lines)),
objects (map first lines),
object-set (set objects)]
(when (not= (count objects) (count object-set))
(illegal-argument "Given file " file " contains double entries for objects."))
(let [interpretation (into {}
(for [line lines
:let [g (first line),
values (rest line),
mapped_values (map map-true-false-unknown-to-x-o-? values)],
[m w] (map vector attributes mapped_values)]
[[g m] w]))]
(make-incomplete-context object-set attributes interpretation))))))
(add-incomplete-context-input-format :burmeister
(fn [rdr]
(= "B" (read-line))))
(defn convert-incomplete-context-incidences-to-burmeister-output
"converts x to X; o to . and ? to ? for burmeister output"
[in]
(if (= in known-true) "X" (if (= in known-false) "." (if (= in unknown) "?" (throw "input not x,o,?")))))
(define-incomplete-context-output-format :burmeister
[ctx file]
(with-out-writer file
(println \B)
(println)
(println (count (objects ctx)))
(println (count (attributes ctx)))
(println)
(doseq [g (objects ctx)] (println g))
(doseq [m (attributes ctx)] (println m))
(let [inz (incidence ctx)]
(doseq [g (objects ctx)]
(doseq [m (attributes ctx)]
(print (convert-incomplete-context-incidences-to-burmeister-output (inz [g m]))))
(println)))))
(define-incomplete-context-input-format :burmeister
[file]
(with-in-reader file
" B\n\n " , we do n't support names
number-of-objects (Integer/parseInt (.trim (get-line)))
number-of-attributes (Integer/parseInt (.trim (get-line)))
seq-of-objects (get-lines number-of-objects)
seq-of-attributes (get-lines number-of-attributes)]
(loop [objs seq-of-objects
incidence {}]
(if (empty? objs)
(make-incomplete-context (set seq-of-objects)
(set seq-of-attributes)
incidence)
(let [line (get-line)]
(recur (rest objs)
(into incidence
(for [idx-m (range number-of-attributes)]
[[(first objs) (nth seq-of-attributes idx-m)] (map-true-false-unknown-to-x-o-? (nth line idx-m))])))))))))
(defn icxt->json
"Returns a formal context as a map that can easily be converted into json format."
[cxt]
(let [icxt (to-incomplete-context cxt)]
{:attributes (attributes icxt)
:objects (objects icxt)
:certain-incidences (mapv first (true-incidence icxt))
:possible-incidences (mapv first (true-or-unknown-incidences icxt))}
))
(defn json->icxt
"Returns a Context object for the given json context."
[json-icxt]
(let [attributes (:attributes json-icxt)
objects (:objects json-icxt)
certain-incidences (into {} (map #(vector % known-true) (:certain-incidences json-icxt)))
possible-incidences (into {} (map #(vector % unknown) (:possible-incidences json-icxt)))
hm (into {} (map #(vector % known-false) (clojure.math.combinatorics/cartesian-product objects attributes)))
incidence (reduce into [hm possible-incidences certain-incidences])
]
(make-incomplete-context objects attributes incidence)))
(add-incomplete-context-input-format :json (fn [rdr]
(try (conexp.io.json/json-object? rdr)
(catch Exception _))))
(define-incomplete-context-output-format :json
[cxt file]
(with-out-writer file
(print (clojure.data.json/write-str (icxt->json cxt)))))
(define-incomplete-context-input-format :json
[file]
(with-in-reader file
(let [file-content (clojure.data.json/read *in* :key-fn keyword)
json-cxt file-content]
(json->icxt json-cxt))))
nil
|
78379e718578f79f0d796dc1dd65f6328e8133a9734b73e48ab63692023f9603 | jordanthayer/ocaml-search | explicit_graph.ml | (** The basic structure of the graph *)
type node = {
id : int;
mutable neighbors : (node * float) list;
}
type graph = {
seed : int;
nodes : node array;
}
(**************************************************************************)
let node_to_string n =
(** Converts a node [n] into a string representation of n *)
let delim = " " in
let rec neighbor_string cur nbrs =
match nbrs with
[] -> cur
| (hd,_)::tl -> neighbor_string
(cur ^ delim ^ string_of_int hd.id) tl in
(Wrutils.str "Name: %i\nNeighbors:%s" n.id
(neighbor_string "" n.neighbors))
let graph_to_string g =
(** Converts a graph [g] to a string *)
Wrutils.str "Seed:%i%s" g.seed
(Array.fold_left (fun accum node -> accum ^ "\n" ^ (node_to_string node))
"" g.nodes)
(************************ Testing Code ***********************************)
let build_unit_connected n =
(** builds a fully connected graph of size n *)
let nodes = Array.init n (fun i -> { id = i; neighbors = []}) in
for current = 0 to (n - 1) do
for adding = 0 to (n - 1) do
if current != adding
then nodes.(current).neighbors <-
(nodes.(adding),1.)::nodes.(current).neighbors
done
done;
{seed = -1;
nodes = nodes;}
EOF
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/synthetic_graph/explicit_graph.ml | ocaml | * The basic structure of the graph
************************************************************************
* Converts a node [n] into a string representation of n
* Converts a graph [g] to a string
*********************** Testing Code **********************************
* builds a fully connected graph of size n |
type node = {
id : int;
mutable neighbors : (node * float) list;
}
type graph = {
seed : int;
nodes : node array;
}
let node_to_string n =
let delim = " " in
let rec neighbor_string cur nbrs =
match nbrs with
[] -> cur
| (hd,_)::tl -> neighbor_string
(cur ^ delim ^ string_of_int hd.id) tl in
(Wrutils.str "Name: %i\nNeighbors:%s" n.id
(neighbor_string "" n.neighbors))
let graph_to_string g =
Wrutils.str "Seed:%i%s" g.seed
(Array.fold_left (fun accum node -> accum ^ "\n" ^ (node_to_string node))
"" g.nodes)
let build_unit_connected n =
let nodes = Array.init n (fun i -> { id = i; neighbors = []}) in
for current = 0 to (n - 1) do
for adding = 0 to (n - 1) do
if current != adding
then nodes.(current).neighbors <-
(nodes.(adding),1.)::nodes.(current).neighbors
done
done;
{seed = -1;
nodes = nodes;}
EOF
|
d07f03bd6cfab2eb5b96551ea31c48ef87d2859bde3af468bb376fe19fb70d26 | TomLisankie/Learning-Lisp | aoc-2018-1b.lisp | (defun open-file (file-name)
"Dumps contents of a text file into a list."
(with-open-file (file-stream file-name)
(loop for line = (read-line file-stream nil)
while line
collect line)))
(defun find-first-duplicate (list-from-file)
"Takes a list containing string representations of integers and finds the first sum that is repeated twice."
(let ((sum 0)
(seen-sums (make-hash-table)))
(dolist (an-int-string list-from-file)
(setf sum (+ sum (parse-integer an-int-string)))
(incf (gethash (write-to-string sum) seen-sums 0))
( format t " ~a~% " ( gethash ( write - to - string sum ) seen - sums ) )
(if (> (gethash (write-to-string sum) seen-sums) 1)
(write-to-string sum)))))
| null | https://raw.githubusercontent.com/TomLisankie/Learning-Lisp/27f9843cbb0c325a6531fd1332c6f19bebfe5a4d/code/Dec2018/9/aoc-2018-1b.lisp | lisp | (defun open-file (file-name)
"Dumps contents of a text file into a list."
(with-open-file (file-stream file-name)
(loop for line = (read-line file-stream nil)
while line
collect line)))
(defun find-first-duplicate (list-from-file)
"Takes a list containing string representations of integers and finds the first sum that is repeated twice."
(let ((sum 0)
(seen-sums (make-hash-table)))
(dolist (an-int-string list-from-file)
(setf sum (+ sum (parse-integer an-int-string)))
(incf (gethash (write-to-string sum) seen-sums 0))
( format t " ~a~% " ( gethash ( write - to - string sum ) seen - sums ) )
(if (> (gethash (write-to-string sum) seen-sums) 1)
(write-to-string sum)))))
| |
624b08eb2994d6cfb8b9080a3a1d6ba05463df8a4a197fe7b14f0c4e1a3b3d1f | supki/liblastfm | User.hs | # LANGUAGE CPP #
# LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
-- | Lastfm user API
--
-- This module is intended to be imported qualified:
--
-- @
-- import qualified Lastfm.User as User
-- @
module Lastfm.User
( getArtistTracks, getBannedTracks, getEvents, getFriends
, getInfo, getLovedTracks, getNeighbours, getNewReleases
, getPastEvents, getPersonalTags, getPlaylists, getRecentStations
, getRecentTracks, getRecommendedArtists, getRecommendedEvents
, getShouts, getTopAlbums, getTopArtists, getTopTags
, getTopTracks, getWeeklyAlbumChart, getWeeklyArtistChart
, getWeeklyChartList, getWeeklyTrackChart, shout
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Lastfm.Request
-- | Get a list of tracks by a given artist scrobbled by this user
-- , including scrobble time. Can be limited to specific timeranges, defaults to all time.
--
Optional : ' startTimestamp ' , ' page ' , ' '
--
-- <>
getArtistTracks :: Request f (User -> Artist -> APIKey -> Ready)
getArtistTracks = api "user.getArtistTracks"
-- | Returns the tracks banned by the user
--
-- Optional: 'limit', 'page'
--
-- <>
getBannedTracks :: Request f (User -> APIKey -> Ready)
getBannedTracks = api "user.getBannedTracks"
-- | Get a list of upcoming events that this user is attending.
-- Easily integratable into calendars, using the ical standard (see 'more formats' section below).
--
-- Optional: 'page', 'festivalsonly', 'limit'
--
-- <>
getEvents :: Request f (User -> APIKey -> Ready)
getEvents = api "user.getEvents"
-- | Get a list of the user's friends on Last.fm.
--
-- Optional: 'recenttracks', 'limit', 'page'
--
-- <>
getFriends :: Request f (User -> APIKey -> Ready)
getFriends = api "user.getFriends"
-- | Get information about a user profile.
--
-- <>
getInfo :: Request f (User -> APIKey -> Ready)
getInfo = api "user.getInfo"
| Get the last 50 tracks loved by a user .
--
-- Optional: 'limit', 'page'
--
-- <>
getLovedTracks :: Request f (User -> APIKey -> Ready)
getLovedTracks = api "user.getLovedTracks"
-- | Get a list of a user's neighbours on Last.fm.
--
-- Optional: 'limit'
--
-- <>
getNeighbours :: Request f (User -> APIKey -> Ready)
getNeighbours = api "user.getNeighbours"
-- | Gets a list of forthcoming releases based on a user's musical taste.
--
Optional : ' userecs '
--
-- <>
getNewReleases :: Request f (User -> APIKey -> Ready)
getNewReleases = api "user.getNewReleases"
-- | Get a paginated list of all events a user has attended in the past.
--
-- Optional: 'page', 'limit'
--
-- <>
getPastEvents :: Request f (User -> APIKey -> Ready)
getPastEvents = api "user.getPastEvents"
-- | Get the user's personal tags
--
-- Optional: 'taggingtype', 'limit', 'page'
--
-- <>
getPersonalTags :: Request f (User -> Tag -> TaggingType -> APIKey -> Ready)
getPersonalTags = api "user.getPersonalTags"
| Get a list of a user 's playlists on Last.fm .
--
-- <>
getPlaylists :: Request f (User -> APIKey -> Ready)
getPlaylists = api "user.getPlaylists"
-- | Get a list of the recent Stations listened to by this user.
--
-- Optional: 'limit', 'page'
--
-- <>
getRecentStations :: Request f (User -> APIKey -> SessionKey -> Sign)
getRecentStations = api "user.getRecentStations"
-- | Get a list of the recent tracks listened to by this user.
-- Also includes the currently playing track with the nowplaying="true"
-- attribute if the user is currently listening.
--
-- Optional: 'limit', 'page', 'from', 'extended', 'to'
--
-- <>
getRecentTracks :: Request f (User -> APIKey -> Ready)
getRecentTracks = api "user.getRecentTracks"
-- | Get Last.fm artist recommendations for a user
--
-- Optional: 'page', 'limit'
--
-- <>
getRecommendedArtists :: Request f (APIKey -> SessionKey -> Sign)
getRecommendedArtists = api "user.getRecommendedArtists"
| Get a paginated list of all events recommended to a user by Last.fm , based on their listening profile .
--
-- Optional: 'limit', 'page', 'latitude', 'longitude', 'festivalsonly', 'country'
--
-- <>
getRecommendedEvents :: Request f (APIKey -> SessionKey -> Sign)
getRecommendedEvents = api "user.getRecommendedEvents"
| Get shouts for this user . Also available as an rss feed .
--
-- Optional: 'page', 'limit'
--
-- <>
getShouts :: Request f (User -> APIKey -> Ready)
getShouts = api "user.getShouts"
-- | Get the top albums listened to by a user.
-- You can stipulate a time period. Sends the overall chart by default.
--
-- Optional: 'period', 'limit', 'page'
--
-- <>
getTopAlbums :: Request f (User -> APIKey -> Ready)
getTopAlbums = api "user.getTopAlbums"
-- | Get the top artists listened to by a user.
-- You can stipulate a time period. Sends the overall chart by default.
--
-- Optional: 'period', 'limit', 'page'
--
-- <>
getTopArtists :: Request f (User -> APIKey -> Ready)
getTopArtists = api "user.getTopArtists"
-- | Get the top tags used by this user.
--
-- Optional: 'limit'
--
-- <>
getTopTags :: Request f (User -> APIKey -> Ready)
getTopTags = api "user.getTopTags"
-- | Get the top tracks listened to by a user.
-- You can stipulate a time period. Sends the overall chart by default.
--
-- Optional: 'period', 'limit', 'page'
--
-- <>
getTopTracks :: Request f (User -> APIKey -> Ready)
getTopTracks = api "user.getTopTracks"
-- | Get an album chart for a user profile, for a given date range.
-- If no date range is supplied, it will return the most recent album chart for this user.
--
-- Optional: 'from', 'to'
--
-- <>
getWeeklyAlbumChart :: Request f (User -> APIKey -> Ready)
getWeeklyAlbumChart = api "user.getWeeklyAlbumChart"
-- | Get an artist chart for a user profile, for a given date range.
-- If no date range is supplied, it will return the most recent artist chart for this user.
--
-- Optional: 'from', 'to'
--
-- <>
getWeeklyArtistChart :: Request f (User -> APIKey -> Ready)
getWeeklyArtistChart = api "user.getWeeklyArtistChart"
-- | Get a list of available charts for this user, expressed as
-- date ranges which can be sent to the chart services.
--
-- <>
getWeeklyChartList :: Request f (User -> APIKey -> Ready)
getWeeklyChartList = api "user.getWeeklyChartList"
-- | Get a track chart for a user profile, for a given date range.
-- If no date range is supplied, it will return the most recent track chart for this user.
--
-- Optional: 'from', 'to'
--
-- <>
getWeeklyTrackChart :: Request f (User -> APIKey -> Ready)
getWeeklyTrackChart = api "user.getWeeklyTrackChart"
-- | Shout on this user's shoutbox
--
-- <>
shout :: Request f (User -> Message -> APIKey -> SessionKey -> Sign)
shout = api "user.shout" <* post
| null | https://raw.githubusercontent.com/supki/liblastfm/754be163c4ce14c9b4819f1359b5f95a0f91a29d/src/Lastfm/User.hs | haskell | # LANGUAGE OverloadedStrings #
| Lastfm user API
This module is intended to be imported qualified:
@
import qualified Lastfm.User as User
@
| Get a list of tracks by a given artist scrobbled by this user
, including scrobble time. Can be limited to specific timeranges, defaults to all time.
<>
| Returns the tracks banned by the user
Optional: 'limit', 'page'
<>
| Get a list of upcoming events that this user is attending.
Easily integratable into calendars, using the ical standard (see 'more formats' section below).
Optional: 'page', 'festivalsonly', 'limit'
<>
| Get a list of the user's friends on Last.fm.
Optional: 'recenttracks', 'limit', 'page'
<>
| Get information about a user profile.
<>
Optional: 'limit', 'page'
<>
| Get a list of a user's neighbours on Last.fm.
Optional: 'limit'
<>
| Gets a list of forthcoming releases based on a user's musical taste.
<>
| Get a paginated list of all events a user has attended in the past.
Optional: 'page', 'limit'
<>
| Get the user's personal tags
Optional: 'taggingtype', 'limit', 'page'
<>
<>
| Get a list of the recent Stations listened to by this user.
Optional: 'limit', 'page'
<>
| Get a list of the recent tracks listened to by this user.
Also includes the currently playing track with the nowplaying="true"
attribute if the user is currently listening.
Optional: 'limit', 'page', 'from', 'extended', 'to'
<>
| Get Last.fm artist recommendations for a user
Optional: 'page', 'limit'
<>
Optional: 'limit', 'page', 'latitude', 'longitude', 'festivalsonly', 'country'
<>
Optional: 'page', 'limit'
<>
| Get the top albums listened to by a user.
You can stipulate a time period. Sends the overall chart by default.
Optional: 'period', 'limit', 'page'
<>
| Get the top artists listened to by a user.
You can stipulate a time period. Sends the overall chart by default.
Optional: 'period', 'limit', 'page'
<>
| Get the top tags used by this user.
Optional: 'limit'
<>
| Get the top tracks listened to by a user.
You can stipulate a time period. Sends the overall chart by default.
Optional: 'period', 'limit', 'page'
<>
| Get an album chart for a user profile, for a given date range.
If no date range is supplied, it will return the most recent album chart for this user.
Optional: 'from', 'to'
<>
| Get an artist chart for a user profile, for a given date range.
If no date range is supplied, it will return the most recent artist chart for this user.
Optional: 'from', 'to'
<>
| Get a list of available charts for this user, expressed as
date ranges which can be sent to the chart services.
<>
| Get a track chart for a user profile, for a given date range.
If no date range is supplied, it will return the most recent track chart for this user.
Optional: 'from', 'to'
<>
| Shout on this user's shoutbox
<> | # LANGUAGE CPP #
# LANGUAGE DataKinds #
module Lastfm.User
( getArtistTracks, getBannedTracks, getEvents, getFriends
, getInfo, getLovedTracks, getNeighbours, getNewReleases
, getPastEvents, getPersonalTags, getPlaylists, getRecentStations
, getRecentTracks, getRecommendedArtists, getRecommendedEvents
, getShouts, getTopAlbums, getTopArtists, getTopTags
, getTopTracks, getWeeklyAlbumChart, getWeeklyArtistChart
, getWeeklyChartList, getWeeklyTrackChart, shout
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Lastfm.Request
Optional : ' startTimestamp ' , ' page ' , ' '
getArtistTracks :: Request f (User -> Artist -> APIKey -> Ready)
getArtistTracks = api "user.getArtistTracks"
getBannedTracks :: Request f (User -> APIKey -> Ready)
getBannedTracks = api "user.getBannedTracks"
getEvents :: Request f (User -> APIKey -> Ready)
getEvents = api "user.getEvents"
getFriends :: Request f (User -> APIKey -> Ready)
getFriends = api "user.getFriends"
getInfo :: Request f (User -> APIKey -> Ready)
getInfo = api "user.getInfo"
| Get the last 50 tracks loved by a user .
getLovedTracks :: Request f (User -> APIKey -> Ready)
getLovedTracks = api "user.getLovedTracks"
getNeighbours :: Request f (User -> APIKey -> Ready)
getNeighbours = api "user.getNeighbours"
Optional : ' userecs '
getNewReleases :: Request f (User -> APIKey -> Ready)
getNewReleases = api "user.getNewReleases"
getPastEvents :: Request f (User -> APIKey -> Ready)
getPastEvents = api "user.getPastEvents"
getPersonalTags :: Request f (User -> Tag -> TaggingType -> APIKey -> Ready)
getPersonalTags = api "user.getPersonalTags"
| Get a list of a user 's playlists on Last.fm .
getPlaylists :: Request f (User -> APIKey -> Ready)
getPlaylists = api "user.getPlaylists"
getRecentStations :: Request f (User -> APIKey -> SessionKey -> Sign)
getRecentStations = api "user.getRecentStations"
getRecentTracks :: Request f (User -> APIKey -> Ready)
getRecentTracks = api "user.getRecentTracks"
getRecommendedArtists :: Request f (APIKey -> SessionKey -> Sign)
getRecommendedArtists = api "user.getRecommendedArtists"
| Get a paginated list of all events recommended to a user by Last.fm , based on their listening profile .
getRecommendedEvents :: Request f (APIKey -> SessionKey -> Sign)
getRecommendedEvents = api "user.getRecommendedEvents"
| Get shouts for this user . Also available as an rss feed .
getShouts :: Request f (User -> APIKey -> Ready)
getShouts = api "user.getShouts"
getTopAlbums :: Request f (User -> APIKey -> Ready)
getTopAlbums = api "user.getTopAlbums"
getTopArtists :: Request f (User -> APIKey -> Ready)
getTopArtists = api "user.getTopArtists"
getTopTags :: Request f (User -> APIKey -> Ready)
getTopTags = api "user.getTopTags"
getTopTracks :: Request f (User -> APIKey -> Ready)
getTopTracks = api "user.getTopTracks"
getWeeklyAlbumChart :: Request f (User -> APIKey -> Ready)
getWeeklyAlbumChart = api "user.getWeeklyAlbumChart"
getWeeklyArtistChart :: Request f (User -> APIKey -> Ready)
getWeeklyArtistChart = api "user.getWeeklyArtistChart"
getWeeklyChartList :: Request f (User -> APIKey -> Ready)
getWeeklyChartList = api "user.getWeeklyChartList"
getWeeklyTrackChart :: Request f (User -> APIKey -> Ready)
getWeeklyTrackChart = api "user.getWeeklyTrackChart"
shout :: Request f (User -> Message -> APIKey -> SessionKey -> Sign)
shout = api "user.shout" <* post
|
8e0bdf61a3b3a3f22021cbd9601864ba6abc028ab531db3df34850ce7c1b60a6 | johnlawrenceaspden/hobby-code | travellingsalesman.clj | On the Travelling of Salesmen
Four vertices , 1 , 2 , 3 , 4
(def vertexset #{1,2,3,4})
Leads to 6 edges in the complete graph
(def edgeset {[1 2] 2, [1 3] 4, [1 4] 5, [2 3] 2, [2 4] 3, [3 4] 4})
;; Which we can represent as a distance function
(defn make-d [edgeset]
(fn [a b] (or (edgeset [a b]) (edgeset [b a]))))
;; Have we got them all?
(for [a vertexset b (disj vertexset a)] [a b ((make-d edgeset) a b)])
- > ( [ 1 2 2 ] [ 1 3 4 ] [ 1 4 5 ] [ 2 1 2 ] [ 2 3 2 ] [ 2 4 3 ] [ 3 1 4 ] [ 3 2 2 ] [ 3 4 4 ] [ 4 1 5 ] [ 4 2 3 ] [ 4 3 4 ] )
;; A tour in such a graph is a permutation of the list of vertices
(defn scissor [a lst]
(for [i (range (inc (count lst)))] (concat (take i lst) (list a) (drop i lst))))
(defn perms [lst]
(if (empty? lst) '(())
(mapcat (partial scissor (first lst)) (perms (rest lst)))))
- > ( ( 1 2 3 4 ) ( 2 1 3 4 ) ( 2 3 1 4 ) ( 2 3 4 1 ) ( 1 3 2 4 ) ( 3 1 2 4 ) ( 3 2 1 4 ) ( 3 2 4 1 ) ( 1 3 4 2 ) ( 3 1 4 2 ) ( 3 4 1 2 ) ( 3 4 2 1 ) ( 1 2 4 3 ) ( 2 1 4 3 ) ( 2 4 1 3 ) ( 2 4 3 1 ) ( 1 4 2 3 ) ( 4 1 2 3 ) ( 4 2 1 3 ) ( 4 2 3 1 ) ( 1 4 3 2 ) ( 4 1 3 2 ) ( 4 3 1 2 ) ( 4 3 2 1 ) )
;; Which represents a traversal of selected edges in a cycle
(defn edgelist [tour]
(for [[a b] (cons (list (last tour) (first tour)) (partition 2 1 tour))] [a b]))
- > ( [ 4 1 ] [ 1 2 ] [ 2 3 ] [ 3 4 ] )
;; And such tours have an associated cost:
(defn make-cost [edgeset]
(let [d (make-d edgeset)]
(fn [tour]
(reduce + (for [[a b] (edgelist tour)] (d a b))))))
- > 13
- > ( 13 14 )
;; In this case it appears that 1 2 3 4 is a minimal cost tour.
Finding such a tour is known as the Travelling Salesman Problem .
;; Behold, I have solved the problem in the time it took to state it.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; There is a problem with this approach
(take 10 (map (comp count perms) (reductions conj #{} (range))))
( 1 1 2 6 24 120 720 5040 40320 362880 )
;; which is very like:
- > ( 1 2 6 24 120 720 5040 40320 362880 3628800 39916800 479001600 6227020800 87178291200 1307674368000 20922789888000 355687428096000 6402373705728000 121645100408832000 )
If we can calculate the cost of a tour a trillion times , then we might be able to solve this problem for a graph with 14 vertices .
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; There is a way to do marginally less miserably
The minimal tour must go through 1 .
If we knew the cheapest path from 1 to 2 that went through 3 and 4 ( once and once only )
and the cheapest path from 1 to 3 that went through 2 and 4 ( oaoo )
and the cheapest path from 1 to 4 that went through 2 and 3 ( o )
Then we 'd be done , because we 'd add the closing edge to each of these paths to make 3 separate tours
;; and then we'd pick the cheapest one.
;; So we need to summon the recursion fairies
;; And the fairies that we need to summon need to be able to answer questions of the following general form
' What , oh fairy , is the cheapest path from 1 to j passing exactly
;; once through every vertex of the set S, which is a subset of the
vertices of the graph which does not contain either 1 or j ? '
;; And the spell which summons such fairies is this spell
(defn cpath [j vset d]
(cond (empty? vset) (d 1 j)
(= (count vset) 1) (+ (d 1 (first vset)) (d (first vset) j))
:else (apply min
(for [ v vset]
(+ (d v j) (cpath v (disj vset v) d))))))
;; Given such an incantation, we can work out the lengths of various short excursions taking in various points of interest
(cpath 1 #{} (make-d edgeset)) ;-> nil
- > 2
- > 4
;; Including some which are nearly tours
- > 8
(cpath 3 #{2 4} (make-d edgeset)) ;-> 9
- > 11
;; Amazingly, when we create the final tours, they all end up having the same value
- > 13
- > 13
- > 13
So perhaps we only need to ask for one of them
(defn tsp [vertexset d]
(+ (cpath 2 (disj (disj vertexset 2) 1) d) (d 2 1)))
- > 13
Now , if you look carefully , you will find that , given a graph with n vertices , the first call
here makes , which each make , which each make n-4 subcalls , and so on
;; So that the running time of this problem also goes like the factorial
- > 5
"Elapsed time: 1.1352 msecs"
- > 6
"Elapsed time: 1.457029 msecs"
- > 7
"Elapsed time: 3.021475 msecs"
- > 8
"Elapsed time: 12.281457 msecs"
(time (tsp #{1 2 3 4 5 6 7 8 9} (fn [a b] 1))) ;-> 9
"Elapsed time: 55.585345 msecs"
- > 10
"Elapsed time: 416.957839 msecs"
- > 11
"Elapsed time: 3365.925792 msecs"
- > 12
"Elapsed time: 33854.042469 msecs"
(defn p2 [sq] (with-out-str (doseq [i sq] (printf "%2.2f " i))))
(defn successive-ratios [sq] (map (fn[[a b]] (float (/ b a))) (partition 2 1 sq)))
(p2 (successive-ratios '(1.13 1.45 3.02 12.2 55.5 416. 3365.1 33854.0)))
- > " 1.28 2.08 4.04 4.55 7.50 8.09 10.06 "
;; and yes, since you ask, I *did* fiddle these timings.
;; Now normally at this point, when we think of a clever recurrence
;; relation and implement it in a straightforward manner, memoization
;; will turn it from a superexponential horror into a clever dynamic
;; programming algorithm and it will run in O(n) or O(n^2) or
;; something neat like that
(defn cpath [j vset d]
(cond (empty? vset) (d 1 j)
(= (count vset) 1) (+ (d 1 (first vset)) (d (first vset) j))
:else (apply min
(for [ v vset]
(+ (d v j) (cpath v (disj vset v) d))))))
;; We memoize
(def cpath (memoize cpath))
;; And find:
(time (tsp #{1 2 3 4 5} (fn [a b] 1)))
"Elapsed time: 1.424132 msecs"
(time (tsp #{1 2 3 4 5 6} (fn [a b] 1)))
"Elapsed time: 2.55207 msecs"
(time (tsp #{1 2 3 4 5 6 7} (fn [a b] 1)))
"Elapsed time: 5.633396 msecs"
(time (tsp #{1 2 3 4 5 6 7 8} (fn [a b] 1)))
"Elapsed time: 15.057154 msecs"
(time (tsp #{1 2 3 4 5 6 7 8 9} (fn [a b] 1)))
"Elapsed time: 31.793358 msecs"
(time (tsp #{1 2 3 4 5 6 7 8 9 10} (fn [a b] 1)))
"Elapsed time: 73.710072 msecs"
(time (tsp #{1 2 3 4 5 6 7 8 9 10 11} (fn [a b] 1)))
"Elapsed time: 217.877989 msecs"
- > 12
"Elapsed time: 770.114412 msecs"
- > 13
"Elapsed time: 2395.936944 msecs"
- > 14
"Elapsed time: 8114.328645 msecs"
- > 15
"Elapsed time: 29191.135966 msecs"
- > 16
"Elapsed time: 125260.112064 msecs"
We 've got a bit quicker . If we 've got 30 seconds to throw at the problem then we can solve it for rather than for n=12
;; The growth in time take still looks superexponential, but the growth rate isn't quite as insane as it was.
(p2 (successive-ratios '(1.42 2.55 5.63 15. 31. 73. 217. 770. 2395. 8114. 29191. 125260.)))
- > " 1.80 2.21 2.66 2.07 2.35 2.97 3.55 3.11 3.39 3.60 4.29 "
If we compare the timings of the two methods where we 've measured them :
(p2 (map / '(1.42 2.55 5.63 15. 31. 73. 217. 770. 2395. 8114. 29191. 125260.) '(1.13 1.45 3.02 12.2 55.5 416. 3365.1 33854.0)))
- > " 1.26 1.76 1.86 1.23 0.56 0.18 0.06 0.02 "
;; It seems that an early slowdown (presumably caused by the effort of
;; looking up the previous answers in the memoization table) is being
;; repaid by big speedups in larger problems.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; In fact the growth rate for the memoized version looks greater than
it should be ( around 4 rather than around 2.22 )
;; A quick estimate of the number of subproblems to be solved gives
O(n^2 2^n ) , which is superexponential but which settles down to
;; roughly doubling with every extra vertex. In the range where the
;; problem gets tricky, every extra vertex should increase the number
of subproblems by a factor of around ~ 2.2
(def dynamic (map (fn[n] (* (* n n)(reduce * (repeat n 2)))) (iterate inc 1)))
;-> (0 2 16 72 256 800 2304 6272 16384 41472 102400 247808 589824 1384448 3211264 7372800 16777216 37879808 84934656 189267968 419430400 924844032 2030043136 4437573632 9663676416 20971520000 45365592064 ...)
- > " 8.00 4.50 3.56 3.13 2.88 2.72 2.61 2.53 2.47 2.42 2.38 2.35 2.32 2.30 2.28 2.26 2.24 2.23 2.22 "
;; Whereas the brute force search and unmemoized versions should go more like O(n!)
(def brute (reductions * (iterate inc 1)))
- > ( 1 2 6 24 120 720 5040 40320 362880 3628800 39916800 479001600 6227020800 87178291200 1307674368000 20922789888000 355687428096000 6402373705728000 121645100408832000 )
;; Which obviously:
- > " 2.00 3.00 4.00 5.00 6.00 7.00 8.00 9.00 10.00 11.00 12.00 13.00 14.00 15.00 16.00 17.00 18.00 19.00 20.00 "
And I can well believe that , give or take . ( actually in the 16 vertex problem you have 14 subproblems to solve , so ( n-2 ) ! really )
;; I wonder why my memoized version isn't doing a bit better.
Here , rather than using ( ( memoize cpath ) ) I 'm going to
;; make my memoization table a dynamic variable so that I can clear it
;; for every run, and watch how many entries get added in the course
;; of computation. There's an added benefit that the memoization table
;; will get garbage collected once the computation is over.
(def ^:dynamic *cpaths* nil)
(defn cpath [j vset d]
(if-let [result (find @*cpaths* [j vset])] (val result)
(let [result (cond (empty? vset) (d 1 j)
(= (count vset) 1) (+ (d 1 (first vset)) (d (first vset) j))
:else (apply min
(for [ v vset]
(+ (d v j) (cpath v (disj vset v) d)))))]
(swap! *cpaths* assoc [j vset] result)
result)))
;; This
(defn benchmarktsp [n]
(binding [*cpaths* (atom {})]
(let [start (System/nanoTime)
ret (tsp (set (range n)) (fn [a b] 1))
end (System/nanoTime)
count (count @*cpaths*)
msec (int (/ (- end start) 1000000.0))]
(str ret ","count "," msec))))
- > " 3,1,0 "
(benchmarktsp 4) ;-> "4,3,0"
- > " 5,10,0 "
- > " 6,29,1 "
(benchmarktsp 7) ;-> "7,76,4"
(benchmarktsp 8) ;-> "8,187,12"
(benchmarktsp 9) ;-> "9,442,31"
- > " 10,1017,75 "
(benchmarktsp 11) ;-> "11,2296,200"
(benchmarktsp 12) ;-> "12,5111,630"
" 13,11254,2114 "
" 14,24565,7326 "
(benchmarktsp 15) ; "15,53236,29532"
(benchmarktsp 16) ; "16,114675,121461"
(benchmarktsp 17) ;
;; Recurrence relation for the size of the memoization table seems to behave as expected
- > " 3.00 3.33 2.90 2.62 2.46 2.36 2.30 2.26 2.23 2.20 2.18 2.17 2.15 "
;; And yet the runtime is going up faster than that
- > " 4.00 3.00 2.58 2.42 2.67 3.15 3.36 3.47 4.03 4.11 "
;; The number of milliseconds per table entry is going up rapidly at the end
(p2 (map (comp float /) '(1,4,12,31,75,200,630,2114,7326,29532,121461) '(29 76 187 442 1017 2296 5111 11254 24565 53236 114675)))
- > " 0.03 0.05 0.06 0.07 0.07 0.09 0.12 0.19 0.30 0.55 1.06 "
| null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/travellingsalesman.clj | clojure | Which we can represent as a distance function
Have we got them all?
A tour in such a graph is a permutation of the list of vertices
Which represents a traversal of selected edges in a cycle
And such tours have an associated cost:
In this case it appears that 1 2 3 4 is a minimal cost tour.
Behold, I have solved the problem in the time it took to state it.
There is a problem with this approach
which is very like:
There is a way to do marginally less miserably
and then we'd pick the cheapest one.
So we need to summon the recursion fairies
And the fairies that we need to summon need to be able to answer questions of the following general form
once through every vertex of the set S, which is a subset of the
And the spell which summons such fairies is this spell
Given such an incantation, we can work out the lengths of various short excursions taking in various points of interest
-> nil
Including some which are nearly tours
-> 9
Amazingly, when we create the final tours, they all end up having the same value
So that the running time of this problem also goes like the factorial
-> 9
and yes, since you ask, I *did* fiddle these timings.
Now normally at this point, when we think of a clever recurrence
relation and implement it in a straightforward manner, memoization
will turn it from a superexponential horror into a clever dynamic
programming algorithm and it will run in O(n) or O(n^2) or
something neat like that
We memoize
And find:
The growth in time take still looks superexponential, but the growth rate isn't quite as insane as it was.
It seems that an early slowdown (presumably caused by the effort of
looking up the previous answers in the memoization table) is being
repaid by big speedups in larger problems.
In fact the growth rate for the memoized version looks greater than
A quick estimate of the number of subproblems to be solved gives
roughly doubling with every extra vertex. In the range where the
problem gets tricky, every extra vertex should increase the number
-> (0 2 16 72 256 800 2304 6272 16384 41472 102400 247808 589824 1384448 3211264 7372800 16777216 37879808 84934656 189267968 419430400 924844032 2030043136 4437573632 9663676416 20971520000 45365592064 ...)
Whereas the brute force search and unmemoized versions should go more like O(n!)
Which obviously:
I wonder why my memoized version isn't doing a bit better.
make my memoization table a dynamic variable so that I can clear it
for every run, and watch how many entries get added in the course
of computation. There's an added benefit that the memoization table
will get garbage collected once the computation is over.
This
-> "4,3,0"
-> "7,76,4"
-> "8,187,12"
-> "9,442,31"
-> "11,2296,200"
-> "12,5111,630"
"15,53236,29532"
"16,114675,121461"
Recurrence relation for the size of the memoization table seems to behave as expected
And yet the runtime is going up faster than that
The number of milliseconds per table entry is going up rapidly at the end | On the Travelling of Salesmen
Four vertices , 1 , 2 , 3 , 4
(def vertexset #{1,2,3,4})
Leads to 6 edges in the complete graph
(def edgeset {[1 2] 2, [1 3] 4, [1 4] 5, [2 3] 2, [2 4] 3, [3 4] 4})
(defn make-d [edgeset]
(fn [a b] (or (edgeset [a b]) (edgeset [b a]))))
(for [a vertexset b (disj vertexset a)] [a b ((make-d edgeset) a b)])
- > ( [ 1 2 2 ] [ 1 3 4 ] [ 1 4 5 ] [ 2 1 2 ] [ 2 3 2 ] [ 2 4 3 ] [ 3 1 4 ] [ 3 2 2 ] [ 3 4 4 ] [ 4 1 5 ] [ 4 2 3 ] [ 4 3 4 ] )
(defn scissor [a lst]
(for [i (range (inc (count lst)))] (concat (take i lst) (list a) (drop i lst))))
(defn perms [lst]
(if (empty? lst) '(())
(mapcat (partial scissor (first lst)) (perms (rest lst)))))
- > ( ( 1 2 3 4 ) ( 2 1 3 4 ) ( 2 3 1 4 ) ( 2 3 4 1 ) ( 1 3 2 4 ) ( 3 1 2 4 ) ( 3 2 1 4 ) ( 3 2 4 1 ) ( 1 3 4 2 ) ( 3 1 4 2 ) ( 3 4 1 2 ) ( 3 4 2 1 ) ( 1 2 4 3 ) ( 2 1 4 3 ) ( 2 4 1 3 ) ( 2 4 3 1 ) ( 1 4 2 3 ) ( 4 1 2 3 ) ( 4 2 1 3 ) ( 4 2 3 1 ) ( 1 4 3 2 ) ( 4 1 3 2 ) ( 4 3 1 2 ) ( 4 3 2 1 ) )
(defn edgelist [tour]
(for [[a b] (cons (list (last tour) (first tour)) (partition 2 1 tour))] [a b]))
- > ( [ 4 1 ] [ 1 2 ] [ 2 3 ] [ 3 4 ] )
(defn make-cost [edgeset]
(let [d (make-d edgeset)]
(fn [tour]
(reduce + (for [[a b] (edgelist tour)] (d a b))))))
- > 13
- > ( 13 14 )
Finding such a tour is known as the Travelling Salesman Problem .
(take 10 (map (comp count perms) (reductions conj #{} (range))))
( 1 1 2 6 24 120 720 5040 40320 362880 )
- > ( 1 2 6 24 120 720 5040 40320 362880 3628800 39916800 479001600 6227020800 87178291200 1307674368000 20922789888000 355687428096000 6402373705728000 121645100408832000 )
If we can calculate the cost of a tour a trillion times , then we might be able to solve this problem for a graph with 14 vertices .
The minimal tour must go through 1 .
If we knew the cheapest path from 1 to 2 that went through 3 and 4 ( once and once only )
and the cheapest path from 1 to 3 that went through 2 and 4 ( oaoo )
and the cheapest path from 1 to 4 that went through 2 and 3 ( o )
Then we 'd be done , because we 'd add the closing edge to each of these paths to make 3 separate tours
' What , oh fairy , is the cheapest path from 1 to j passing exactly
vertices of the graph which does not contain either 1 or j ? '
(defn cpath [j vset d]
(cond (empty? vset) (d 1 j)
(= (count vset) 1) (+ (d 1 (first vset)) (d (first vset) j))
:else (apply min
(for [ v vset]
(+ (d v j) (cpath v (disj vset v) d))))))
- > 2
- > 4
- > 8
- > 11
- > 13
- > 13
- > 13
So perhaps we only need to ask for one of them
(defn tsp [vertexset d]
(+ (cpath 2 (disj (disj vertexset 2) 1) d) (d 2 1)))
- > 13
Now , if you look carefully , you will find that , given a graph with n vertices , the first call
here makes , which each make , which each make n-4 subcalls , and so on
- > 5
"Elapsed time: 1.1352 msecs"
- > 6
"Elapsed time: 1.457029 msecs"
- > 7
"Elapsed time: 3.021475 msecs"
- > 8
"Elapsed time: 12.281457 msecs"
"Elapsed time: 55.585345 msecs"
- > 10
"Elapsed time: 416.957839 msecs"
- > 11
"Elapsed time: 3365.925792 msecs"
- > 12
"Elapsed time: 33854.042469 msecs"
(defn p2 [sq] (with-out-str (doseq [i sq] (printf "%2.2f " i))))
(defn successive-ratios [sq] (map (fn[[a b]] (float (/ b a))) (partition 2 1 sq)))
(p2 (successive-ratios '(1.13 1.45 3.02 12.2 55.5 416. 3365.1 33854.0)))
- > " 1.28 2.08 4.04 4.55 7.50 8.09 10.06 "
(defn cpath [j vset d]
(cond (empty? vset) (d 1 j)
(= (count vset) 1) (+ (d 1 (first vset)) (d (first vset) j))
:else (apply min
(for [ v vset]
(+ (d v j) (cpath v (disj vset v) d))))))
(def cpath (memoize cpath))
(time (tsp #{1 2 3 4 5} (fn [a b] 1)))
"Elapsed time: 1.424132 msecs"
(time (tsp #{1 2 3 4 5 6} (fn [a b] 1)))
"Elapsed time: 2.55207 msecs"
(time (tsp #{1 2 3 4 5 6 7} (fn [a b] 1)))
"Elapsed time: 5.633396 msecs"
(time (tsp #{1 2 3 4 5 6 7 8} (fn [a b] 1)))
"Elapsed time: 15.057154 msecs"
(time (tsp #{1 2 3 4 5 6 7 8 9} (fn [a b] 1)))
"Elapsed time: 31.793358 msecs"
(time (tsp #{1 2 3 4 5 6 7 8 9 10} (fn [a b] 1)))
"Elapsed time: 73.710072 msecs"
(time (tsp #{1 2 3 4 5 6 7 8 9 10 11} (fn [a b] 1)))
"Elapsed time: 217.877989 msecs"
- > 12
"Elapsed time: 770.114412 msecs"
- > 13
"Elapsed time: 2395.936944 msecs"
- > 14
"Elapsed time: 8114.328645 msecs"
- > 15
"Elapsed time: 29191.135966 msecs"
- > 16
"Elapsed time: 125260.112064 msecs"
We 've got a bit quicker . If we 've got 30 seconds to throw at the problem then we can solve it for rather than for n=12
(p2 (successive-ratios '(1.42 2.55 5.63 15. 31. 73. 217. 770. 2395. 8114. 29191. 125260.)))
- > " 1.80 2.21 2.66 2.07 2.35 2.97 3.55 3.11 3.39 3.60 4.29 "
If we compare the timings of the two methods where we 've measured them :
(p2 (map / '(1.42 2.55 5.63 15. 31. 73. 217. 770. 2395. 8114. 29191. 125260.) '(1.13 1.45 3.02 12.2 55.5 416. 3365.1 33854.0)))
- > " 1.26 1.76 1.86 1.23 0.56 0.18 0.06 0.02 "
it should be ( around 4 rather than around 2.22 )
O(n^2 2^n ) , which is superexponential but which settles down to
of subproblems by a factor of around ~ 2.2
(def dynamic (map (fn[n] (* (* n n)(reduce * (repeat n 2)))) (iterate inc 1)))
- > " 8.00 4.50 3.56 3.13 2.88 2.72 2.61 2.53 2.47 2.42 2.38 2.35 2.32 2.30 2.28 2.26 2.24 2.23 2.22 "
(def brute (reductions * (iterate inc 1)))
- > ( 1 2 6 24 120 720 5040 40320 362880 3628800 39916800 479001600 6227020800 87178291200 1307674368000 20922789888000 355687428096000 6402373705728000 121645100408832000 )
- > " 2.00 3.00 4.00 5.00 6.00 7.00 8.00 9.00 10.00 11.00 12.00 13.00 14.00 15.00 16.00 17.00 18.00 19.00 20.00 "
And I can well believe that , give or take . ( actually in the 16 vertex problem you have 14 subproblems to solve , so ( n-2 ) ! really )
Here , rather than using ( ( memoize cpath ) ) I 'm going to
(def ^:dynamic *cpaths* nil)
(defn cpath [j vset d]
(if-let [result (find @*cpaths* [j vset])] (val result)
(let [result (cond (empty? vset) (d 1 j)
(= (count vset) 1) (+ (d 1 (first vset)) (d (first vset) j))
:else (apply min
(for [ v vset]
(+ (d v j) (cpath v (disj vset v) d)))))]
(swap! *cpaths* assoc [j vset] result)
result)))
(defn benchmarktsp [n]
(binding [*cpaths* (atom {})]
(let [start (System/nanoTime)
ret (tsp (set (range n)) (fn [a b] 1))
end (System/nanoTime)
count (count @*cpaths*)
msec (int (/ (- end start) 1000000.0))]
(str ret ","count "," msec))))
- > " 3,1,0 "
- > " 5,10,0 "
- > " 6,29,1 "
- > " 10,1017,75 "
" 13,11254,2114 "
" 14,24565,7326 "
- > " 3.00 3.33 2.90 2.62 2.46 2.36 2.30 2.26 2.23 2.20 2.18 2.17 2.15 "
- > " 4.00 3.00 2.58 2.42 2.67 3.15 3.36 3.47 4.03 4.11 "
(p2 (map (comp float /) '(1,4,12,31,75,200,630,2114,7326,29532,121461) '(29 76 187 442 1017 2296 5111 11254 24565 53236 114675)))
- > " 0.03 0.05 0.06 0.07 0.07 0.09 0.12 0.19 0.30 0.55 1.06 "
|
c5671550f892b916776760705dcc3149f8b8c565c667556b51d7e6264a501f57 | bvaugon/ocapic | thermostat.ml | (*************************************************************************)
(* *)
(* OCaPIC *)
(* *)
(* *)
This file is distributed under the terms of the CeCILL license .
(* See file ../../LICENSE-en. *)
(* *)
(*************************************************************************)
open Pic
module Disp = Lcd.Connect (
struct
let bus_size = Lcd.Eight
let e = LATC5
let rs = LATD3
let rw = LATC4
let bus = PORTB
end
)
type button = Number of int | Star | Sharp
let get_button =
let release_rc1 () = set_bit TRISC1 in
let release_rc2 () = set_bit TRISC2 in
let release_rc3 () = set_bit TRISC3 in
let set_rc1 () = clear_bit TRISC1; set_bit RC1 in
let set_rc2 () = clear_bit TRISC2; set_bit RC2 in
let set_rc3 () = clear_bit TRISC3; set_bit RC3 in
fun () ->
let result =
release_rc1 (); release_rc2 (); set_rc3 ();
if test_bit RC0 then Some (Number 1) else
if test_bit RE2 then Some (Number 4) else
if test_bit RE1 then Some (Number 7) else
if test_bit RE0 then Some Star else (
release_rc1 (); release_rc3 (); set_rc2 ();
if test_bit RC0 then Some (Number 2) else
if test_bit RE2 then Some (Number 5) else
if test_bit RE1 then Some (Number 8) else
if test_bit RE0 then Some (Number 0) else (
release_rc2 (); release_rc3 (); set_rc1 ();
if test_bit RC0 then Some (Number 3) else
if test_bit RE2 then Some (Number 6) else
if test_bit RE1 then Some (Number 9) else
if test_bit RE0 then Some Sharp else
None)) in
release_rc1 (); release_rc2 (); release_rc3 ();
result
let read_temp () =
write_reg ADCON2 0b10111110;
write_reg ADCON1 0b00111101;
write_reg ADCON0 0b00000111;
while test_bit GO_NOT_DONE do () done;
(read_reg ADRESH lsl 8) lor read_reg ADRES
let print_temp temp =
Disp.moveto 2 0;
Disp.print_int temp
let print_but but =
Disp.moveto 1 0;
match but with
| Some (Number n) -> Disp.print_int n
| Some Star -> Disp.print_char '*'
| Some Sharp -> Disp.print_char '#'
| None -> Disp.print_string "Hello!"
let rec loop old_but old_temp =
let new_but = get_button () in
let new_temp = read_temp () in
if old_but <> new_but || old_temp <> new_temp then (
Disp.clear ();
print_but new_but;
print_temp new_temp;
);
if new_but = Some Sharp then clear_bit RA5
else set_bit RA5;
Sys.sleep 10;
loop new_but new_temp
let () =
set_bit IRCF1;
set_bit IRCF0;
set_bit PLLEN;
Disp.init ();
Disp.config ();
Disp.print_string "Starting...";
write_reg ADCON1 0x0F;
clear_bit TRISA5;
loop None 0
| null | https://raw.githubusercontent.com/bvaugon/ocapic/a14cd9ec3f5022aeb5fe2264d595d7e8f1ddf58a/tests/thermostat2/thermostat.ml | ocaml | ***********************************************************************
OCaPIC
See file ../../LICENSE-en.
*********************************************************************** |
This file is distributed under the terms of the CeCILL license .
open Pic
module Disp = Lcd.Connect (
struct
let bus_size = Lcd.Eight
let e = LATC5
let rs = LATD3
let rw = LATC4
let bus = PORTB
end
)
type button = Number of int | Star | Sharp
let get_button =
let release_rc1 () = set_bit TRISC1 in
let release_rc2 () = set_bit TRISC2 in
let release_rc3 () = set_bit TRISC3 in
let set_rc1 () = clear_bit TRISC1; set_bit RC1 in
let set_rc2 () = clear_bit TRISC2; set_bit RC2 in
let set_rc3 () = clear_bit TRISC3; set_bit RC3 in
fun () ->
let result =
release_rc1 (); release_rc2 (); set_rc3 ();
if test_bit RC0 then Some (Number 1) else
if test_bit RE2 then Some (Number 4) else
if test_bit RE1 then Some (Number 7) else
if test_bit RE0 then Some Star else (
release_rc1 (); release_rc3 (); set_rc2 ();
if test_bit RC0 then Some (Number 2) else
if test_bit RE2 then Some (Number 5) else
if test_bit RE1 then Some (Number 8) else
if test_bit RE0 then Some (Number 0) else (
release_rc2 (); release_rc3 (); set_rc1 ();
if test_bit RC0 then Some (Number 3) else
if test_bit RE2 then Some (Number 6) else
if test_bit RE1 then Some (Number 9) else
if test_bit RE0 then Some Sharp else
None)) in
release_rc1 (); release_rc2 (); release_rc3 ();
result
let read_temp () =
write_reg ADCON2 0b10111110;
write_reg ADCON1 0b00111101;
write_reg ADCON0 0b00000111;
while test_bit GO_NOT_DONE do () done;
(read_reg ADRESH lsl 8) lor read_reg ADRES
let print_temp temp =
Disp.moveto 2 0;
Disp.print_int temp
let print_but but =
Disp.moveto 1 0;
match but with
| Some (Number n) -> Disp.print_int n
| Some Star -> Disp.print_char '*'
| Some Sharp -> Disp.print_char '#'
| None -> Disp.print_string "Hello!"
let rec loop old_but old_temp =
let new_but = get_button () in
let new_temp = read_temp () in
if old_but <> new_but || old_temp <> new_temp then (
Disp.clear ();
print_but new_but;
print_temp new_temp;
);
if new_but = Some Sharp then clear_bit RA5
else set_bit RA5;
Sys.sleep 10;
loop new_but new_temp
let () =
set_bit IRCF1;
set_bit IRCF0;
set_bit PLLEN;
Disp.init ();
Disp.config ();
Disp.print_string "Starting...";
write_reg ADCON1 0x0F;
clear_bit TRISA5;
loop None 0
|
cc5f1a78b3a8433afff8cc20b3e304d4f93e9d302de450bd34a3dc0e3e10ece6 | binaryage/chromex | cookies.clj | (ns chromex.ext.cookies
"Use the chrome.cookies API to query and modify cookies, and to be notified when they change.
* available since Chrome 36
* "
(:refer-clojure :only [defmacro defn apply declare meta let partial])
(:require [chromex.wrapgen :refer [gen-wrap-helper]]
[chromex.callgen :refer [gen-call-helper gen-tap-all-events-call]]))
(declare api-table)
(declare gen-call)
-- functions --------------------------------------------------------------------------------------------------------------
(defmacro get
"Retrieves information about a single cookie. If more than one cookie of the same name exists for the given URL, the one
with the longest path will be returned. For cookies with the same path length, the cookie with the earliest creation time
will be returned.
|details| - Details to identify the cookie being retrieved.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookie] where:
|cookie| - Contains details about the cookie. This parameter is null if no such cookie was found.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-get."
([details] (gen-call :function ::get &form details)))
(defmacro get-all
"Retrieves all cookies from a single cookie store that match the given information. The cookies returned will be sorted,
with those with the longest path first. If multiple cookies have the same path length, those with the earliest creation
time will be first.
|details| - Information to filter the cookies being retrieved.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookies] where:
|cookies| - All the existing, unexpired cookies that match the given cookie info.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-getAll."
([details] (gen-call :function ::get-all &form details)))
(defmacro set
"Sets a cookie with the given cookie data; may overwrite equivalent cookies if they exist.
|details| - Details about the cookie being set.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookie] where:
|cookie| - Contains details about the cookie that's been set. If setting failed for any reason, this will be 'null', and
'runtime.lastError' will be set.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-set."
([details] (gen-call :function ::set &form details)))
(defmacro remove
"Deletes a cookie by name.
|details| - Information to identify the cookie to remove.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [details] where:
|details| - Contains details about the cookie that's been removed. If removal failed for any reason, this will be
'null', and 'runtime.lastError' will be set.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-remove."
([details] (gen-call :function ::remove &form details)))
(defmacro get-all-cookie-stores
"Lists all existing cookie stores.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookie-stores] where:
|cookie-stores| - All the existing cookie stores.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-getAllCookieStores."
([] (gen-call :function ::get-all-cookie-stores &form)))
; -- events -----------------------------------------------------------------------------------------------------------------
;
; docs: /#tapping-events
(defmacro tap-on-changed-events
"Fired when a cookie is set or removed. As a special case, note that updating a cookie's properties is implemented as a two
step process: the cookie to be updated is first removed entirely, generating a notification with 'cause' of 'overwrite' .
Afterwards, a new cookie is written with the updated values, generating a second notification with 'cause' 'explicit'.
Events will be put on the |channel| with signature [::on-changed [change-info]] where:
|change-info| - #property-onChanged-changeInfo.
Note: |args| will be passed as additional parameters into Chrome event's .addListener call.
#event-onChanged."
([channel & args] (apply gen-call :event ::on-changed &form channel args)))
; -- convenience ------------------------------------------------------------------------------------------------------------
(defmacro tap-all-events
"Taps all valid non-deprecated events in chromex.ext.cookies namespace."
[chan]
(gen-tap-all-events-call api-table (meta &form) chan))
; ---------------------------------------------------------------------------------------------------------------------------
; -- API TABLE --------------------------------------------------------------------------------------------------------------
; ---------------------------------------------------------------------------------------------------------------------------
(def api-table
{:namespace "chrome.cookies",
:since "36",
:functions
[{:id ::get,
:name "get",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:type :callback,
:callback {:params [{:name "cookie", :optional? true, :type "cookies.Cookie"}]}}]}
{:id ::get-all,
:name "getAll",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:type :callback,
:callback {:params [{:name "cookies", :type "[array-of-cookies.Cookies]"}]}}]}
{:id ::set,
:name "set",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:optional? true,
:type :callback,
:callback {:params [{:name "cookie", :optional? true, :type "cookies.Cookie"}]}}]}
{:id ::remove,
:name "remove",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:optional? true,
:type :callback,
:callback {:params [{:name "details", :optional? true, :type "object"}]}}]}
{:id ::get-all-cookie-stores,
:name "getAllCookieStores",
:callback? true,
:params
[{:name "callback",
:type :callback,
:callback {:params [{:name "cookie-stores", :type "[array-of-cookies.CookieStores]"}]}}]}],
:events [{:id ::on-changed, :name "onChanged", :params [{:name "change-info", :type "object"}]}]})
; -- helpers ----------------------------------------------------------------------------------------------------------------
; code generation for native API wrapper
(defmacro gen-wrap [kind item-id config & args]
(apply gen-wrap-helper api-table kind item-id config args))
; code generation for API call-site
(def gen-call (partial gen-call-helper api-table)) | null | https://raw.githubusercontent.com/binaryage/chromex/33834ba5dd4f4238a3c51f99caa0416f30c308c5/src/exts/chromex/ext/cookies.clj | clojure | may overwrite equivalent cookies if they exist.
-- events -----------------------------------------------------------------------------------------------------------------
docs: /#tapping-events
-- convenience ------------------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------------------------------
-- API TABLE --------------------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------------------------------
-- helpers ----------------------------------------------------------------------------------------------------------------
code generation for native API wrapper
code generation for API call-site | (ns chromex.ext.cookies
"Use the chrome.cookies API to query and modify cookies, and to be notified when they change.
* available since Chrome 36
* "
(:refer-clojure :only [defmacro defn apply declare meta let partial])
(:require [chromex.wrapgen :refer [gen-wrap-helper]]
[chromex.callgen :refer [gen-call-helper gen-tap-all-events-call]]))
(declare api-table)
(declare gen-call)
-- functions --------------------------------------------------------------------------------------------------------------
(defmacro get
"Retrieves information about a single cookie. If more than one cookie of the same name exists for the given URL, the one
with the longest path will be returned. For cookies with the same path length, the cookie with the earliest creation time
will be returned.
|details| - Details to identify the cookie being retrieved.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookie] where:
|cookie| - Contains details about the cookie. This parameter is null if no such cookie was found.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-get."
([details] (gen-call :function ::get &form details)))
(defmacro get-all
"Retrieves all cookies from a single cookie store that match the given information. The cookies returned will be sorted,
with those with the longest path first. If multiple cookies have the same path length, those with the earliest creation
time will be first.
|details| - Information to filter the cookies being retrieved.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookies] where:
|cookies| - All the existing, unexpired cookies that match the given cookie info.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-getAll."
([details] (gen-call :function ::get-all &form details)))
(defmacro set
|details| - Details about the cookie being set.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookie] where:
|cookie| - Contains details about the cookie that's been set. If setting failed for any reason, this will be 'null', and
'runtime.lastError' will be set.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-set."
([details] (gen-call :function ::set &form details)))
(defmacro remove
"Deletes a cookie by name.
|details| - Information to identify the cookie to remove.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [details] where:
|details| - Contains details about the cookie that's been removed. If removal failed for any reason, this will be
'null', and 'runtime.lastError' will be set.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-remove."
([details] (gen-call :function ::remove &form details)))
(defmacro get-all-cookie-stores
"Lists all existing cookie stores.
This function returns a core.async channel of type `promise-chan` which eventually receives a result value.
Signature of the result value put on the channel is [cookie-stores] where:
|cookie-stores| - All the existing cookie stores.
In case of an error the channel closes without receiving any value and relevant error object can be obtained via
chromex.error/get-last-error.
#method-getAllCookieStores."
([] (gen-call :function ::get-all-cookie-stores &form)))
(defmacro tap-on-changed-events
"Fired when a cookie is set or removed. As a special case, note that updating a cookie's properties is implemented as a two
step process: the cookie to be updated is first removed entirely, generating a notification with 'cause' of 'overwrite' .
Afterwards, a new cookie is written with the updated values, generating a second notification with 'cause' 'explicit'.
Events will be put on the |channel| with signature [::on-changed [change-info]] where:
|change-info| - #property-onChanged-changeInfo.
Note: |args| will be passed as additional parameters into Chrome event's .addListener call.
#event-onChanged."
([channel & args] (apply gen-call :event ::on-changed &form channel args)))
(defmacro tap-all-events
"Taps all valid non-deprecated events in chromex.ext.cookies namespace."
[chan]
(gen-tap-all-events-call api-table (meta &form) chan))
(def api-table
{:namespace "chrome.cookies",
:since "36",
:functions
[{:id ::get,
:name "get",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:type :callback,
:callback {:params [{:name "cookie", :optional? true, :type "cookies.Cookie"}]}}]}
{:id ::get-all,
:name "getAll",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:type :callback,
:callback {:params [{:name "cookies", :type "[array-of-cookies.Cookies]"}]}}]}
{:id ::set,
:name "set",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:optional? true,
:type :callback,
:callback {:params [{:name "cookie", :optional? true, :type "cookies.Cookie"}]}}]}
{:id ::remove,
:name "remove",
:callback? true,
:params
[{:name "details", :type "object"}
{:name "callback",
:optional? true,
:type :callback,
:callback {:params [{:name "details", :optional? true, :type "object"}]}}]}
{:id ::get-all-cookie-stores,
:name "getAllCookieStores",
:callback? true,
:params
[{:name "callback",
:type :callback,
:callback {:params [{:name "cookie-stores", :type "[array-of-cookies.CookieStores]"}]}}]}],
:events [{:id ::on-changed, :name "onChanged", :params [{:name "change-info", :type "object"}]}]})
(defmacro gen-wrap [kind item-id config & args]
(apply gen-wrap-helper api-table kind item-id config args))
(def gen-call (partial gen-call-helper api-table)) |
d3f2c14c04ebd126984a503ff4a1476325e679281647b3acdda5e66768ede43a | avsm/platform | opamDirTrack.ml | (**************************************************************************)
(* *)
(* Copyright 2016 OCamlPro *)
(* *)
(* All rights reserved. This file is distributed under the terms of the *)
GNU Lesser General Public License version 2.1 , with the special
(* exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open OpamCompat
open OpamStd.Op
open OpamProcess.Job.Op
let log ?level fmt = OpamConsole.log ?level "TRACK" fmt
let slog = OpamConsole.slog
module SM = OpamStd.String.Map
type digest = string
let digest_of_string dg = dg
let string_of_digest dg = dg
type change =
| Added of digest
| Removed
| Contents_changed of digest
| Perm_changed of digest
| Kind_changed of digest
type t = change SM.t
let string_of_change = function
| Added _ -> "addition"
| Removed -> "removal"
| Contents_changed _ -> "modifications"
| Perm_changed _ -> "permission change"
| Kind_changed _ -> "kind change"
let to_string t =
OpamStd.Format.itemize (fun (f, change) ->
Printf.sprintf "%s of %s"
(String.capitalize_ascii (string_of_change change)) f)
(SM.bindings t)
(** uid, gid, perm *)
type perms = int * int * int
type item_value =
| File of string
| Dir
| Link of string
| Special of (int * int)
type item = perms * item_value
let cached_digest =
let item_cache = Hashtbl.create 749 in
fun f size mtime ->
try
let csize, cmtime, digest = Hashtbl.find item_cache f in
if csize = size || mtime = cmtime then Digest.to_hex digest
else raise Not_found
with Not_found ->
let digest = Digest.file f in
Hashtbl.replace item_cache f (size, mtime, digest);
Digest.to_hex digest
let quick_digest _f size mtime =
Printf.sprintf "S%dT%s" size (string_of_float mtime)
let get_digest ?(precise=OpamCoreConfig.(!r.precise_tracking)) f size mtime =
if precise then cached_digest f size mtime
else quick_digest f size mtime
let item_of_filename ?precise f : item =
let stats = Unix.lstat f in
Unix.(stats.st_uid, stats.st_gid, stats.st_perm),
match stats.Unix.st_kind with
| Unix.S_REG ->
File (get_digest ?precise f stats.Unix.st_size stats.Unix.st_mtime)
| Unix.S_DIR -> Dir
| Unix.S_LNK -> Link (Unix.readlink f)
| Unix.S_CHR | Unix.S_BLK | Unix.S_FIFO | Unix.S_SOCK ->
Special Unix.(stats.st_dev, stats.st_rdev)
let item_digest = function
| _perms, File d -> "F:" ^ d
| _perms, Dir -> "D"
| _perms, Link l -> "L:" ^ l
| _perms, Special (a,b) -> Printf.sprintf "S:%d:%d" a b
let is_precise_digest d =
not (OpamStd.String.starts_with ~prefix:"F:S" d)
let track dir ?(except=OpamFilename.Base.Set.empty) job_f =
let module SM = OpamStd.String.Map in
let rec make_index acc prefix dir =
let files =
try Sys.readdir (Filename.concat prefix dir)
with Sys_error _ as e ->
log "Error at dir %s: %a" (Filename.concat prefix dir)
(slog Printexc.to_string) e;
[||]
in
Array.fold_left
(fun acc f ->
let rel = Filename.concat dir f in
if OpamFilename.Base.(Set.mem (of_string rel) except) then acc else
let f = Filename.concat prefix rel in
try
let item = item_of_filename f in
let acc = SM.add rel item acc in
match item with
| _, Dir -> make_index acc prefix rel
| _ -> acc
with Unix.Unix_error _ as e ->
log "Error at %s: %a" f (slog Printexc.to_string) e;
acc)
acc files
in
let str_dir = OpamFilename.Dir.to_string dir in
let scan_timer = OpamConsole.timer () in
let before = make_index SM.empty str_dir "" in
log ~level:2 "before install: %a elements scanned in %.3fs"
(slog @@ string_of_int @* SM.cardinal) before (scan_timer ());
job_f () @@| fun result ->
let scan_timer = OpamConsole.timer () in
let after = make_index SM.empty str_dir "" in
let diff =
SM.merge (fun _ before after ->
match before, after with
| None, None -> assert false
| Some _, None -> Some Removed
| None, Some item -> Some (Added (item_digest item))
| Some (perma, a), Some ((permb, b) as item) ->
if a = b then
if perma = permb then None
else Some (Perm_changed (item_digest item))
else
match a, b with
| File _, File _ | Link _, Link _
| Dir, Dir | Special _, Special _ ->
Some (Contents_changed (item_digest item))
| _ -> Some (Kind_changed (item_digest item)))
before after
in
log "after install: %a elements, %a added, scanned in %.3fs"
(slog @@ string_of_int @* SM.cardinal) after
(slog @@ string_of_int @* SM.cardinal @*
SM.filter (fun _ -> function Added _ -> true | _ -> false))
diff (scan_timer ());
result, diff
let check_digest file digest =
let precise = is_precise_digest digest in
let it = item_of_filename ~precise file in
try if item_digest it = digest then `Unchanged else `Changed
with Unix.Unix_error _ -> `Removed
let check prefix changes =
let str_pfx = OpamFilename.Dir.to_string prefix in
SM.fold (fun fname op acc ->
let f = Filename.concat str_pfx fname in
match op with
| Added dg | Kind_changed dg | Contents_changed dg ->
(OpamFilename.of_string f, check_digest f dg) :: acc
| Perm_changed _ | Removed -> acc)
changes []
|> List.rev
let revert ?title ?(verbose=OpamConsole.verbose()) ?(force=false)
?(dryrun=false) prefix changes =
let title = match title with
| None -> ""
| Some t -> t ^ ": "
in
let rmdir d = if not dryrun then OpamFilename.rmdir d in
let rmfile f = if not dryrun then OpamFilename.remove f in
let changes =
(* Reverse the list so that dirnames come after the files they contain *)
List.rev (OpamStd.String.Map.bindings changes)
in
let already, modified, nonempty, cannot =
List.fold_left (fun (already,modified,nonempty,cannot as acc) (fname,op) ->
let f = Filename.concat (OpamFilename.Dir.to_string prefix) fname in
match op with
| Added dg | Kind_changed dg ->
let cur_item_ct, cur_dg =
try
let precise = is_precise_digest dg in
let item = item_of_filename ~precise f in
Some (snd item), Some (item_digest item)
with Unix.Unix_error _ -> None, None
in
if cur_dg = None then (fname::already, modified, nonempty, cannot)
else if cur_dg <> Some dg && not force then
(already, fname::modified, nonempty, cannot)
else if cur_item_ct = Some Dir then
let d = OpamFilename.Dir.of_string f in
if OpamFilename.dir_is_empty d then
(rmdir d; acc)
else
let nonempty =
if List.exists
(OpamStd.String.starts_with ~prefix:fname) nonempty
then nonempty else fname::nonempty
in
(already, modified, nonempty, cannot)
else
let f = OpamFilename.of_string f in
rmfile f;
acc
| Contents_changed dg ->
if check_digest f dg = `Changed then
(already, modified, nonempty, (op,fname)::cannot)
else
acc (* File has changed, assume the removal script reverted it *)
| (Removed | Perm_changed _) ->
(already, modified, nonempty, (op,fname)::cannot))
([], [], [], []) changes
in
if already <> [] then
log ~level:2 "%sfiles %s were already removed" title
(String.concat ", " (List.rev already));
if modified <> [] && verbose then
OpamConsole.warning "%snot removing files that changed since:\n%s" title
(OpamStd.Format.itemize (fun s -> s) (List.rev modified));
if nonempty <> [] && verbose then
OpamConsole.note "%snot removing non-empty directories:\n%s" title
(OpamStd.Format.itemize (fun s -> s) (List.rev nonempty));
if cannot <> [] && verbose then
OpamConsole.warning "%scannot revert:\n%s" title
(OpamStd.Format.itemize
(fun (op,f) -> string_of_change op ^" of "^ f)
(List.rev cannot))
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/opam-client.2.0.5%2Bdune/src/core/opamDirTrack.ml | ocaml | ************************************************************************
Copyright 2016 OCamlPro
All rights reserved. This file is distributed under the terms of the
exception on linking described in the file LICENSE.
************************************************************************
* uid, gid, perm
Reverse the list so that dirnames come after the files they contain
File has changed, assume the removal script reverted it | GNU Lesser General Public License version 2.1 , with the special
open OpamCompat
open OpamStd.Op
open OpamProcess.Job.Op
let log ?level fmt = OpamConsole.log ?level "TRACK" fmt
let slog = OpamConsole.slog
module SM = OpamStd.String.Map
type digest = string
let digest_of_string dg = dg
let string_of_digest dg = dg
type change =
| Added of digest
| Removed
| Contents_changed of digest
| Perm_changed of digest
| Kind_changed of digest
type t = change SM.t
let string_of_change = function
| Added _ -> "addition"
| Removed -> "removal"
| Contents_changed _ -> "modifications"
| Perm_changed _ -> "permission change"
| Kind_changed _ -> "kind change"
let to_string t =
OpamStd.Format.itemize (fun (f, change) ->
Printf.sprintf "%s of %s"
(String.capitalize_ascii (string_of_change change)) f)
(SM.bindings t)
type perms = int * int * int
type item_value =
| File of string
| Dir
| Link of string
| Special of (int * int)
type item = perms * item_value
let cached_digest =
let item_cache = Hashtbl.create 749 in
fun f size mtime ->
try
let csize, cmtime, digest = Hashtbl.find item_cache f in
if csize = size || mtime = cmtime then Digest.to_hex digest
else raise Not_found
with Not_found ->
let digest = Digest.file f in
Hashtbl.replace item_cache f (size, mtime, digest);
Digest.to_hex digest
let quick_digest _f size mtime =
Printf.sprintf "S%dT%s" size (string_of_float mtime)
let get_digest ?(precise=OpamCoreConfig.(!r.precise_tracking)) f size mtime =
if precise then cached_digest f size mtime
else quick_digest f size mtime
let item_of_filename ?precise f : item =
let stats = Unix.lstat f in
Unix.(stats.st_uid, stats.st_gid, stats.st_perm),
match stats.Unix.st_kind with
| Unix.S_REG ->
File (get_digest ?precise f stats.Unix.st_size stats.Unix.st_mtime)
| Unix.S_DIR -> Dir
| Unix.S_LNK -> Link (Unix.readlink f)
| Unix.S_CHR | Unix.S_BLK | Unix.S_FIFO | Unix.S_SOCK ->
Special Unix.(stats.st_dev, stats.st_rdev)
let item_digest = function
| _perms, File d -> "F:" ^ d
| _perms, Dir -> "D"
| _perms, Link l -> "L:" ^ l
| _perms, Special (a,b) -> Printf.sprintf "S:%d:%d" a b
let is_precise_digest d =
not (OpamStd.String.starts_with ~prefix:"F:S" d)
let track dir ?(except=OpamFilename.Base.Set.empty) job_f =
let module SM = OpamStd.String.Map in
let rec make_index acc prefix dir =
let files =
try Sys.readdir (Filename.concat prefix dir)
with Sys_error _ as e ->
log "Error at dir %s: %a" (Filename.concat prefix dir)
(slog Printexc.to_string) e;
[||]
in
Array.fold_left
(fun acc f ->
let rel = Filename.concat dir f in
if OpamFilename.Base.(Set.mem (of_string rel) except) then acc else
let f = Filename.concat prefix rel in
try
let item = item_of_filename f in
let acc = SM.add rel item acc in
match item with
| _, Dir -> make_index acc prefix rel
| _ -> acc
with Unix.Unix_error _ as e ->
log "Error at %s: %a" f (slog Printexc.to_string) e;
acc)
acc files
in
let str_dir = OpamFilename.Dir.to_string dir in
let scan_timer = OpamConsole.timer () in
let before = make_index SM.empty str_dir "" in
log ~level:2 "before install: %a elements scanned in %.3fs"
(slog @@ string_of_int @* SM.cardinal) before (scan_timer ());
job_f () @@| fun result ->
let scan_timer = OpamConsole.timer () in
let after = make_index SM.empty str_dir "" in
let diff =
SM.merge (fun _ before after ->
match before, after with
| None, None -> assert false
| Some _, None -> Some Removed
| None, Some item -> Some (Added (item_digest item))
| Some (perma, a), Some ((permb, b) as item) ->
if a = b then
if perma = permb then None
else Some (Perm_changed (item_digest item))
else
match a, b with
| File _, File _ | Link _, Link _
| Dir, Dir | Special _, Special _ ->
Some (Contents_changed (item_digest item))
| _ -> Some (Kind_changed (item_digest item)))
before after
in
log "after install: %a elements, %a added, scanned in %.3fs"
(slog @@ string_of_int @* SM.cardinal) after
(slog @@ string_of_int @* SM.cardinal @*
SM.filter (fun _ -> function Added _ -> true | _ -> false))
diff (scan_timer ());
result, diff
let check_digest file digest =
let precise = is_precise_digest digest in
let it = item_of_filename ~precise file in
try if item_digest it = digest then `Unchanged else `Changed
with Unix.Unix_error _ -> `Removed
let check prefix changes =
let str_pfx = OpamFilename.Dir.to_string prefix in
SM.fold (fun fname op acc ->
let f = Filename.concat str_pfx fname in
match op with
| Added dg | Kind_changed dg | Contents_changed dg ->
(OpamFilename.of_string f, check_digest f dg) :: acc
| Perm_changed _ | Removed -> acc)
changes []
|> List.rev
let revert ?title ?(verbose=OpamConsole.verbose()) ?(force=false)
?(dryrun=false) prefix changes =
let title = match title with
| None -> ""
| Some t -> t ^ ": "
in
let rmdir d = if not dryrun then OpamFilename.rmdir d in
let rmfile f = if not dryrun then OpamFilename.remove f in
let changes =
List.rev (OpamStd.String.Map.bindings changes)
in
let already, modified, nonempty, cannot =
List.fold_left (fun (already,modified,nonempty,cannot as acc) (fname,op) ->
let f = Filename.concat (OpamFilename.Dir.to_string prefix) fname in
match op with
| Added dg | Kind_changed dg ->
let cur_item_ct, cur_dg =
try
let precise = is_precise_digest dg in
let item = item_of_filename ~precise f in
Some (snd item), Some (item_digest item)
with Unix.Unix_error _ -> None, None
in
if cur_dg = None then (fname::already, modified, nonempty, cannot)
else if cur_dg <> Some dg && not force then
(already, fname::modified, nonempty, cannot)
else if cur_item_ct = Some Dir then
let d = OpamFilename.Dir.of_string f in
if OpamFilename.dir_is_empty d then
(rmdir d; acc)
else
let nonempty =
if List.exists
(OpamStd.String.starts_with ~prefix:fname) nonempty
then nonempty else fname::nonempty
in
(already, modified, nonempty, cannot)
else
let f = OpamFilename.of_string f in
rmfile f;
acc
| Contents_changed dg ->
if check_digest f dg = `Changed then
(already, modified, nonempty, (op,fname)::cannot)
else
| (Removed | Perm_changed _) ->
(already, modified, nonempty, (op,fname)::cannot))
([], [], [], []) changes
in
if already <> [] then
log ~level:2 "%sfiles %s were already removed" title
(String.concat ", " (List.rev already));
if modified <> [] && verbose then
OpamConsole.warning "%snot removing files that changed since:\n%s" title
(OpamStd.Format.itemize (fun s -> s) (List.rev modified));
if nonempty <> [] && verbose then
OpamConsole.note "%snot removing non-empty directories:\n%s" title
(OpamStd.Format.itemize (fun s -> s) (List.rev nonempty));
if cannot <> [] && verbose then
OpamConsole.warning "%scannot revert:\n%s" title
(OpamStd.Format.itemize
(fun (op,f) -> string_of_change op ^" of "^ f)
(List.rev cannot))
|
27952a9f1c8766a1cf041d88c6a3f9c75843612d6ccc08dd7c27ef7b7d307e32 | dparis/gen-phzr | debug.cljs | (ns phzr.utils.debug
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser])
(:refer-clojure :exclude [key]))
(defn ->Debug
"A collection of methods for displaying debug information about game objects.
If your game is running in WebGL then Debug will create a Sprite that is placed at the top of the Stage display list and bind a canvas texture
to it, which must be uploaded every frame. Be advised: this is very expensive, especially in browsers like Firefox. So please only enable Debug
in WebGL mode if you really need it (or your desktop can cope with it well) and disable it for production!
If your game is using a Canvas renderer then the debug information is literally drawn on the top of the active game canvas and no Sprite is used.
Parameters:
* game (Phaser.Game) - A reference to the currently running game."
([game]
(js/Phaser.Utils.Debug. (clj->phaser game))))
(defn a-star-
"Debug method to draw the last calculated path by AStar
Parameters:
* astar (Phaser.Plugin.AStar) - The AStar plugin that you want to debug.
* x (number) - X position on camera for debug display.
* y (number) - Y position on camera for debug display.
* color (string) - Color to stroke the path line.
Returns: void - "
([astar x y color]
(phaser->clj
(.AStar js/Phaser.Utils.Debug
(clj->phaser astar)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn body
"Render a Sprites Physics body if it has one set. The body is rendered as a filled or stroked rectangle.
This only works for Arcade Physics, Ninja Physics (AABB and Circle only) and Box2D Physics bodies.
To display a P2 Physics body you should enable debug mode on the body when creating it.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The Sprite who's body will be rendered.
* color (string) {optional} - Color of the debug rectangle to be rendered. The format is a CSS color string such as '#ff0000' or 'rgba(255,0,0,0.5)'.
* filled (boolean) {optional} - Render the body as a filled rectangle (true) or a stroked rectangle (false)"
([debug sprite]
(phaser->clj
(.body debug
(clj->phaser sprite))))
([debug sprite color]
(phaser->clj
(.body debug
(clj->phaser sprite)
(clj->phaser color))))
([debug sprite color filled]
(phaser->clj
(.body debug
(clj->phaser sprite)
(clj->phaser color)
(clj->phaser filled)))))
(defn body-info
"Render a Sprites Physic Body information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The sprite to be rendered.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.bodyInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.bodyInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn box-2d-body
"Renders 'debug draw' data for the given Box2D body.
This uses the standard debug drawing feature of Box2D, so colors will be decided by the Box2D engine.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The sprite whos body will be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite]
(phaser->clj
(.box2dBody debug
(clj->phaser sprite))))
([debug sprite color]
(phaser->clj
(.box2dBody debug
(clj->phaser sprite)
(clj->phaser color)))))
(defn box-2d-world
"Renders 'debug draw' data for the Box2D world if it exists.
This uses the standard debug drawing feature of Box2D, so colors will be decided by
the Box2D engine."
([debug]
(phaser->clj
(.box2dWorld debug))))
(defn camera-info
"Render camera information including dimensions and location.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* camera (Phaser.Camera) - The Phaser.Camera to show the debug information for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug camera x y]
(phaser->clj
(.cameraInfo debug
(clj->phaser camera)
(clj->phaser x)
(clj->phaser y))))
([debug camera x y color]
(phaser->clj
(.cameraInfo debug
(clj->phaser camera)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn geom
"Renders a Phaser geometry object including Rectangle, Circle, Point or Line.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* object (Phaser.Rectangle | Phaser.Circle | Phaser.Point | Phaser.Line) - The geometry object to render.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the objected as a filled (default, true) or a stroked (false)
* force-type (number) {optional} - Force rendering of a specific type. If 0 no type will be forced, otherwise 1 = Rectangle, 2 = Circle, 3 = Point and 4 = Line."
([debug object]
(phaser->clj
(.geom debug
(clj->phaser object))))
([debug object color]
(phaser->clj
(.geom debug
(clj->phaser object)
(clj->phaser color))))
([debug object color filled]
(phaser->clj
(.geom debug
(clj->phaser object)
(clj->phaser color)
(clj->phaser filled))))
([debug object color filled force-type]
(phaser->clj
(.geom debug
(clj->phaser object)
(clj->phaser color)
(clj->phaser filled)
(clj->phaser force-type)))))
(defn input-info
"Render debug information about the Input object.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug x y]
(phaser->clj
(.inputInfo debug
(clj->phaser x)
(clj->phaser y))))
([debug x y color]
(phaser->clj
(.inputInfo debug
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn key
"Renders Phaser.Key object information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* key (Phaser.Key) - The Key to render the information for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug key x y]
(phaser->clj
(.key debug
(clj->phaser key)
(clj->phaser x)
(clj->phaser y))))
([debug key x y color]
(phaser->clj
(.key debug
(clj->phaser key)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn line-info
"Renders Line information in the given color.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* line (Phaser.Line) - The Line to display the data for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug line x y]
(phaser->clj
(.lineInfo debug
(clj->phaser line)
(clj->phaser x)
(clj->phaser y))))
([debug line x y color]
(phaser->clj
(.lineInfo debug
(clj->phaser line)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn pixel
"Renders a single pixel at the given size.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* x (number) - X position of the pixel to be rendered.
* y (number) - Y position of the pixel to be rendered.
* color (string) {optional} - Color of the pixel (format is css color string).
* size (number) {optional} - The 'size' to render the pixel at."
([debug x y]
(phaser->clj
(.pixel debug
(clj->phaser x)
(clj->phaser y))))
([debug x y color]
(phaser->clj
(.pixel debug
(clj->phaser x)
(clj->phaser y)
(clj->phaser color))))
([debug x y color size]
(phaser->clj
(.pixel debug
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)
(clj->phaser size)))))
(defn pointer
"Renders the Pointer.circle object onto the stage in green if down or red if up along with debug text.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* pointer (Phaser.Pointer) - The Pointer you wish to display.
* hide-if-up (boolean) {optional} - Doesn't render the circle if the pointer is up.
* down-color (string) {optional} - The color the circle is rendered in if down.
* up-color (string) {optional} - The color the circle is rendered in if up (and hideIfUp is false).
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug pointer]
(phaser->clj
(.pointer debug
(clj->phaser pointer))))
([debug pointer hide-if-up]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up))))
([debug pointer hide-if-up down-color]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up)
(clj->phaser down-color))))
([debug pointer hide-if-up down-color up-color]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up)
(clj->phaser down-color)
(clj->phaser up-color))))
([debug pointer hide-if-up down-color up-color color]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up)
(clj->phaser down-color)
(clj->phaser up-color)
(clj->phaser color)))))
(defn pre-update
"Internal method that clears the canvas (if a Sprite) ready for a new debug session."
([debug]
(phaser->clj
(.preUpdate debug))))
(defn quad-tree
"Visually renders a QuadTree to the display.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* quadtree (Phaser.QuadTree) - The quadtree to render.
* color (string) - The color of the lines in the quadtree."
([debug quadtree color]
(phaser->clj
(.quadTree debug
(clj->phaser quadtree)
(clj->phaser color)))))
(defn rectangle
"Renders a Rectangle.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* object (Phaser.Rectangle | object) - The geometry object to render.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the objected as a filled (default, true) or a stroked (false)"
([debug object]
(phaser->clj
(.rectangle debug
(clj->phaser object))))
([debug object color]
(phaser->clj
(.rectangle debug
(clj->phaser object)
(clj->phaser color))))
([debug object color filled]
(phaser->clj
(.rectangle debug
(clj->phaser object)
(clj->phaser color)
(clj->phaser filled)))))
(defn reset
"Clears the Debug canvas."
([debug]
(phaser->clj
(.reset debug))))
(defn rope-segments
"Renders the Rope's segments. Note: This is really expensive as it has to calculate new segments every time you call it
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* rope (Phaser.Rope) - The rope to display the segments of.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the rectangle as a fillRect (default, true) or a strokeRect (false)"
([debug rope]
(phaser->clj
(.ropeSegments debug
(clj->phaser rope))))
([debug rope color]
(phaser->clj
(.ropeSegments debug
(clj->phaser rope)
(clj->phaser color))))
([debug rope color filled]
(phaser->clj
(.ropeSegments debug
(clj->phaser rope)
(clj->phaser color)
(clj->phaser filled)))))
(defn sound-info
"Render Sound information, including decoded state, duration, volume and more.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sound (Phaser.Sound) - The sound object to debug.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sound x y]
(phaser->clj
(.soundInfo debug
(clj->phaser sound)
(clj->phaser x)
(clj->phaser y))))
([debug sound x y color]
(phaser->clj
(.soundInfo debug
(clj->phaser sound)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn sprite-bounds
"Renders the Sprites bounds. Note: This is really expensive as it has to calculate the bounds every time you call it!
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite | Phaser.Image) - The sprite to display the bounds of.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the rectangle as a fillRect (default, true) or a strokeRect (false)"
([debug sprite]
(phaser->clj
(.spriteBounds debug
(clj->phaser sprite))))
([debug sprite color]
(phaser->clj
(.spriteBounds debug
(clj->phaser sprite)
(clj->phaser color))))
([debug sprite color filled]
(phaser->clj
(.spriteBounds debug
(clj->phaser sprite)
(clj->phaser color)
(clj->phaser filled)))))
(defn sprite-coords
"Renders the sprite coordinates in local, positional and world space.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite | Phaser.Image) - The sprite to display the coordinates for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.spriteCoords debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.spriteCoords debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn sprite-info
"Render debug infos (including name, bounds info, position and some other properties) about the Sprite.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The Sprite to display the information of.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.spriteInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.spriteInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn sprite-input-info
"Render Sprite Input Debug information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite | Phaser.Image) - The sprite to display the input data for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.spriteInputInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.spriteInputInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn text
"Render a string of text.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* text (string) - The line of text to draw.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* font (string) {optional} - The font of text to draw."
([debug text x y]
(phaser->clj
(.text debug
(clj->phaser text)
(clj->phaser x)
(clj->phaser y))))
([debug text x y color]
(phaser->clj
(.text debug
(clj->phaser text)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color))))
([debug text x y color font]
(phaser->clj
(.text debug
(clj->phaser text)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)
(clj->phaser font)))))
(defn timer
"Render Timer information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* timer (Phaser.Timer) - The Phaser.Timer to show the debug information for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug timer x y]
(phaser->clj
(.timer debug
(clj->phaser timer)
(clj->phaser x)
(clj->phaser y))))
([debug timer x y color]
(phaser->clj
(.timer debug
(clj->phaser timer)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color))))) | null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/utils/debug.cljs | clojure | (ns phzr.utils.debug
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser])
(:refer-clojure :exclude [key]))
(defn ->Debug
"A collection of methods for displaying debug information about game objects.
If your game is running in WebGL then Debug will create a Sprite that is placed at the top of the Stage display list and bind a canvas texture
to it, which must be uploaded every frame. Be advised: this is very expensive, especially in browsers like Firefox. So please only enable Debug
in WebGL mode if you really need it (or your desktop can cope with it well) and disable it for production!
If your game is using a Canvas renderer then the debug information is literally drawn on the top of the active game canvas and no Sprite is used.
Parameters:
* game (Phaser.Game) - A reference to the currently running game."
([game]
(js/Phaser.Utils.Debug. (clj->phaser game))))
(defn a-star-
"Debug method to draw the last calculated path by AStar
Parameters:
* astar (Phaser.Plugin.AStar) - The AStar plugin that you want to debug.
* x (number) - X position on camera for debug display.
* y (number) - Y position on camera for debug display.
* color (string) - Color to stroke the path line.
Returns: void - "
([astar x y color]
(phaser->clj
(.AStar js/Phaser.Utils.Debug
(clj->phaser astar)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn body
"Render a Sprites Physics body if it has one set. The body is rendered as a filled or stroked rectangle.
This only works for Arcade Physics, Ninja Physics (AABB and Circle only) and Box2D Physics bodies.
To display a P2 Physics body you should enable debug mode on the body when creating it.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The Sprite who's body will be rendered.
* color (string) {optional} - Color of the debug rectangle to be rendered. The format is a CSS color string such as '#ff0000' or 'rgba(255,0,0,0.5)'.
* filled (boolean) {optional} - Render the body as a filled rectangle (true) or a stroked rectangle (false)"
([debug sprite]
(phaser->clj
(.body debug
(clj->phaser sprite))))
([debug sprite color]
(phaser->clj
(.body debug
(clj->phaser sprite)
(clj->phaser color))))
([debug sprite color filled]
(phaser->clj
(.body debug
(clj->phaser sprite)
(clj->phaser color)
(clj->phaser filled)))))
(defn body-info
"Render a Sprites Physic Body information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The sprite to be rendered.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.bodyInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.bodyInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn box-2d-body
"Renders 'debug draw' data for the given Box2D body.
This uses the standard debug drawing feature of Box2D, so colors will be decided by the Box2D engine.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The sprite whos body will be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite]
(phaser->clj
(.box2dBody debug
(clj->phaser sprite))))
([debug sprite color]
(phaser->clj
(.box2dBody debug
(clj->phaser sprite)
(clj->phaser color)))))
(defn box-2d-world
"Renders 'debug draw' data for the Box2D world if it exists.
This uses the standard debug drawing feature of Box2D, so colors will be decided by
the Box2D engine."
([debug]
(phaser->clj
(.box2dWorld debug))))
(defn camera-info
"Render camera information including dimensions and location.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* camera (Phaser.Camera) - The Phaser.Camera to show the debug information for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug camera x y]
(phaser->clj
(.cameraInfo debug
(clj->phaser camera)
(clj->phaser x)
(clj->phaser y))))
([debug camera x y color]
(phaser->clj
(.cameraInfo debug
(clj->phaser camera)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn geom
"Renders a Phaser geometry object including Rectangle, Circle, Point or Line.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* object (Phaser.Rectangle | Phaser.Circle | Phaser.Point | Phaser.Line) - The geometry object to render.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the objected as a filled (default, true) or a stroked (false)
* force-type (number) {optional} - Force rendering of a specific type. If 0 no type will be forced, otherwise 1 = Rectangle, 2 = Circle, 3 = Point and 4 = Line."
([debug object]
(phaser->clj
(.geom debug
(clj->phaser object))))
([debug object color]
(phaser->clj
(.geom debug
(clj->phaser object)
(clj->phaser color))))
([debug object color filled]
(phaser->clj
(.geom debug
(clj->phaser object)
(clj->phaser color)
(clj->phaser filled))))
([debug object color filled force-type]
(phaser->clj
(.geom debug
(clj->phaser object)
(clj->phaser color)
(clj->phaser filled)
(clj->phaser force-type)))))
(defn input-info
"Render debug information about the Input object.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug x y]
(phaser->clj
(.inputInfo debug
(clj->phaser x)
(clj->phaser y))))
([debug x y color]
(phaser->clj
(.inputInfo debug
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn key
"Renders Phaser.Key object information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* key (Phaser.Key) - The Key to render the information for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug key x y]
(phaser->clj
(.key debug
(clj->phaser key)
(clj->phaser x)
(clj->phaser y))))
([debug key x y color]
(phaser->clj
(.key debug
(clj->phaser key)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn line-info
"Renders Line information in the given color.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* line (Phaser.Line) - The Line to display the data for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug line x y]
(phaser->clj
(.lineInfo debug
(clj->phaser line)
(clj->phaser x)
(clj->phaser y))))
([debug line x y color]
(phaser->clj
(.lineInfo debug
(clj->phaser line)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn pixel
"Renders a single pixel at the given size.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* x (number) - X position of the pixel to be rendered.
* y (number) - Y position of the pixel to be rendered.
* color (string) {optional} - Color of the pixel (format is css color string).
* size (number) {optional} - The 'size' to render the pixel at."
([debug x y]
(phaser->clj
(.pixel debug
(clj->phaser x)
(clj->phaser y))))
([debug x y color]
(phaser->clj
(.pixel debug
(clj->phaser x)
(clj->phaser y)
(clj->phaser color))))
([debug x y color size]
(phaser->clj
(.pixel debug
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)
(clj->phaser size)))))
(defn pointer
"Renders the Pointer.circle object onto the stage in green if down or red if up along with debug text.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* pointer (Phaser.Pointer) - The Pointer you wish to display.
* hide-if-up (boolean) {optional} - Doesn't render the circle if the pointer is up.
* down-color (string) {optional} - The color the circle is rendered in if down.
* up-color (string) {optional} - The color the circle is rendered in if up (and hideIfUp is false).
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug pointer]
(phaser->clj
(.pointer debug
(clj->phaser pointer))))
([debug pointer hide-if-up]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up))))
([debug pointer hide-if-up down-color]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up)
(clj->phaser down-color))))
([debug pointer hide-if-up down-color up-color]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up)
(clj->phaser down-color)
(clj->phaser up-color))))
([debug pointer hide-if-up down-color up-color color]
(phaser->clj
(.pointer debug
(clj->phaser pointer)
(clj->phaser hide-if-up)
(clj->phaser down-color)
(clj->phaser up-color)
(clj->phaser color)))))
(defn pre-update
"Internal method that clears the canvas (if a Sprite) ready for a new debug session."
([debug]
(phaser->clj
(.preUpdate debug))))
(defn quad-tree
"Visually renders a QuadTree to the display.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* quadtree (Phaser.QuadTree) - The quadtree to render.
* color (string) - The color of the lines in the quadtree."
([debug quadtree color]
(phaser->clj
(.quadTree debug
(clj->phaser quadtree)
(clj->phaser color)))))
(defn rectangle
"Renders a Rectangle.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* object (Phaser.Rectangle | object) - The geometry object to render.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the objected as a filled (default, true) or a stroked (false)"
([debug object]
(phaser->clj
(.rectangle debug
(clj->phaser object))))
([debug object color]
(phaser->clj
(.rectangle debug
(clj->phaser object)
(clj->phaser color))))
([debug object color filled]
(phaser->clj
(.rectangle debug
(clj->phaser object)
(clj->phaser color)
(clj->phaser filled)))))
(defn reset
"Clears the Debug canvas."
([debug]
(phaser->clj
(.reset debug))))
(defn rope-segments
"Renders the Rope's segments. Note: This is really expensive as it has to calculate new segments every time you call it
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* rope (Phaser.Rope) - The rope to display the segments of.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the rectangle as a fillRect (default, true) or a strokeRect (false)"
([debug rope]
(phaser->clj
(.ropeSegments debug
(clj->phaser rope))))
([debug rope color]
(phaser->clj
(.ropeSegments debug
(clj->phaser rope)
(clj->phaser color))))
([debug rope color filled]
(phaser->clj
(.ropeSegments debug
(clj->phaser rope)
(clj->phaser color)
(clj->phaser filled)))))
(defn sound-info
"Render Sound information, including decoded state, duration, volume and more.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sound (Phaser.Sound) - The sound object to debug.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sound x y]
(phaser->clj
(.soundInfo debug
(clj->phaser sound)
(clj->phaser x)
(clj->phaser y))))
([debug sound x y color]
(phaser->clj
(.soundInfo debug
(clj->phaser sound)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn sprite-bounds
"Renders the Sprites bounds. Note: This is really expensive as it has to calculate the bounds every time you call it!
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite | Phaser.Image) - The sprite to display the bounds of.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* filled (boolean) {optional} - Render the rectangle as a fillRect (default, true) or a strokeRect (false)"
([debug sprite]
(phaser->clj
(.spriteBounds debug
(clj->phaser sprite))))
([debug sprite color]
(phaser->clj
(.spriteBounds debug
(clj->phaser sprite)
(clj->phaser color))))
([debug sprite color filled]
(phaser->clj
(.spriteBounds debug
(clj->phaser sprite)
(clj->phaser color)
(clj->phaser filled)))))
(defn sprite-coords
"Renders the sprite coordinates in local, positional and world space.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite | Phaser.Image) - The sprite to display the coordinates for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.spriteCoords debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.spriteCoords debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn sprite-info
"Render debug infos (including name, bounds info, position and some other properties) about the Sprite.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite) - The Sprite to display the information of.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.spriteInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.spriteInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn sprite-input-info
"Render Sprite Input Debug information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* sprite (Phaser.Sprite | Phaser.Image) - The sprite to display the input data for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug sprite x y]
(phaser->clj
(.spriteInputInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y))))
([debug sprite x y color]
(phaser->clj
(.spriteInputInfo debug
(clj->phaser sprite)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)))))
(defn text
"Render a string of text.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* text (string) - The line of text to draw.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - Color of the debug info to be rendered (format is css color string).
* font (string) {optional} - The font of text to draw."
([debug text x y]
(phaser->clj
(.text debug
(clj->phaser text)
(clj->phaser x)
(clj->phaser y))))
([debug text x y color]
(phaser->clj
(.text debug
(clj->phaser text)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color))))
([debug text x y color font]
(phaser->clj
(.text debug
(clj->phaser text)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color)
(clj->phaser font)))))
(defn timer
"Render Timer information.
Parameters:
* debug (Phaser.Utils.Debug) - Targeted instance for method
* timer (Phaser.Timer) - The Phaser.Timer to show the debug information for.
* x (number) - X position of the debug info to be rendered.
* y (number) - Y position of the debug info to be rendered.
* color (string) {optional} - color of the debug info to be rendered. (format is css color string)."
([debug timer x y]
(phaser->clj
(.timer debug
(clj->phaser timer)
(clj->phaser x)
(clj->phaser y))))
([debug timer x y color]
(phaser->clj
(.timer debug
(clj->phaser timer)
(clj->phaser x)
(clj->phaser y)
(clj->phaser color))))) | |
714f1e15ecfeb7f99324ea0366bb6bbfb57ae8a9d4f72a159c3f10bf511503e4 | techascent/tvm-clj | te.clj | (ns tvm-clj.impl.fns.te
(:require [tvm-clj.impl.tvm-ns-fns :as tvm-ns-fns]))
(tvm-ns-fns/export-tvm-functions "te") | null | https://raw.githubusercontent.com/techascent/tvm-clj/1088845bd613b4ba14b00381ffe3cdbd3d8b639e/src/tvm_clj/impl/fns/te.clj | clojure | (ns tvm-clj.impl.fns.te
(:require [tvm-clj.impl.tvm-ns-fns :as tvm-ns-fns]))
(tvm-ns-fns/export-tvm-functions "te") | |
d18f3b1e1c5f6d3c07fce4145bad047fdf4524bda8481f91b5054fc4092ecca4 | mirage/mirage-clock | mclock.mli |
* Copyright ( c ) 2015 .
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2015 Daniel C. Bünzli.
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
include Mirage_clock.MCLOCK
* Monotonic clock for Unix using MirageOS interfaces
Clock returning monotonic time since an arbitrary point . To be used for eg .
profiling .
Clock returning monotonic time since an arbitrary point. To be used for eg.
profiling. *)
| null | https://raw.githubusercontent.com/mirage/mirage-clock/74a02c9df3770bcd9d704c39cc6582b11f80e551/unix/mclock.mli | ocaml |
* Copyright ( c ) 2015 .
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2015 Daniel C. Bünzli.
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
include Mirage_clock.MCLOCK
* Monotonic clock for Unix using MirageOS interfaces
Clock returning monotonic time since an arbitrary point . To be used for eg .
profiling .
Clock returning monotonic time since an arbitrary point. To be used for eg.
profiling. *)
| |
1cb2416e88f06026edc17ee8eacebb67b9cdde69bc3e20ff79d5f26f792c335e | tweag/ormolu | foreign-export-out.hs | foreign export ccall foo :: Int -> IO Int
-- | 'foreignTomFun' is a very important thing
foreign export ccall "tomography"
foreignTomFun ::
StablePtr Storage {- Storage is bad -} -> TomForegin
foreign export {- We can't use capi here -} ccall "dynamic"
export_nullaryMeth :: (IO HRESULT) -> IO (Ptr ())
| null | https://raw.githubusercontent.com/tweag/ormolu/34bdf62429768f24b70d0f8ba7730fc4d8ae73ba/data/examples/declaration/foreign/foreign-export-out.hs | haskell | | 'foreignTomFun' is a very important thing
Storage is bad
We can't use capi here | foreign export ccall foo :: Int -> IO Int
foreign export ccall "tomography"
foreignTomFun ::
export_nullaryMeth :: (IO HRESULT) -> IO (Ptr ())
|
6cbe03b635981205de8fd677c13eddb0fcf3d1b45d09f95aaf544beb231b8400 | clash-lang/clash-compiler | NewType.hs | # LANGUAGE DataKinds #
Issue # 359 :
module NewType where
import Clash.Prelude
newtype Circuit a b c d = Circuit ((a,b) -> (c, d))
topEntity
:: Clock System
-> Reset System
-> Circuit (Signal System Int) () () (Signal System Int)
topEntity c r = withClockReset c r $ Circuit $ \(i,()) -> ((), register 0 i)
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/8e461a910f2f37c900705a0847a9b533bce4d2ea/tests/shouldwork/SynthesisAttributes/NewType.hs | haskell | # LANGUAGE DataKinds #
Issue # 359 :
module NewType where
import Clash.Prelude
newtype Circuit a b c d = Circuit ((a,b) -> (c, d))
topEntity
:: Clock System
-> Reset System
-> Circuit (Signal System Int) () () (Signal System Int)
topEntity c r = withClockReset c r $ Circuit $ \(i,()) -> ((), register 0 i)
| |
a227d810457685ae1591bee17df983eae0fc5d55e89c1b38adc4906ffc494674 | talex5/mirage-trace-viewer | mtv_render.ml | Copyright ( C ) 2014 ,
module type CANVAS = sig
type context
type text_extents = {
x_bearing : float;
y_bearing : float;
width : float;
height : float;
x_advance : float;
y_advance : float;
}
val set_line_width : context -> float -> unit
val set_source_rgb : context -> r:float -> g:float -> b:float -> unit
val set_source_rgba : context -> r:float -> g:float -> b:float -> a:float -> unit
( Cairo needs to know the r , , b too )
val set_source_alpha : context -> r:float -> g:float -> b:float -> float -> unit
val move_to : context -> x:float -> y:float -> unit
val line_to : context -> x:float -> y:float -> unit
val rectangle : context -> x:float -> y:float -> w:float -> h:float -> unit
val stroke : context -> unit
val stroke_preserve : context -> unit
val fill : context -> unit
val text_extents : context -> string -> text_extents
val paint_text : context -> ?clip_area:(float * float) -> x:float -> y:float -> string -> unit
val paint : ?alpha:float -> context -> unit
end
(* Find a place to put the label for the next stat line, ideally close to y. *)
let insert_label y stat_labels =
let rec aux (y:float) = function
| [] -> y, [y]
| y2 :: ys when y +. 16. < y2 -> y, (y :: y2 :: ys)
| y2 :: ys ->
let y, ys = aux (max y (y2 +. 16.)) ys in
y, (y2 :: ys) in
let y, new_stats = aux y !stat_labels in
stat_labels := new_stats;
y
module Make (C : CANVAS) = struct
let arrow_width = 4.
let arrow_height = 10.
let thin cr = C.set_line_width cr 1.0
let thread_label cr =
C.set_source_rgb cr ~r:0.0 ~g:0.0 ~b:0.0
let type_label cr =
C.set_source_rgb cr ~r:0.5 ~g:0.5 ~b:0.5
let counter_line_width = 5.0
let counter_shadow cr =
C.set_source_rgb cr ~r:0.0 ~g:0.0 ~b:0.0;
C.set_line_width cr 5.0
let counter_line i cr =
C.set_line_width cr 3.0;
match i mod 4 with
| 0 -> C.set_source_rgb cr ~r:1.0 ~g:0.4 ~b:0.4
| 1 -> C.set_source_rgb cr ~r:1.0 ~g:0.5 ~b:0.0
| 2 -> C.set_source_rgb cr ~r:0.4 ~g:0.8 ~b:0.8
| _ -> C.set_source_rgb cr ~r:0.8 ~g:0.4 ~b:1.0
let anonymous_thread cr =
C.set_line_width cr 2.0;
C.set_source_rgb cr ~r:0.6 ~g:0.6 ~b:0.6
let highlight cr =
C.set_source_rgb cr ~r:1.0 ~g:1.0 ~b:0.0
let named_thread cr =
C.set_line_width cr 2.0;
C.set_source_rgb cr ~r:0.2 ~g:0.2 ~b:0.2
let failed cr =
C.set_line_width cr 2.0;
C.set_source_rgb cr ~r:0.8 ~g:0.0 ~b:0.0
let activation cr =
C.set_line_width cr 3.0;
C.set_source_rgb cr ~r:1.0 ~g:1.0 ~b:1.0
let line v cr time src recv =
C.move_to cr ~x:(Mtv_view.x_of_time v time) ~y:(Mtv_view.y_of_thread v src);
C.line_to cr ~x:(Mtv_view.x_of_time v time) ~y:(Mtv_view.y_of_thread v recv);
C.stroke cr
let draw_arrow_head_v cr ~x ~y ~arrow_head_y =
C.line_to cr ~x ~y:arrow_head_y;
C.stroke cr;
C.move_to cr ~x ~y;
C.line_to cr ~x:(x +. arrow_width) ~y:arrow_head_y;
C.line_to cr ~x:(x -. arrow_width) ~y:arrow_head_y;
C.fill cr
let draw_arrow_head_h cr ~x ~y ~arrow_head_x =
C.line_to cr ~x:arrow_head_x ~y;
C.stroke cr;
C.move_to cr ~x ~y;
C.line_to cr ~x:arrow_head_x ~y:(y +. arrow_width);
C.line_to cr ~x:arrow_head_x ~y:(y -. arrow_width);
C.fill cr
let arrow v cr src src_time recv recv_time (r, g, b) =
let width = Mtv_view.width_of_timespan v (recv_time -. src_time) in
let alpha = 1.0 -. (min 1.0 (width /. 6000.)) in
if alpha > 0.01 then (
C.set_source_alpha cr ~r ~g ~b alpha;
if Mtv_thread.id src <> -1 && Mtv_thread.id src <> Mtv_thread.id recv then (
let src_x = Mtv_view.clip_x_of_time v src_time in
let src_y = Mtv_view.y_of_thread v src in
let recv_y = Mtv_view.y_of_thread v recv in
C.move_to cr ~x:src_x ~y:src_y;
let x = Mtv_view.clip_x_of_time v recv_time in
let d = recv_y -. src_y in
if d < -.arrow_height then draw_arrow_head_v cr ~x ~y:recv_y ~arrow_head_y:(recv_y +. arrow_height)
else if d > arrow_height then draw_arrow_head_v cr ~x ~y:recv_y ~arrow_head_y:(recv_y -. arrow_height)
else draw_arrow_head_h cr ~x ~y:recv_y ~arrow_head_x:(x -. arrow_height)
)
)
let draw_grid v cr area_start_x area_end_x =
C.set_line_width cr 1.0;
C.set_source_rgb cr ~r:0.7 ~g:0.7 ~b:0.7;
let grid_step = Mtv_view.grid_step v in
let top = 0.0 in
let bottom = Mtv_view.view_height v in
let area_start_time = Mtv_view.time_of_x v area_start_x in
let grid_start_x = floor (area_start_time /. grid_step) *. grid_step |> Mtv_view.x_of_time v in
let grid_step_x = Mtv_view.width_of_timespan v grid_step in
let rec draw x =
if x < area_end_x then (
C.move_to cr ~x:x ~y:top;
C.line_to cr ~x:x ~y:bottom;
C.stroke cr;
draw (x +. grid_step_x)
) in
draw grid_start_x;
C.set_source_rgb cr ~r:0.4 ~g:0.4 ~b:0.4;
let msg =
if grid_step >= 1.0 then Printf.sprintf "Each grid division: %.0f s" grid_step
else if grid_step >= 0.001 then Printf.sprintf "Each grid division: %.0f ms" (grid_step *. 1000.)
else if grid_step >= 0.000_001 then Printf.sprintf "Each grid division: %.0f us" (grid_step *. 1_000_000.)
else if grid_step >= 0.000_000_001 then Printf.sprintf "Each grid division: %.0f ns" (grid_step *. 1_000_000_000.)
else Printf.sprintf "Each grid division: %.2g s" grid_step in
let extents = C.text_extents cr msg in
let y = bottom -. C.(extents.height +. extents.y_bearing) -. 2.0 in
C.paint_text cr ~x:4.0 ~y msg
let draw_mark cr x y =
C.move_to cr ~x ~y;
C.line_to cr ~x ~y:(y +. 6.);
C.stroke cr
(** Draw [msg] in the area (min_x, max_x) and ideally centred at [x]. *)
let draw_label cr ~v ~y ~min_x ~max_x x msg =
let text_width = C.((text_extents cr msg).x_advance) in
let x =
x -. (text_width /. 2.) (* Desired start for centred text *)
|> min (max_x -. text_width)
|> max min_x in
if x +. text_width > max_x then (
(* Doesn't fit. Draw as much as we can. *)
C.paint_text cr ~x:min_x ~y ~clip_area:(max_x -. x, Mtv_view.view_height v) msg;
max_x
) else (
(* Show label on left margin if the thread starts off-screen *)
let x =
if x < 4.0 then min 4.0 (max_x -. text_width)
else x in
C.paint_text cr ~x ~y msg;
x +. text_width
)
let rec draw_labels cr ~v ~y ~min_x ~max_x = function
| [] -> ()
| [(time, msg)] ->
let x = Mtv_view.clip_x_of_time v time in
let _end : float = draw_label cr ~v ~y ~min_x ~max_x x msg in
draw_mark cr x y;
()
| (t1, msg1) :: (((t2, _msg2) :: _) as rest) ->
let x1 = Mtv_view.clip_x_of_time v t1 in
let x2 = Mtv_view.clip_x_of_time v t2 in
let min_x = draw_label cr ~v ~y ~min_x ~max_x:x2 x1 msg1 in
draw_mark cr x1 y;
draw_labels cr ~v ~y ~min_x ~max_x rest
let render v cr ~expose_area =
let vat = Mtv_view.vat v in
let top_thread = Mtv_thread.top_thread vat in
let ((expose_min_x, expose_min_y), (expose_max_x, expose_max_y)) = expose_area in
(* Note: switching drawing colours is really slow with HTML canvas, so we try to group by colour. *)
C.set_source_rgb cr ~r:0.9 ~g:0.9 ~b:0.9;
C.paint cr;
let region_labels = ref [] in
(* When the system thread is "active", the system is idle. *)
C.set_source_rgb cr ~r:0.8 ~g:0.8 ~b:0.8;
Mtv_thread.activations top_thread |> List.iter (fun (start_time, end_time) ->
let start_x = Mtv_view.clip_x_of_time v start_time in
let end_x = Mtv_view.clip_x_of_time v end_time in
if end_x >= expose_min_x && start_x < expose_max_x then (
C.rectangle cr ~x:start_x ~y:expose_min_y ~w:(end_x -. start_x) ~h:expose_max_y;
C.fill cr;
if end_x -. start_x > 16. then region_labels := (start_x, end_x, "sleeping") :: !region_labels
)
);
C.set_source_rgb cr ~r:0.7 ~g:0.6 ~b:0.6;
Mtv_thread.gc_periods vat |> List.iter (fun (start_time, end_time) ->
let start_x = Mtv_view.clip_x_of_time v start_time in
let end_x = Mtv_view.clip_x_of_time v end_time in
if end_x >= expose_min_x && start_x < expose_max_x then (
C.rectangle cr ~x:start_x ~y:expose_min_y ~w:(end_x -. start_x) ~h:expose_max_y;
C.fill cr;
if end_x -. start_x > 16. then region_labels := (start_x, end_x, "GC") :: !region_labels
)
);
C.set_source_rgb cr ~r:1.0 ~g:1.0 ~b:1.0;
!region_labels |> List.iter (fun (min_x, max_x, label) ->
let x = (min_x +. max_x) /. 2. in
draw_label cr ~v ~y:14.0 ~min_x ~max_x x label |> ignore
);
draw_grid v cr expose_min_x expose_max_x;
(* Draw the thread lines. *)
let failed_thread_lines = ref [] in
let draw_thread_line start_x end_x y =
C.move_to cr ~x:start_x ~y;
C.line_to cr ~x:end_x ~y;
C.stroke cr in
let visible_t_min = Mtv_view.time_of_x v expose_min_x in
let visible_t_max = Mtv_view.time_of_x v expose_max_x in
let visible_threads = Mtv_view.visible_threads v (visible_t_min, visible_t_max) in
let highlights = Mtv_view.highlights v in
let short_highlights = ref [] in
if not (Mtv_view.ThreadSet.is_empty highlights) then (
highlight cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
if Mtv_view.ThreadSet.mem t highlights then (
let start_x = Mtv_view.clip_x_of_time v (Mtv_thread.start_time t) in
let end_x = Mtv_view.clip_x_of_time v (Mtv_thread.end_time t) in
let y = Mtv_view.y_of_thread v t in
if end_x -. start_x < 32.0 then short_highlights := (start_x, end_x, y) :: !short_highlights
else (
C.rectangle cr ~x:start_x ~y:(y -. 4.0) ~w:(end_x -. start_x) ~h:8.0;
C.fill cr;
);
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child <> Mtv_thread.y t && Mtv_view.ThreadSet.mem child highlights ->
let h = Mtv_view.y_of_thread v child -. y in
C.rectangle cr ~x:(end_x -. 4.0) ~y:(y -. 4.0) ~w:8.0 ~h:(h +. 8.0);
C.fill cr
| _ -> ()
)
)
);
named_thread cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let start_x = Mtv_view.clip_x_of_time v (Mtv_thread.start_time t) in
let end_x = Mtv_view.clip_x_of_time v (Mtv_thread.end_time t) in
let y = Mtv_view.y_of_thread v t in
if Mtv_thread.failure t = None then draw_thread_line start_x end_x y
else failed_thread_lines := (start_x, end_x, y) :: !failed_thread_lines;
Mtv_thread.creates t |> List.iter (fun child ->
let child_start_time = Mtv_thread.start_time child in
if Mtv_thread.show_creation child then
line v cr child_start_time t child
);
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child <> Mtv_thread.y t ->
line v cr (Mtv_thread.end_time t) t child
| None when not (Mtv_thread.resolved t) && end_x -. start_x > 4.0 ->
C.move_to cr ~x:end_x ~y;
C.line_to cr ~x:(end_x -. 6.) ~y:(y -. 4.);
C.line_to cr ~x:(end_x -. 6.) ~y:(y +. 4.);
C.fill cr;
| _ -> ()
);
activation cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let y = Mtv_view.y_of_thread v t in
Mtv_thread.activations t |> List.iter (fun (start_time, end_time) ->
C.move_to cr ~x:(max expose_min_x (Mtv_view.clip_x_of_time v start_time)) ~y;
C.line_to cr ~x:(min expose_max_x (Mtv_view.clip_x_of_time v end_time)) ~y;
C.stroke cr;
)
);
(* Arrows that are only just off screen can still be visible, so extend the
* window slightly. Once we get wider than a screen width, they become invisible anyway. *)
let view_timespace = Mtv_view.timespan_of_width v (Mtv_view.view_width v) in
let vis_arrows_min = visible_t_min -. view_timespace in
let vis_arrows_max = visible_t_max +. view_timespace in
thin cr;
let c = (0.8, 0.8, 0.4) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Try_read -> arrow v cr t start_time other end_time c
| _ -> ()
);
let c = (0.0, 0.0, 1.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Read when Mtv_thread.failure other = None -> arrow v cr other end_time t start_time c
| _ -> ()
);
let c = (1.0, 0.0, 0.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Read when Mtv_thread.failure other <> None -> arrow v cr other end_time t start_time c
| _ -> ()
);
let c = (0.0, 0.5, 0.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Resolve when Mtv_thread.id t <> -1 -> arrow v cr t start_time other end_time c
| _ -> ()
);
let c = (1.0, 0.6, 0.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Signal -> arrow v cr t start_time other end_time c
| _ -> ()
);
let text_visible t =
let vert_dist = Mtv_view.dist_from_focus v t in
vert_dist > -.2000. && vert_dist < 2000. in
thread_label cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let start_x = Mtv_view.x_of_start v t +. 2. in
let end_x = Mtv_view.x_of_end v t in
let thread_width = end_x -. start_x in
if thread_width > 16. && text_visible t then (
let y = Mtv_view.y_of_thread v t -. 3.0 in
let end_x =
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child = Mtv_thread.y t -> Mtv_view.x_of_start v child
| _ -> end_x in
draw_labels cr ~v ~y ~min_x:start_x ~max_x:(min end_x (Mtv_view.view_width v)) (Mtv_thread.labels t)
)
);
let text_visible t =
let vert_dist = Mtv_view.dist_from_focus v t in
vert_dist > -.1000. && vert_dist < 1000. in
type_label cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let start_x = Mtv_view.x_of_start v t +. 2. in
let end_x = Mtv_view.x_of_end v t in
let thread_width = end_x -. start_x in
if thread_width > 16. && text_visible t then (
let y = Mtv_view.y_of_thread v t +. 10.0 in
let end_x =
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child = Mtv_thread.y t -> Mtv_view.x_of_start v child
| _ -> end_x in
draw_label cr ~v ~y ~min_x:start_x ~max_x:end_x start_x (Mtv_thread.thread_type t)
|> ignore;
)
);
failed cr;
!failed_thread_lines |> List.iter (fun (start_x, end_x, y) ->
draw_thread_line start_x end_x y;
C.move_to cr ~x:end_x ~y:(y -. 8.);
C.line_to cr ~x:end_x ~y:(y +. 8.);
C.stroke cr;
);
if Mtv_view.show_metrics v then (
let stat_labels = ref [] in
Mtv_thread.counters vat |> List.iteri (fun counter_i counter ->
let open Mtv_counter in
if counter.shown then (
let range = counter.scale.max -. counter.scale.min in
let v_scale = (Mtv_view.view_height v -. (2.0 *. counter_line_width)) /. range in
let v_offset = Mtv_view.view_height v +. (v_scale *. counter.scale.min) -. counter_line_width in
let y_of_value value = v_offset -. v_scale *. value in
let values = counter.values in
let i = Mtv_sorted_array.count_before (fun (time, _v) -> time >= Mtv_view.view_start_time v) values in
let first_visible = max (i - 1) 0 in
let first_value =
if i = 0 then 0.0
else (snd values.(first_visible)) in
let y = ref (y_of_value first_value) in
C.move_to cr ~x:0.0 ~y:!y;
begin try
for i = first_visible to Array.length values - 1 do
let time, value = Array.get values i in
let x = Mtv_view.clip_x_of_time v time in
C.line_to cr ~x ~y:!y;
if x > Mtv_view.view_width v then raise Exit;
let new_y = y_of_value value in
C.line_to cr ~x ~y:new_y;
y := new_y;
done
with Exit -> () end;
C.line_to cr ~x:(Mtv_view.view_width v) ~y:!y;
counter_shadow cr;
C.stroke_preserve cr;
counter_line counter_i cr;
C.stroke cr;
let y = insert_label (max 16. (!y -. 2.)) stat_labels in
let max_x = Mtv_view.view_width v in
draw_label cr ~v ~y ~min_x:0.0 ~max_x max_x counter.name |> ignore
)
);
);
(* Draw these on top of everything else so they can still be seen *)
if !short_highlights <> [] then (
highlight cr;
!short_highlights |> List.iter (fun (start_x, end_x, y) ->
C.rectangle cr ~x:(start_x -. 4.0) ~y:(y -. 4.0) ~w:(end_x +. 4.0 -. start_x) ~h:8.0;
C.fill cr;
)
);
end
| null | https://raw.githubusercontent.com/talex5/mirage-trace-viewer/f3b5abcf013e7601638e1c4f036f35f9c15caeec/lib/mtv_render.ml | ocaml | Find a place to put the label for the next stat line, ideally close to y.
* Draw [msg] in the area (min_x, max_x) and ideally centred at [x].
Desired start for centred text
Doesn't fit. Draw as much as we can.
Show label on left margin if the thread starts off-screen
Note: switching drawing colours is really slow with HTML canvas, so we try to group by colour.
When the system thread is "active", the system is idle.
Draw the thread lines.
Arrows that are only just off screen can still be visible, so extend the
* window slightly. Once we get wider than a screen width, they become invisible anyway.
Draw these on top of everything else so they can still be seen | Copyright ( C ) 2014 ,
module type CANVAS = sig
type context
type text_extents = {
x_bearing : float;
y_bearing : float;
width : float;
height : float;
x_advance : float;
y_advance : float;
}
val set_line_width : context -> float -> unit
val set_source_rgb : context -> r:float -> g:float -> b:float -> unit
val set_source_rgba : context -> r:float -> g:float -> b:float -> a:float -> unit
( Cairo needs to know the r , , b too )
val set_source_alpha : context -> r:float -> g:float -> b:float -> float -> unit
val move_to : context -> x:float -> y:float -> unit
val line_to : context -> x:float -> y:float -> unit
val rectangle : context -> x:float -> y:float -> w:float -> h:float -> unit
val stroke : context -> unit
val stroke_preserve : context -> unit
val fill : context -> unit
val text_extents : context -> string -> text_extents
val paint_text : context -> ?clip_area:(float * float) -> x:float -> y:float -> string -> unit
val paint : ?alpha:float -> context -> unit
end
let insert_label y stat_labels =
let rec aux (y:float) = function
| [] -> y, [y]
| y2 :: ys when y +. 16. < y2 -> y, (y :: y2 :: ys)
| y2 :: ys ->
let y, ys = aux (max y (y2 +. 16.)) ys in
y, (y2 :: ys) in
let y, new_stats = aux y !stat_labels in
stat_labels := new_stats;
y
module Make (C : CANVAS) = struct
let arrow_width = 4.
let arrow_height = 10.
let thin cr = C.set_line_width cr 1.0
let thread_label cr =
C.set_source_rgb cr ~r:0.0 ~g:0.0 ~b:0.0
let type_label cr =
C.set_source_rgb cr ~r:0.5 ~g:0.5 ~b:0.5
let counter_line_width = 5.0
let counter_shadow cr =
C.set_source_rgb cr ~r:0.0 ~g:0.0 ~b:0.0;
C.set_line_width cr 5.0
let counter_line i cr =
C.set_line_width cr 3.0;
match i mod 4 with
| 0 -> C.set_source_rgb cr ~r:1.0 ~g:0.4 ~b:0.4
| 1 -> C.set_source_rgb cr ~r:1.0 ~g:0.5 ~b:0.0
| 2 -> C.set_source_rgb cr ~r:0.4 ~g:0.8 ~b:0.8
| _ -> C.set_source_rgb cr ~r:0.8 ~g:0.4 ~b:1.0
let anonymous_thread cr =
C.set_line_width cr 2.0;
C.set_source_rgb cr ~r:0.6 ~g:0.6 ~b:0.6
let highlight cr =
C.set_source_rgb cr ~r:1.0 ~g:1.0 ~b:0.0
let named_thread cr =
C.set_line_width cr 2.0;
C.set_source_rgb cr ~r:0.2 ~g:0.2 ~b:0.2
let failed cr =
C.set_line_width cr 2.0;
C.set_source_rgb cr ~r:0.8 ~g:0.0 ~b:0.0
let activation cr =
C.set_line_width cr 3.0;
C.set_source_rgb cr ~r:1.0 ~g:1.0 ~b:1.0
let line v cr time src recv =
C.move_to cr ~x:(Mtv_view.x_of_time v time) ~y:(Mtv_view.y_of_thread v src);
C.line_to cr ~x:(Mtv_view.x_of_time v time) ~y:(Mtv_view.y_of_thread v recv);
C.stroke cr
let draw_arrow_head_v cr ~x ~y ~arrow_head_y =
C.line_to cr ~x ~y:arrow_head_y;
C.stroke cr;
C.move_to cr ~x ~y;
C.line_to cr ~x:(x +. arrow_width) ~y:arrow_head_y;
C.line_to cr ~x:(x -. arrow_width) ~y:arrow_head_y;
C.fill cr
let draw_arrow_head_h cr ~x ~y ~arrow_head_x =
C.line_to cr ~x:arrow_head_x ~y;
C.stroke cr;
C.move_to cr ~x ~y;
C.line_to cr ~x:arrow_head_x ~y:(y +. arrow_width);
C.line_to cr ~x:arrow_head_x ~y:(y -. arrow_width);
C.fill cr
let arrow v cr src src_time recv recv_time (r, g, b) =
let width = Mtv_view.width_of_timespan v (recv_time -. src_time) in
let alpha = 1.0 -. (min 1.0 (width /. 6000.)) in
if alpha > 0.01 then (
C.set_source_alpha cr ~r ~g ~b alpha;
if Mtv_thread.id src <> -1 && Mtv_thread.id src <> Mtv_thread.id recv then (
let src_x = Mtv_view.clip_x_of_time v src_time in
let src_y = Mtv_view.y_of_thread v src in
let recv_y = Mtv_view.y_of_thread v recv in
C.move_to cr ~x:src_x ~y:src_y;
let x = Mtv_view.clip_x_of_time v recv_time in
let d = recv_y -. src_y in
if d < -.arrow_height then draw_arrow_head_v cr ~x ~y:recv_y ~arrow_head_y:(recv_y +. arrow_height)
else if d > arrow_height then draw_arrow_head_v cr ~x ~y:recv_y ~arrow_head_y:(recv_y -. arrow_height)
else draw_arrow_head_h cr ~x ~y:recv_y ~arrow_head_x:(x -. arrow_height)
)
)
let draw_grid v cr area_start_x area_end_x =
C.set_line_width cr 1.0;
C.set_source_rgb cr ~r:0.7 ~g:0.7 ~b:0.7;
let grid_step = Mtv_view.grid_step v in
let top = 0.0 in
let bottom = Mtv_view.view_height v in
let area_start_time = Mtv_view.time_of_x v area_start_x in
let grid_start_x = floor (area_start_time /. grid_step) *. grid_step |> Mtv_view.x_of_time v in
let grid_step_x = Mtv_view.width_of_timespan v grid_step in
let rec draw x =
if x < area_end_x then (
C.move_to cr ~x:x ~y:top;
C.line_to cr ~x:x ~y:bottom;
C.stroke cr;
draw (x +. grid_step_x)
) in
draw grid_start_x;
C.set_source_rgb cr ~r:0.4 ~g:0.4 ~b:0.4;
let msg =
if grid_step >= 1.0 then Printf.sprintf "Each grid division: %.0f s" grid_step
else if grid_step >= 0.001 then Printf.sprintf "Each grid division: %.0f ms" (grid_step *. 1000.)
else if grid_step >= 0.000_001 then Printf.sprintf "Each grid division: %.0f us" (grid_step *. 1_000_000.)
else if grid_step >= 0.000_000_001 then Printf.sprintf "Each grid division: %.0f ns" (grid_step *. 1_000_000_000.)
else Printf.sprintf "Each grid division: %.2g s" grid_step in
let extents = C.text_extents cr msg in
let y = bottom -. C.(extents.height +. extents.y_bearing) -. 2.0 in
C.paint_text cr ~x:4.0 ~y msg
let draw_mark cr x y =
C.move_to cr ~x ~y;
C.line_to cr ~x ~y:(y +. 6.);
C.stroke cr
let draw_label cr ~v ~y ~min_x ~max_x x msg =
let text_width = C.((text_extents cr msg).x_advance) in
let x =
|> min (max_x -. text_width)
|> max min_x in
if x +. text_width > max_x then (
C.paint_text cr ~x:min_x ~y ~clip_area:(max_x -. x, Mtv_view.view_height v) msg;
max_x
) else (
let x =
if x < 4.0 then min 4.0 (max_x -. text_width)
else x in
C.paint_text cr ~x ~y msg;
x +. text_width
)
let rec draw_labels cr ~v ~y ~min_x ~max_x = function
| [] -> ()
| [(time, msg)] ->
let x = Mtv_view.clip_x_of_time v time in
let _end : float = draw_label cr ~v ~y ~min_x ~max_x x msg in
draw_mark cr x y;
()
| (t1, msg1) :: (((t2, _msg2) :: _) as rest) ->
let x1 = Mtv_view.clip_x_of_time v t1 in
let x2 = Mtv_view.clip_x_of_time v t2 in
let min_x = draw_label cr ~v ~y ~min_x ~max_x:x2 x1 msg1 in
draw_mark cr x1 y;
draw_labels cr ~v ~y ~min_x ~max_x rest
let render v cr ~expose_area =
let vat = Mtv_view.vat v in
let top_thread = Mtv_thread.top_thread vat in
let ((expose_min_x, expose_min_y), (expose_max_x, expose_max_y)) = expose_area in
C.set_source_rgb cr ~r:0.9 ~g:0.9 ~b:0.9;
C.paint cr;
let region_labels = ref [] in
C.set_source_rgb cr ~r:0.8 ~g:0.8 ~b:0.8;
Mtv_thread.activations top_thread |> List.iter (fun (start_time, end_time) ->
let start_x = Mtv_view.clip_x_of_time v start_time in
let end_x = Mtv_view.clip_x_of_time v end_time in
if end_x >= expose_min_x && start_x < expose_max_x then (
C.rectangle cr ~x:start_x ~y:expose_min_y ~w:(end_x -. start_x) ~h:expose_max_y;
C.fill cr;
if end_x -. start_x > 16. then region_labels := (start_x, end_x, "sleeping") :: !region_labels
)
);
C.set_source_rgb cr ~r:0.7 ~g:0.6 ~b:0.6;
Mtv_thread.gc_periods vat |> List.iter (fun (start_time, end_time) ->
let start_x = Mtv_view.clip_x_of_time v start_time in
let end_x = Mtv_view.clip_x_of_time v end_time in
if end_x >= expose_min_x && start_x < expose_max_x then (
C.rectangle cr ~x:start_x ~y:expose_min_y ~w:(end_x -. start_x) ~h:expose_max_y;
C.fill cr;
if end_x -. start_x > 16. then region_labels := (start_x, end_x, "GC") :: !region_labels
)
);
C.set_source_rgb cr ~r:1.0 ~g:1.0 ~b:1.0;
!region_labels |> List.iter (fun (min_x, max_x, label) ->
let x = (min_x +. max_x) /. 2. in
draw_label cr ~v ~y:14.0 ~min_x ~max_x x label |> ignore
);
draw_grid v cr expose_min_x expose_max_x;
let failed_thread_lines = ref [] in
let draw_thread_line start_x end_x y =
C.move_to cr ~x:start_x ~y;
C.line_to cr ~x:end_x ~y;
C.stroke cr in
let visible_t_min = Mtv_view.time_of_x v expose_min_x in
let visible_t_max = Mtv_view.time_of_x v expose_max_x in
let visible_threads = Mtv_view.visible_threads v (visible_t_min, visible_t_max) in
let highlights = Mtv_view.highlights v in
let short_highlights = ref [] in
if not (Mtv_view.ThreadSet.is_empty highlights) then (
highlight cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
if Mtv_view.ThreadSet.mem t highlights then (
let start_x = Mtv_view.clip_x_of_time v (Mtv_thread.start_time t) in
let end_x = Mtv_view.clip_x_of_time v (Mtv_thread.end_time t) in
let y = Mtv_view.y_of_thread v t in
if end_x -. start_x < 32.0 then short_highlights := (start_x, end_x, y) :: !short_highlights
else (
C.rectangle cr ~x:start_x ~y:(y -. 4.0) ~w:(end_x -. start_x) ~h:8.0;
C.fill cr;
);
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child <> Mtv_thread.y t && Mtv_view.ThreadSet.mem child highlights ->
let h = Mtv_view.y_of_thread v child -. y in
C.rectangle cr ~x:(end_x -. 4.0) ~y:(y -. 4.0) ~w:8.0 ~h:(h +. 8.0);
C.fill cr
| _ -> ()
)
)
);
named_thread cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let start_x = Mtv_view.clip_x_of_time v (Mtv_thread.start_time t) in
let end_x = Mtv_view.clip_x_of_time v (Mtv_thread.end_time t) in
let y = Mtv_view.y_of_thread v t in
if Mtv_thread.failure t = None then draw_thread_line start_x end_x y
else failed_thread_lines := (start_x, end_x, y) :: !failed_thread_lines;
Mtv_thread.creates t |> List.iter (fun child ->
let child_start_time = Mtv_thread.start_time child in
if Mtv_thread.show_creation child then
line v cr child_start_time t child
);
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child <> Mtv_thread.y t ->
line v cr (Mtv_thread.end_time t) t child
| None when not (Mtv_thread.resolved t) && end_x -. start_x > 4.0 ->
C.move_to cr ~x:end_x ~y;
C.line_to cr ~x:(end_x -. 6.) ~y:(y -. 4.);
C.line_to cr ~x:(end_x -. 6.) ~y:(y +. 4.);
C.fill cr;
| _ -> ()
);
activation cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let y = Mtv_view.y_of_thread v t in
Mtv_thread.activations t |> List.iter (fun (start_time, end_time) ->
C.move_to cr ~x:(max expose_min_x (Mtv_view.clip_x_of_time v start_time)) ~y;
C.line_to cr ~x:(min expose_max_x (Mtv_view.clip_x_of_time v end_time)) ~y;
C.stroke cr;
)
);
let view_timespace = Mtv_view.timespan_of_width v (Mtv_view.view_width v) in
let vis_arrows_min = visible_t_min -. view_timespace in
let vis_arrows_max = visible_t_max +. view_timespace in
thin cr;
let c = (0.8, 0.8, 0.4) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Try_read -> arrow v cr t start_time other end_time c
| _ -> ()
);
let c = (0.0, 0.0, 1.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Read when Mtv_thread.failure other = None -> arrow v cr other end_time t start_time c
| _ -> ()
);
let c = (1.0, 0.0, 0.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Read when Mtv_thread.failure other <> None -> arrow v cr other end_time t start_time c
| _ -> ()
);
let c = (0.0, 0.5, 0.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Resolve when Mtv_thread.id t <> -1 -> arrow v cr t start_time other end_time c
| _ -> ()
);
let c = (1.0, 0.6, 0.0) in
begin let r, g, b = c in C.set_source_rgb cr ~r ~g ~b end;
Mtv_view.iter_interactions v vis_arrows_min vis_arrows_max (fun (t, start_time, op, other, end_time) ->
match op with
| Mtv_thread.Signal -> arrow v cr t start_time other end_time c
| _ -> ()
);
let text_visible t =
let vert_dist = Mtv_view.dist_from_focus v t in
vert_dist > -.2000. && vert_dist < 2000. in
thread_label cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let start_x = Mtv_view.x_of_start v t +. 2. in
let end_x = Mtv_view.x_of_end v t in
let thread_width = end_x -. start_x in
if thread_width > 16. && text_visible t then (
let y = Mtv_view.y_of_thread v t -. 3.0 in
let end_x =
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child = Mtv_thread.y t -> Mtv_view.x_of_start v child
| _ -> end_x in
draw_labels cr ~v ~y ~min_x:start_x ~max_x:(min end_x (Mtv_view.view_width v)) (Mtv_thread.labels t)
)
);
let text_visible t =
let vert_dist = Mtv_view.dist_from_focus v t in
vert_dist > -.1000. && vert_dist < 1000. in
type_label cr;
visible_threads |> Mtv_layout.IT.IntervalSet.iter (fun i ->
let t = i.Interval_tree.Interval.value in
let start_x = Mtv_view.x_of_start v t +. 2. in
let end_x = Mtv_view.x_of_end v t in
let thread_width = end_x -. start_x in
if thread_width > 16. && text_visible t then (
let y = Mtv_view.y_of_thread v t +. 10.0 in
let end_x =
match Mtv_thread.becomes t with
| Some child when Mtv_thread.y child = Mtv_thread.y t -> Mtv_view.x_of_start v child
| _ -> end_x in
draw_label cr ~v ~y ~min_x:start_x ~max_x:end_x start_x (Mtv_thread.thread_type t)
|> ignore;
)
);
failed cr;
!failed_thread_lines |> List.iter (fun (start_x, end_x, y) ->
draw_thread_line start_x end_x y;
C.move_to cr ~x:end_x ~y:(y -. 8.);
C.line_to cr ~x:end_x ~y:(y +. 8.);
C.stroke cr;
);
if Mtv_view.show_metrics v then (
let stat_labels = ref [] in
Mtv_thread.counters vat |> List.iteri (fun counter_i counter ->
let open Mtv_counter in
if counter.shown then (
let range = counter.scale.max -. counter.scale.min in
let v_scale = (Mtv_view.view_height v -. (2.0 *. counter_line_width)) /. range in
let v_offset = Mtv_view.view_height v +. (v_scale *. counter.scale.min) -. counter_line_width in
let y_of_value value = v_offset -. v_scale *. value in
let values = counter.values in
let i = Mtv_sorted_array.count_before (fun (time, _v) -> time >= Mtv_view.view_start_time v) values in
let first_visible = max (i - 1) 0 in
let first_value =
if i = 0 then 0.0
else (snd values.(first_visible)) in
let y = ref (y_of_value first_value) in
C.move_to cr ~x:0.0 ~y:!y;
begin try
for i = first_visible to Array.length values - 1 do
let time, value = Array.get values i in
let x = Mtv_view.clip_x_of_time v time in
C.line_to cr ~x ~y:!y;
if x > Mtv_view.view_width v then raise Exit;
let new_y = y_of_value value in
C.line_to cr ~x ~y:new_y;
y := new_y;
done
with Exit -> () end;
C.line_to cr ~x:(Mtv_view.view_width v) ~y:!y;
counter_shadow cr;
C.stroke_preserve cr;
counter_line counter_i cr;
C.stroke cr;
let y = insert_label (max 16. (!y -. 2.)) stat_labels in
let max_x = Mtv_view.view_width v in
draw_label cr ~v ~y ~min_x:0.0 ~max_x max_x counter.name |> ignore
)
);
);
if !short_highlights <> [] then (
highlight cr;
!short_highlights |> List.iter (fun (start_x, end_x, y) ->
C.rectangle cr ~x:(start_x -. 4.0) ~y:(y -. 4.0) ~w:(end_x +. 4.0 -. start_x) ~h:8.0;
C.fill cr;
)
);
end
|
066b4585ceefaabe602cc09c4b28d178793c5fda5e3d1ad5d1b371aeb5835eec | nklein/cl-reactive | apply-t.lisp | ;;;; apply-t.lisp
(in-package #:cl-reactive/tests)
(nst:def-test-group apply-tests ()
(nst:def-test simple-apply-test (:equal 5)
(signal-let ((sig-x 3 :type integer)
(sig-y 4 :type integer))
(flet ((hypotenuse (a b)
(values (round (sqrt (+ (* a a) (* b b)))))))
(with-signal-values ((h (signal-apply #'hypotenuse
(list sig-x sig-y))))
h))))
(nst:def-test apply-documentation-test (:equal "Yes")
(signal-let (sig-x)
(documentation (signal-apply #'+ (list sig-x) :documentation "Yes") t))))
| null | https://raw.githubusercontent.com/nklein/cl-reactive/e322391f553989add18e6755e810351085c28197/src/apply-t.lisp | lisp | apply-t.lisp |
(in-package #:cl-reactive/tests)
(nst:def-test-group apply-tests ()
(nst:def-test simple-apply-test (:equal 5)
(signal-let ((sig-x 3 :type integer)
(sig-y 4 :type integer))
(flet ((hypotenuse (a b)
(values (round (sqrt (+ (* a a) (* b b)))))))
(with-signal-values ((h (signal-apply #'hypotenuse
(list sig-x sig-y))))
h))))
(nst:def-test apply-documentation-test (:equal "Yes")
(signal-let (sig-x)
(documentation (signal-apply #'+ (list sig-x) :documentation "Yes") t))))
|
fcac4608d54fc8236251cbaa60ac0867e8e19c57f77da9f2912da74704f97698 | nasa/Common-Metadata-Repository | caching.clj | (ns cmr.authz.components.caching
(:require
[clojure.core.cache :as cache]
[clojure.java.io :as io]
[cmr.authz.components.config :as config]
[com.stuartsierra.component :as component]
[taoensso.timbre :as log]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Support / utility Data & Functions ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn load-cache
[system]
(if-let [sys system]
(if-let [filename (config/cache-dumpfile system)]
(try
(read-string
(slurp filename))
(catch Exception _ nil)))))
(defn dump-cache
[system cache-data]
(let [dumpfile (config/cache-dumpfile system)]
(io/make-parents dumpfile)
(spit
dumpfile
(prn-str cache-data))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Caching Component API ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn create-cache
([system]
(create-cache system
(merge (config/cache-init system)
(load-cache system))))
([system init-items]
(let [ttl (config/cache-ttl-ms system)
threshold (config/cache-lru-threshold system)
cache (-> init-items
(cache/ttl-cache-factory :ttl ttl)
(cache/lru-cache-factory :threshold threshold))]
(log/debug "Creating TTL Cache with time-to-live of" ttl)
(log/debug "Composing with LRU cache with threshold (item count)" threshold)
(log/trace "Starting value:" init-items)
cache)))
(defn get-cache
[system]
(get-in system [:auth-caching :cache]))
(defn evict
[system item-key]
(swap! (get-cache system) cache/evict item-key))
(defn evict-all
[system]
(reset! (get-cache system) (create-cache system (config/cache-init system))))
(defn lookup
([system item-key]
(cache/lookup @(get-cache system) item-key))
([system item-key value-fn]
(let [ch @(get-cache system)]
(if (cache/has? ch item-key)
(do
(log/debug "Cache has key; skipping value function ...")
(log/trace "Key:" item-key)
(cache/hit ch item-key))
(when-let [value (value-fn)]
(log/debug "Cache miss; calling value function ...")
(log/trace "Key:" item-key)
(log/trace "Value missed:" value)
(when-not (or (nil? value) (empty? value))
(swap! (get-cache system) #(cache/miss % item-key value))))))
(lookup system item-key)))
(defn lookup-all
[system]
@(get-cache system))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Component Lifecycle Implementation ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defrecord AuthzCaching [cache])
(defn start
[this]
(log/info "Starting authz caching component ...")
(let [cache (atom (create-cache this))]
(log/debug "Started authz caching component.")
(assoc this :cache cache)))
(defn stop
[this]
(log/info "Stopping authz caching component ...")
(if-let [cache-ref (:cache this)]
(if-let [cache @cache-ref]
(dump-cache this cache)))
(log/debug "Stopped authz caching component.")
(assoc this :cache nil))
(def lifecycle-behaviour
{:start start
:stop stop})
(extend AuthzCaching
component/Lifecycle
lifecycle-behaviour)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Component Constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn create-component
""
[]
(map->AuthzCaching {}))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/63001cf021d32d61030b1dcadd8b253e4a221662/other/cmr-exchange/authz/src/cmr/authz/components/caching.clj | clojure |
; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
Caching Component API ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Component Lifecycle Implementation ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Component Constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
| (ns cmr.authz.components.caching
(:require
[clojure.core.cache :as cache]
[clojure.java.io :as io]
[cmr.authz.components.config :as config]
[com.stuartsierra.component :as component]
[taoensso.timbre :as log]))
(defn load-cache
[system]
(if-let [sys system]
(if-let [filename (config/cache-dumpfile system)]
(try
(read-string
(slurp filename))
(catch Exception _ nil)))))
(defn dump-cache
[system cache-data]
(let [dumpfile (config/cache-dumpfile system)]
(io/make-parents dumpfile)
(spit
dumpfile
(prn-str cache-data))))
(defn create-cache
([system]
(create-cache system
(merge (config/cache-init system)
(load-cache system))))
([system init-items]
(let [ttl (config/cache-ttl-ms system)
threshold (config/cache-lru-threshold system)
cache (-> init-items
(cache/ttl-cache-factory :ttl ttl)
(cache/lru-cache-factory :threshold threshold))]
(log/debug "Creating TTL Cache with time-to-live of" ttl)
(log/debug "Composing with LRU cache with threshold (item count)" threshold)
(log/trace "Starting value:" init-items)
cache)))
(defn get-cache
[system]
(get-in system [:auth-caching :cache]))
(defn evict
[system item-key]
(swap! (get-cache system) cache/evict item-key))
(defn evict-all
[system]
(reset! (get-cache system) (create-cache system (config/cache-init system))))
(defn lookup
([system item-key]
(cache/lookup @(get-cache system) item-key))
([system item-key value-fn]
(let [ch @(get-cache system)]
(if (cache/has? ch item-key)
(do
(log/debug "Cache has key; skipping value function ...")
(log/trace "Key:" item-key)
(cache/hit ch item-key))
(when-let [value (value-fn)]
(log/debug "Cache miss; calling value function ...")
(log/trace "Key:" item-key)
(log/trace "Value missed:" value)
(when-not (or (nil? value) (empty? value))
(swap! (get-cache system) #(cache/miss % item-key value))))))
(lookup system item-key)))
(defn lookup-all
[system]
@(get-cache system))
(defrecord AuthzCaching [cache])
(defn start
[this]
(log/info "Starting authz caching component ...")
(let [cache (atom (create-cache this))]
(log/debug "Started authz caching component.")
(assoc this :cache cache)))
(defn stop
[this]
(log/info "Stopping authz caching component ...")
(if-let [cache-ref (:cache this)]
(if-let [cache @cache-ref]
(dump-cache this cache)))
(log/debug "Stopped authz caching component.")
(assoc this :cache nil))
(def lifecycle-behaviour
{:start start
:stop stop})
(extend AuthzCaching
component/Lifecycle
lifecycle-behaviour)
(defn create-component
""
[]
(map->AuthzCaching {}))
|
8e4a8d61de45b5283a0d452efd1e669ec0e140f5c2e7d1ec133788d79467bbc0 | tisnik/clojure-examples | project.clj | ;
( C ) Copyright 2021
;
; All rights reserved. This program and the accompanying materials
; are made available under the terms of the Eclipse Public License v1.0
; which accompanies this distribution, and is available at
-v10.html
;
; Contributors:
;
(defproject vector1 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[net.mikera/core.matrix "0.34.0"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot vector1.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/1350d206ec6702248a560a6c9569d2128de2da60/vector1/project.clj | clojure |
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
Contributors:
| ( C ) Copyright 2021
-v10.html
(defproject vector1 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[net.mikera/core.matrix "0.34.0"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot vector1.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
|
270572aee44263ae320719d18647d76a997908a41aca819a825433bf839c035d | takikawa/tr-pfds | leftist.rkt | #lang typed/racket
(provide (rename-out [heap-map map]
[heap-ormap ormap] [heap-andmap andmap])
fold filter remove Heap
heap merge insert find-min/max delete-min/max sorted-list
build-heap)
(struct: (A) Tree ([rank : Integer]
[elem : A]
[left : (IntHeap A)]
[right : (IntHeap A)]))
(define-type (IntHeap A) (U Null (Tree A)))
(struct: (A) LeftistHeap ([comparer : (A A -> Boolean)]
[heap : (IntHeap A)]))
(define-type (Heap A) (LeftistHeap A))
;; An empty heap
(define empty null)
;; Returns the rank of the heap
(: rank : (All (A) ((IntHeap A) -> Integer)))
(define (rank lheap)
(if (null? lheap) 0 (Tree-rank lheap)))
(: make-lheap : (All (A) (A (IntHeap A) (IntHeap A) -> (IntHeap A))))
(define (make-lheap elem heap1 heap2)
(let ([rank1 (rank heap1)]
[rank2 (rank heap2)])
(if (>= rank1 rank2)
(Tree (add1 rank2) elem heap1 heap2)
(Tree (add1 rank1) elem heap2 heap1))))
;; Checks for empty heap
(: empty? : (All (A) ((LeftistHeap A) -> Boolean)))
(define (empty? lheap)
(null? (LeftistHeap-heap lheap)))
;; Inserts an element into the heap
(: insert : (All (A) (A (LeftistHeap A) -> (LeftistHeap A))))
(define (insert elem lheap)
(let ([comparer (LeftistHeap-comparer lheap)])
(LeftistHeap comparer
(in-merge (Tree 1 elem empty empty)
(LeftistHeap-heap lheap)
comparer))))
Merges two heaps .
(: merge : (All (A) ((LeftistHeap A) (LeftistHeap A) -> (LeftistHeap A))))
(define (merge heap1 heap2)
(let ([comparer (LeftistHeap-comparer heap1)])
(LeftistHeap comparer
(in-merge (LeftistHeap-heap heap1)
(LeftistHeap-heap heap2)
comparer))))
;; Helper for merge
(: in-merge :
(All (A) ((IntHeap A) (IntHeap A) (A A -> Boolean) -> (IntHeap A))))
(define (in-merge heap1 heap2 comparer)
(cond
[(null? heap2) heap1]
[(null? heap1) heap2]
[else (in-merge-helper heap1 heap2 comparer)]))
(: in-merge-helper :
(All (A) ((Tree A) (Tree A) (A A -> Boolean) -> (IntHeap A))))
(define (in-merge-helper tree1 tree2 comparer)
(let ([tr1-elm (Tree-elem tree1)]
[tr2-elm (Tree-elem tree2)]
[tr1-lft (Tree-left tree1)]
[tr2-lft (Tree-left tree2)]
[tr1-rgt (Tree-right tree1)]
[tr2-rgt (Tree-right tree2)])
(if (comparer tr1-elm tr2-elm)
(make-lheap tr1-elm tr1-lft
(in-merge tr1-rgt tree2 comparer))
(make-lheap tr2-elm tr2-lft
(in-merge tree1 tr2-rgt comparer)))))
Returns min or element of the heap
(: find-min/max : (All (A) ((LeftistHeap A) -> A)))
(define (find-min/max lheap)
(let ([heap (LeftistHeap-heap lheap)])
(if (null? heap)
(error 'find-min/max "given heap is empty")
(Tree-elem heap))))
Deletes min or element of the heap
(: delete-min/max : (All (A) ((LeftistHeap A) -> (LeftistHeap A))))
(define (delete-min/max lheap)
(let ([heap (LeftistHeap-heap lheap)]
[comparer (LeftistHeap-comparer lheap)])
(if (null? heap)
(error 'delete-min/max "given heap is empty")
(LeftistHeap comparer
(in-merge (Tree-left heap)
(Tree-right heap)
comparer)))))
;; Returns a sorted list
(: sorted-list : (All (A) ((LeftistHeap A) -> (Listof A))))
(define (sorted-list lheap)
(if (null? (LeftistHeap-heap lheap))
null
(cons (find-min/max lheap) (sorted-list (delete-min/max lheap)))))
Heap constructor
(: heap : (All (A) ((A A -> Boolean) A * -> (LeftistHeap A))))
(define (heap comparer . lst)
(let ([first ((inst LeftistHeap A) comparer empty)])
(foldl (inst insert A) first lst)))
;; similar to list filter function
(: filter : (All (A) ((A -> Boolean) (LeftistHeap A) -> (LeftistHeap A))))
(define (filter func hep)
(: inner : (All (A) ((A -> Boolean) (LeftistHeap A) (LeftistHeap A) -> (LeftistHeap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail (insert head accum))
(inner func tail accum)))))
(inner func hep ((inst LeftistHeap A) (LeftistHeap-comparer hep) empty)))
;; similar to list remove function
(: remove : (All (A) ((A -> Boolean) (LeftistHeap A) -> (LeftistHeap A))))
(define (remove func hep)
(: inner : (All (A) ((A -> Boolean) (LeftistHeap A) (LeftistHeap A) -> (LeftistHeap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail accum)
(inner func tail (insert head accum))))))
(inner func hep ((inst LeftistHeap A) (LeftistHeap-comparer hep) empty)))
;; similar to list map function. apply is expensive so using case-lambda
;; in order to saperate the more common case
(: heap-map :
(All (A C B ...)
(case-lambda
((C C -> Boolean) (A -> C) (Heap A) -> (Heap C))
((C C -> Boolean)
(A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C)))))
(define heap-map
(pcase-lambda: (A C B ...)
[([comp : (C C -> Boolean)]
[func : (A -> C)]
[heap : (Heap A)])
(map-single ((inst LeftistHeap C) comp empty) func heap)]
[([comp : (C C -> Boolean)]
[func : (A B ... B -> C)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(apply map-multiple
((inst LeftistHeap C) comp empty)
func heap heaps)]))
(: map-single : (All (A C) ((Heap C) (A -> C) (Heap A) -> (Heap C))))
(define (map-single accum func heap)
(if (empty? heap)
accum
(map-single (insert (func (find-min/max heap)) accum) func
(delete-min/max heap))))
(: map-multiple :
(All (A C B ...)
((Heap C) (A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C))))
(define (map-multiple accum func heap . heaps)
(if (or (empty? heap) (ormap empty? heaps))
accum
(apply map-multiple
(insert (apply func (find-min/max heap) (map find-min/max heaps))
accum)
func
(delete-min/max heap)
(map delete-min/max heaps))))
;; similar to list foldr or foldl
(: fold :
(All (A C B ...)
(case-lambda ((C A -> C) C (Heap A) -> C)
((C A B ... B -> C) C (Heap A) (Heap B) ... B
-> C))))
(define fold
(pcase-lambda: (A C B ...)
[([func : (C A -> C)]
[base : C]
[heap : (Heap A)])
(if (empty? heap)
base
(fold func (func base (find-min/max heap))
(delete-min/max heap)))]
[([func : (C A B ... B -> C)]
[base : C]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(if (or (empty? heap) (ormap empty? heaps))
base
(apply fold
func
(apply func base (find-min/max heap)
(map find-min/max heaps))
(delete-min/max heap)
(map delete-min/max heaps)))]))
;; Similar to build-list
(: build-heap : (All (A) (Natural (Natural -> A) (A A -> Boolean)
-> (Heap A))))
(define (build-heap size func comparer)
(let: loop : (Heap A) ([n : Natural size])
(if (zero? n)
((inst LeftistHeap A) comparer empty)
(let ([nsub1 (sub1 n)])
(insert (func nsub1) (loop nsub1))))))
similar to list andmap function
(: heap-andmap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-andmap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[deque : (Heap A)])
(or (empty? deque)
(and (func (find-min/max deque))
(heap-andmap func (delete-min/max deque))))]
[([func : (A B ... B -> Boolean)]
[deque : (Heap A)] . [deques : (Heap B) ... B])
(or (empty? deque) (ormap empty? deques)
(and (apply func (find-min/max deque)
(map find-min/max deques))
(apply heap-andmap func (delete-min/max deque)
(map delete-min/max deques))))]))
;; Similar to ormap
(: heap-ormap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-ormap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[deque : (Heap A)])
(and (not (empty? deque))
(or (func (find-min/max deque))
(heap-ormap func (delete-min/max deque))))]
[([func : (A B ... B -> Boolean)]
[deque : (Heap A)] . [deques : (Heap B) ... B])
(and (not (or (empty? deque) (ormap empty? deques)))
(or (apply func (find-min/max deque)
(map find-min/max deques))
(apply heap-ormap func (delete-min/max deque)
(map delete-min/max deques))))])) | null | https://raw.githubusercontent.com/takikawa/tr-pfds/a08810bdfc760bb9ed68d08ea222a59135d9a203/pfds/heap/leftist.rkt | racket | An empty heap
Returns the rank of the heap
Checks for empty heap
Inserts an element into the heap
Helper for merge
Returns a sorted list
similar to list filter function
similar to list remove function
similar to list map function. apply is expensive so using case-lambda
in order to saperate the more common case
similar to list foldr or foldl
Similar to build-list
Similar to ormap | #lang typed/racket
(provide (rename-out [heap-map map]
[heap-ormap ormap] [heap-andmap andmap])
fold filter remove Heap
heap merge insert find-min/max delete-min/max sorted-list
build-heap)
(struct: (A) Tree ([rank : Integer]
[elem : A]
[left : (IntHeap A)]
[right : (IntHeap A)]))
(define-type (IntHeap A) (U Null (Tree A)))
(struct: (A) LeftistHeap ([comparer : (A A -> Boolean)]
[heap : (IntHeap A)]))
(define-type (Heap A) (LeftistHeap A))
(define empty null)
(: rank : (All (A) ((IntHeap A) -> Integer)))
(define (rank lheap)
(if (null? lheap) 0 (Tree-rank lheap)))
(: make-lheap : (All (A) (A (IntHeap A) (IntHeap A) -> (IntHeap A))))
(define (make-lheap elem heap1 heap2)
(let ([rank1 (rank heap1)]
[rank2 (rank heap2)])
(if (>= rank1 rank2)
(Tree (add1 rank2) elem heap1 heap2)
(Tree (add1 rank1) elem heap2 heap1))))
(: empty? : (All (A) ((LeftistHeap A) -> Boolean)))
(define (empty? lheap)
(null? (LeftistHeap-heap lheap)))
(: insert : (All (A) (A (LeftistHeap A) -> (LeftistHeap A))))
(define (insert elem lheap)
(let ([comparer (LeftistHeap-comparer lheap)])
(LeftistHeap comparer
(in-merge (Tree 1 elem empty empty)
(LeftistHeap-heap lheap)
comparer))))
Merges two heaps .
(: merge : (All (A) ((LeftistHeap A) (LeftistHeap A) -> (LeftistHeap A))))
(define (merge heap1 heap2)
(let ([comparer (LeftistHeap-comparer heap1)])
(LeftistHeap comparer
(in-merge (LeftistHeap-heap heap1)
(LeftistHeap-heap heap2)
comparer))))
(: in-merge :
(All (A) ((IntHeap A) (IntHeap A) (A A -> Boolean) -> (IntHeap A))))
(define (in-merge heap1 heap2 comparer)
(cond
[(null? heap2) heap1]
[(null? heap1) heap2]
[else (in-merge-helper heap1 heap2 comparer)]))
(: in-merge-helper :
(All (A) ((Tree A) (Tree A) (A A -> Boolean) -> (IntHeap A))))
(define (in-merge-helper tree1 tree2 comparer)
(let ([tr1-elm (Tree-elem tree1)]
[tr2-elm (Tree-elem tree2)]
[tr1-lft (Tree-left tree1)]
[tr2-lft (Tree-left tree2)]
[tr1-rgt (Tree-right tree1)]
[tr2-rgt (Tree-right tree2)])
(if (comparer tr1-elm tr2-elm)
(make-lheap tr1-elm tr1-lft
(in-merge tr1-rgt tree2 comparer))
(make-lheap tr2-elm tr2-lft
(in-merge tree1 tr2-rgt comparer)))))
Returns min or element of the heap
(: find-min/max : (All (A) ((LeftistHeap A) -> A)))
(define (find-min/max lheap)
(let ([heap (LeftistHeap-heap lheap)])
(if (null? heap)
(error 'find-min/max "given heap is empty")
(Tree-elem heap))))
Deletes min or element of the heap
(: delete-min/max : (All (A) ((LeftistHeap A) -> (LeftistHeap A))))
(define (delete-min/max lheap)
(let ([heap (LeftistHeap-heap lheap)]
[comparer (LeftistHeap-comparer lheap)])
(if (null? heap)
(error 'delete-min/max "given heap is empty")
(LeftistHeap comparer
(in-merge (Tree-left heap)
(Tree-right heap)
comparer)))))
(: sorted-list : (All (A) ((LeftistHeap A) -> (Listof A))))
(define (sorted-list lheap)
(if (null? (LeftistHeap-heap lheap))
null
(cons (find-min/max lheap) (sorted-list (delete-min/max lheap)))))
Heap constructor
(: heap : (All (A) ((A A -> Boolean) A * -> (LeftistHeap A))))
(define (heap comparer . lst)
(let ([first ((inst LeftistHeap A) comparer empty)])
(foldl (inst insert A) first lst)))
(: filter : (All (A) ((A -> Boolean) (LeftistHeap A) -> (LeftistHeap A))))
(define (filter func hep)
(: inner : (All (A) ((A -> Boolean) (LeftistHeap A) (LeftistHeap A) -> (LeftistHeap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail (insert head accum))
(inner func tail accum)))))
(inner func hep ((inst LeftistHeap A) (LeftistHeap-comparer hep) empty)))
(: remove : (All (A) ((A -> Boolean) (LeftistHeap A) -> (LeftistHeap A))))
(define (remove func hep)
(: inner : (All (A) ((A -> Boolean) (LeftistHeap A) (LeftistHeap A) -> (LeftistHeap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail accum)
(inner func tail (insert head accum))))))
(inner func hep ((inst LeftistHeap A) (LeftistHeap-comparer hep) empty)))
(: heap-map :
(All (A C B ...)
(case-lambda
((C C -> Boolean) (A -> C) (Heap A) -> (Heap C))
((C C -> Boolean)
(A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C)))))
(define heap-map
(pcase-lambda: (A C B ...)
[([comp : (C C -> Boolean)]
[func : (A -> C)]
[heap : (Heap A)])
(map-single ((inst LeftistHeap C) comp empty) func heap)]
[([comp : (C C -> Boolean)]
[func : (A B ... B -> C)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(apply map-multiple
((inst LeftistHeap C) comp empty)
func heap heaps)]))
(: map-single : (All (A C) ((Heap C) (A -> C) (Heap A) -> (Heap C))))
(define (map-single accum func heap)
(if (empty? heap)
accum
(map-single (insert (func (find-min/max heap)) accum) func
(delete-min/max heap))))
(: map-multiple :
(All (A C B ...)
((Heap C) (A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C))))
(define (map-multiple accum func heap . heaps)
(if (or (empty? heap) (ormap empty? heaps))
accum
(apply map-multiple
(insert (apply func (find-min/max heap) (map find-min/max heaps))
accum)
func
(delete-min/max heap)
(map delete-min/max heaps))))
(: fold :
(All (A C B ...)
(case-lambda ((C A -> C) C (Heap A) -> C)
((C A B ... B -> C) C (Heap A) (Heap B) ... B
-> C))))
(define fold
(pcase-lambda: (A C B ...)
[([func : (C A -> C)]
[base : C]
[heap : (Heap A)])
(if (empty? heap)
base
(fold func (func base (find-min/max heap))
(delete-min/max heap)))]
[([func : (C A B ... B -> C)]
[base : C]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(if (or (empty? heap) (ormap empty? heaps))
base
(apply fold
func
(apply func base (find-min/max heap)
(map find-min/max heaps))
(delete-min/max heap)
(map delete-min/max heaps)))]))
(: build-heap : (All (A) (Natural (Natural -> A) (A A -> Boolean)
-> (Heap A))))
(define (build-heap size func comparer)
(let: loop : (Heap A) ([n : Natural size])
(if (zero? n)
((inst LeftistHeap A) comparer empty)
(let ([nsub1 (sub1 n)])
(insert (func nsub1) (loop nsub1))))))
similar to list andmap function
(: heap-andmap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-andmap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[deque : (Heap A)])
(or (empty? deque)
(and (func (find-min/max deque))
(heap-andmap func (delete-min/max deque))))]
[([func : (A B ... B -> Boolean)]
[deque : (Heap A)] . [deques : (Heap B) ... B])
(or (empty? deque) (ormap empty? deques)
(and (apply func (find-min/max deque)
(map find-min/max deques))
(apply heap-andmap func (delete-min/max deque)
(map delete-min/max deques))))]))
(: heap-ormap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-ormap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[deque : (Heap A)])
(and (not (empty? deque))
(or (func (find-min/max deque))
(heap-ormap func (delete-min/max deque))))]
[([func : (A B ... B -> Boolean)]
[deque : (Heap A)] . [deques : (Heap B) ... B])
(and (not (or (empty? deque) (ormap empty? deques)))
(or (apply func (find-min/max deque)
(map find-min/max deques))
(apply heap-ormap func (delete-min/max deque)
(map delete-min/max deques))))])) |
b68350f5288e7d4cc8da2587a691847bf8eca5bd8ca9c379069948356eae3637 | spell-music/csound-expression | Layout.hs | -- | The functions from this module specify the geometry
-- of the GUI-elements. They tell where to render the elements.
--
-- Every element is rectangular. To know where to place the element is
-- to know the parameters of the bounding rectangle. All rectangles are
-- relative and automatically aligned.
--
We have two functions for grouping . They construct horizontal and vertical
-- groups of the elements. Within the group we can change the relative size
of the rectangles ( by scaling one side of the rectangle ) . In place of rectangle
-- we can put an empty space.
module Csound.Control.Gui.Layout (
hor, ver, space, sca, horSca, verSca, grid,
padding, margin, ScaleFactor, resizeGui,
) where
import Csound.Typed.Gui
| Layouts the widgets in grid . The first argument is the number of widgets in the row .
--
> grid widgets
grid :: Int -> [Gui] -> Gui
grid columnSize guis = ver $ fmap hor $ splitList columnSize guis
where
splitList n xs = case splitAt n xs of
(res, []) -> [res ++ spaceTail xs]
(as,rest) -> as : splitList n rest
spaceTail xs = replicate n space
where n = getMissingToEven (length xs)
getMissingToEven total = case total `mod` columnSize of
0 -> 0
n -> columnSize - n
| null | https://raw.githubusercontent.com/spell-music/csound-expression/29c1611172153347b16d0b6b133e4db61a7218d5/csound-expression/src/Csound/Control/Gui/Layout.hs | haskell | | The functions from this module specify the geometry
of the GUI-elements. They tell where to render the elements.
Every element is rectangular. To know where to place the element is
to know the parameters of the bounding rectangle. All rectangles are
relative and automatically aligned.
groups of the elements. Within the group we can change the relative size
we can put an empty space.
| We have two functions for grouping . They construct horizontal and vertical
of the rectangles ( by scaling one side of the rectangle ) . In place of rectangle
module Csound.Control.Gui.Layout (
hor, ver, space, sca, horSca, verSca, grid,
padding, margin, ScaleFactor, resizeGui,
) where
import Csound.Typed.Gui
| Layouts the widgets in grid . The first argument is the number of widgets in the row .
> grid widgets
grid :: Int -> [Gui] -> Gui
grid columnSize guis = ver $ fmap hor $ splitList columnSize guis
where
splitList n xs = case splitAt n xs of
(res, []) -> [res ++ spaceTail xs]
(as,rest) -> as : splitList n rest
spaceTail xs = replicate n space
where n = getMissingToEven (length xs)
getMissingToEven total = case total `mod` columnSize of
0 -> 0
n -> columnSize - n
|
9c8d54e27ebc2ed0a83efe7de17a0be5b18e3f069f631582181180f2202f7fbe | onaio/hatti | chart_test.cljs | (ns hatti.views.chart-test
(:require-macros [cljs.test :refer (is deftest testing)]
[dommy.core :refer [sel sel1]])
(:require [cljs.test :as t]
[cljs.core.async :refer [<! chan put!]]
[dommy.core :as dommy]
[hatti.shared :as shared]
[hatti.views :refer [chart-page]]
[hatti.views.chart]
[hatti.test-utils :refer [new-container! texts]]
[hatti.ona.forms :as f]
[om.core :as om :include-macros true]
[hatti.shared-test :refer
[fat-form no-data small-fat-data data-gen]]))
;; CHART COMPONENT HELPERS
(def chart-form (take 4 fat-form))
(def chart-get-mock #(let [c (chan)] (put! c {:body nil}) c))
(defn- chart-container
"Returns a container in which a chart component has been rendered.
`data` arg is directly passed into the component as its cursor."
[form]
(let [cont (new-container!)
arg {:shared {:flat-form form
:event-chan (chan)}
:opts {:chart-get chart-get-mock}
:target cont}
_ (om/root chart-page shared/app-state arg)]
cont))
;; COMPONENT TESTS
(deftest charts-render-properly
(let [container (chart-container chart-form)
sel1qs (into (f/meta-fields chart-form :with-submission-details? true)
chart-form)
stringqs (filter f/text? chart-form)]
(testing "chart-chooser menu renders properly"
(is (every? (-> container (sel :li.submenu-list) texts set)
(map :label chart-form)))
First icon is a clock , for submission time
(is (= "fa fa-clock-o"
(-> container (sel1 :ul) (sel :i.fa) first (dommy/attr :class))))
;; Rest of the items should be horizontal chart
(is (= (rest (map #(-> % f/get-icon last :class) sel1qs))
(->> (-> container (sel1 :ul) (sel :i.fa) rest)
(map #(dommy/attr % :class))))))
(testing "string questions are unclickable on the chart menu"
(let [stringq-texts (map :label stringqs)
num-stringqs (count stringqs)
txt->href (into {} (map #(vector (dommy/text %)
(dommy/attr % :href))
(sel container :a)))]
(is (= (repeat num-stringqs nil)
(map val (select-keys txt->href stringq-texts))))))
(testing "chart-container displays submission time chart initially"
(is (= 1 (count (sel container :div.chart-holder))))
(is (= "Submission Time" (dommy/text (sel1 container :h3.chart-name)))))))
(deftest charts-renders-correct-language
(let [chart-form [{:type "select one" :name "foo" :full-name "foo"
:label {:French "Oui?" :English "Yes?"}}]
_ (swap! shared/app-state assoc-in [:languages]
{:all [:English :French] :current :French})
container (chart-container chart-form)]
(testing "chart menu renders in current language when set"
(is (contains? (-> container (sel :li.submenu-list) texts set) "Oui?")))))
| null | https://raw.githubusercontent.com/onaio/hatti/7dc23c1c60b9fc46e1ff5b023eb6c794ebd04773/test/hatti/views/chart_test.cljs | clojure | CHART COMPONENT HELPERS
COMPONENT TESTS
Rest of the items should be horizontal chart | (ns hatti.views.chart-test
(:require-macros [cljs.test :refer (is deftest testing)]
[dommy.core :refer [sel sel1]])
(:require [cljs.test :as t]
[cljs.core.async :refer [<! chan put!]]
[dommy.core :as dommy]
[hatti.shared :as shared]
[hatti.views :refer [chart-page]]
[hatti.views.chart]
[hatti.test-utils :refer [new-container! texts]]
[hatti.ona.forms :as f]
[om.core :as om :include-macros true]
[hatti.shared-test :refer
[fat-form no-data small-fat-data data-gen]]))
(def chart-form (take 4 fat-form))
(def chart-get-mock #(let [c (chan)] (put! c {:body nil}) c))
(defn- chart-container
"Returns a container in which a chart component has been rendered.
`data` arg is directly passed into the component as its cursor."
[form]
(let [cont (new-container!)
arg {:shared {:flat-form form
:event-chan (chan)}
:opts {:chart-get chart-get-mock}
:target cont}
_ (om/root chart-page shared/app-state arg)]
cont))
(deftest charts-render-properly
(let [container (chart-container chart-form)
sel1qs (into (f/meta-fields chart-form :with-submission-details? true)
chart-form)
stringqs (filter f/text? chart-form)]
(testing "chart-chooser menu renders properly"
(is (every? (-> container (sel :li.submenu-list) texts set)
(map :label chart-form)))
First icon is a clock , for submission time
(is (= "fa fa-clock-o"
(-> container (sel1 :ul) (sel :i.fa) first (dommy/attr :class))))
(is (= (rest (map #(-> % f/get-icon last :class) sel1qs))
(->> (-> container (sel1 :ul) (sel :i.fa) rest)
(map #(dommy/attr % :class))))))
(testing "string questions are unclickable on the chart menu"
(let [stringq-texts (map :label stringqs)
num-stringqs (count stringqs)
txt->href (into {} (map #(vector (dommy/text %)
(dommy/attr % :href))
(sel container :a)))]
(is (= (repeat num-stringqs nil)
(map val (select-keys txt->href stringq-texts))))))
(testing "chart-container displays submission time chart initially"
(is (= 1 (count (sel container :div.chart-holder))))
(is (= "Submission Time" (dommy/text (sel1 container :h3.chart-name)))))))
(deftest charts-renders-correct-language
(let [chart-form [{:type "select one" :name "foo" :full-name "foo"
:label {:French "Oui?" :English "Yes?"}}]
_ (swap! shared/app-state assoc-in [:languages]
{:all [:English :French] :current :French})
container (chart-container chart-form)]
(testing "chart menu renders in current language when set"
(is (contains? (-> container (sel :li.submenu-list) texts set) "Oui?")))))
|
37495fdabee787c5f5b267a3bdeeb0f2ddf943b46915689e4cc00bafe0fc7971 | na4zagin3/satyrographos | template_docMake_en.ml |
SPDX - License - Identifier : CC0 - 1.0
SPDX-License-Identifier: CC0-1.0
*)
let name = "[experimental]doc-make@en"
let local_satyh_template =
"local.satyh",
{|% This is a file for local function/command definitions
@require: code
@require: math
let-block ctx +frame content =
let pads = (10pt, 10pt, 10pt, 10pt) in
let decoset = VDecoSet.simple-frame-stroke 1pt (Color.gray 0.75) in
block-frame-breakable ctx pads decoset (fun ctx -> read-block ctx content)
let-block ctx +display-boxes content code =
read-block (ctx |> set-paragraph-margin 12pt 0pt) '<+frame(content);>
+++ read-block (ctx |> set-paragraph-margin 0pt 12pt) '<+code(code);>
% Define a math command
let-math \factorial x =
${#x \mathpunct{\mathrm-token!(`!`)}}
|}
let main_saty_template =
"main.saty",
{|% This is the document file
% Class package
@require: stdjabook
% Standard packages
@require: annot
@require: code
@require: math
@require: itemize
% Third-party packages
@require: fss/fss
@require: fss/fonts
@require: fss/style
% Local package
@import: local
document (|
title = {Test Document};
author = {Your Name};
show-title = true;
show-toc = false;
|) '<
+p {
This template is for \SATySFi; 0.0.5.
As \SATySFi; is not yet murture,
please be warned that \font-style[italic]{you may experience some breaking changes}.
}
+p {
There are online resources, so Please check out!
\listing{
* \href(``){`demo.saty`} is a brief introduction to \SATySFi;.
* Please join \href(`-Wiki#satsysfi-slack`){\emph{SATySFi Slack}}!
}%
}
+p {
As you see, `+p { ... }` represents a paragraph.
Technically speaking, `+p` is a block command applied to an inline text object `{ ... }`.
}
+p {
An inline equation is represented by a math object `${ ... }`. E.g., ${x^2 - x + 1}.
}
+p {
Basic math commands resemble those in \LaTeX;. E.g., ${f: A \to \mathbb{R}}.
}
+p {
Unlike math commands or \LaTeX; commands, a text command needs argument terminator “`;`” if the last argument is neither `{ ... }` (i.e., an inline text) or `< ... >` (i.e., a block text): \emph{emph} vs. \code(`code`);.
}
+p({
Each text command takes parenthesized arguments or block/inline texts.
E.g., \emph{abc} vs. \emph({abc});.
});
+p {
You can get a displayed equation by applying `\eqn` command to a math object. E.g.,
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha.
});%
Similarly, you can get a code example with `\d-code` command.
\d-code(```
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha
});
```);%
}
+p {
`\math-list` takes a list of math objects.
\math-list[
${\delta_{ij} = \cases![
(${1}, {${i = j}});
(${0}, {otherwise});
]};
${\epsilon_{a_{1}a_{2}\cdots a_{n}} =
\lower{\prod}{1\leq i\leq j\leq n}
\mathop{\mathrm{sgn}}\paren{a_{j} - a_{i}}
};
];%
`\align` takes a list of lists of math objects.
\align[
[ ${\pi};
${=\paren{
\frac{2\sqrt{2}}{99^{2}}\upper{\lower{\sum}{n=0}}{\infty}
\frac{
\factorial{\paren{4n}}
\paren{1103 + 26390n}
}{
\paren{4^{n} 99^{n} \factorial{n}}^{4}
}
}^{-1}
};
];
[ ${};
${=\paren{
\int_{-\infty}^{\infty}
e^{
-x^2
}
\mathrm{d}x
}^{ 2 }
};
];
];%
}
+section{Sections} <
+p {
A new section is created by
\code(`+section{Section title} < block commands... >`);.
}
+subsection{Subsection} <
+p {
There’s `+subsection` command too.
}
>
>
+section{Packages} <
+p {
You can import standard/third-party packages with `@require` directive:
}
+code (`
@require: math
`);
+p {
`@import` directive will import a package from the relative path to this file.
}
+code (`
% This directive imports local.satyh file
@import: ./local
`);
>
>
|}
let satyristes_template =
"Satyristes",
{|(lang "0.0.3")
(doc
(name "main")
(build ((make)))
(dependencies
(;; Standard library
dist
;; Third-party library
fss
)))
|}
let gitignore_template =
".gitignore",
{|# OMake
*.omc
.omakedb.lock
# Satyristes
*.deps
# SATySFi
*.satysfi-aux
# Generated files
main.pdf
|}
let makefile_template =
"Makefile",
{|.PHONY: all
all: doc
# SATySFi/Satyrographos rules
%.pdf: %.saty
satyrographos satysfi -- -o $@ $<
%.pdf.deps: %.saty
satyrographos util deps -r -p --depfile $@ --mode pdf -o "$(basename $@)" $<
# User rules
doc: main.pdf
-include main.pdf.deps
|}
let readme_template =
"README.md",
{|# @@library@@
A great document.
## How to compile?
Run `satyrographos build`.
|}
let files = [
main_saty_template;
local_satyh_template;
satyristes_template;
gitignore_template;
makefile_template;
readme_template;
]
let template =
name, ("Document with Makefile (en)", files)
| null | https://raw.githubusercontent.com/na4zagin3/satyrographos/9dbccf05138510c977a67c859bbbb48755470c7f/src/template/template_docMake_en.ml | ocaml |
SPDX - License - Identifier : CC0 - 1.0
SPDX-License-Identifier: CC0-1.0
*)
let name = "[experimental]doc-make@en"
let local_satyh_template =
"local.satyh",
{|% This is a file for local function/command definitions
@require: code
@require: math
let-block ctx +frame content =
let pads = (10pt, 10pt, 10pt, 10pt) in
let decoset = VDecoSet.simple-frame-stroke 1pt (Color.gray 0.75) in
block-frame-breakable ctx pads decoset (fun ctx -> read-block ctx content)
let-block ctx +display-boxes content code =
read-block (ctx |> set-paragraph-margin 12pt 0pt) '<+frame(content);>
+++ read-block (ctx |> set-paragraph-margin 0pt 12pt) '<+code(code);>
% Define a math command
let-math \factorial x =
${#x \mathpunct{\mathrm-token!(`!`)}}
|}
let main_saty_template =
"main.saty",
{|% This is the document file
% Class package
@require: stdjabook
% Standard packages
@require: annot
@require: code
@require: math
@require: itemize
% Third-party packages
@require: fss/fss
@require: fss/fonts
@require: fss/style
% Local package
@import: local
document (|
title = {Test Document};
author = {Your Name};
show-title = true;
show-toc = false;
|) '<
+p {
This template is for \SATySFi; 0.0.5.
As \SATySFi; is not yet murture,
please be warned that \font-style[italic]{you may experience some breaking changes}.
}
+p {
There are online resources, so Please check out!
\listing{
* \href(``){`demo.saty`} is a brief introduction to \SATySFi;.
* Please join \href(`-Wiki#satsysfi-slack`){\emph{SATySFi Slack}}!
}%
}
+p {
As you see, `+p { ... }` represents a paragraph.
Technically speaking, `+p` is a block command applied to an inline text object `{ ... }`.
}
+p {
An inline equation is represented by a math object `${ ... }`. E.g., ${x^2 - x + 1}.
}
+p {
Basic math commands resemble those in \LaTeX;. E.g., ${f: A \to \mathbb{R}}.
}
+p {
Unlike math commands or \LaTeX; commands, a text command needs argument terminator “`;`” if the last argument is neither `{ ... }` (i.e., an inline text) or `< ... >` (i.e., a block text): \emph{emph} vs. \code(`code`);.
}
+p({
Each text command takes parenthesized arguments or block/inline texts.
E.g., \emph{abc} vs. \emph({abc});.
});
+p {
You can get a displayed equation by applying `\eqn` command to a math object. E.g.,
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha.
});%
Similarly, you can get a code example with `\d-code` command.
\d-code(```
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha
});
```);%
}
+p {
`\math-list` takes a list of math objects.
\math-list[
${\delta_{ij} = \cases![
(${1}, {${i = j}});
(${0}, {otherwise});
]};
${\epsilon_{a_{1}a_{2}\cdots a_{n}} =
\lower{\prod}{1\leq i\leq j\leq n}
\mathop{\mathrm{sgn}}\paren{a_{j} - a_{i}}
};
];%
`\align` takes a list of lists of math objects.
\align[
[ ${\pi};
${=\paren{
\frac{2\sqrt{2}}{99^{2}}\upper{\lower{\sum}{n=0}}{\infty}
\frac{
\factorial{\paren{4n}}
\paren{1103 + 26390n}
}{
\paren{4^{n} 99^{n} \factorial{n}}^{4}
}
}^{-1}
};
];
[ ${};
${=\paren{
\int_{-\infty}^{\infty}
e^{
-x^2
}
\mathrm{d}x
}^{ 2 }
};
];
];%
}
+section{Sections} <
+p {
A new section is created by
\code(`+section{Section title} < block commands... >`);.
}
+subsection{Subsection} <
+p {
There’s `+subsection` command too.
}
>
>
+section{Packages} <
+p {
You can import standard/third-party packages with `@require` directive:
}
+code (`
@require: math
`);
+p {
`@import` directive will import a package from the relative path to this file.
}
+code (`
% This directive imports local.satyh file
@import: ./local
`);
>
>
|}
let satyristes_template =
"Satyristes",
{|(lang "0.0.3")
(doc
(name "main")
(build ((make)))
(dependencies
(;; Standard library
dist
;; Third-party library
fss
)))
|}
let gitignore_template =
".gitignore",
{|# OMake
*.omc
.omakedb.lock
# Satyristes
*.deps
# SATySFi
*.satysfi-aux
# Generated files
main.pdf
|}
let makefile_template =
"Makefile",
{|.PHONY: all
all: doc
# SATySFi/Satyrographos rules
%.pdf: %.saty
satyrographos satysfi -- -o $@ $<
%.pdf.deps: %.saty
satyrographos util deps -r -p --depfile $@ --mode pdf -o "$(basename $@)" $<
# User rules
doc: main.pdf
-include main.pdf.deps
|}
let readme_template =
"README.md",
{|# @@library@@
A great document.
## How to compile?
Run `satyrographos build`.
|}
let files = [
main_saty_template;
local_satyh_template;
satyristes_template;
gitignore_template;
makefile_template;
readme_template;
]
let template =
name, ("Document with Makefile (en)", files)
| |
f226231447a7c9daeb279a4698ee0f2dfe2b14ce7c1c830bad8e762cdf744e02 | default-kramer/fission-flare | graphics.rkt | #lang racket/gui
(provide frame->pict state->pict cell-size)
(require pict
"util.rkt"
"setup-panel.rkt"
"occupant-picts.rkt"
"../core.rkt")
; Using `scale` or `rotate` sometimes introduces unwanted artifacts.
; So instead, this parameter controls the desired pixel count (height and width)
; of a single cell:
(define cell-size (make-parameter 30))
; The viewmodel contains the state and all extra information that we need to draw.
Commonly abbreviated ` vm ` in this file .
(struct viewmodel (state
; Burst factor is used to scale the size of blanks being bursted.
Should be a number between 0 and 1 or # f
burst-factor
; Fast Blinker is a boolean value that should be #t for a few frames
; followed by #f for a few frames, repeating endlessly.
; This can be used to control, for example, the energy gauge
; to blink when it is critically low.
fast-blinker
; extra-occs : a hash of (Loc -> Occ) mappings to be drawn as if they
; were still on the grid. Used for the "ripple destruction" effect.
extra-occs
seconds - remaining : the number of seconds remaining for time attack or # f
seconds-remaining
; frame-debug-str : any string
frame-debug-str
) #:transparent)
(define black (make-color 0 0 0))
(define white (make-color 255 255 255))
(define red (make-color 255 0 0))
(define dark-gray (make-color 55 55 55))
(define transparent (make-color 0 0 0 0))
(define mask (make-color 0 0 0 0.7))
(define light-orange (make-color 255 180 60))
(define dark-orange (make-color 255 120 0))
(define green (make-color 30 255 30))
; Caching is very important.
; I don't know if freezing improves efficiency, but it seems to introduce
; anti-aliasing which looks better IMO.
Update - but freezing looks horrible when Windows DPI is not 100 %
(define occ->pict
(let ([cache (make-hash)])
(lambda (occupant-key)
(let* ([size (cell-size)]
[cache-key (cons `(#:size ,size) occupant-key)])
(or (hash-ref cache cache-key #f)
(let ([result (occupant-key->pict occupant-key size)]
#;[result (freeze result)])
(hash-set! cache cache-key result)
result))))))
(define (occupant-key vm occ)
(cond
[(not occ) '(#f)]
[(ground? occ) '(ground)]
[(fuel? occ) `(fuel ,(occupant-color occ))]
[(catalyst? occ)
(let* ([color (occupant-color occ)]
[factor (and (not color)
(viewmodel-burst-factor vm))]
[raw `(catalyst ,(occupant-color occ) ,(catalyst-direction occ))])
(if factor
`(scale ,factor ,raw)
raw))]
[(contaminant? occ) `(contaminant ,(occupant-color occ))]
[else (fail "unexpected occupant" occ)]))
(define (occupant->pict vm occ)
(occ->pict (occupant-key vm occ)))
(define (occupant->pict/maskable vm occ mask?)
(let ([o (occupant-key vm occ)])
(occ->pict (if mask?
(list 'mask o)
o))))
(define/contract (grid-pict vm)
(-> viewmodel? pict?)
(define state (viewmodel-state vm))
(define top-line-height 10)
(define width (state-width state))
(define height (state-height state))
(define preview-fall-state
(car (state-apply state (list (action:plummet)))))
(define (make-pict loc)
(define occ (or (hash-ref (viewmodel-extra-occs vm) loc #f)
(state-get state loc)))
(define preview-occ (and preview-fall-state
(not occ)
(state-get preview-fall-state loc)))
(occupant->pict/maskable vm (or occ preview-occ) preview-occ))
(define (make-row y)
(apply hc-append (for/list ([x (in-range width)])
(make-pict (make-loc x y)))))
(define grid-pict
(let* ([picts (for/list ([y (in-range (sub1 height) -1 -1)])
(make-row y))]
[width (pict-width (car picts))]
[top-line (filled-rectangle width top-line-height #:color white #:draw-border? #f)]
[picts (match picts
[(list a b more ...)
(list* a b top-line more)])]
[grid-pict (apply vc-append picts)])
grid-pict))
(define (make-text str)
(define t (text str (list white 'bold) (* 2 (cell-size))))
(cc-superimpose
(filled-rectangle (pict-width t) (pict-height t) #:color mask)
t))
(case (state-game-over? state)
[(#f) grid-pict]
[(win) (cc-superimpose grid-pict (make-text "Win"))]
[(time-expired) (cc-superimpose grid-pict (make-text "Time"))]
[else (cc-superimpose grid-pict (make-text "Lose"))]))
(define/contract (queue->pict vm queue)
(-> viewmodel? (listof (cons/c occupant? occupant?)) pict?)
(let* ([padding 20]
[main-pict
(apply vc-append
(for/list ([pair queue])
(vc-append (blank padding)
(hc-append (occupant->pict vm (car pair))
(occupant->pict vm (cdr pair))))))]
[w (+ (pict-width main-pict) (* 2 padding))]
[h (+ (pict-height main-pict) padding)])
(ct-superimpose
(filled-rectangle w h #:color "gray" #:draw-border? #f)
main-pict)))
(define (clock-pict2 val maxval [w 400] [h 80])
(define (clamp x)
(max 0 (min x 1)))
(define text-size (floor (/ (* h 3) 4)))
(define clock-ratio (clamp (1 . - . (val . / . maxval))))
(define clock-color (if (val . > . 0) light-orange dark-orange))
(lc-superimpose
(filled-rectangle (* clock-ratio w) h #:color clock-color #:draw-border? #f)
(rc-superimpose
(blank w h)
(text (~a val) '(bold) text-size))
; just for the border:
(rectangle w h)))
(define (clock-pict state [w 400] [h 80])
(let* ([settings (state-settings state)]
[resistance (game-settings-penalty:resistance settings)]
[ps (state-penalty-state state)]
[countdown (penalty-state-countdown ps)])
(clock-pict2 countdown resistance w h)))
(define (add-background pict color)
(cc-superimpose (filled-rectangle (pict-width pict) (pict-height pict) #:color color)
pict))
(define (pad pict spec)
(define-values (top right bottom left)
(match spec
[(list a b c d)
(values a b c d)]
[x (values x x x x)]))
(let ([w (inexact->exact (ceiling (+ left right (pict-width pict))))]
[h (inexact->exact (ceiling (+ top bottom (pict-height pict))))])
(lt-superimpose (blank w h)
(translate pict left top))))
(define (battery-pict percent [blink? #f])
(define size (cell-size))
(define (size* a [size size])
(floor (* a size)))
(let* ([thickness (size* 0.25)]
[w (size* 3)]
[h (* w 2)]
[knob-w size]
[knob-h (size* 0.3)]
[knob-x (/ (- w knob-w) 2)]
[bar-width (- w thickness thickness (/ w 5))]
[bar-x-offset (/ (- w bar-width) 2)]
[bar-count 5]
[bar-h (floor (/ h (* bar-count 1.5)))]
[bar-y-offset (+ bar-h thickness)])
(dc (lambda (dc dx dy)
(define old-pen (send dc get-pen))
(define old-brush (send dc get-brush))
(send dc set-pen (new pen% [width thickness] [color black]))
(send dc draw-rounded-rectangle (+ dx thickness) (+ dy thickness knob-h) w h)
(send dc set-brush (new brush% [color black]))
(send dc draw-rounded-rectangle (+ dx thickness knob-x) (+ dy thickness) knob-w knob-h)
(when (not blink?)
(let ([color (if (< percent 0.4)
light-orange
green)])
(send dc set-pen (new pen% [width 1] [color color]))
(send dc set-brush (new brush% [color color]))
(for ([i (in-range bar-count)])
(let* ([percent-per-bar (/ 1 bar-count)]
[range-bottom (- 1 (* (add1 i) percent-per-bar))]
[excess (- percent range-bottom)]
[foo (min 1 (max 0 (/ excess percent-per-bar)))]
[bar-h-adjusted (* foo bar-h)]
[bar-h-missing (- bar-h bar-h-adjusted)])
(send dc draw-rectangle
(+ dx thickness bar-x-offset)
(+ dy bar-h-missing (* (add1 i) bar-y-offset))
bar-width
bar-h-adjusted)))))
(send dc set-pen old-pen)
(send dc set-brush old-brush))
(+ w thickness thickness)
(+ h thickness thickness knob-h))))
(define (gauge-pict energy max-energy blinker)
(let* ([percent (/ energy max-energy)]
[critical-level 0.15]
[blink? (and blinker (<= percent critical-level))]
[battery (battery-pict (max 0.1 percent) blink?)]
[size (cell-size)]
[label (text (format "~a kW" energy) '(bold) size)])
(vr-append battery label)))
(define (seconds->string total-seconds)
(define (str num [padding #f])
(let ([num (inexact->exact (truncate num))])
(if padding
(~a num #:min-width padding #:left-pad-string "0" #:align 'right)
(~a num))))
(let*-values ([(seconds)
(truncate total-seconds)]
[(fraction)
(- total-seconds seconds)]
[(minutes seconds)
(quotient/remainder seconds 60)])
(format "~a:~a.~a"
(str minutes)
(str seconds 2)
(str (* fraction 100) 2))))
(define (time-attack-pict energy seconds-remaining [blinker? #f])
(let* ([size (cell-size)]
[color (if (< seconds-remaining 20)
red
black)]
[color (if (and blinker? (< seconds-remaining 10))
transparent
color)]
[color (if (<= seconds-remaining 0)
black
color)])
(vr-append
(text (seconds->string seconds-remaining)
(list 'bold color)
size)
(blank 10 10)
(text (format "~a kW" energy) '(bold) size))))
(define (combo-pict explanations size)
(match explanations
[(list (list heading number detail) more ...)
(vr-append
(text (format "~a ~a" heading number) '() size)
(text detail '() (- size 2))
(combo-pict more size))]
[(list)
(blank 0 0)]))
(define (stats-pict state)
(let* ([stats (state-stats state)]
[spawn-count (stats-spawn-count stats)]
[spawn-energy (stats-spawn-energy stats)]
[waiting-frames (stats-waiting-frames stats)]
[waiting-energy (stats-waiting-energy stats)]
[size 12]
[spacer (blank (* size 11) size)]
[txt (lambda (str . args)
(text (apply format (cons str args)) '() size))]
[current-combo (state-current-combo state)]
[combo (or current-combo (state-previous-combo state))]
[combo-name (if current-combo "Current" "Previous")]
[explanations (if combo
(combo-explanations combo)
(list))])
(vr-append
(txt "catalysts: ~a" (stats-spawn-count stats))
(txt "energy cost: ~a" (stats-spawn-energy stats))
spacer
(txt "waiting frames: ~a" (stats-waiting-frames stats))
(txt "energy cost: ~a" (stats-waiting-energy stats))
spacer
(txt "~a Combo:" combo-name)
(combo-pict explanations size)
)))
(define (viewmodel->pict vm show-clock? show-queue?)
(define state (viewmodel-state vm))
(define pgrid
(let ([main (add-background (grid-pict vm) black)]
[str (viewmodel-frame-debug-str vm)])
(if str
(lt-superimpose main
(text str (list white) 12))
main)))
(define pleft
(if show-clock?
(let ([w (pict-width pgrid)]
[h (* (cell-size) 2)])
(vc-append pgrid
(clock-pict state w h)))
pgrid))
(define pmain
(if show-queue?
(let ([spawns (state-next-spawns state 5)]
[vm (struct-copy viewmodel vm
[burst-factor #f])])
(ht-append pleft
(pad (queue->pict vm spawns) '(0 20 20 20))))
pleft))
(let* ([energy (state-energy state)]
[max-energy (state-max-energy state)]
[blinker (viewmodel-fast-blinker vm)]
[seconds-remaining (viewmodel-seconds-remaining vm)]
[left (stats-pict state)]
[left (cond
[seconds-remaining
(vr-append (pad (time-attack-pict energy seconds-remaining blinker) '(0 0 35 0))
left)]
[max-energy
(vr-append (pad (gauge-pict energy max-energy blinker) '(0 0 35 0))
left)]
[else left])]
[left (pad left '(0 12 0 0))])
(ht-append left pmain)))
(define (state->pict state [show-clock? #t] [show-queue? #t])
(let* ([burst-factor #f]
[fast-blinker #f]
[extra-occs (hash)]
[vm (viewmodel state burst-factor fast-blinker extra-occs #f #f)])
(viewmodel->pict vm show-clock? show-queue?)))
(define (frame->pict frame)
(let* ([state (frame-state frame)]
[info (frame-info frame)]
[kind (cadr info)]
[timing (frame-timing frame)]
[frames-needed (timing-bursting timing)]
[counter (frame-counter frame)]
[frames-elapsed (- counter (car info))]
[burst-delay 6] ; how many frames before we start shrinking the blanks
[burst-factor (and (equal? 'bursting kind)
(> frames-elapsed burst-delay)
(- 1 (/ (- frames-elapsed burst-delay)
(- frames-needed burst-delay))))]
[blink-rate 3]
[fast-blinker (= 0 (modulo (quotient counter blink-rate) 2))]
[extra-occs (frame-extra-occs frame)]
[time-remaining (frame-time-remaining frame)]
[time-remaining (and time-remaining (max 0 time-remaining))]
[debug-str (format "~a ~a" kind counter)]
[vm (viewmodel state burst-factor fast-blinker extra-occs time-remaining debug-str)])
(viewmodel->pict vm #t #t)))
; Just for testing / debugging
(define (build-picts)
(flatten (list (occ->pict '(#f))
(occ->pict '(ground))
(for/list ([color '(r y b)])
(occ->pict `(fuel ,color)))
(for/list ([color '(r y b)])
(occ->pict `(contaminant ,color)))
(for/list ([color '(r y b #f)]
#:when #t
[dir '(#f u r d l)])
(occ->pict `(catalyst ,color ,dir))))))
(module+ test
; make sure no error is thrown at any size, including 0
(for ([i (in-range 30)])
(parameterize ([cell-size i])
(void (build-picts)))))
(module+ main
(define (make-state level)
(let ([settings (build-settings 'standard level #f)])
(make-initial-state settings)))
(clock-pict2 1300 700)
(clock-pict2 -100 800)
(state->pict (make-state 10))
; The following is useful for debugging drawing problems.
; Also try changing the border to black instead of the occupant's color.
(parameterize ([cell-size 100])
(for/list ([dir '(#f u r d l)])
(cc-superimpose (filled-rectangle (cell-size) (cell-size) #:color "green" #:draw-border? #f)
(occ->pict `(catalyst r ,dir)))))
(flatten (build-picts))
(ht-append (occ->pict '(catalyst b r))
(occ->pict '(catalyst r l)))
)
| null | https://raw.githubusercontent.com/default-kramer/fission-flare/80d8e0649a6088912da257a59ffd82b361acab98/src/ui/graphics.rkt | racket | Using `scale` or `rotate` sometimes introduces unwanted artifacts.
So instead, this parameter controls the desired pixel count (height and width)
of a single cell:
The viewmodel contains the state and all extra information that we need to draw.
Burst factor is used to scale the size of blanks being bursted.
Fast Blinker is a boolean value that should be #t for a few frames
followed by #f for a few frames, repeating endlessly.
This can be used to control, for example, the energy gauge
to blink when it is critically low.
extra-occs : a hash of (Loc -> Occ) mappings to be drawn as if they
were still on the grid. Used for the "ripple destruction" effect.
frame-debug-str : any string
Caching is very important.
I don't know if freezing improves efficiency, but it seems to introduce
anti-aliasing which looks better IMO.
[result (freeze result)])
just for the border:
how many frames before we start shrinking the blanks
Just for testing / debugging
make sure no error is thrown at any size, including 0
The following is useful for debugging drawing problems.
Also try changing the border to black instead of the occupant's color. | #lang racket/gui
(provide frame->pict state->pict cell-size)
(require pict
"util.rkt"
"setup-panel.rkt"
"occupant-picts.rkt"
"../core.rkt")
(define cell-size (make-parameter 30))
Commonly abbreviated ` vm ` in this file .
(struct viewmodel (state
Should be a number between 0 and 1 or # f
burst-factor
fast-blinker
extra-occs
seconds - remaining : the number of seconds remaining for time attack or # f
seconds-remaining
frame-debug-str
) #:transparent)
(define black (make-color 0 0 0))
(define white (make-color 255 255 255))
(define red (make-color 255 0 0))
(define dark-gray (make-color 55 55 55))
(define transparent (make-color 0 0 0 0))
(define mask (make-color 0 0 0 0.7))
(define light-orange (make-color 255 180 60))
(define dark-orange (make-color 255 120 0))
(define green (make-color 30 255 30))
Update - but freezing looks horrible when Windows DPI is not 100 %
(define occ->pict
(let ([cache (make-hash)])
(lambda (occupant-key)
(let* ([size (cell-size)]
[cache-key (cons `(#:size ,size) occupant-key)])
(or (hash-ref cache cache-key #f)
(let ([result (occupant-key->pict occupant-key size)]
(hash-set! cache cache-key result)
result))))))
(define (occupant-key vm occ)
(cond
[(not occ) '(#f)]
[(ground? occ) '(ground)]
[(fuel? occ) `(fuel ,(occupant-color occ))]
[(catalyst? occ)
(let* ([color (occupant-color occ)]
[factor (and (not color)
(viewmodel-burst-factor vm))]
[raw `(catalyst ,(occupant-color occ) ,(catalyst-direction occ))])
(if factor
`(scale ,factor ,raw)
raw))]
[(contaminant? occ) `(contaminant ,(occupant-color occ))]
[else (fail "unexpected occupant" occ)]))
(define (occupant->pict vm occ)
(occ->pict (occupant-key vm occ)))
(define (occupant->pict/maskable vm occ mask?)
(let ([o (occupant-key vm occ)])
(occ->pict (if mask?
(list 'mask o)
o))))
(define/contract (grid-pict vm)
(-> viewmodel? pict?)
(define state (viewmodel-state vm))
(define top-line-height 10)
(define width (state-width state))
(define height (state-height state))
(define preview-fall-state
(car (state-apply state (list (action:plummet)))))
(define (make-pict loc)
(define occ (or (hash-ref (viewmodel-extra-occs vm) loc #f)
(state-get state loc)))
(define preview-occ (and preview-fall-state
(not occ)
(state-get preview-fall-state loc)))
(occupant->pict/maskable vm (or occ preview-occ) preview-occ))
(define (make-row y)
(apply hc-append (for/list ([x (in-range width)])
(make-pict (make-loc x y)))))
(define grid-pict
(let* ([picts (for/list ([y (in-range (sub1 height) -1 -1)])
(make-row y))]
[width (pict-width (car picts))]
[top-line (filled-rectangle width top-line-height #:color white #:draw-border? #f)]
[picts (match picts
[(list a b more ...)
(list* a b top-line more)])]
[grid-pict (apply vc-append picts)])
grid-pict))
(define (make-text str)
(define t (text str (list white 'bold) (* 2 (cell-size))))
(cc-superimpose
(filled-rectangle (pict-width t) (pict-height t) #:color mask)
t))
(case (state-game-over? state)
[(#f) grid-pict]
[(win) (cc-superimpose grid-pict (make-text "Win"))]
[(time-expired) (cc-superimpose grid-pict (make-text "Time"))]
[else (cc-superimpose grid-pict (make-text "Lose"))]))
(define/contract (queue->pict vm queue)
(-> viewmodel? (listof (cons/c occupant? occupant?)) pict?)
(let* ([padding 20]
[main-pict
(apply vc-append
(for/list ([pair queue])
(vc-append (blank padding)
(hc-append (occupant->pict vm (car pair))
(occupant->pict vm (cdr pair))))))]
[w (+ (pict-width main-pict) (* 2 padding))]
[h (+ (pict-height main-pict) padding)])
(ct-superimpose
(filled-rectangle w h #:color "gray" #:draw-border? #f)
main-pict)))
(define (clock-pict2 val maxval [w 400] [h 80])
(define (clamp x)
(max 0 (min x 1)))
(define text-size (floor (/ (* h 3) 4)))
(define clock-ratio (clamp (1 . - . (val . / . maxval))))
(define clock-color (if (val . > . 0) light-orange dark-orange))
(lc-superimpose
(filled-rectangle (* clock-ratio w) h #:color clock-color #:draw-border? #f)
(rc-superimpose
(blank w h)
(text (~a val) '(bold) text-size))
(rectangle w h)))
(define (clock-pict state [w 400] [h 80])
(let* ([settings (state-settings state)]
[resistance (game-settings-penalty:resistance settings)]
[ps (state-penalty-state state)]
[countdown (penalty-state-countdown ps)])
(clock-pict2 countdown resistance w h)))
(define (add-background pict color)
(cc-superimpose (filled-rectangle (pict-width pict) (pict-height pict) #:color color)
pict))
(define (pad pict spec)
(define-values (top right bottom left)
(match spec
[(list a b c d)
(values a b c d)]
[x (values x x x x)]))
(let ([w (inexact->exact (ceiling (+ left right (pict-width pict))))]
[h (inexact->exact (ceiling (+ top bottom (pict-height pict))))])
(lt-superimpose (blank w h)
(translate pict left top))))
(define (battery-pict percent [blink? #f])
(define size (cell-size))
(define (size* a [size size])
(floor (* a size)))
(let* ([thickness (size* 0.25)]
[w (size* 3)]
[h (* w 2)]
[knob-w size]
[knob-h (size* 0.3)]
[knob-x (/ (- w knob-w) 2)]
[bar-width (- w thickness thickness (/ w 5))]
[bar-x-offset (/ (- w bar-width) 2)]
[bar-count 5]
[bar-h (floor (/ h (* bar-count 1.5)))]
[bar-y-offset (+ bar-h thickness)])
(dc (lambda (dc dx dy)
(define old-pen (send dc get-pen))
(define old-brush (send dc get-brush))
(send dc set-pen (new pen% [width thickness] [color black]))
(send dc draw-rounded-rectangle (+ dx thickness) (+ dy thickness knob-h) w h)
(send dc set-brush (new brush% [color black]))
(send dc draw-rounded-rectangle (+ dx thickness knob-x) (+ dy thickness) knob-w knob-h)
(when (not blink?)
(let ([color (if (< percent 0.4)
light-orange
green)])
(send dc set-pen (new pen% [width 1] [color color]))
(send dc set-brush (new brush% [color color]))
(for ([i (in-range bar-count)])
(let* ([percent-per-bar (/ 1 bar-count)]
[range-bottom (- 1 (* (add1 i) percent-per-bar))]
[excess (- percent range-bottom)]
[foo (min 1 (max 0 (/ excess percent-per-bar)))]
[bar-h-adjusted (* foo bar-h)]
[bar-h-missing (- bar-h bar-h-adjusted)])
(send dc draw-rectangle
(+ dx thickness bar-x-offset)
(+ dy bar-h-missing (* (add1 i) bar-y-offset))
bar-width
bar-h-adjusted)))))
(send dc set-pen old-pen)
(send dc set-brush old-brush))
(+ w thickness thickness)
(+ h thickness thickness knob-h))))
(define (gauge-pict energy max-energy blinker)
(let* ([percent (/ energy max-energy)]
[critical-level 0.15]
[blink? (and blinker (<= percent critical-level))]
[battery (battery-pict (max 0.1 percent) blink?)]
[size (cell-size)]
[label (text (format "~a kW" energy) '(bold) size)])
(vr-append battery label)))
(define (seconds->string total-seconds)
(define (str num [padding #f])
(let ([num (inexact->exact (truncate num))])
(if padding
(~a num #:min-width padding #:left-pad-string "0" #:align 'right)
(~a num))))
(let*-values ([(seconds)
(truncate total-seconds)]
[(fraction)
(- total-seconds seconds)]
[(minutes seconds)
(quotient/remainder seconds 60)])
(format "~a:~a.~a"
(str minutes)
(str seconds 2)
(str (* fraction 100) 2))))
(define (time-attack-pict energy seconds-remaining [blinker? #f])
(let* ([size (cell-size)]
[color (if (< seconds-remaining 20)
red
black)]
[color (if (and blinker? (< seconds-remaining 10))
transparent
color)]
[color (if (<= seconds-remaining 0)
black
color)])
(vr-append
(text (seconds->string seconds-remaining)
(list 'bold color)
size)
(blank 10 10)
(text (format "~a kW" energy) '(bold) size))))
(define (combo-pict explanations size)
(match explanations
[(list (list heading number detail) more ...)
(vr-append
(text (format "~a ~a" heading number) '() size)
(text detail '() (- size 2))
(combo-pict more size))]
[(list)
(blank 0 0)]))
(define (stats-pict state)
(let* ([stats (state-stats state)]
[spawn-count (stats-spawn-count stats)]
[spawn-energy (stats-spawn-energy stats)]
[waiting-frames (stats-waiting-frames stats)]
[waiting-energy (stats-waiting-energy stats)]
[size 12]
[spacer (blank (* size 11) size)]
[txt (lambda (str . args)
(text (apply format (cons str args)) '() size))]
[current-combo (state-current-combo state)]
[combo (or current-combo (state-previous-combo state))]
[combo-name (if current-combo "Current" "Previous")]
[explanations (if combo
(combo-explanations combo)
(list))])
(vr-append
(txt "catalysts: ~a" (stats-spawn-count stats))
(txt "energy cost: ~a" (stats-spawn-energy stats))
spacer
(txt "waiting frames: ~a" (stats-waiting-frames stats))
(txt "energy cost: ~a" (stats-waiting-energy stats))
spacer
(txt "~a Combo:" combo-name)
(combo-pict explanations size)
)))
(define (viewmodel->pict vm show-clock? show-queue?)
(define state (viewmodel-state vm))
(define pgrid
(let ([main (add-background (grid-pict vm) black)]
[str (viewmodel-frame-debug-str vm)])
(if str
(lt-superimpose main
(text str (list white) 12))
main)))
(define pleft
(if show-clock?
(let ([w (pict-width pgrid)]
[h (* (cell-size) 2)])
(vc-append pgrid
(clock-pict state w h)))
pgrid))
(define pmain
(if show-queue?
(let ([spawns (state-next-spawns state 5)]
[vm (struct-copy viewmodel vm
[burst-factor #f])])
(ht-append pleft
(pad (queue->pict vm spawns) '(0 20 20 20))))
pleft))
(let* ([energy (state-energy state)]
[max-energy (state-max-energy state)]
[blinker (viewmodel-fast-blinker vm)]
[seconds-remaining (viewmodel-seconds-remaining vm)]
[left (stats-pict state)]
[left (cond
[seconds-remaining
(vr-append (pad (time-attack-pict energy seconds-remaining blinker) '(0 0 35 0))
left)]
[max-energy
(vr-append (pad (gauge-pict energy max-energy blinker) '(0 0 35 0))
left)]
[else left])]
[left (pad left '(0 12 0 0))])
(ht-append left pmain)))
(define (state->pict state [show-clock? #t] [show-queue? #t])
(let* ([burst-factor #f]
[fast-blinker #f]
[extra-occs (hash)]
[vm (viewmodel state burst-factor fast-blinker extra-occs #f #f)])
(viewmodel->pict vm show-clock? show-queue?)))
(define (frame->pict frame)
(let* ([state (frame-state frame)]
[info (frame-info frame)]
[kind (cadr info)]
[timing (frame-timing frame)]
[frames-needed (timing-bursting timing)]
[counter (frame-counter frame)]
[frames-elapsed (- counter (car info))]
[burst-factor (and (equal? 'bursting kind)
(> frames-elapsed burst-delay)
(- 1 (/ (- frames-elapsed burst-delay)
(- frames-needed burst-delay))))]
[blink-rate 3]
[fast-blinker (= 0 (modulo (quotient counter blink-rate) 2))]
[extra-occs (frame-extra-occs frame)]
[time-remaining (frame-time-remaining frame)]
[time-remaining (and time-remaining (max 0 time-remaining))]
[debug-str (format "~a ~a" kind counter)]
[vm (viewmodel state burst-factor fast-blinker extra-occs time-remaining debug-str)])
(viewmodel->pict vm #t #t)))
(define (build-picts)
(flatten (list (occ->pict '(#f))
(occ->pict '(ground))
(for/list ([color '(r y b)])
(occ->pict `(fuel ,color)))
(for/list ([color '(r y b)])
(occ->pict `(contaminant ,color)))
(for/list ([color '(r y b #f)]
#:when #t
[dir '(#f u r d l)])
(occ->pict `(catalyst ,color ,dir))))))
(module+ test
(for ([i (in-range 30)])
(parameterize ([cell-size i])
(void (build-picts)))))
(module+ main
(define (make-state level)
(let ([settings (build-settings 'standard level #f)])
(make-initial-state settings)))
(clock-pict2 1300 700)
(clock-pict2 -100 800)
(state->pict (make-state 10))
(parameterize ([cell-size 100])
(for/list ([dir '(#f u r d l)])
(cc-superimpose (filled-rectangle (cell-size) (cell-size) #:color "green" #:draw-border? #f)
(occ->pict `(catalyst r ,dir)))))
(flatten (build-picts))
(ht-append (occ->pict '(catalyst b r))
(occ->pict '(catalyst r l)))
)
|
8ae22c76f9ef81d8657fc143a8024c5d01516c175729afd4bfb86758dea91253 | SnootyMonkey/Falkland-CMS | collection_resource.clj | (ns fcms.resources.collection-resource
"Collection resources are either taxonomies or items, and are stored in a particular collection."
(:require [clojure.set :refer (intersection)]
[clojure.string :refer (blank?)]
[clojure.walk :refer (keywordize-keys)]
[com.ashafa.clutch :as clutch]
[fcms.resources.common :as common]
[fcms.resources.collection :as collection]
[fcms.lib.slugify :refer (slugify)]
[defun.core :refer (defun)]))
(def reserved-properties
"Properties that can't be specified during a create and are ignored during an update."
(conj common/reserved-properties :collection :categories))
(def retained-properties
"Properties that are retained during an update even if they aren't in the updated property set."
(conj common/retained-properties :collection :categories))
(defn allow-category-reserved-properties []
(vec (remove #(= :categories %) reserved-properties)))
(defn- get-resource-with-db [coll-id coll-slug slug type]
(when-let [resource (common/resource-doc coll-id slug type)]
(common/resource-from-db coll-slug resource)))
(defn get-resource
"Given the slug of the collection containing the resource and the slug of the resource,
return the resource as a map, or return :bad-collection if there's no collection with that slug, or
nil if there is no resource with that slug."
[coll-slug slug type]
(collection/with-collection coll-slug
(get-resource-with-db (:id collection) coll-slug slug type)))
(defun valid-new-resource
"Given the slug of the collection, the name of the resource, a map of a potential new resource,
and a retrieval function for the resource type, check if the everything is in order to create
the new resource.
Ensure the collection exists or return :bad-collection.
Ensure the name of the resource is specified or return :no-name.
Ensure the slug is valid and doesn't already exist if it's specified,
or return :invalid-slug or :slug-conflict respectively.
:property-conflict is returned if a property is included in the map of properties that is in
the reserved-properties set."
([coll-slug resource-name type reserved-properties] (valid-new-resource coll-slug resource-name reserved-properties type {}))
([coll-slug :guard #(collection/get-collection %) resource-name type reserved-properties props]
(cond
(or (nil? resource-name) (blank? resource-name)) :no-name
(not-empty (intersection (set (keys (keywordize-keys props))) reserved-properties)) :property-conflict
(not (:slug props)) true
(not (common/valid-slug? (:slug props))) :invalid-slug
(nil? (get-resource coll-slug (:slug props) type)) true
:else :slug-conflict))
([_ _ _ _ _] :bad-collection))
(defn create-resource
"Create a new resource in the collection specified by its slug, using the specified
resource name, resource type and an optional map of properties.
If :slug is included in the properties it will be used as the resource's slug, otherwise
the slug will be created from the name.
:slug-conflict is returned if a :slug is included in the properties and a resource already exists
in the collection with that slug.
:invalid-slug is returned if a :slug is included in the properties and it's not valid.
:property-conflict is returned if a property is included in the map of properties that is
in the reserved-properties set."
([coll-slug resource-name type reserved-properties] (create-resource coll-slug resource-name type reserved-properties {}))
([coll-slug resource-name type reserved-properties properties]
(let [props (keywordize-keys properties)
validity (valid-new-resource coll-slug resource-name type reserved-properties props)]
(if (true? validity)
(collection/with-collection coll-slug
(let [slug (common/unique-slug (:id collection) (or (:slug props) (slugify resource-name)))]
(when-let [resource (common/create-resource-with-db
(merge props {:slug slug :collection (:id collection) :name resource-name}) type)]
(common/resource-from-db coll-slug resource))))
validity))))
(defn delete-resource
"Given the slug of the collection containing the resource and the slug of the resource,
delete the resource, or return :bad-collection if there's no collection with that slug, or
the provided bad resource keyword if there is no resource with that slug."
[coll-slug slug type]
(if-let [coll-id (:id (collection/get-collection coll-slug))]
(if-let [resource (clutch/with-db (common/db) (common/resource-doc coll-id slug type))]
(common/delete-resource resource)
(keyword (str "bad-" (name type))))
:bad-collection))
(defn valid-resource-update
"Given the slug of the collection, the slug of the resource,
and a map of updated properties for the resource,
check if the everything is in order to update the resource.
Ensure the collection exists or return :bad-collection.
Ensure the resource exists or return :bad-<resource>.
Ensure no reserved properties are used or return :property-conflict.
If a new slug is provided in the properties, ensure it is
valid or return :invalid-slug and ensure it is unused or
return :slug-conflict. If no item slug is specified in
the properties it will be retain its current slug."
[coll-slug slug type reserved-properties {provided-slug :slug :as props}]
(let [coll-id (:id (collection/get-collection coll-slug))
resource-id (:id (get-resource coll-slug slug type))]
(cond
(nil? coll-id) :bad-collection
(nil? resource-id) (keyword (str "bad-" (name type)))
(not-empty (intersection (set (keys (keywordize-keys props))) reserved-properties)) :property-conflict
(not provided-slug) true
(not (common/valid-slug? provided-slug)) :invalid-slug
(= slug provided-slug) true
:else (if (nil? (get-resource coll-slug provided-slug type)) true :slug-conflict))))
(defn update-resource
"Update a resource retaining it's manufactured properties and replacing the rest with the provided properties"
[coll-slug slug type properties]
(collection/with-collection coll-slug
(if-let [resource (common/resource-doc (:id collection) slug type)]
(let [props (keywordize-keys properties)
retained-props (select-keys (:data resource) (conj (:retained props) :version))
updated-props (apply dissoc (:updated props) (:reserved props))
new-props (merge retained-props updated-props)]
(common/resource-from-db coll-slug (common/update-resource-with-db resource new-props))
(get-resource-with-db (:id collection) coll-slug (:slug new-props) type))
(keyword (str "bad-" (name type))))))
(defn all-resources
"Given the slug of the collection, return all the resources it contains of the type specified
as a sequence of maps, or return :bad-collection if there's no collection with that slug."
[coll-slug type]
(collection/with-collection coll-slug
(when-let [results (common/doc-from-view-with-db type :all-slugs-by-coll-id (:id collection))]
(vec (map #(common/resource-from-db coll-slug (:doc %)) results))))) | null | https://raw.githubusercontent.com/SnootyMonkey/Falkland-CMS/bd653c23dd458609b652dfac3f0f2f11526f00d1/src/fcms/resources/collection_resource.clj | clojure | (ns fcms.resources.collection-resource
"Collection resources are either taxonomies or items, and are stored in a particular collection."
(:require [clojure.set :refer (intersection)]
[clojure.string :refer (blank?)]
[clojure.walk :refer (keywordize-keys)]
[com.ashafa.clutch :as clutch]
[fcms.resources.common :as common]
[fcms.resources.collection :as collection]
[fcms.lib.slugify :refer (slugify)]
[defun.core :refer (defun)]))
(def reserved-properties
"Properties that can't be specified during a create and are ignored during an update."
(conj common/reserved-properties :collection :categories))
(def retained-properties
"Properties that are retained during an update even if they aren't in the updated property set."
(conj common/retained-properties :collection :categories))
(defn allow-category-reserved-properties []
(vec (remove #(= :categories %) reserved-properties)))
(defn- get-resource-with-db [coll-id coll-slug slug type]
(when-let [resource (common/resource-doc coll-id slug type)]
(common/resource-from-db coll-slug resource)))
(defn get-resource
"Given the slug of the collection containing the resource and the slug of the resource,
return the resource as a map, or return :bad-collection if there's no collection with that slug, or
nil if there is no resource with that slug."
[coll-slug slug type]
(collection/with-collection coll-slug
(get-resource-with-db (:id collection) coll-slug slug type)))
(defun valid-new-resource
"Given the slug of the collection, the name of the resource, a map of a potential new resource,
and a retrieval function for the resource type, check if the everything is in order to create
the new resource.
Ensure the collection exists or return :bad-collection.
Ensure the name of the resource is specified or return :no-name.
Ensure the slug is valid and doesn't already exist if it's specified,
or return :invalid-slug or :slug-conflict respectively.
:property-conflict is returned if a property is included in the map of properties that is in
the reserved-properties set."
([coll-slug resource-name type reserved-properties] (valid-new-resource coll-slug resource-name reserved-properties type {}))
([coll-slug :guard #(collection/get-collection %) resource-name type reserved-properties props]
(cond
(or (nil? resource-name) (blank? resource-name)) :no-name
(not-empty (intersection (set (keys (keywordize-keys props))) reserved-properties)) :property-conflict
(not (:slug props)) true
(not (common/valid-slug? (:slug props))) :invalid-slug
(nil? (get-resource coll-slug (:slug props) type)) true
:else :slug-conflict))
([_ _ _ _ _] :bad-collection))
(defn create-resource
"Create a new resource in the collection specified by its slug, using the specified
resource name, resource type and an optional map of properties.
If :slug is included in the properties it will be used as the resource's slug, otherwise
the slug will be created from the name.
:slug-conflict is returned if a :slug is included in the properties and a resource already exists
in the collection with that slug.
:invalid-slug is returned if a :slug is included in the properties and it's not valid.
:property-conflict is returned if a property is included in the map of properties that is
in the reserved-properties set."
([coll-slug resource-name type reserved-properties] (create-resource coll-slug resource-name type reserved-properties {}))
([coll-slug resource-name type reserved-properties properties]
(let [props (keywordize-keys properties)
validity (valid-new-resource coll-slug resource-name type reserved-properties props)]
(if (true? validity)
(collection/with-collection coll-slug
(let [slug (common/unique-slug (:id collection) (or (:slug props) (slugify resource-name)))]
(when-let [resource (common/create-resource-with-db
(merge props {:slug slug :collection (:id collection) :name resource-name}) type)]
(common/resource-from-db coll-slug resource))))
validity))))
(defn delete-resource
"Given the slug of the collection containing the resource and the slug of the resource,
delete the resource, or return :bad-collection if there's no collection with that slug, or
the provided bad resource keyword if there is no resource with that slug."
[coll-slug slug type]
(if-let [coll-id (:id (collection/get-collection coll-slug))]
(if-let [resource (clutch/with-db (common/db) (common/resource-doc coll-id slug type))]
(common/delete-resource resource)
(keyword (str "bad-" (name type))))
:bad-collection))
(defn valid-resource-update
"Given the slug of the collection, the slug of the resource,
and a map of updated properties for the resource,
check if the everything is in order to update the resource.
Ensure the collection exists or return :bad-collection.
Ensure the resource exists or return :bad-<resource>.
Ensure no reserved properties are used or return :property-conflict.
If a new slug is provided in the properties, ensure it is
valid or return :invalid-slug and ensure it is unused or
return :slug-conflict. If no item slug is specified in
the properties it will be retain its current slug."
[coll-slug slug type reserved-properties {provided-slug :slug :as props}]
(let [coll-id (:id (collection/get-collection coll-slug))
resource-id (:id (get-resource coll-slug slug type))]
(cond
(nil? coll-id) :bad-collection
(nil? resource-id) (keyword (str "bad-" (name type)))
(not-empty (intersection (set (keys (keywordize-keys props))) reserved-properties)) :property-conflict
(not provided-slug) true
(not (common/valid-slug? provided-slug)) :invalid-slug
(= slug provided-slug) true
:else (if (nil? (get-resource coll-slug provided-slug type)) true :slug-conflict))))
(defn update-resource
"Update a resource retaining it's manufactured properties and replacing the rest with the provided properties"
[coll-slug slug type properties]
(collection/with-collection coll-slug
(if-let [resource (common/resource-doc (:id collection) slug type)]
(let [props (keywordize-keys properties)
retained-props (select-keys (:data resource) (conj (:retained props) :version))
updated-props (apply dissoc (:updated props) (:reserved props))
new-props (merge retained-props updated-props)]
(common/resource-from-db coll-slug (common/update-resource-with-db resource new-props))
(get-resource-with-db (:id collection) coll-slug (:slug new-props) type))
(keyword (str "bad-" (name type))))))
(defn all-resources
"Given the slug of the collection, return all the resources it contains of the type specified
as a sequence of maps, or return :bad-collection if there's no collection with that slug."
[coll-slug type]
(collection/with-collection coll-slug
(when-let [results (common/doc-from-view-with-db type :all-slugs-by-coll-id (:id collection))]
(vec (map #(common/resource-from-db coll-slug (:doc %)) results))))) | |
8c8436c6160fc5afda58fb3f27c39e2212fd4ed53e335f33296cec91ed8b05a6 | morgenthum/lambda-heights | Main.hs | # LANGUAGE TemplateHaskell #
module Main where
import Data.FileEmbed
import qualified LambdaHeights.Game as Game
import LambdaHeights.Types.Config
import qualified SDL.Font as SDLF
main :: IO ()
main = do
Game.init
config <- Config <$> highSchoolUSASansFont 28 <*> retroGamingFont 11
Game.start config
Game.destroy
highSchoolUSASansFont :: Int -> IO SDLF.Font
highSchoolUSASansFont = SDLF.decode $(embedFile "../fonts/HighSchoolUSASans.ttf")
retroGamingFont :: Int -> IO SDLF.Font
retroGamingFont = SDLF.decode $(embedFile "../fonts/retro_gaming.ttf")
| null | https://raw.githubusercontent.com/morgenthum/lambda-heights/0a86ead23e8c223ba2672fa314666a06eb669fe2/lambda-heights-app/src/Main.hs | haskell | # LANGUAGE TemplateHaskell #
module Main where
import Data.FileEmbed
import qualified LambdaHeights.Game as Game
import LambdaHeights.Types.Config
import qualified SDL.Font as SDLF
main :: IO ()
main = do
Game.init
config <- Config <$> highSchoolUSASansFont 28 <*> retroGamingFont 11
Game.start config
Game.destroy
highSchoolUSASansFont :: Int -> IO SDLF.Font
highSchoolUSASansFont = SDLF.decode $(embedFile "../fonts/HighSchoolUSASans.ttf")
retroGamingFont :: Int -> IO SDLF.Font
retroGamingFont = SDLF.decode $(embedFile "../fonts/retro_gaming.ttf")
| |
a29b008244a11acfd53c490a1bfcb9a27c06afe3e0a97f7d5a5f89fee739fec0 | chlorinejs/chlorine | js.clj | (ns chlorine.js
(:require [clojure.string :as str]
[hiccup.core]
[clojure.walk])
(:use [chlorine.reader]
[slingshot.slingshot]
[pathetic.core :only [normalize url-normalize]]
[chlorine.util
:only [url? resource-path? to-resource unzip assert-args
*cwd* *paths* get-dir find-in-paths
re? replace-map]]))
(defn ->camelCase [^String method-name]
(str/replace method-name #"-(\w)"
#(str/upper-case (second %1))))
(def ^:dynamic *print-pretty* false)
(def ^:dynamic *object-member* false)
(defmacro with-pretty-print [& body]
`(binding [*print-pretty* true]
~@body))
(def ^:dynamic *indent* 0)
(defmacro with-indent [[& increment] & body]
`(binding [*indent* (+ *indent* (or ~increment 4))]
~@body))
(def ^:dynamic *in-block-exp?* false)
(defmacro with-block [& body]
`(binding [*in-block-exp?* true]
~@body))
(defmacro with-bracket-block [& body]
`(with-parens ["{" "}"]
(with-block
(with-indent [] ~@body))
(newline-indent)))
(defn newline-indent []
(if *print-pretty*
(do
(newline)
(print (apply str (repeat *indent* " "))))
(print " ")))
(defmacro with-parens [[& [left right]] & body]
`(do
(print (or ~left "("))
~@body
(print (or ~right ")"))))
(def ^:dynamic *inline-if* false)
(def ^:dynamic *quoted* false)
(def ^:dynamic *in-fn-toplevel* true)
(def ^:dynamic *unique-return-expr* false)
Chlorinejs transforms Clojure code / data to Javascript equivalents .
;;
;; Normal data such as strings, numbers, keywords, symbols, vectors, (quoted)
;; lists are transformed by associated emitters of their type.
;;
;; Functions, macros and special forms (including javascript native ones)
share the same looks : they are unquoted lists whose first element is the
form name and require one more step : looking up by the names to detect
;; their types.
(defn detect-form
"Detects macro/function/special form names from expressions
for further processing. Used as dispatch function for chlorine.js/emit, the
most hardworking multi-method in chlorine library."
[expr]
(let [expr (if (and (coll? expr) (seq expr)) (first expr) expr)]
(if (symbol? expr) (name expr) :default)))
(defn normalize-dot-form
"Normalizes dot forms or new-object forms by removing \".\" from their
beginnings or endings."
[form]
(cond (and (.startsWith (name form) ".")
(< 1 (count (name form))))
(symbol (subs (name form) 1))
(and (.endsWith (name form) ".")
(< 1 (count (name form))))
(symbol (apply str (drop-last (str form))))
:default
form))
(declare emit-str)
(declare emit-symbol)
(declare tojs')
(defn sym->property
"Transforms symbol or keyword into object's property access form."
[s]
(binding [*quoted* true]
(emit-str
(if (member-form? s)
(symbol (subs (name s) 1))
s))))
(defmulti emit
"Receives forms, emits javascript expressions."
detect-form)
(defn emit-delimited
"Emit sequences with delimiters. Useful to emit javascript arrays,
function arguments etc."
[delimiter args & [emitter]]
(when-not (empty? args)
((or emitter emit) (first args))
(doseq [arg (rest args)]
(print delimiter)
((or emitter emit) arg))))
several functions to emit Clojure data of
;; map, set, vector, regexp, symbol and keyword types
(defn emit-map
"Clojure maps are emitted to javascript key/value objects.
Keys can only be strings. Keywords and quoted symbols don't really make
sense in Chlorinejs and that's why they are emitted to plain strings."
[expr]
(with-parens ["{" "}"]
(binding [*inline-if* true]
(emit-delimited
","
(seq expr)
(fn [[key val]]
(cond
(keyword? key)
(emit-symbol key)
(or (string? key)
(number? key))
(emit key)
:default
(throw+ {:known-error true
:msg
(str "Error emitting this map `"
expr "`:\n"
"Invalid map key: `" key "`.\n"
"Valid keys are elements which can be"
" converted to strings.")
:causes [expr key]}))
(print " : ")
(emit val))))))
(defn emit-set
"Clojure sets are emitted to javascript key/value objects whose
values are all `true`. These 'sets' objects as javascript nature will have
distinct elements (the keys) which can be checked by `contains?` (javascript's
`in`). Please remember, all set elements are coerced to strings by javascript.
That means, both `(contains? 5 {:a 1 \"5\" 2})` and
`(contains? \"5\" {:b 3 5 4} will return true."
[expr]
(emit `(hash-set ~@(seq expr))))
(defn emit-vector
"Clojure vectors and quoted lists are emitted as javascript arrays."
[expr]
(with-parens ["[" "]"]
(binding [*inline-if* true]
(emit-delimited "," (seq expr)))))
(defn emit-re [expr]
(let [[_ flags pattern] (re-find #"^(?:\(\?([idmsux]*)\))?(.*)" (str expr))]
(print (str \/ (.replaceAll (re-matcher #"/" pattern) "\\\\/") \/ flags))))
;; Symbols are Chlorine's amazing pieces. We have a wide range of valid
characters for Chlorine just like Clojure . You can use Lisp - style naming
;; conventions such as "?" endings for predicate functions.
;; You can tell ChlorineJS to emit a symbol as if it's an other one by
;; using aliases
(def ^:dynamic *aliases*
(ref '{;; `int` and `boolean` are reserved symbols in js.
They 're also function names in Clojure and Chlorine core
;; library.
int int*
boolean boolean*
Chlorine uses a Clojure - like syntax of ` ( require ... ) `
;; to load nodejs/browserify. It's implemented as macro which
;; expands to the lower level `require*`. `require*` in turn
;; emitted as javascript `require()`
require* require
}))
;; Because javascript doesn't allow such characters, the function
` chlorine.util/replace-map ` will be used to replace all Clojure - only
;; characters to javascript-friendly ones.
;; The mapping used to do the replacements
(def ^:dynamic *symbol-map*
(array-map
"?" "_p"
"*" "__"
"'" "_q"
"!" "_s"
"+" "_plus"
"/" "_divide"
"=" "_eq"
"->" "-to-"
"<-" "-from-"
">" "-gt-"
"<" "-lt-"
">=" "-ge-"
"=<" "-le-"
"#" "_h"
"%" "$P100$"
"&" "-and-"
))
(defn hyphen->underscore [s]
(str/escape s {\- \_}))
;; You can also specify "reserved symbols", which are NOT affected by
;; `replace-map`.
;; For example: [;;#"^\$.*" #"^\.\$.*"]
(def ^:dynamic *reserved-symbols* [])
(def ^:dynamic *core-symbols* #{})
(def ^:dynamic *core-symbols-in-use* (ref #{}))
(defn emit-symbol
"Emits Clojure symbols to javascript ones. If the symbol is quoted, emits its
name as a string. Does some replacements with characters not supported by
javascript if the symbol isn't marked as reserved ones."
[expr]
(let [sym-name (name expr)]
(print
(let [output-string
(if (or (reserved-symbol? *reserved-symbols* sym-name)
*object-member*)
sym-name
(-> (or (get @*aliases* (symbol sym-name))
sym-name)
(replace-map *symbol-map*)
->camelCase
hyphen->underscore))
output-sym (symbol output-string)]
(if (and (contains? *core-symbols* output-sym)
(not (contains? @*core-symbols-in-use* output-sym)))
(dosync (alter *core-symbols-in-use*
conj output-sym)))
(if *quoted*
(format "'%s'" output-string)
output-string)))))
(defn emit-keyword
"Emits Clojure keywords. Uses emit-symbol as backend."
[expr]
(binding [*quoted* true]
(emit-symbol expr)))
;; Some Chlorine forms are converted directly to javascript native
;; operators: unary and infix ones.
Unary operators in Chlorine : " ! "
;; Infix operators are consist of
;; - `instance?` special form
;; - and/or macros
;; - some low-level math operators such as plus (+*), minus (-*), multiple (**)
;; and remainder (rem)
;; - low-level comparator === which can't work on vectors and maps
;; - binary operators
;;
Please use high - level functions from Chlorine 's core library instead of
;; low-level ones"
(defn emit-unary-operator
[op arg]
(print (name op))
(emit arg))
(defn emit-infix-operator
[op & args]
(let [clj->js {"instance?" "instanceof"
"and" "&&"
"or" "||"
"=*" "==="
"+*" "+"
"-*" "-"
"**" "*"
"js-divide" "/"
"rem" "%"
"bit-and" "&"
"bit-or" "|"
"bit-xor" "^"
"bit-not" "~"
"bit-shift-left" "<<"
"bit-shift-right" ">>"
"bit-shift-right-zero-fill" ">>>"}
js-op (get clj->js (name op) (name op))]
(binding [*unique-return-expr* false
*in-fn-toplevel* false]
(with-parens []
(emit-delimited (str " " js-op " ") args)))))
(defn property->member
"Removes `-` prefix in a property name to bring it a member look."
[property]
(symbol (subs (name property) 1)))
(defn emit-function-call
"Emits a function call by simply emitting the function name and its arguments
in parentheses."
[fun & args]
(emit fun)
(with-parens []
(with-indent [] (emit-delimited ", " args))))
(defn emit-invoke-function
"Like emit-function-call, but wraps the function in parentheses. Used to
emit function calls where function is not a symbol but an other form instead."
[fun & args]
(with-parens [] (emit fun))
(with-parens [] (emit-delimited "," args)))
;; All Clojure forms return something (even nil). Javascript is imperative
;; and its forms may or may not return values. Javascript function bodies
;; require a manual `return` keyword.
;;
;; That's why we create this dynamic var with initial value `false`,
;; change its value to `true` where necessary and "consume" `true` values
;; (print "return" and set the var back to `false`)
(def ^:dynamic *return-expr* false)
(defmacro with-return-expr
"Consumes *return-expr* `true` states or sets it to a new value."
[[& [new-val]] & body]
`(binding [*return-expr* (if *return-expr*
(do
(print "return ")
false)
(or ~new-val false))]
~@body))
(defn emit-function-form
"Emits function forms such as: unary and infix operator calls,
applying keyword on a map, method calls, creating new object calls,
and normal function calls."
[form]
(binding [*inline-if* true
*unique-return-expr* false
*in-fn-toplevel* false]
(let [[fun & args] form]
(cond
;; those are not normal function calls
(unary-operator? fun) (apply emit-unary-operator form)
(infix-operator? fun) (apply emit-infix-operator form)
(keyword? fun)
(let [[map & default] args]
(emit `(get ~map ~fun ~@default)))
(member-form? fun)
(let [[object & margs] args]
(emit `(. ~object ~(normalize-dot-form fun) ~@margs)))
(new-object? fun)
(emit
`(new ~(normalize-dot-form fun)
~@args))
;; Normal function calls:
;; - Ensures caller are in parentheses by using `emit-invoke-function`
;; instead of `emit-function-call` in case the caller is not simply
;; a symbol.
(coll? fun) (apply emit-invoke-function form)
true (apply emit-function-call form)))))
(defn emit-statement
"Emits an expression with trailing `;` and `newline-indent` if necessary."
[expr]
(try+
(binding [*inline-if* false]
(if (will-output-nothing? expr)
(emit expr)
(do
(newline-indent)
(emit expr)
(when-not (require-no-trailing? expr)
(print ";")))))
(catch map? e
(throw+ (merge e
{:causes (conj (or (:causes e) [])
expr)})))
(catch Throwable e
(throw+ {:known-error false
:msg (.getMessage e)
:causes [expr]
:trace e}))))
(defn emit-statements [exprs]
(doseq [expr exprs]
(emit-statement expr)))
(defn emit-statements-with-return
"Emits statements with the manual `return` added in front of the
last expression. If the last expression in `nil`, ignores it."
[exprs]
(binding [*return-expr* false]
(doseq [expr (butlast exprs)]
(emit-statement expr)))
(when (not= 'nil (last exprs))
(emit-statement (last exprs))))
;; Lispers love macros. In fact, making a macro means writing a function
that returns some code . Chlorine macros are nearly Clojure ones :
you can write the function ( by ) using all Clojure expressions ,
even ones from external Clojure libraries ( if you 've already loaded them ) .
;; The only difference is that the generated code is treated as Chlorine one.
;; When defined, new macros are added to a ref holding a map. The map keys
;; are macro names while the values are the macro functions (the one that
;; generates code).
(def ^:dynamic *macros* (ref {}))
(defn macro?
"Checks if a macro with that name is defined."
[n] (and (symbol? n) (contains? @*macros* (name n))))
(defn get-macro
"Gets the macro function by its name in order to generate code."
[n] (and (symbol? n) (get @*macros* (name n))))
(defn undef-macro
"Removes a macro from known macro list."
[n]
(when (macro? n)
(when *print-pretty* (println "// undefining macro" n))
(dosync (alter *macros* dissoc (name n)))))
(defmethod emit "defmacro" [[_ mname & mdeclrs]]
(try+
(let [mdeclrs (if (string? (first mdeclrs))
(rest mdeclrs)
mdeclrs)]
(when *print-pretty* (println "// defining macro" mname))
(dosync
(alter *macros*
assoc
(name mname)
(eval `(clojure.core/fn ~@mdeclrs)))))
(catch Throwable e
(throw+ {:known-error true
:msg (str "Error defining macro `" mname "`:\n"
(.getMessage e))
:causes [`(~_ ~mname ~@mdeclrs)]
:trace e})))
;; returns `nil` because output are consumed by `with-out-str`
nil)
(defn borrow-macros
"Many Clojure macros work the same in Chlorine. Use this function to reuse
them instead of rewriting."
[& syms]
(doseq [sym syms]
(dosync
(alter *macros* conj
{(name sym)
(try+
(fn [& args#]
(apply (resolve sym) (concat [nil nil] args#)))
(catch Throwable e
(throw+ {:known-error true
:msg (str "Error borrowing macro `" sym "`:\n"
(.getMessage e))
:causes [`(borrow-macros ~sym)]
:trace e})))}))))
(defn expand-macro-1
"Gets and executes macro function, returns the Chlorine code."
[form]
(if (seq? form)
(let [[mac-name & args] form]
(if-let [mac (get-macro mac-name)]
(try+
(apply mac args)
(catch Throwable e
(throw+ {:known-error true
:msg (str "Error expanding macro `" form "`:\n"
(.getMessage e))
:causes [form]
:trace e})))
form))
form))
(defn expand-macro
"Repeatedly calls expand-macro-1 on form until it no longer
represents a macro form, then returns it. Note neither
expand-macro-1 nor expand-macro expand macros in subforms."
[form]
(let [ex (expand-macro-1 form)]
(if (identical? ex form)
form
(expand-macro ex))))
(defn emit-macro-expansion
"Gets and executes macro function, emits the result as Chlorine code."
[form]
(emit (expand-macro-1 form)))
(defn emit-docstring
"Prints docstrings as javascript comments."
[docstring]
(when *print-pretty*
(let [lines (str/split-lines docstring)]
(newline-indent)
(print (str "/* " (first lines)))
(doseq [line (rest lines)]
(newline-indent)
(print (str " " line)))
(print " */"))))
;; ChlorineJS produces a lot of temporary javascript symbols. To ensure
;; all these symbols are unique, we use this counter
(def ^:dynamic *temp-sym-count* nil)
(defn tempsym
"Generates an unique temporary symbol."
[]
(dosync
(ref-set *temp-sym-count* (+ 1 @*temp-sym-count*))
(symbol (str "_temp_" @*temp-sym-count*))))
Chlorine supports the following Clojure binding forms :
;; - Basic binding with just a single symbol
;; - Destructuring binding with sequences or maps
(defn- emit-simple-binding [vname val]
(emit (if (ignorable-arg? vname) (tempsym) vname))
(print " = ")
(binding [*inline-if* true]
(emit val)))
(declare emit-var-bindings
emit-destructured-seq-binding
emit-destructured-map-binding)
(defn- emit-binding [vname val]
(binding [*inline-if* true]
(let [emitter (cond
(vector? vname) emit-destructured-seq-binding
(map? vname) emit-destructured-map-binding
:else emit-simple-binding)]
(emitter vname val))))
;; Note on choice of get/get* in destructuring:
;; - destructuring seq use `get*` for faster array access
;; - destructuring map use `get` function which works correctly
;; on maps and supports default value when not found.
(defn- emit-destructured-seq-binding [vvec val]
(let [temp (tempsym)]
(print (str temp " = "))
(emit val)
(loop [vseq vvec, i 0, seen-rest? false]
(when (seq vseq)
(let [vname (first vseq)
vval (second vseq)]
(print ", ")
(condp = vname
'& (cond
seen-rest?
(throw+
{:known-error true
:msg (str "Unsupported binding form `" vvec "`:\n"
"only `:as` can follow `&`")
:causes [vvec]})
(not (symbol? vval))
(throw+
{:known-error true
:msg
(str "Unsupported binding form `" vvec "`:\n"
"`&` must be followed by exactly one symbol")
:causes [vvec]})
:else
(do (emit-binding vval `(.slice ~temp ~i))
(recur (nnext vseq) (inc i) true)))
:as (cond
(not= (count (nnext vseq)) 0)
(throw+
{:known-error true
:msg (str "Unsupported binding form `" vvec "`:\n"
"nothing may follow after `:as <binding>`")
:causes [vvec]})
(not (symbol? vval))
(throw+
{:known-error true
:msg (str "Unsupported binding form, `" vvec "`:\n"
"`:as` must be followed by a symbol")
:causes [vvec]})
:else
(emit-binding vval temp))
(do (emit-binding vname `(get* ~temp ~i))
(recur (next vseq) (inc i) seen-rest?))))))))
(defn- emit-destructured-map-binding [vmap val]
(let [temp (or (:as vmap) (tempsym))
defaults (get vmap :or)
keysmap (reduce #(assoc %1 %2 (keyword %2))
{}
(mapcat vmap [:keys :strs :syms]))
vmap (merge (dissoc vmap :as :or :keys :strs :syms) keysmap)]
(print (str temp " = "))
(emit val)
(doseq [[vname vkey] vmap]
(print ", ")
(cond
(not (and (binding-form? vname)
(or (some #(% vkey) #{keyword? number? binding-form?}))))
(throw+
{:known-error true
:msg (str "Unsupported binding form `" vmap "`:\n"
"binding symbols must be followed by keywords or numbers")
:causes [vmap]})
:else
(if-let [[_ default] (find defaults vname)]
(emit-binding vname `(get ~temp ~vkey ~default))
(emit-binding vname `(get ~temp ~vkey)))))))
(defn- emit-var-bindings [bindings]
(binding [*return-expr* false]
(emit-delimited
", "
(partition 2 bindings)
(fn [[vname val]]
(emit-binding vname val)))))
(defn- emit-function [fdecl]
(let [[fname fdecl] (if (symbol? (first fdecl))
[(first fdecl) (rest fdecl)]
[nil fdecl])
docstring (if (string? (first fdecl))
(first fdecl)
nil)
fdecl (if (string? (first fdecl))
(rest fdecl)
fdecl)
args (first fdecl)
dargs? (or (some destructuring-form? args)
(some binding-special? args)
(some ignorable-arg? args))
body (rest fdecl)]
(assert-args fn
(vector? args) "a vector for its bindings")
(if dargs?
(do
(print "function ")
(if fname (do (emit-symbol fname) (print " ")))
(print "() {")
(with-indent []
(newline-indent)
(print "var ")
(emit-binding args '(Array.prototype.slice.call arguments))
(print ";")))
(do
(print "function ")
(if fname (do (emit-symbol fname) (print " ")))
(print "(")
(binding [*return-expr* false] (emit-delimited ", " args))
(print ") {")))
(with-indent []
(when docstring
(emit-docstring docstring))
(binding [*return-expr* true
*unique-return-expr* (when (= 1 (count body)) true)
*in-fn-toplevel* false]
(emit-statements-with-return body)
))
(newline-indent)
(print "}")))
;; We define local vars with `def`
(defmethod emit "def" [[_ name value]]
(print "var ")
(emit-symbol name)
(print " = ")
(binding [*inline-if* true]
(emit value)))
(defmethod emit "alias" [[_ sym other]]
(when *print-pretty* (println "// alias" sym "as" other))
(dosync
(alter *aliases* assoc sym other)))
Macro expansions are useful in REPL .
macroexpand-1 and macroexpand work the like in Clojure except :
;; - they're special forms, not functions and receive unquoted Chlorine forms
instead of quoted ones like in Clojure .
;; - they print out code as strings because javascript is not a Lisp.
;; - namespaces don't make sense in ChlorineJS so they're automatically removed.
(defn remove-namespaces
"Removes all namespaces in forms using clojure.walk/postwalk."
[forms]
(clojure.walk/postwalk
(fn [x] (if (symbol? x) (symbol (name x)) x))
forms))
(defmethod emit "macroexpand-1" [[_ form]]
(emit (pr-str (remove-namespaces (expand-macro-1 form)))))
(defmethod emit "macroexpand" [[_ form]]
(emit (pr-str (remove-namespaces (expand-macro form)))))
;; Low-level function form. Please use `fn` and `defn` macros instead
(defmethod emit "fn*" [[_ & fdecl]]
(with-return-expr []
(with-block (emit-function fdecl))))
Javascript 's ` if ` expressions do n't return values directly [ 1 ] . That 's
opposite to Clojure / Chlorine where returning something is a must
;; (functional programming means efficiency!)
Just keep writing ` ( if exprs ) ` as usual , and ChlorineJS will determine
;; whether an `if` expression should return a value *directly* or not.
;; If 'yes', outputs the "inline" syntax as following:
;; `{{test}} ? {{consequent}}: {{alternate}}`
;;
[ 1 ] : Javascript 's ` if ` with ` return ` in it is for the upper
;; function but itself
(defn emit-inline-if
[test consequent alternate]
(with-return-expr []
(with-parens []
(emit test)
(print " ? ")
(emit consequent)
(print " : ")
(emit alternate))))
;; If 'no', traditional javascript `if` will be used instead.
(defn emit-block-if [test consequent alternate]
(print "if (")
(binding [*return-expr* false
*inline-if* true]
(emit test))
(print ") ")
(with-bracket-block
(with-indent []
(emit-statement consequent)))
;; alternate might be `0`, which js equates as `nil`
(when-not (or (nil? alternate)
(= '(clojure.core/cond)
alternate))
(print " else ")
(with-bracket-block
(with-indent []
(emit-statement alternate)))))
(defmethod emit "if" [[_ test consequent & [alternate]]]
;; emit consequent directly without printing checks
;; used to optimize `cond` macro output
(if (and *inline-if* consequent)
(emit-inline-if test consequent alternate)
(emit-block-if test consequent alternate)))
Clojure / ChlorineJS ` ( case ... ) ` syntax will output
;; javascript `switch ... case` equivalent.
(defn emit-case [e clauses]
(binding [*unique-return-expr* false
*in-fn-toplevel* false]
(let [pairs (partition 2 clauses)]
(print "switch (")
(binding [*return-expr* false]
(emit e))
(print ") {")
(doseq [[k v] pairs]
(with-indent []
(newline-indent)
(print "case " )
(binding [*return-expr* false]
(emit k))
(print ":")
(with-block
(with-indent []
(emit-statement v)
(newline-indent)
(when-not *return-expr*
(print "break;")))))))
(when (odd? (count clauses))
(with-indent []
(newline-indent)
(print "default:")
(with-block
(with-indent []
(emit-statement (last clauses)))))))
(newline-indent)
(print "}"))
(defmethod emit "case" [[_ e & clauses]]
(if *inline-if*
(do
(print "(function(){")
(binding [*return-expr* true]
(with-indent []
(newline-indent)
(emit-case e clauses)))
(print "})()"))
(emit-case e clauses)))
(defmethod emit "do" [[_ & exprs]]
(if *inline-if*
(do
(print "(function(){")
(binding [*return-expr* true]
(with-indent []
(newline-indent)
(emit-statements-with-return exprs)))
(print "})()"))
(emit-statements-with-return exprs)))
` let ` is a Clojure fundamental form that provides lexical bindings
;; of data structures to symbols.
;; The binding is available only within the lexical context of the let.
;;
;; Chlorine implements the same behavior of `let` by wrapping the body
;; inside a function in most cases.
(defmethod emit "let" [[_ bindings & exprs]]
(let [emit-var-decls (fn []
(print "var ")
(binding [*return-expr* false]
(with-block (emit-var-bindings bindings))
(print ";"))
(emit-statements-with-return exprs))
emit-let-fun (fn []
(print "(function () {")
(with-indent []
(newline-indent)
(binding [*return-expr* true]
(emit-var-decls)))
(newline-indent)
(print " })()"))]
(cond
*inline-if*
(with-return-expr []
(emit-let-fun))
*unique-return-expr* ;; *in-fn-toplevel*
(binding [*unique-return-expr* false]
(emit-var-decls))
*return-expr*
(with-return-expr []
(emit-let-fun))
:default
(do (emit-let-fun)
(print ";")))))
;; "Leaky" versions of `let` that don't wrap anything inside a function.
(defmethod emit "let!" [[_ & bindings]]
(binding [*return-expr* false]
(with-block (emit-var-bindings bindings))
(print ";")))
(defmethod emit "let*" [[_ & bindings]]
(print "var ")
(binding [*return-expr* false]
(with-block (emit-var-bindings bindings))
(print ";")))
(defmethod emit "new" [[_ class & args]]
(with-return-expr []
(binding [*inline-if* true]
(print "new ")
(emit class)
(with-parens [] (emit-delimited "," args)))))
(defmethod emit "delete" [[_ item]]
(with-return-expr []
(binding [*inline-if* true]
(print "delete ")
(emit item))))
(defmethod emit "return" [[_ value]]
(print "return ")
(emit value))
;; Low-level form to directly access object properties/array indexes.
;; Use `get` (in core library) which support default value when not found
;; instead
(defmethod emit "get*" [[_ map key]]
(with-return-expr []
(emit map)
(if (keyword? key)
(do (print ".")
(binding [*quoted* false]
(emit-symbol key)))
(do (print "[")
(emit key)
(print "]")))))
(defmethod emit "." [[_ object key & args]]
(with-return-expr []
(emit object)
(print ".")
(cond
(symbol? key)
(if (.startsWith (name key) "-")
(binding [*object-member* true]
(emit (property->member key)))
(do (binding [*object-member* true]
(emit key))
(with-parens []
(with-indent [] (emit-delimited ", " args)))))
(coll? key)
(do (binding [*object-member* true]
(emit (first key)))
(with-parens []
(with-indent [] (emit-delimited ", " (rest key))))))))
(defmethod emit "set!" [[_ & apairs]]
(binding [*return-expr* false
*in-fn-toplevel* false
*unique-return-expr* false
*inline-if* true]
(let [apairs (partition 2 apairs)]
(emit-delimited " = " (first apairs))
(doseq [apair (rest apairs)]
(print ";")
(newline-indent)
(emit-delimited " = " apair)))))
(defmethod emit "try" [[_ expr & clauses]]
(print "try ")
(with-bracket-block
(emit-statement expr))
(doseq [[clause & body] clauses]
(case clause
catch (let [[evar expr] body]
(with-block
(print " catch (")
(emit-symbol evar)
(print ") ")
(with-bracket-block
(emit-statement expr))))
finally (with-block
(print " finally ")
(with-bracket-block
(doseq [expr body] (emit-statement expr)))))))
(def ^:dynamic *loop-vars* nil)
(defmethod emit "loop" [[_ bindings & body]]
(let [emit-for-block (fn []
(print "for (var ")
(binding [*return-expr* false
*in-block-exp?* false]
(emit-var-bindings bindings))
(print "; true;) {")
(with-indent []
(binding [*loop-vars* (first (unzip bindings))]
(emit-statements-with-return body))
(newline-indent)
(print "break;"))
(newline-indent)
(print "}"))]
(if (or *in-fn-toplevel* *unique-return-expr*)
(binding [*unique-return-expr* false
*in-fn-toplevel* false]
(emit-for-block))
(with-return-expr []
(print "(function () {")
(binding [*return-expr* true]
(with-indent []
(newline-indent)
(emit-for-block))
(newline-indent))
(print "}).call(this)")))))
(defmethod emit "recur" [[_ & args]]
(binding [*return-expr* false]
(let [tmp (tempsym)]
(print "var" (emit-str tmp) "= ")
(emit-vector args)
(println ";")
(emit-statements (map (fn [lvar i] `(set! ~lvar (get* ~tmp ~i)))
*loop-vars*
(range (count *loop-vars*))))))
(newline-indent)
(print "continue"))
(defmethod emit "dokeys" [[_ [lvar hash] & body]]
(binding [*return-expr* false]
(print "for (var ")
(emit lvar)
(print " in ")
(emit hash)
(print ") ")
(with-bracket-block
(emit-statements body))))
(defmethod emit "while" [[_ test & body]]
(binding [*return-expr* false]
(print "while (")
(emit test)
(print ") ")
(with-bracket-block
(emit-statements body))))
(defmethod emit "do-while" [[_ test & body]]
(binding [*return-expr* false]
(print "do ")
(with-bracket-block
(emit-statements body))
(print " while (")
(emit test)
(print ")")))
(defmethod emit "dofor" [[_ [init-bindings test update] & body]]
(let [init (if (vector? init-bindings)
`(let* ~@init-bindings)
init-bindings)]
(binding [*return-expr* false]
(print "for (")
(emit-statements [init test update])
(print ") ")
(with-bracket-block
(emit-statements body)))))
(defmethod emit "inline" [[_ js]]
(with-return-expr []
(print js)))
(defmethod emit "quote" [[_ expr]]
(binding [*quoted* true]
(emit expr)))
(defmethod emit "throw" [[_ expr]]
(binding [*return-expr* false]
(print "throw ")
(emit expr)))
(defmethod emit :default [expr]
(if (and (coll? expr) (not *quoted*) (macro? (first expr)))
(emit-macro-expansion expr)
(with-return-expr []
(cond
(map? expr) (emit-map expr)
(set? expr) (emit-set expr)
(vector? expr) (emit-vector expr)
(re? expr) (emit-re expr)
(keyword? expr) (emit-keyword expr)
(string? expr) (pr expr)
(symbol? expr) (emit-symbol expr)
(char? expr) (print (format "'%c'" expr))
(and *quoted* (coll? expr)) (emit-vector expr)
(coll? expr) (emit-function-form expr)
(nil? expr) (print "void(0)")
true (print expr)))))
(defn emit-str [expr]
(binding [*return-expr* false
*inline-if* true]
(with-out-str (emit expr))))
(defn js-emit [expr] (emit expr))
(defmacro js
"Translate the Clojure subset `exprs' to a string of javascript
code."
[& exprs]
(let [exprs# `(quote ~exprs)]
`(binding [*temp-sym-count* (ref 999)]
(with-out-str
(if (< 1 (count ~exprs#))
(emit-statements ~exprs#)
(js-emit (first ~exprs#)))))))
(defmacro js-let
"Bind Clojure environment values to named vars of a cljs block, and
translate the Clojure subset `exprs' to a string of javascript code."
[bindings & exprs]
(let [form# 'fn*
[formals# actuals#] (unzip bindings)]
`(with-out-str
(emit-statement (list '(~form# ~(vec formals#) ~@exprs) ~@actuals#)))))
(defmacro let-js
"Bind Clojure environment values to named vars of a quoted cljs block, and
translate the Clojure subset `exprs' to a string of javascript code."
[bindings quoted-expr]
(let [body# `(let ~bindings ~quoted-expr)]
`(with-out-str
(js-emit ~body#))))
(declare raw-script)
Chlorine does n't support an official way to modularize code like Clojure
;; with namespaces. Instead, Chlorine provides a basic syntax to load code
;; from other files into the current file as if they are one. This can be
;; done with `load-file`
(defmethod emit "load-file" [[_ & files]]
( print ( str ( apply ' files ) ) )
(doseq [file files]
(when *print-pretty* (println "// <-- Starts loading file: " file))
(if-let [content (tojs' file)]
(print (str content)))
(when *print-pretty* (println "// Ends loading file: " file " -->"))))
;; Sometimes you only want to load macros from an outside file and print out
;; nothing. Use `load-file-macros` then
(defmethod emit "load-file-macros" [[_ & files]]
(doseq [file files]
(when *print-pretty* (println "// Loads macros from file: " file))
(tojs' file)))
Inlines raw javascript from files instead of Chlorine ones .
(defmethod emit "load-js" [[_ & files]]
(doseq [file files]
(when *print-pretty* (println "// <-- Starts Javascipt file: " file))
(if-let [content (raw-script file)]
(print (str content)))
(when *print-pretty* (println "// Ends Javascript file: " file " -->"))))
(defn raw-script [& scripts]
(with-out-str
(doseq [script scripts
:let [file (find-in-paths script)
dir (get-dir file)]]
(binding [*cwd* dir]
(if (nil? file) (throw+ {:known-error true
:msg
"File not found `" script "`"
:causes [script]}))
(let [f (if (resource-path? file)
(to-resource file)
file)]
(print (slurp f)))))))
(defn tojs'
"The low-level, stateful way to compile Chlorine source files. This function
varies depending on states such as macros, temporary symbol count etc."
[& scripts]
(with-out-str
(doseq [script scripts
:let [file (find-in-paths script)
dir (get-dir file)]]
(binding [*cwd* dir]
(try+
(if (nil? file) (throw+ {:known-error true
:msg
"File not found `" script "`"
:causes [script]}))
(let [f (if (resource-path? file)
(to-resource file)
file)]
(with-open [in (sexp-reader f)]
(loop [expr (read in false :eof)]
(when (not= expr :eof)
(when-let [s (emit-statement expr)]
(print s))
(recur (read in false :eof))))))
(catch map? e
(throw+ (merge e
{:causes (conj (or (:causes e) [])
file)})))
(catch RuntimeException e
(if (= (.getMessage e) "EOF while reading")
(throw+ {:known-error true
:msg (str "EOF while reading file "
file "\n"
"Maybe you've got mismatched parentheses,"
" brackets or braces.")
:causes [file]
:trace e})
(throw+ {:known-error false
:msg (.getMessage e)
:causes [file]
:trace e})))
)))))
(defn tojs
"The top-level, stateless way to compile Chlorine source files.
Loads and compiles a list of cl2 scripts into javascript, and
returns them in a string. This function starts its own temporary symbol count
and macro memory."
[& scripts]
(binding [*temp-sym-count* (ref 999)
*macros* (ref {})]
(apply tojs' scripts)))
| null | https://raw.githubusercontent.com/chlorinejs/chlorine/9983349b47434b366b8096e938cc137f1390ab9e/src/chlorine/js.clj | clojure |
Normal data such as strings, numbers, keywords, symbols, vectors, (quoted)
lists are transformed by associated emitters of their type.
Functions, macros and special forms (including javascript native ones)
their types.
map, set, vector, regexp, symbol and keyword types
Symbols are Chlorine's amazing pieces. We have a wide range of valid
conventions such as "?" endings for predicate functions.
You can tell ChlorineJS to emit a symbol as if it's an other one by
using aliases
`int` and `boolean` are reserved symbols in js.
library.
to load nodejs/browserify. It's implemented as macro which
expands to the lower level `require*`. `require*` in turn
emitted as javascript `require()`
Because javascript doesn't allow such characters, the function
characters to javascript-friendly ones.
The mapping used to do the replacements
You can also specify "reserved symbols", which are NOT affected by
`replace-map`.
For example: [;;#"^\$.*" #"^\.\$.*"]
Some Chlorine forms are converted directly to javascript native
operators: unary and infix ones.
Infix operators are consist of
- `instance?` special form
- and/or macros
- some low-level math operators such as plus (+*), minus (-*), multiple (**)
and remainder (rem)
- low-level comparator === which can't work on vectors and maps
- binary operators
low-level ones"
All Clojure forms return something (even nil). Javascript is imperative
and its forms may or may not return values. Javascript function bodies
require a manual `return` keyword.
That's why we create this dynamic var with initial value `false`,
change its value to `true` where necessary and "consume" `true` values
(print "return" and set the var back to `false`)
those are not normal function calls
Normal function calls:
- Ensures caller are in parentheses by using `emit-invoke-function`
instead of `emit-function-call` in case the caller is not simply
a symbol.
Lispers love macros. In fact, making a macro means writing a function
The only difference is that the generated code is treated as Chlorine one.
When defined, new macros are added to a ref holding a map. The map keys
are macro names while the values are the macro functions (the one that
generates code).
returns `nil` because output are consumed by `with-out-str`
ChlorineJS produces a lot of temporary javascript symbols. To ensure
all these symbols are unique, we use this counter
- Basic binding with just a single symbol
- Destructuring binding with sequences or maps
Note on choice of get/get* in destructuring:
- destructuring seq use `get*` for faster array access
- destructuring map use `get` function which works correctly
on maps and supports default value when not found.
We define local vars with `def`
- they're special forms, not functions and receive unquoted Chlorine forms
- they print out code as strings because javascript is not a Lisp.
- namespaces don't make sense in ChlorineJS so they're automatically removed.
Low-level function form. Please use `fn` and `defn` macros instead
(functional programming means efficiency!)
whether an `if` expression should return a value *directly* or not.
If 'yes', outputs the "inline" syntax as following:
`{{test}} ? {{consequent}}: {{alternate}}`
function but itself
If 'no', traditional javascript `if` will be used instead.
alternate might be `0`, which js equates as `nil`
emit consequent directly without printing checks
used to optimize `cond` macro output
javascript `switch ... case` equivalent.
of data structures to symbols.
The binding is available only within the lexical context of the let.
Chlorine implements the same behavior of `let` by wrapping the body
inside a function in most cases.
*in-fn-toplevel*
"Leaky" versions of `let` that don't wrap anything inside a function.
Low-level form to directly access object properties/array indexes.
Use `get` (in core library) which support default value when not found
instead
with namespaces. Instead, Chlorine provides a basic syntax to load code
from other files into the current file as if they are one. This can be
done with `load-file`
Sometimes you only want to load macros from an outside file and print out
nothing. Use `load-file-macros` then | (ns chlorine.js
(:require [clojure.string :as str]
[hiccup.core]
[clojure.walk])
(:use [chlorine.reader]
[slingshot.slingshot]
[pathetic.core :only [normalize url-normalize]]
[chlorine.util
:only [url? resource-path? to-resource unzip assert-args
*cwd* *paths* get-dir find-in-paths
re? replace-map]]))
(defn ->camelCase [^String method-name]
(str/replace method-name #"-(\w)"
#(str/upper-case (second %1))))
(def ^:dynamic *print-pretty* false)
(def ^:dynamic *object-member* false)
(defmacro with-pretty-print [& body]
`(binding [*print-pretty* true]
~@body))
(def ^:dynamic *indent* 0)
(defmacro with-indent [[& increment] & body]
`(binding [*indent* (+ *indent* (or ~increment 4))]
~@body))
(def ^:dynamic *in-block-exp?* false)
(defmacro with-block [& body]
`(binding [*in-block-exp?* true]
~@body))
(defmacro with-bracket-block [& body]
`(with-parens ["{" "}"]
(with-block
(with-indent [] ~@body))
(newline-indent)))
(defn newline-indent []
(if *print-pretty*
(do
(newline)
(print (apply str (repeat *indent* " "))))
(print " ")))
(defmacro with-parens [[& [left right]] & body]
`(do
(print (or ~left "("))
~@body
(print (or ~right ")"))))
(def ^:dynamic *inline-if* false)
(def ^:dynamic *quoted* false)
(def ^:dynamic *in-fn-toplevel* true)
(def ^:dynamic *unique-return-expr* false)
Chlorinejs transforms Clojure code / data to Javascript equivalents .
share the same looks : they are unquoted lists whose first element is the
form name and require one more step : looking up by the names to detect
(defn detect-form
"Detects macro/function/special form names from expressions
for further processing. Used as dispatch function for chlorine.js/emit, the
most hardworking multi-method in chlorine library."
[expr]
(let [expr (if (and (coll? expr) (seq expr)) (first expr) expr)]
(if (symbol? expr) (name expr) :default)))
(defn normalize-dot-form
"Normalizes dot forms or new-object forms by removing \".\" from their
beginnings or endings."
[form]
(cond (and (.startsWith (name form) ".")
(< 1 (count (name form))))
(symbol (subs (name form) 1))
(and (.endsWith (name form) ".")
(< 1 (count (name form))))
(symbol (apply str (drop-last (str form))))
:default
form))
(declare emit-str)
(declare emit-symbol)
(declare tojs')
(defn sym->property
"Transforms symbol or keyword into object's property access form."
[s]
(binding [*quoted* true]
(emit-str
(if (member-form? s)
(symbol (subs (name s) 1))
s))))
(defmulti emit
"Receives forms, emits javascript expressions."
detect-form)
(defn emit-delimited
"Emit sequences with delimiters. Useful to emit javascript arrays,
function arguments etc."
[delimiter args & [emitter]]
(when-not (empty? args)
((or emitter emit) (first args))
(doseq [arg (rest args)]
(print delimiter)
((or emitter emit) arg))))
several functions to emit Clojure data of
(defn emit-map
"Clojure maps are emitted to javascript key/value objects.
Keys can only be strings. Keywords and quoted symbols don't really make
sense in Chlorinejs and that's why they are emitted to plain strings."
[expr]
(with-parens ["{" "}"]
(binding [*inline-if* true]
(emit-delimited
","
(seq expr)
(fn [[key val]]
(cond
(keyword? key)
(emit-symbol key)
(or (string? key)
(number? key))
(emit key)
:default
(throw+ {:known-error true
:msg
(str "Error emitting this map `"
expr "`:\n"
"Invalid map key: `" key "`.\n"
"Valid keys are elements which can be"
" converted to strings.")
:causes [expr key]}))
(print " : ")
(emit val))))))
(defn emit-set
"Clojure sets are emitted to javascript key/value objects whose
values are all `true`. These 'sets' objects as javascript nature will have
distinct elements (the keys) which can be checked by `contains?` (javascript's
`in`). Please remember, all set elements are coerced to strings by javascript.
That means, both `(contains? 5 {:a 1 \"5\" 2})` and
`(contains? \"5\" {:b 3 5 4} will return true."
[expr]
(emit `(hash-set ~@(seq expr))))
(defn emit-vector
"Clojure vectors and quoted lists are emitted as javascript arrays."
[expr]
(with-parens ["[" "]"]
(binding [*inline-if* true]
(emit-delimited "," (seq expr)))))
(defn emit-re [expr]
(let [[_ flags pattern] (re-find #"^(?:\(\?([idmsux]*)\))?(.*)" (str expr))]
(print (str \/ (.replaceAll (re-matcher #"/" pattern) "\\\\/") \/ flags))))
characters for Chlorine just like Clojure . You can use Lisp - style naming
(def ^:dynamic *aliases*
They 're also function names in Clojure and Chlorine core
int int*
boolean boolean*
Chlorine uses a Clojure - like syntax of ` ( require ... ) `
require* require
}))
` chlorine.util/replace-map ` will be used to replace all Clojure - only
(def ^:dynamic *symbol-map*
(array-map
"?" "_p"
"*" "__"
"'" "_q"
"!" "_s"
"+" "_plus"
"/" "_divide"
"=" "_eq"
"->" "-to-"
"<-" "-from-"
">" "-gt-"
"<" "-lt-"
">=" "-ge-"
"=<" "-le-"
"#" "_h"
"%" "$P100$"
"&" "-and-"
))
(defn hyphen->underscore [s]
(str/escape s {\- \_}))
(def ^:dynamic *reserved-symbols* [])
(def ^:dynamic *core-symbols* #{})
(def ^:dynamic *core-symbols-in-use* (ref #{}))
(defn emit-symbol
"Emits Clojure symbols to javascript ones. If the symbol is quoted, emits its
name as a string. Does some replacements with characters not supported by
javascript if the symbol isn't marked as reserved ones."
[expr]
(let [sym-name (name expr)]
(print
(let [output-string
(if (or (reserved-symbol? *reserved-symbols* sym-name)
*object-member*)
sym-name
(-> (or (get @*aliases* (symbol sym-name))
sym-name)
(replace-map *symbol-map*)
->camelCase
hyphen->underscore))
output-sym (symbol output-string)]
(if (and (contains? *core-symbols* output-sym)
(not (contains? @*core-symbols-in-use* output-sym)))
(dosync (alter *core-symbols-in-use*
conj output-sym)))
(if *quoted*
(format "'%s'" output-string)
output-string)))))
(defn emit-keyword
"Emits Clojure keywords. Uses emit-symbol as backend."
[expr]
(binding [*quoted* true]
(emit-symbol expr)))
Unary operators in Chlorine : " ! "
Please use high - level functions from Chlorine 's core library instead of
(defn emit-unary-operator
[op arg]
(print (name op))
(emit arg))
(defn emit-infix-operator
[op & args]
(let [clj->js {"instance?" "instanceof"
"and" "&&"
"or" "||"
"=*" "==="
"+*" "+"
"-*" "-"
"**" "*"
"js-divide" "/"
"rem" "%"
"bit-and" "&"
"bit-or" "|"
"bit-xor" "^"
"bit-not" "~"
"bit-shift-left" "<<"
"bit-shift-right" ">>"
"bit-shift-right-zero-fill" ">>>"}
js-op (get clj->js (name op) (name op))]
(binding [*unique-return-expr* false
*in-fn-toplevel* false]
(with-parens []
(emit-delimited (str " " js-op " ") args)))))
(defn property->member
"Removes `-` prefix in a property name to bring it a member look."
[property]
(symbol (subs (name property) 1)))
(defn emit-function-call
"Emits a function call by simply emitting the function name and its arguments
in parentheses."
[fun & args]
(emit fun)
(with-parens []
(with-indent [] (emit-delimited ", " args))))
(defn emit-invoke-function
"Like emit-function-call, but wraps the function in parentheses. Used to
emit function calls where function is not a symbol but an other form instead."
[fun & args]
(with-parens [] (emit fun))
(with-parens [] (emit-delimited "," args)))
(def ^:dynamic *return-expr* false)
(defmacro with-return-expr
"Consumes *return-expr* `true` states or sets it to a new value."
[[& [new-val]] & body]
`(binding [*return-expr* (if *return-expr*
(do
(print "return ")
false)
(or ~new-val false))]
~@body))
(defn emit-function-form
"Emits function forms such as: unary and infix operator calls,
applying keyword on a map, method calls, creating new object calls,
and normal function calls."
[form]
(binding [*inline-if* true
*unique-return-expr* false
*in-fn-toplevel* false]
(let [[fun & args] form]
(cond
(unary-operator? fun) (apply emit-unary-operator form)
(infix-operator? fun) (apply emit-infix-operator form)
(keyword? fun)
(let [[map & default] args]
(emit `(get ~map ~fun ~@default)))
(member-form? fun)
(let [[object & margs] args]
(emit `(. ~object ~(normalize-dot-form fun) ~@margs)))
(new-object? fun)
(emit
`(new ~(normalize-dot-form fun)
~@args))
(coll? fun) (apply emit-invoke-function form)
true (apply emit-function-call form)))))
(defn emit-statement
"Emits an expression with trailing `;` and `newline-indent` if necessary."
[expr]
(try+
(binding [*inline-if* false]
(if (will-output-nothing? expr)
(emit expr)
(do
(newline-indent)
(emit expr)
(when-not (require-no-trailing? expr)
(print ";")))))
(catch map? e
(throw+ (merge e
{:causes (conj (or (:causes e) [])
expr)})))
(catch Throwable e
(throw+ {:known-error false
:msg (.getMessage e)
:causes [expr]
:trace e}))))
(defn emit-statements [exprs]
(doseq [expr exprs]
(emit-statement expr)))
(defn emit-statements-with-return
"Emits statements with the manual `return` added in front of the
last expression. If the last expression in `nil`, ignores it."
[exprs]
(binding [*return-expr* false]
(doseq [expr (butlast exprs)]
(emit-statement expr)))
(when (not= 'nil (last exprs))
(emit-statement (last exprs))))
that returns some code . Chlorine macros are nearly Clojure ones :
you can write the function ( by ) using all Clojure expressions ,
even ones from external Clojure libraries ( if you 've already loaded them ) .
(def ^:dynamic *macros* (ref {}))
(defn macro?
"Checks if a macro with that name is defined."
[n] (and (symbol? n) (contains? @*macros* (name n))))
(defn get-macro
"Gets the macro function by its name in order to generate code."
[n] (and (symbol? n) (get @*macros* (name n))))
(defn undef-macro
"Removes a macro from known macro list."
[n]
(when (macro? n)
(when *print-pretty* (println "// undefining macro" n))
(dosync (alter *macros* dissoc (name n)))))
(defmethod emit "defmacro" [[_ mname & mdeclrs]]
(try+
(let [mdeclrs (if (string? (first mdeclrs))
(rest mdeclrs)
mdeclrs)]
(when *print-pretty* (println "// defining macro" mname))
(dosync
(alter *macros*
assoc
(name mname)
(eval `(clojure.core/fn ~@mdeclrs)))))
(catch Throwable e
(throw+ {:known-error true
:msg (str "Error defining macro `" mname "`:\n"
(.getMessage e))
:causes [`(~_ ~mname ~@mdeclrs)]
:trace e})))
nil)
(defn borrow-macros
"Many Clojure macros work the same in Chlorine. Use this function to reuse
them instead of rewriting."
[& syms]
(doseq [sym syms]
(dosync
(alter *macros* conj
{(name sym)
(try+
(fn [& args#]
(apply (resolve sym) (concat [nil nil] args#)))
(catch Throwable e
(throw+ {:known-error true
:msg (str "Error borrowing macro `" sym "`:\n"
(.getMessage e))
:causes [`(borrow-macros ~sym)]
:trace e})))}))))
(defn expand-macro-1
"Gets and executes macro function, returns the Chlorine code."
[form]
(if (seq? form)
(let [[mac-name & args] form]
(if-let [mac (get-macro mac-name)]
(try+
(apply mac args)
(catch Throwable e
(throw+ {:known-error true
:msg (str "Error expanding macro `" form "`:\n"
(.getMessage e))
:causes [form]
:trace e})))
form))
form))
(defn expand-macro
"Repeatedly calls expand-macro-1 on form until it no longer
represents a macro form, then returns it. Note neither
expand-macro-1 nor expand-macro expand macros in subforms."
[form]
(let [ex (expand-macro-1 form)]
(if (identical? ex form)
form
(expand-macro ex))))
(defn emit-macro-expansion
"Gets and executes macro function, emits the result as Chlorine code."
[form]
(emit (expand-macro-1 form)))
(defn emit-docstring
"Prints docstrings as javascript comments."
[docstring]
(when *print-pretty*
(let [lines (str/split-lines docstring)]
(newline-indent)
(print (str "/* " (first lines)))
(doseq [line (rest lines)]
(newline-indent)
(print (str " " line)))
(print " */"))))
(def ^:dynamic *temp-sym-count* nil)
(defn tempsym
"Generates an unique temporary symbol."
[]
(dosync
(ref-set *temp-sym-count* (+ 1 @*temp-sym-count*))
(symbol (str "_temp_" @*temp-sym-count*))))
Chlorine supports the following Clojure binding forms :
(defn- emit-simple-binding [vname val]
(emit (if (ignorable-arg? vname) (tempsym) vname))
(print " = ")
(binding [*inline-if* true]
(emit val)))
(declare emit-var-bindings
emit-destructured-seq-binding
emit-destructured-map-binding)
(defn- emit-binding [vname val]
(binding [*inline-if* true]
(let [emitter (cond
(vector? vname) emit-destructured-seq-binding
(map? vname) emit-destructured-map-binding
:else emit-simple-binding)]
(emitter vname val))))
(defn- emit-destructured-seq-binding [vvec val]
(let [temp (tempsym)]
(print (str temp " = "))
(emit val)
(loop [vseq vvec, i 0, seen-rest? false]
(when (seq vseq)
(let [vname (first vseq)
vval (second vseq)]
(print ", ")
(condp = vname
'& (cond
seen-rest?
(throw+
{:known-error true
:msg (str "Unsupported binding form `" vvec "`:\n"
"only `:as` can follow `&`")
:causes [vvec]})
(not (symbol? vval))
(throw+
{:known-error true
:msg
(str "Unsupported binding form `" vvec "`:\n"
"`&` must be followed by exactly one symbol")
:causes [vvec]})
:else
(do (emit-binding vval `(.slice ~temp ~i))
(recur (nnext vseq) (inc i) true)))
:as (cond
(not= (count (nnext vseq)) 0)
(throw+
{:known-error true
:msg (str "Unsupported binding form `" vvec "`:\n"
"nothing may follow after `:as <binding>`")
:causes [vvec]})
(not (symbol? vval))
(throw+
{:known-error true
:msg (str "Unsupported binding form, `" vvec "`:\n"
"`:as` must be followed by a symbol")
:causes [vvec]})
:else
(emit-binding vval temp))
(do (emit-binding vname `(get* ~temp ~i))
(recur (next vseq) (inc i) seen-rest?))))))))
(defn- emit-destructured-map-binding [vmap val]
(let [temp (or (:as vmap) (tempsym))
defaults (get vmap :or)
keysmap (reduce #(assoc %1 %2 (keyword %2))
{}
(mapcat vmap [:keys :strs :syms]))
vmap (merge (dissoc vmap :as :or :keys :strs :syms) keysmap)]
(print (str temp " = "))
(emit val)
(doseq [[vname vkey] vmap]
(print ", ")
(cond
(not (and (binding-form? vname)
(or (some #(% vkey) #{keyword? number? binding-form?}))))
(throw+
{:known-error true
:msg (str "Unsupported binding form `" vmap "`:\n"
"binding symbols must be followed by keywords or numbers")
:causes [vmap]})
:else
(if-let [[_ default] (find defaults vname)]
(emit-binding vname `(get ~temp ~vkey ~default))
(emit-binding vname `(get ~temp ~vkey)))))))
(defn- emit-var-bindings [bindings]
(binding [*return-expr* false]
(emit-delimited
", "
(partition 2 bindings)
(fn [[vname val]]
(emit-binding vname val)))))
(defn- emit-function [fdecl]
(let [[fname fdecl] (if (symbol? (first fdecl))
[(first fdecl) (rest fdecl)]
[nil fdecl])
docstring (if (string? (first fdecl))
(first fdecl)
nil)
fdecl (if (string? (first fdecl))
(rest fdecl)
fdecl)
args (first fdecl)
dargs? (or (some destructuring-form? args)
(some binding-special? args)
(some ignorable-arg? args))
body (rest fdecl)]
(assert-args fn
(vector? args) "a vector for its bindings")
(if dargs?
(do
(print "function ")
(if fname (do (emit-symbol fname) (print " ")))
(print "() {")
(with-indent []
(newline-indent)
(print "var ")
(emit-binding args '(Array.prototype.slice.call arguments))
(print ";")))
(do
(print "function ")
(if fname (do (emit-symbol fname) (print " ")))
(print "(")
(binding [*return-expr* false] (emit-delimited ", " args))
(print ") {")))
(with-indent []
(when docstring
(emit-docstring docstring))
(binding [*return-expr* true
*unique-return-expr* (when (= 1 (count body)) true)
*in-fn-toplevel* false]
(emit-statements-with-return body)
))
(newline-indent)
(print "}")))
(defmethod emit "def" [[_ name value]]
(print "var ")
(emit-symbol name)
(print " = ")
(binding [*inline-if* true]
(emit value)))
(defmethod emit "alias" [[_ sym other]]
(when *print-pretty* (println "// alias" sym "as" other))
(dosync
(alter *aliases* assoc sym other)))
Macro expansions are useful in REPL .
macroexpand-1 and macroexpand work the like in Clojure except :
instead of quoted ones like in Clojure .
(defn remove-namespaces
"Removes all namespaces in forms using clojure.walk/postwalk."
[forms]
(clojure.walk/postwalk
(fn [x] (if (symbol? x) (symbol (name x)) x))
forms))
(defmethod emit "macroexpand-1" [[_ form]]
(emit (pr-str (remove-namespaces (expand-macro-1 form)))))
(defmethod emit "macroexpand" [[_ form]]
(emit (pr-str (remove-namespaces (expand-macro form)))))
(defmethod emit "fn*" [[_ & fdecl]]
(with-return-expr []
(with-block (emit-function fdecl))))
Javascript 's ` if ` expressions do n't return values directly [ 1 ] . That 's
opposite to Clojure / Chlorine where returning something is a must
Just keep writing ` ( if exprs ) ` as usual , and ChlorineJS will determine
[ 1 ] : Javascript 's ` if ` with ` return ` in it is for the upper
(defn emit-inline-if
[test consequent alternate]
(with-return-expr []
(with-parens []
(emit test)
(print " ? ")
(emit consequent)
(print " : ")
(emit alternate))))
(defn emit-block-if [test consequent alternate]
(print "if (")
(binding [*return-expr* false
*inline-if* true]
(emit test))
(print ") ")
(with-bracket-block
(with-indent []
(emit-statement consequent)))
(when-not (or (nil? alternate)
(= '(clojure.core/cond)
alternate))
(print " else ")
(with-bracket-block
(with-indent []
(emit-statement alternate)))))
(defmethod emit "if" [[_ test consequent & [alternate]]]
(if (and *inline-if* consequent)
(emit-inline-if test consequent alternate)
(emit-block-if test consequent alternate)))
Clojure / ChlorineJS ` ( case ... ) ` syntax will output
(defn emit-case [e clauses]
(binding [*unique-return-expr* false
*in-fn-toplevel* false]
(let [pairs (partition 2 clauses)]
(print "switch (")
(binding [*return-expr* false]
(emit e))
(print ") {")
(doseq [[k v] pairs]
(with-indent []
(newline-indent)
(print "case " )
(binding [*return-expr* false]
(emit k))
(print ":")
(with-block
(with-indent []
(emit-statement v)
(newline-indent)
(when-not *return-expr*
(print "break;")))))))
(when (odd? (count clauses))
(with-indent []
(newline-indent)
(print "default:")
(with-block
(with-indent []
(emit-statement (last clauses)))))))
(newline-indent)
(print "}"))
(defmethod emit "case" [[_ e & clauses]]
(if *inline-if*
(do
(print "(function(){")
(binding [*return-expr* true]
(with-indent []
(newline-indent)
(emit-case e clauses)))
(print "})()"))
(emit-case e clauses)))
(defmethod emit "do" [[_ & exprs]]
(if *inline-if*
(do
(print "(function(){")
(binding [*return-expr* true]
(with-indent []
(newline-indent)
(emit-statements-with-return exprs)))
(print "})()"))
(emit-statements-with-return exprs)))
` let ` is a Clojure fundamental form that provides lexical bindings
(defmethod emit "let" [[_ bindings & exprs]]
(let [emit-var-decls (fn []
(print "var ")
(binding [*return-expr* false]
(with-block (emit-var-bindings bindings))
(print ";"))
(emit-statements-with-return exprs))
emit-let-fun (fn []
(print "(function () {")
(with-indent []
(newline-indent)
(binding [*return-expr* true]
(emit-var-decls)))
(newline-indent)
(print " })()"))]
(cond
*inline-if*
(with-return-expr []
(emit-let-fun))
(binding [*unique-return-expr* false]
(emit-var-decls))
*return-expr*
(with-return-expr []
(emit-let-fun))
:default
(do (emit-let-fun)
(print ";")))))
(defmethod emit "let!" [[_ & bindings]]
(binding [*return-expr* false]
(with-block (emit-var-bindings bindings))
(print ";")))
(defmethod emit "let*" [[_ & bindings]]
(print "var ")
(binding [*return-expr* false]
(with-block (emit-var-bindings bindings))
(print ";")))
(defmethod emit "new" [[_ class & args]]
(with-return-expr []
(binding [*inline-if* true]
(print "new ")
(emit class)
(with-parens [] (emit-delimited "," args)))))
(defmethod emit "delete" [[_ item]]
(with-return-expr []
(binding [*inline-if* true]
(print "delete ")
(emit item))))
(defmethod emit "return" [[_ value]]
(print "return ")
(emit value))
(defmethod emit "get*" [[_ map key]]
(with-return-expr []
(emit map)
(if (keyword? key)
(do (print ".")
(binding [*quoted* false]
(emit-symbol key)))
(do (print "[")
(emit key)
(print "]")))))
(defmethod emit "." [[_ object key & args]]
(with-return-expr []
(emit object)
(print ".")
(cond
(symbol? key)
(if (.startsWith (name key) "-")
(binding [*object-member* true]
(emit (property->member key)))
(do (binding [*object-member* true]
(emit key))
(with-parens []
(with-indent [] (emit-delimited ", " args)))))
(coll? key)
(do (binding [*object-member* true]
(emit (first key)))
(with-parens []
(with-indent [] (emit-delimited ", " (rest key))))))))
(defmethod emit "set!" [[_ & apairs]]
(binding [*return-expr* false
*in-fn-toplevel* false
*unique-return-expr* false
*inline-if* true]
(let [apairs (partition 2 apairs)]
(emit-delimited " = " (first apairs))
(doseq [apair (rest apairs)]
(print ";")
(newline-indent)
(emit-delimited " = " apair)))))
(defmethod emit "try" [[_ expr & clauses]]
(print "try ")
(with-bracket-block
(emit-statement expr))
(doseq [[clause & body] clauses]
(case clause
catch (let [[evar expr] body]
(with-block
(print " catch (")
(emit-symbol evar)
(print ") ")
(with-bracket-block
(emit-statement expr))))
finally (with-block
(print " finally ")
(with-bracket-block
(doseq [expr body] (emit-statement expr)))))))
(def ^:dynamic *loop-vars* nil)
(defmethod emit "loop" [[_ bindings & body]]
(let [emit-for-block (fn []
(print "for (var ")
(binding [*return-expr* false
*in-block-exp?* false]
(emit-var-bindings bindings))
(print "; true;) {")
(with-indent []
(binding [*loop-vars* (first (unzip bindings))]
(emit-statements-with-return body))
(newline-indent)
(print "break;"))
(newline-indent)
(print "}"))]
(if (or *in-fn-toplevel* *unique-return-expr*)
(binding [*unique-return-expr* false
*in-fn-toplevel* false]
(emit-for-block))
(with-return-expr []
(print "(function () {")
(binding [*return-expr* true]
(with-indent []
(newline-indent)
(emit-for-block))
(newline-indent))
(print "}).call(this)")))))
(defmethod emit "recur" [[_ & args]]
(binding [*return-expr* false]
(let [tmp (tempsym)]
(print "var" (emit-str tmp) "= ")
(emit-vector args)
(println ";")
(emit-statements (map (fn [lvar i] `(set! ~lvar (get* ~tmp ~i)))
*loop-vars*
(range (count *loop-vars*))))))
(newline-indent)
(print "continue"))
(defmethod emit "dokeys" [[_ [lvar hash] & body]]
(binding [*return-expr* false]
(print "for (var ")
(emit lvar)
(print " in ")
(emit hash)
(print ") ")
(with-bracket-block
(emit-statements body))))
(defmethod emit "while" [[_ test & body]]
(binding [*return-expr* false]
(print "while (")
(emit test)
(print ") ")
(with-bracket-block
(emit-statements body))))
(defmethod emit "do-while" [[_ test & body]]
(binding [*return-expr* false]
(print "do ")
(with-bracket-block
(emit-statements body))
(print " while (")
(emit test)
(print ")")))
(defmethod emit "dofor" [[_ [init-bindings test update] & body]]
(let [init (if (vector? init-bindings)
`(let* ~@init-bindings)
init-bindings)]
(binding [*return-expr* false]
(print "for (")
(emit-statements [init test update])
(print ") ")
(with-bracket-block
(emit-statements body)))))
(defmethod emit "inline" [[_ js]]
(with-return-expr []
(print js)))
(defmethod emit "quote" [[_ expr]]
(binding [*quoted* true]
(emit expr)))
(defmethod emit "throw" [[_ expr]]
(binding [*return-expr* false]
(print "throw ")
(emit expr)))
(defmethod emit :default [expr]
(if (and (coll? expr) (not *quoted*) (macro? (first expr)))
(emit-macro-expansion expr)
(with-return-expr []
(cond
(map? expr) (emit-map expr)
(set? expr) (emit-set expr)
(vector? expr) (emit-vector expr)
(re? expr) (emit-re expr)
(keyword? expr) (emit-keyword expr)
(string? expr) (pr expr)
(symbol? expr) (emit-symbol expr)
(char? expr) (print (format "'%c'" expr))
(and *quoted* (coll? expr)) (emit-vector expr)
(coll? expr) (emit-function-form expr)
(nil? expr) (print "void(0)")
true (print expr)))))
(defn emit-str [expr]
(binding [*return-expr* false
*inline-if* true]
(with-out-str (emit expr))))
(defn js-emit [expr] (emit expr))
(defmacro js
"Translate the Clojure subset `exprs' to a string of javascript
code."
[& exprs]
(let [exprs# `(quote ~exprs)]
`(binding [*temp-sym-count* (ref 999)]
(with-out-str
(if (< 1 (count ~exprs#))
(emit-statements ~exprs#)
(js-emit (first ~exprs#)))))))
(defmacro js-let
"Bind Clojure environment values to named vars of a cljs block, and
translate the Clojure subset `exprs' to a string of javascript code."
[bindings & exprs]
(let [form# 'fn*
[formals# actuals#] (unzip bindings)]
`(with-out-str
(emit-statement (list '(~form# ~(vec formals#) ~@exprs) ~@actuals#)))))
(defmacro let-js
"Bind Clojure environment values to named vars of a quoted cljs block, and
translate the Clojure subset `exprs' to a string of javascript code."
[bindings quoted-expr]
(let [body# `(let ~bindings ~quoted-expr)]
`(with-out-str
(js-emit ~body#))))
(declare raw-script)
Chlorine does n't support an official way to modularize code like Clojure
(defmethod emit "load-file" [[_ & files]]
( print ( str ( apply ' files ) ) )
(doseq [file files]
(when *print-pretty* (println "// <-- Starts loading file: " file))
(if-let [content (tojs' file)]
(print (str content)))
(when *print-pretty* (println "// Ends loading file: " file " -->"))))
(defmethod emit "load-file-macros" [[_ & files]]
(doseq [file files]
(when *print-pretty* (println "// Loads macros from file: " file))
(tojs' file)))
Inlines raw javascript from files instead of Chlorine ones .
(defmethod emit "load-js" [[_ & files]]
(doseq [file files]
(when *print-pretty* (println "// <-- Starts Javascipt file: " file))
(if-let [content (raw-script file)]
(print (str content)))
(when *print-pretty* (println "// Ends Javascript file: " file " -->"))))
(defn raw-script [& scripts]
(with-out-str
(doseq [script scripts
:let [file (find-in-paths script)
dir (get-dir file)]]
(binding [*cwd* dir]
(if (nil? file) (throw+ {:known-error true
:msg
"File not found `" script "`"
:causes [script]}))
(let [f (if (resource-path? file)
(to-resource file)
file)]
(print (slurp f)))))))
(defn tojs'
"The low-level, stateful way to compile Chlorine source files. This function
varies depending on states such as macros, temporary symbol count etc."
[& scripts]
(with-out-str
(doseq [script scripts
:let [file (find-in-paths script)
dir (get-dir file)]]
(binding [*cwd* dir]
(try+
(if (nil? file) (throw+ {:known-error true
:msg
"File not found `" script "`"
:causes [script]}))
(let [f (if (resource-path? file)
(to-resource file)
file)]
(with-open [in (sexp-reader f)]
(loop [expr (read in false :eof)]
(when (not= expr :eof)
(when-let [s (emit-statement expr)]
(print s))
(recur (read in false :eof))))))
(catch map? e
(throw+ (merge e
{:causes (conj (or (:causes e) [])
file)})))
(catch RuntimeException e
(if (= (.getMessage e) "EOF while reading")
(throw+ {:known-error true
:msg (str "EOF while reading file "
file "\n"
"Maybe you've got mismatched parentheses,"
" brackets or braces.")
:causes [file]
:trace e})
(throw+ {:known-error false
:msg (.getMessage e)
:causes [file]
:trace e})))
)))))
(defn tojs
"The top-level, stateless way to compile Chlorine source files.
Loads and compiles a list of cl2 scripts into javascript, and
returns them in a string. This function starts its own temporary symbol count
and macro memory."
[& scripts]
(binding [*temp-sym-count* (ref 999)
*macros* (ref {})]
(apply tojs' scripts)))
|
59890e03de48c9987afa7916a81d3fc6961d79a11bbc301fe92c2616de0bcd51 | AdaCore/why3 | pp.ml | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2022 -- Inria - CNRS - Paris - Saclay University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
i $ I d : pp.ml , v 1.22 2009 - 10 - 19 11:55:33 bobot Exp $ i
(*s Pretty-print library *)
open Format
type 'a pp = formatter -> 'a -> unit
let print_string = pp_print_string
let print_option f fmt = function
| None -> ()
| Some x -> f fmt x
let print_option_or_default default f fmt = function
| None -> pp_print_string fmt default
| Some x -> f fmt x
let rec print_list_pre sep print fmt = function
| [] -> ()
| x :: r -> sep fmt (); print fmt x; print_list_pre sep print fmt r
let rec print_list_suf sep print fmt = function
| [] -> ()
| x :: r -> print fmt x; sep fmt (); print_list_suf sep print fmt r
let print_list sep print fmt = function
| [] -> ()
| [x] -> print fmt x
| x :: r -> print fmt x; print_list_pre sep print fmt r
let print_list_or_default default sep print fmt = function
| [] -> pp_print_string fmt default
| l -> print_list sep print fmt l
let print_list_par sep pr fmt l =
print_list sep (fun fmt x -> fprintf fmt "(%a)" pr x) fmt l
let print_list_delim ~start ~stop ~sep pr fmt = function
| [] -> ()
| l -> fprintf fmt "%a%a%a" start () (print_list sep pr) l stop ()
let print_list_next sep print fmt = function
| [] -> ()
| [x] -> print true fmt x
| x :: r ->
print true fmt x; sep fmt ();
print_list sep (print false) fmt r
let print_iter1 iter sep print fmt l =
let first = ref true in
iter (fun x ->
if !first
then first := false
else sep fmt ();
print fmt x ) l
let print_iter2 iter sep1 sep2 print1 print2 fmt l =
let first = ref true in
iter (fun x y ->
if !first
then first := false
else sep1 fmt ();
print1 fmt x;sep2 fmt (); print2 fmt y) l
let print_iter22 iter sep print fmt l =
let first = ref true in
iter (fun x y ->
if !first
then first := false
else sep fmt ();
print fmt x y) l
let print_pair_delim start sep stop pr1 pr2 fmt (a,b) =
fprintf fmt "%a%a%a%a%a" start () pr1 a sep () pr2 b stop ()
type formatted = (unit, unit, unit, unit, unit, unit) format6
let empty_formatted : formatted = ""
let dot fmt () = fprintf fmt ".@ "
let comma fmt () = fprintf fmt ",@ "
let star fmt () = fprintf fmt "*@ "
let simple_comma fmt () = pp_print_string fmt ", "
let underscore fmt () = pp_print_string fmt "_"
let slash fmt () = pp_print_string fmt "/"
let semi fmt () = fprintf fmt ";@ "
let colon fmt () = fprintf fmt ":@ "
let space fmt () = fprintf fmt "@ "
let alt fmt () = fprintf fmt "|@ "
let alt2 fmt () = fprintf fmt "@ | "
let equal fmt () = fprintf fmt "@ =@ "
let newline fmt () = fprintf fmt "@\n"
let newline2 fmt () = fprintf fmt "@\n@\n"
let arrow fmt () = fprintf fmt "@ -> "
let lbrace fmt () = pp_print_string fmt "{"
let rbrace fmt () = pp_print_string fmt "}"
let lsquare fmt () = pp_print_string fmt "["
let rsquare fmt () = pp_print_string fmt "]"
let lparen fmt () = pp_print_string fmt "("
let rparen fmt () = pp_print_string fmt ")"
let lchevron fmt () = pp_print_string fmt "<"
let rchevron fmt () = pp_print_string fmt ">"
let nothing _fmt _ = ()
let string = pp_print_string
let float = pp_print_float
let int = pp_print_int
let bool = pp_print_bool
let constant_string s fmt () = string fmt s
let formatted fmt x = Format.fprintf fmt "%( %)" x
let constant_formatted f fmt () = formatted fmt f
let print0 fmt () = pp_print_string fmt "\000"
let add_flush sep fmt x = sep fmt x; pp_print_flush fmt ()
let asd f fmt x = fprintf fmt "\"%a\"" f x
let print_pair pr1 = print_pair_delim lparen comma rparen pr1
let hov n f fmt x = pp_open_hovbox fmt n; f fmt x; pp_close_box fmt ()
let indent n f fmt x =
for _i = 0 to n do
pp_print_char fmt ' '
done;
hov 0 f fmt x
let open_formatter ?(margin=78) cout =
let fmt = formatter_of_out_channel cout in
pp_set_margin fmt margin;
pp_open_box fmt 0;
fmt
let close_formatter fmt =
pp_close_box fmt ();
pp_print_flush fmt ()
let open_file_and_formatter ?(margin=78) f =
let cout = open_out f in
let fmt = open_formatter ~margin cout in
cout,fmt
let close_file_and_formatter (cout,fmt) =
close_formatter fmt;
close_out cout
let print_in_file_no_close ?(margin=78) p f =
let cout,fmt = open_file_and_formatter ~margin f in
p fmt;
close_formatter fmt;
cout
let print_in_file ?(margin=78) p f =
let cout = print_in_file_no_close ~margin p f in
close_out cout
(* With optional separation *)
let rec print_list_opt sep print fmt = function
| [] -> false
| [x] -> print fmt x
| x :: r ->
let notempty1 = print fmt x in
if notempty1 then sep fmt ();
let notempty2 = print_list_opt sep print fmt r in
notempty1 || notempty2
let string_of ?max_boxes p x =
let b = Buffer.create 100 in
let fmt = formatter_of_buffer b in
Opt.iter (fun x ->
Format.pp_set_ellipsis_text fmt "...";
Format.pp_set_max_boxes fmt x) max_boxes;
fprintf fmt "%a@?" p x;
Buffer.contents b
let wnl fmt =
let out , flush,_newline , spaces =
pp_get_all_formatter_output_functions fmt ( ) in
pp_set_all_formatter_output_functions fmt
~out ~flush ~newline:(fun ( ) - > spaces 1 ) ~spaces
let out,flush,_newline,spaces =
pp_get_all_formatter_output_functions fmt () in
pp_set_all_formatter_output_functions fmt
~out ~flush ~newline:(fun () -> spaces 1) ~spaces
*)
let o = pp_get_formatter_out_functions fmt () in
pp_set_formatter_out_functions fmt
{ o with out_newline = (fun () -> o.out_spaces 1) }
let string_of_wnl p x =
let b = Buffer.create 100 in
let fmt = formatter_of_buffer b in
wnl fmt;
fprintf fmt "%a@?" p x;
Buffer.contents b
let sprintf = asprintf
let sprintf_wnl p =
let b = Buffer.create 100 in
let fmt = formatter_of_buffer b in
wnl fmt;
kfprintf (fun fmt -> Format.pp_print_flush fmt (); Buffer.contents b) fmt p
let html_char fmt c =
match c with
| '\"' -> pp_print_string fmt """
| '\'' -> pp_print_string fmt "'"
| '<' -> pp_print_string fmt "<"
| '>' -> pp_print_string fmt ">"
| '&' -> pp_print_string fmt "&"
| c -> pp_print_char fmt c
let html_string fmt s =
for i=0 to String.length s - 1 do
html_char fmt (String.get s i)
done
module Ansi =
struct
let set_column fmt n = fprintf fmt "\027[%iG" n
end
type formatter = Format.formatter
| null | https://raw.githubusercontent.com/AdaCore/why3/be1023970d48869285e68f12d32858c3383958e0/src/util/pp.ml | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
s Pretty-print library
With optional separation | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2022 -- Inria - CNRS - Paris - Saclay University
General Public License version 2.1 , with the special exception
i $ I d : pp.ml , v 1.22 2009 - 10 - 19 11:55:33 bobot Exp $ i
open Format
type 'a pp = formatter -> 'a -> unit
let print_string = pp_print_string
let print_option f fmt = function
| None -> ()
| Some x -> f fmt x
let print_option_or_default default f fmt = function
| None -> pp_print_string fmt default
| Some x -> f fmt x
let rec print_list_pre sep print fmt = function
| [] -> ()
| x :: r -> sep fmt (); print fmt x; print_list_pre sep print fmt r
let rec print_list_suf sep print fmt = function
| [] -> ()
| x :: r -> print fmt x; sep fmt (); print_list_suf sep print fmt r
let print_list sep print fmt = function
| [] -> ()
| [x] -> print fmt x
| x :: r -> print fmt x; print_list_pre sep print fmt r
let print_list_or_default default sep print fmt = function
| [] -> pp_print_string fmt default
| l -> print_list sep print fmt l
let print_list_par sep pr fmt l =
print_list sep (fun fmt x -> fprintf fmt "(%a)" pr x) fmt l
let print_list_delim ~start ~stop ~sep pr fmt = function
| [] -> ()
| l -> fprintf fmt "%a%a%a" start () (print_list sep pr) l stop ()
let print_list_next sep print fmt = function
| [] -> ()
| [x] -> print true fmt x
| x :: r ->
print true fmt x; sep fmt ();
print_list sep (print false) fmt r
let print_iter1 iter sep print fmt l =
let first = ref true in
iter (fun x ->
if !first
then first := false
else sep fmt ();
print fmt x ) l
let print_iter2 iter sep1 sep2 print1 print2 fmt l =
let first = ref true in
iter (fun x y ->
if !first
then first := false
else sep1 fmt ();
print1 fmt x;sep2 fmt (); print2 fmt y) l
let print_iter22 iter sep print fmt l =
let first = ref true in
iter (fun x y ->
if !first
then first := false
else sep fmt ();
print fmt x y) l
let print_pair_delim start sep stop pr1 pr2 fmt (a,b) =
fprintf fmt "%a%a%a%a%a" start () pr1 a sep () pr2 b stop ()
type formatted = (unit, unit, unit, unit, unit, unit) format6
let empty_formatted : formatted = ""
let dot fmt () = fprintf fmt ".@ "
let comma fmt () = fprintf fmt ",@ "
let star fmt () = fprintf fmt "*@ "
let simple_comma fmt () = pp_print_string fmt ", "
let underscore fmt () = pp_print_string fmt "_"
let slash fmt () = pp_print_string fmt "/"
let semi fmt () = fprintf fmt ";@ "
let colon fmt () = fprintf fmt ":@ "
let space fmt () = fprintf fmt "@ "
let alt fmt () = fprintf fmt "|@ "
let alt2 fmt () = fprintf fmt "@ | "
let equal fmt () = fprintf fmt "@ =@ "
let newline fmt () = fprintf fmt "@\n"
let newline2 fmt () = fprintf fmt "@\n@\n"
let arrow fmt () = fprintf fmt "@ -> "
let lbrace fmt () = pp_print_string fmt "{"
let rbrace fmt () = pp_print_string fmt "}"
let lsquare fmt () = pp_print_string fmt "["
let rsquare fmt () = pp_print_string fmt "]"
let lparen fmt () = pp_print_string fmt "("
let rparen fmt () = pp_print_string fmt ")"
let lchevron fmt () = pp_print_string fmt "<"
let rchevron fmt () = pp_print_string fmt ">"
let nothing _fmt _ = ()
let string = pp_print_string
let float = pp_print_float
let int = pp_print_int
let bool = pp_print_bool
let constant_string s fmt () = string fmt s
let formatted fmt x = Format.fprintf fmt "%( %)" x
let constant_formatted f fmt () = formatted fmt f
let print0 fmt () = pp_print_string fmt "\000"
let add_flush sep fmt x = sep fmt x; pp_print_flush fmt ()
let asd f fmt x = fprintf fmt "\"%a\"" f x
let print_pair pr1 = print_pair_delim lparen comma rparen pr1
let hov n f fmt x = pp_open_hovbox fmt n; f fmt x; pp_close_box fmt ()
let indent n f fmt x =
for _i = 0 to n do
pp_print_char fmt ' '
done;
hov 0 f fmt x
let open_formatter ?(margin=78) cout =
let fmt = formatter_of_out_channel cout in
pp_set_margin fmt margin;
pp_open_box fmt 0;
fmt
let close_formatter fmt =
pp_close_box fmt ();
pp_print_flush fmt ()
let open_file_and_formatter ?(margin=78) f =
let cout = open_out f in
let fmt = open_formatter ~margin cout in
cout,fmt
let close_file_and_formatter (cout,fmt) =
close_formatter fmt;
close_out cout
let print_in_file_no_close ?(margin=78) p f =
let cout,fmt = open_file_and_formatter ~margin f in
p fmt;
close_formatter fmt;
cout
let print_in_file ?(margin=78) p f =
let cout = print_in_file_no_close ~margin p f in
close_out cout
let rec print_list_opt sep print fmt = function
| [] -> false
| [x] -> print fmt x
| x :: r ->
let notempty1 = print fmt x in
if notempty1 then sep fmt ();
let notempty2 = print_list_opt sep print fmt r in
notempty1 || notempty2
let string_of ?max_boxes p x =
let b = Buffer.create 100 in
let fmt = formatter_of_buffer b in
Opt.iter (fun x ->
Format.pp_set_ellipsis_text fmt "...";
Format.pp_set_max_boxes fmt x) max_boxes;
fprintf fmt "%a@?" p x;
Buffer.contents b
let wnl fmt =
let out , flush,_newline , spaces =
pp_get_all_formatter_output_functions fmt ( ) in
pp_set_all_formatter_output_functions fmt
~out ~flush ~newline:(fun ( ) - > spaces 1 ) ~spaces
let out,flush,_newline,spaces =
pp_get_all_formatter_output_functions fmt () in
pp_set_all_formatter_output_functions fmt
~out ~flush ~newline:(fun () -> spaces 1) ~spaces
*)
let o = pp_get_formatter_out_functions fmt () in
pp_set_formatter_out_functions fmt
{ o with out_newline = (fun () -> o.out_spaces 1) }
let string_of_wnl p x =
let b = Buffer.create 100 in
let fmt = formatter_of_buffer b in
wnl fmt;
fprintf fmt "%a@?" p x;
Buffer.contents b
let sprintf = asprintf
let sprintf_wnl p =
let b = Buffer.create 100 in
let fmt = formatter_of_buffer b in
wnl fmt;
kfprintf (fun fmt -> Format.pp_print_flush fmt (); Buffer.contents b) fmt p
let html_char fmt c =
match c with
| '\"' -> pp_print_string fmt """
| '\'' -> pp_print_string fmt "'"
| '<' -> pp_print_string fmt "<"
| '>' -> pp_print_string fmt ">"
| '&' -> pp_print_string fmt "&"
| c -> pp_print_char fmt c
let html_string fmt s =
for i=0 to String.length s - 1 do
html_char fmt (String.get s i)
done
module Ansi =
struct
let set_column fmt n = fprintf fmt "\027[%iG" n
end
type formatter = Format.formatter
|
6f42a2f3f6901f25d91a140a0f29573e4de4b6d083efdab1c31eae82a20f5d99 | smeruelo/mooc-ocaml | w4_5.1_writing_map.ml | type 'a tree =
| Node of 'a tree * 'a * 'a tree
| Leaf of 'a;;
let wrap l =
List.map (fun x -> [x]) l
let rec tree_map f = function
| Node (lt, x, rt) -> Node (tree_map f lt, f x, tree_map f rt)
| Leaf x -> Leaf (f x)
| null | https://raw.githubusercontent.com/smeruelo/mooc-ocaml/8e2efb1632ec9dd381489a08465d5341a6c727c9/week4/w4_5.1_writing_map.ml | ocaml | type 'a tree =
| Node of 'a tree * 'a * 'a tree
| Leaf of 'a;;
let wrap l =
List.map (fun x -> [x]) l
let rec tree_map f = function
| Node (lt, x, rt) -> Node (tree_map f lt, f x, tree_map f rt)
| Leaf x -> Leaf (f x)
| |
733417c381d658be71b64e2db3e12bb28289c86881986ef52764f6068b07b638 | totakke/cavia | test_util.clj | (ns cavia.test-util
(:require [clojure.java.io :as io]))
(defmacro with-out-null
[& body]
`(binding [*out* (io/writer "/dev/null")
*err* (io/writer "/dev/null")]
~@body))
(def temp-dir (.getPath (io/file (System/getProperty "java.io.tmpdir") "cavia-test")))
(defn prepare-cache! []
(.mkdir (io/file temp-dir)))
(defn clean-cache! []
(let [dir (io/file temp-dir)]
(when (.exists dir)
(doseq [f (seq (.list dir))]
(.delete (io/file (str temp-dir "/" f))))
(.delete dir))))
| null | https://raw.githubusercontent.com/totakke/cavia/a72e739b2d78adf01c5ac32a1c487d29b24b482f/test/cavia/test_util.clj | clojure | (ns cavia.test-util
(:require [clojure.java.io :as io]))
(defmacro with-out-null
[& body]
`(binding [*out* (io/writer "/dev/null")
*err* (io/writer "/dev/null")]
~@body))
(def temp-dir (.getPath (io/file (System/getProperty "java.io.tmpdir") "cavia-test")))
(defn prepare-cache! []
(.mkdir (io/file temp-dir)))
(defn clean-cache! []
(let [dir (io/file temp-dir)]
(when (.exists dir)
(doseq [f (seq (.list dir))]
(.delete (io/file (str temp-dir "/" f))))
(.delete dir))))
| |
7d3b4abdcd4a8cb0fff4250e7a2e5934951152d79623d00e846bf9d38e37929f | dparis/gen-phzr | filter_texture.cljs | (ns phzr.pixi.filter-texture
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->FilterTexture
"
Parameters:
* gl (WebGLContext) - the current WebGL drawing context
* width (Number) - the horizontal range of the filter
* height (Number) - the vertical range of the filter
* scale-mode (Number) - See PIXI.scaleModes for possible values"
([gl width height scale-mode]
(js/PIXI.FilterTexture. (clj->phaser gl)
(clj->phaser width)
(clj->phaser height)
(clj->phaser scale-mode))))
(defn clear
"Clears the filter texture."
([filter-texture]
(phaser->clj
(.clear filter-texture))))
(defn destroy
"Destroys the filter texture."
([filter-texture]
(phaser->clj
(.destroy filter-texture))))
(defn resize
"Resizes the texture to the specified width and height
Parameters:
* filter-texture (PIXI.FilterTexture) - Targeted instance for method
* width (Number) - the new width of the texture
* height (Number) - the new height of the texture"
([filter-texture width height]
(phaser->clj
(.resize filter-texture
(clj->phaser width)
(clj->phaser height))))) | null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/pixi/filter_texture.cljs | clojure | (ns phzr.pixi.filter-texture
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->FilterTexture
"
Parameters:
* gl (WebGLContext) - the current WebGL drawing context
* width (Number) - the horizontal range of the filter
* height (Number) - the vertical range of the filter
* scale-mode (Number) - See PIXI.scaleModes for possible values"
([gl width height scale-mode]
(js/PIXI.FilterTexture. (clj->phaser gl)
(clj->phaser width)
(clj->phaser height)
(clj->phaser scale-mode))))
(defn clear
"Clears the filter texture."
([filter-texture]
(phaser->clj
(.clear filter-texture))))
(defn destroy
"Destroys the filter texture."
([filter-texture]
(phaser->clj
(.destroy filter-texture))))
(defn resize
"Resizes the texture to the specified width and height
Parameters:
* filter-texture (PIXI.FilterTexture) - Targeted instance for method
* width (Number) - the new width of the texture
* height (Number) - the new height of the texture"
([filter-texture width height]
(phaser->clj
(.resize filter-texture
(clj->phaser width)
(clj->phaser height))))) | |
0f9eb78f6b1cc408a495904bd549829dda335a0c84425025a589d7a4285ff82f | khibino/haskell-relational-record | join.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE TemplateHaskell #
import Data.Int
import Data.Time
import Database.Relational.Query
import Database.Relational.Query.TH
import Database.Relational.Query.Monad.Trans.Ordering (Orderings)
import Person (Person (Person), person)
import Birthday (Birthday, birthday)
import qualified Person
import qualified Birthday
personAndBirthday :: Relation () (Person, Birthday)
personAndBirthday = relation $ do
p <- query person -- Join product accumulated
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
return $ p >< b
personAndBirthdayL :: Relation () (Person, Maybe Birthday)
personAndBirthdayL = relation $ do
p <- query person
b <- queryMaybe birthday -- Maybe not match
on $ just (p ! Person.name') .=. b ?! Birthday.name'
return $ p >< b
sameBirthdayHeisei' :: Relation () (Day, Int64)
sameBirthdayHeisei' = aggregateRelation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
wheres $ b ! Birthday.day' .>=. value (fromGregorian 1989 1 8)
gbd <- groupBy $ b ! Birthday.day'
having $ count (p ! Person.name') .>. value (1 :: Int64)
return $ gbd >< count (p ! Person.name')
sameBirthdayHeisei :: Relation () (Day, Int64)
sameBirthdayHeisei = aggregateRelation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
let birthDay = b ! Birthday.day'
wheres $ birthDay .>=. value (fromGregorian 1989 1 8)
gbd <- groupBy birthDay
let personCount = count $ p ! Person.name'
having $ personCount .>. value 1
return $ gbd >< personCount
birthdayHeiseiDesc :: Relation () (Day, Int64)
birthdayHeiseiDesc = aggregateRelation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
let birthDay = b ! Birthday.day'
wheres $ birthDay .>=. value (fromGregorian 1989 1 8)
gbd <- groupBy birthDay
let personCount = count $ p ! Person.name'
orderBy personCount Desc
return $ gbd >< personCount
personAndBirthdayO :: Relation () (Person, Birthday)
personAndBirthdayO = relation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
orderBy (b ! Birthday.day') Asc -- Specify ordering key
orderBy (p ! Person.name') Asc
return $ p >< b
specifyPerson :: Relation String (Person, Birthday)
specifyPerson = relation' $ do
pb <- query personAndBirthday
(ph, ()) <- placeholder (\ph' -> wheres $ pb ! fst' ! Person.name' .=. ph')
return (ph, pb)
data PersonAndBirthday =
PersonAndBirthday
{ pbPerson :: Person
, pbBirthday :: Birthday
}
$(makeRelationalRecordDefault ''PersonAndBirthday)
personAndBirthdayT :: Relation () PersonAndBirthday
personAndBirthdayT = relation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
return $ PersonAndBirthday |$| p |*| b -- Build record phantom type
Birthday.day ' : : Pi Birthday Day
uncurryPB :: Pi (Person, Birthday) PersonAndBirthday
uncurryPB = PersonAndBirthday |$| fst' |*| snd'
personAndBirthdayP :: Relation Person PersonAndBirthday
personAndBirthdayP = relation' $ do
p <- query person
b <- query birthday
(ph, ()) <- placeholder (\ph' -> on $ p .=. ph')
return $ (ph, PersonAndBirthday |$| p |*| b)
personAndBirthdayP2 :: Relation Person PersonAndBirthday
personAndBirthdayP2 = relation' $ do
p <- query person
b <- query birthday
(ph0, ()) <- placeholder (\ph0' -> on $ p ! Person.name' .=. ph0')
(ph1, ()) <- placeholder (\ph1' -> on $ p ! Person.age' .=. ph1')
(ph2, ()) <- placeholder (\ph2' -> on $ p ! Person.family' .=. ph2')
return (Person |$| ph0 |*| ph1 |*| ph2, PersonAndBirthday |$| p |*| b)
agesOfFamilies :: Relation () (String, Maybe Int32)
agesOfFamilies = aggregateRelation $ do
p <- query person
gFam <- groupBy $ p ! Person.family' -- Specify grouping key
return $ gFam >< sum' (p ! Person.age') -- Aggregated results
agesOfFamiliesO :: Relation () (String, Maybe Int32)
agesOfFamiliesO = aggregateRelation $ do
p <- query person
gFam <- groupBy $ p ! Person.family'
let s = sum' (p ! Person.age')
orderBy s Desc -- Only aggregated value is allowed to pass
orderBy gFam Asc
return $ gFam >< s
ageRankOfFamilies :: Relation () ((Int64, String), Int32)
ageRankOfFamilies = relation $ do
p <- query person
return $
rank `over` do
Monad to build window
orderBy (p ! Person.age') Desc
><
p ! Person.family'
><
p ! Person.age'
nonsense :: Relation () (Person, Birthday)
nonsense = personAndBirthday `union` personAndBirthdayO
heiseiBirthday :: MonadRestrict Flat m
=> Projection Flat Birthday -> m ()
heiseiBirthday b = wheres $ b ! Birthday.day' .>=. value (fromGregorian 1989 1 8)
orderByName :: Monad m
=> Projection c Person
-> Orderings c m ()
orderByName p = orderBy (p ! Person.name') Asc
| null | https://raw.githubusercontent.com/khibino/haskell-relational-record/759b3d7cea207e64d2bd1cf195125182f73d2a52/doc/slide/PostgreSQL-Unconference-201512/join.hs | haskell | Join product accumulated
Maybe not match
Specify ordering key
Build record phantom type
Specify grouping key
Aggregated results
Only aggregated value is allowed to pass | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE TemplateHaskell #
import Data.Int
import Data.Time
import Database.Relational.Query
import Database.Relational.Query.TH
import Database.Relational.Query.Monad.Trans.Ordering (Orderings)
import Person (Person (Person), person)
import Birthday (Birthday, birthday)
import qualified Person
import qualified Birthday
personAndBirthday :: Relation () (Person, Birthday)
personAndBirthday = relation $ do
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
return $ p >< b
personAndBirthdayL :: Relation () (Person, Maybe Birthday)
personAndBirthdayL = relation $ do
p <- query person
on $ just (p ! Person.name') .=. b ?! Birthday.name'
return $ p >< b
sameBirthdayHeisei' :: Relation () (Day, Int64)
sameBirthdayHeisei' = aggregateRelation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
wheres $ b ! Birthday.day' .>=. value (fromGregorian 1989 1 8)
gbd <- groupBy $ b ! Birthday.day'
having $ count (p ! Person.name') .>. value (1 :: Int64)
return $ gbd >< count (p ! Person.name')
sameBirthdayHeisei :: Relation () (Day, Int64)
sameBirthdayHeisei = aggregateRelation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
let birthDay = b ! Birthday.day'
wheres $ birthDay .>=. value (fromGregorian 1989 1 8)
gbd <- groupBy birthDay
let personCount = count $ p ! Person.name'
having $ personCount .>. value 1
return $ gbd >< personCount
birthdayHeiseiDesc :: Relation () (Day, Int64)
birthdayHeiseiDesc = aggregateRelation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
let birthDay = b ! Birthday.day'
wheres $ birthDay .>=. value (fromGregorian 1989 1 8)
gbd <- groupBy birthDay
let personCount = count $ p ! Person.name'
orderBy personCount Desc
return $ gbd >< personCount
personAndBirthdayO :: Relation () (Person, Birthday)
personAndBirthdayO = relation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
orderBy (p ! Person.name') Asc
return $ p >< b
specifyPerson :: Relation String (Person, Birthday)
specifyPerson = relation' $ do
pb <- query personAndBirthday
(ph, ()) <- placeholder (\ph' -> wheres $ pb ! fst' ! Person.name' .=. ph')
return (ph, pb)
data PersonAndBirthday =
PersonAndBirthday
{ pbPerson :: Person
, pbBirthday :: Birthday
}
$(makeRelationalRecordDefault ''PersonAndBirthday)
personAndBirthdayT :: Relation () PersonAndBirthday
personAndBirthdayT = relation $ do
p <- query person
b <- query birthday
on $ p ! Person.name' .=. b ! Birthday.name'
Birthday.day ' : : Pi Birthday Day
uncurryPB :: Pi (Person, Birthday) PersonAndBirthday
uncurryPB = PersonAndBirthday |$| fst' |*| snd'
personAndBirthdayP :: Relation Person PersonAndBirthday
personAndBirthdayP = relation' $ do
p <- query person
b <- query birthday
(ph, ()) <- placeholder (\ph' -> on $ p .=. ph')
return $ (ph, PersonAndBirthday |$| p |*| b)
personAndBirthdayP2 :: Relation Person PersonAndBirthday
personAndBirthdayP2 = relation' $ do
p <- query person
b <- query birthday
(ph0, ()) <- placeholder (\ph0' -> on $ p ! Person.name' .=. ph0')
(ph1, ()) <- placeholder (\ph1' -> on $ p ! Person.age' .=. ph1')
(ph2, ()) <- placeholder (\ph2' -> on $ p ! Person.family' .=. ph2')
return (Person |$| ph0 |*| ph1 |*| ph2, PersonAndBirthday |$| p |*| b)
agesOfFamilies :: Relation () (String, Maybe Int32)
agesOfFamilies = aggregateRelation $ do
p <- query person
agesOfFamiliesO :: Relation () (String, Maybe Int32)
agesOfFamiliesO = aggregateRelation $ do
p <- query person
gFam <- groupBy $ p ! Person.family'
let s = sum' (p ! Person.age')
orderBy gFam Asc
return $ gFam >< s
ageRankOfFamilies :: Relation () ((Int64, String), Int32)
ageRankOfFamilies = relation $ do
p <- query person
return $
rank `over` do
Monad to build window
orderBy (p ! Person.age') Desc
><
p ! Person.family'
><
p ! Person.age'
nonsense :: Relation () (Person, Birthday)
nonsense = personAndBirthday `union` personAndBirthdayO
heiseiBirthday :: MonadRestrict Flat m
=> Projection Flat Birthday -> m ()
heiseiBirthday b = wheres $ b ! Birthday.day' .>=. value (fromGregorian 1989 1 8)
orderByName :: Monad m
=> Projection c Person
-> Orderings c m ()
orderByName p = orderBy (p ! Person.name') Asc
|
97824fd62cb48d5622f9f7d8779496a51b232d53878e333fb034ef1caac61e6e | 0install/0install | command.mli | Copyright ( C ) 2018 , the README file for details , or visit .
* See the README file for details, or visit .
*)
(** <command> elements *)
open Support
open Support.Common
* [ build_command sels reqs env ] is the argv for the command to run to execute [ sels ] as requested by [ ] .
In --dry - run mode , do n't complain if the target does n't exist .
In --dry-run mode, don't complain if the target doesn't exist. *)
val build_command :
?main:string ->
?dry_run:bool ->
(Selections.impl * filepath option) Selections.RoleMap.t ->
Selections.requirements ->
Env.t ->
string list
| null | https://raw.githubusercontent.com/0install/0install/22eebdbe51a9f46cda29eed3e9e02e37e36b2d18/src/zeroinstall/command.mli | ocaml | * <command> elements | Copyright ( C ) 2018 , the README file for details , or visit .
* See the README file for details, or visit .
*)
open Support
open Support.Common
* [ build_command sels reqs env ] is the argv for the command to run to execute [ sels ] as requested by [ ] .
In --dry - run mode , do n't complain if the target does n't exist .
In --dry-run mode, don't complain if the target doesn't exist. *)
val build_command :
?main:string ->
?dry_run:bool ->
(Selections.impl * filepath option) Selections.RoleMap.t ->
Selections.requirements ->
Env.t ->
string list
|
e1da5bfa5ecc6ad5c3e0567f708a351672ee1f1fc3fe0f0b2095c063ce79b9ad | donaldsonjw/bigloo | idea.scm | (module __crypto-idea
(import __crypto-block-ciphers
__crypto-util))
(define *block-size* 8)
(define (key->decrypt-param key)
(inverse-keys (key-expansion key)))
(register-cipher! 'idea
(instantiate::Block-Cipher
(name "IDEA (International Data Encryption Algorithm)")
(block-size *block-size*)
(preferred-key-length 16)
(encrypt! encrypt-block)
(decrypt! encrypt-block)
(key->encrypt-param key-expansion)
(key->decrypt-param key->decrypt-param)))
(define *rounds* 8)
;; note: the keys for each round start at "1" (to be consistent with the
;; description of Wikipedia (as of 22/07/2009)
(define (key keys round i)
(vector-ref keys (+fx (*fx 6 round) (-fx i 1))))
(define (key-expansion::vector key::bstring)
(define (extract-subkey-from-key str at)
(let* ((i (*fx 2 at))
(c1 (string-ref str i))
(c2 (string-ref str (+fx i 1)))
(i1 (char->integer c1))
(i2 (char->integer c2)))
(+fx (*fx i1 256) i2)))
(define (extract-subkey subkeys i)
get the subkeys from the previous row , where rows are 16byte blocks .
Each row is shifted by 25bits .
(let* ((previous-row (*fx 8 (-fx (/fx i 8) 1)))
(off1 (modulofx (+fx i 1) 8))
(off2 (modulofx (+fx i 2) 8))
(subk1 (vector-ref subkeys (+fx previous-row off1)))
(subk2 (vector-ref subkeys (+fx previous-row off2))))
(bit-and #xFFFF
(+fx (bit-lsh subk1 9)
(bit-rsh subk2 7)))))
(when (not (=fx (string-length key) 16))
(error 'idea
"Key must be 128 bits long"
(*fx 8 (string-length key))))
each round uses 6 subkeys .
last half - round uses 4 subkeys .
(let* ((nb-sub-keys (+fx (*fx 6 *rounds*) 4))
(res (make-vector nb-sub-keys)))
first 8 keys are verbatim copies .
(let loop ((i 0))
(when (<fx i 8)
(vector-set! res i (extract-subkey-from-key key i))
(loop (+fx i 1))))
(let loop ((j 8))
(when (<fx j nb-sub-keys)
(vector-set! res j (extract-subkey res j))
(loop (+fx j 1))))
res))
(define (inverse-keys::vector keys::vector)
(define (add-inv k)
(bit-and #xFFFF (-fx #x10000 k)))
(define (mult-inv k)
(cond
((<fx k 2)
k)
(else
(let loop ((x k) (y #x10001)
(u1 1) (u2 0)
(v1 0) (v2 1))
(cond
((and (zerofx? y) (>=fx u1 0))
u1)
((zerofx? y)
(bit-and #xFFFF (+fx #x10001 u1)))
(else
(let ((q (quotientfx x y))
(r (remainderfx x y)))
(loop y r u2 (-fx u1 (*fx q u2)) v2 (-fx v1 (*fx q v2))))))))))
round 0 starts at 4 for setting .
(define (inv-key-set! i offset keys k)
(vector-set! keys (+fx (*fx 6 i) (+fx offset 3)) k))
(let ((res (make-vector (vector-length keys))))
(vector-set! res 0 (mult-inv (key keys 8 1)))
(vector-set! res 1 (add-inv (key keys 8 2)))
(vector-set! res 2 (add-inv (key keys 8 3)))
(vector-set! res 3 (mult-inv (key keys 8 4)))
(let loop ((i 0)
(j 7))
(when (<fx i (-fx *rounds* 1)) ;; last 6 subkeys are different.
(inv-key-set! i 1 res (key keys j 5))
(inv-key-set! i 2 res (key keys j 6))
(inv-key-set! i 3 res (mult-inv (key keys j 1)))
(inv-key-set! i 4 res (add-inv (key keys j 3)))
(inv-key-set! i 5 res (add-inv (key keys j 2)))
(inv-key-set! i 6 res (mult-inv (key keys j 4)))
(loop (+fx i 1) (-fx j 1))))
(inv-key-set! 7 1 res (key keys 0 5))
(inv-key-set! 7 2 res (key keys 0 6))
(inv-key-set! 7 3 res (mult-inv (key keys 0 1)))
(inv-key-set! 7 4 res (add-inv (key keys 0 2)))
(inv-key-set! 7 5 res (add-inv (key keys 0 3)))
(inv-key-set! 7 6 res (mult-inv (key keys 0 4)))
res))
;; there are definitely better faster ways to do this, but this is the easiest
;; way (if we want to stay portable).
this way we only require longs to have at least 32 bits .
(define (idea* a b)
(cond
((=fx a 0) (bit-and #xFFFF (-fx #x10001 b)))
((=fx b 0) (bit-and #xFFFF (-fx #x10001 a)))
(else
(let* ((ae (fixnum->elong a))
(be (fixnum->elong b))
(ae*be (*elong ae be))
(rl (elong->fixnum (bit-andelong ae*be #exFFFF)))
(rh (elong->fixnum (bit-andelong #exFFFF
(bit-rshelong ae*be 16))))
(diff (-fx rl rh)))
(if (<fx diff 0)
(bit-and #xFFFF (+fx #x10001 diff))
(bit-and #xFFFF diff))))))
in theory ' idea * ' is equivalent to ' * ' .
(define (idea2* a b)
(cond
((=fx a 0) (idea2* #x10000 b))
((=fx b 0) (idea2* a #x10000))
(else
(let* ((aa (fixnum->llong a))
(bb (fixnum->llong b))
(t (remainderllong (* aa bb)
#lx10001))
(f (llong->fixnum t)))
(bit-and #xFFFF f)))))
(define (idea+ a b)
(bit-and #xFFFF (+fx a b)))
(define (idea^ a b)
(bit-xor a b))
(define (do-half-round a b c d round keys)
(values (idea* a (key keys round 1))
(idea+ c (key keys round 2))
(idea+ b (key keys round 3))
(idea* d (key keys round 4))))
one round .
I have copied the image from wikipedia to the source - tree ( idea-round.png ) .
;; red: idea*
;; green: idea+
blue :
(define (do-round a0 b0 c0 d0 round keys)
;; numbers represent the depth in picture.
;; the middle columns are x and y.
(let* ((a1 (idea* a0 (key keys round 1)))
(b1 (idea+ b0 (key keys round 2)))
(c1 (idea+ c0 (key keys round 3)))
(d1 (idea* d0 (key keys round 4)))
next line ( the blue cross )
(x2 (idea^ a1 c1))
;; next line
(y3 (idea^ b1 d1))
;;
(x4 (idea* x2 (key keys round 5)))
(y4 (idea+ x4 y3))
;;
(y5 (idea* y4 (key keys round 6)))
(x5 (idea+ x4 y5))
;;
(a6 (idea^ a1 y5))
(c6 (idea^ c1 y5))
;;
(b7 (idea^ b1 x5))
(d7 (idea^ d1 x5)))
(values a6 c6 b7 d7)))
(define (encrypt-block from::bstring from-pos::long to::bstring to-pos::long
keys::vector)
(define (read-uint16-at str i)
(+fx (*fx (char->integer (string-ref str i)) 256)
(char->integer (string-ref str (+fx i 1)))))
(define (write-uint16-at str i val)
(string-set! str i (integer->char-ur (/fx val 256)))
(string-set! str (+fx i 1) (integer->char-ur (bit-and #xFF val))))
(let loop ((i 0)
(a (read-uint16-at from (+fx from-pos 0)))
(b (read-uint16-at from (+fx from-pos 2)))
(c (read-uint16-at from (+fx from-pos 4)))
(d (read-uint16-at from (+fx from-pos 6))))
(cond
((=fx i *rounds*)
(receive (a b c d)
(do-half-round a b c d i keys)
(write-uint16-at to (+fx to-pos 0) a)
(write-uint16-at to (+fx to-pos 2) b)
(write-uint16-at to (+fx to-pos 4) c)
(write-uint16-at to (+fx to-pos 6) d)))
(else
(receive (a_ b_ c_ d_)
(do-round a b c d i keys)
(loop (+fx i 1)
a_ b_ c_ d_))))))
| null | https://raw.githubusercontent.com/donaldsonjw/bigloo/a4d06e409d0004e159ce92b9908719510a18aed5/api/crypto/src/Llib/idea.scm | scheme | note: the keys for each round start at "1" (to be consistent with the
description of Wikipedia (as of 22/07/2009)
last 6 subkeys are different.
there are definitely better faster ways to do this, but this is the easiest
way (if we want to stay portable).
red: idea*
green: idea+
numbers represent the depth in picture.
the middle columns are x and y.
next line
| (module __crypto-idea
(import __crypto-block-ciphers
__crypto-util))
(define *block-size* 8)
(define (key->decrypt-param key)
(inverse-keys (key-expansion key)))
(register-cipher! 'idea
(instantiate::Block-Cipher
(name "IDEA (International Data Encryption Algorithm)")
(block-size *block-size*)
(preferred-key-length 16)
(encrypt! encrypt-block)
(decrypt! encrypt-block)
(key->encrypt-param key-expansion)
(key->decrypt-param key->decrypt-param)))
(define *rounds* 8)
(define (key keys round i)
(vector-ref keys (+fx (*fx 6 round) (-fx i 1))))
(define (key-expansion::vector key::bstring)
(define (extract-subkey-from-key str at)
(let* ((i (*fx 2 at))
(c1 (string-ref str i))
(c2 (string-ref str (+fx i 1)))
(i1 (char->integer c1))
(i2 (char->integer c2)))
(+fx (*fx i1 256) i2)))
(define (extract-subkey subkeys i)
get the subkeys from the previous row , where rows are 16byte blocks .
Each row is shifted by 25bits .
(let* ((previous-row (*fx 8 (-fx (/fx i 8) 1)))
(off1 (modulofx (+fx i 1) 8))
(off2 (modulofx (+fx i 2) 8))
(subk1 (vector-ref subkeys (+fx previous-row off1)))
(subk2 (vector-ref subkeys (+fx previous-row off2))))
(bit-and #xFFFF
(+fx (bit-lsh subk1 9)
(bit-rsh subk2 7)))))
(when (not (=fx (string-length key) 16))
(error 'idea
"Key must be 128 bits long"
(*fx 8 (string-length key))))
each round uses 6 subkeys .
last half - round uses 4 subkeys .
(let* ((nb-sub-keys (+fx (*fx 6 *rounds*) 4))
(res (make-vector nb-sub-keys)))
first 8 keys are verbatim copies .
(let loop ((i 0))
(when (<fx i 8)
(vector-set! res i (extract-subkey-from-key key i))
(loop (+fx i 1))))
(let loop ((j 8))
(when (<fx j nb-sub-keys)
(vector-set! res j (extract-subkey res j))
(loop (+fx j 1))))
res))
(define (inverse-keys::vector keys::vector)
(define (add-inv k)
(bit-and #xFFFF (-fx #x10000 k)))
(define (mult-inv k)
(cond
((<fx k 2)
k)
(else
(let loop ((x k) (y #x10001)
(u1 1) (u2 0)
(v1 0) (v2 1))
(cond
((and (zerofx? y) (>=fx u1 0))
u1)
((zerofx? y)
(bit-and #xFFFF (+fx #x10001 u1)))
(else
(let ((q (quotientfx x y))
(r (remainderfx x y)))
(loop y r u2 (-fx u1 (*fx q u2)) v2 (-fx v1 (*fx q v2))))))))))
round 0 starts at 4 for setting .
(define (inv-key-set! i offset keys k)
(vector-set! keys (+fx (*fx 6 i) (+fx offset 3)) k))
(let ((res (make-vector (vector-length keys))))
(vector-set! res 0 (mult-inv (key keys 8 1)))
(vector-set! res 1 (add-inv (key keys 8 2)))
(vector-set! res 2 (add-inv (key keys 8 3)))
(vector-set! res 3 (mult-inv (key keys 8 4)))
(let loop ((i 0)
(j 7))
(inv-key-set! i 1 res (key keys j 5))
(inv-key-set! i 2 res (key keys j 6))
(inv-key-set! i 3 res (mult-inv (key keys j 1)))
(inv-key-set! i 4 res (add-inv (key keys j 3)))
(inv-key-set! i 5 res (add-inv (key keys j 2)))
(inv-key-set! i 6 res (mult-inv (key keys j 4)))
(loop (+fx i 1) (-fx j 1))))
(inv-key-set! 7 1 res (key keys 0 5))
(inv-key-set! 7 2 res (key keys 0 6))
(inv-key-set! 7 3 res (mult-inv (key keys 0 1)))
(inv-key-set! 7 4 res (add-inv (key keys 0 2)))
(inv-key-set! 7 5 res (add-inv (key keys 0 3)))
(inv-key-set! 7 6 res (mult-inv (key keys 0 4)))
res))
this way we only require longs to have at least 32 bits .
(define (idea* a b)
(cond
((=fx a 0) (bit-and #xFFFF (-fx #x10001 b)))
((=fx b 0) (bit-and #xFFFF (-fx #x10001 a)))
(else
(let* ((ae (fixnum->elong a))
(be (fixnum->elong b))
(ae*be (*elong ae be))
(rl (elong->fixnum (bit-andelong ae*be #exFFFF)))
(rh (elong->fixnum (bit-andelong #exFFFF
(bit-rshelong ae*be 16))))
(diff (-fx rl rh)))
(if (<fx diff 0)
(bit-and #xFFFF (+fx #x10001 diff))
(bit-and #xFFFF diff))))))
in theory ' idea * ' is equivalent to ' * ' .
(define (idea2* a b)
(cond
((=fx a 0) (idea2* #x10000 b))
((=fx b 0) (idea2* a #x10000))
(else
(let* ((aa (fixnum->llong a))
(bb (fixnum->llong b))
(t (remainderllong (* aa bb)
#lx10001))
(f (llong->fixnum t)))
(bit-and #xFFFF f)))))
(define (idea+ a b)
(bit-and #xFFFF (+fx a b)))
(define (idea^ a b)
(bit-xor a b))
(define (do-half-round a b c d round keys)
(values (idea* a (key keys round 1))
(idea+ c (key keys round 2))
(idea+ b (key keys round 3))
(idea* d (key keys round 4))))
one round .
I have copied the image from wikipedia to the source - tree ( idea-round.png ) .
blue :
(define (do-round a0 b0 c0 d0 round keys)
(let* ((a1 (idea* a0 (key keys round 1)))
(b1 (idea+ b0 (key keys round 2)))
(c1 (idea+ c0 (key keys round 3)))
(d1 (idea* d0 (key keys round 4)))
next line ( the blue cross )
(x2 (idea^ a1 c1))
(y3 (idea^ b1 d1))
(x4 (idea* x2 (key keys round 5)))
(y4 (idea+ x4 y3))
(y5 (idea* y4 (key keys round 6)))
(x5 (idea+ x4 y5))
(a6 (idea^ a1 y5))
(c6 (idea^ c1 y5))
(b7 (idea^ b1 x5))
(d7 (idea^ d1 x5)))
(values a6 c6 b7 d7)))
(define (encrypt-block from::bstring from-pos::long to::bstring to-pos::long
keys::vector)
(define (read-uint16-at str i)
(+fx (*fx (char->integer (string-ref str i)) 256)
(char->integer (string-ref str (+fx i 1)))))
(define (write-uint16-at str i val)
(string-set! str i (integer->char-ur (/fx val 256)))
(string-set! str (+fx i 1) (integer->char-ur (bit-and #xFF val))))
(let loop ((i 0)
(a (read-uint16-at from (+fx from-pos 0)))
(b (read-uint16-at from (+fx from-pos 2)))
(c (read-uint16-at from (+fx from-pos 4)))
(d (read-uint16-at from (+fx from-pos 6))))
(cond
((=fx i *rounds*)
(receive (a b c d)
(do-half-round a b c d i keys)
(write-uint16-at to (+fx to-pos 0) a)
(write-uint16-at to (+fx to-pos 2) b)
(write-uint16-at to (+fx to-pos 4) c)
(write-uint16-at to (+fx to-pos 6) d)))
(else
(receive (a_ b_ c_ d_)
(do-round a b c d i keys)
(loop (+fx i 1)
a_ b_ c_ d_))))))
|
5fa7604de14efd9769e3a16aa5c61d9289da280106349b6b534b8db2a76b44ee | kit-ty-kate/opam-grep | grep.mli | SPDX - License - Identifier : MIT
exception OpamGrepError of string
val search :
repos:string option ->
depends_on:string option ->
regexp:string ->
unit
| null | https://raw.githubusercontent.com/kit-ty-kate/opam-grep/5fb5fa565c7ed209b9136cac6bb2fbcf7104203b/src/grep.mli | ocaml | SPDX - License - Identifier : MIT
exception OpamGrepError of string
val search :
repos:string option ->
depends_on:string option ->
regexp:string ->
unit
| |
5b6ccb905f8372f311e4188c480ef847409cca57929ed87ac440c8c2dd44284f | GlideAngle/flare-timing | Error.hs | module Serve.Error
( errTaskPoints
, errAltPoints
, errTaskBounds
, errTaskStep
, errPilotTrackNotFound
, errPilotNotFound
, errTaskLengths
) where
import Text.Printf (printf)
import qualified Data.ByteString.Lazy.Char8 as LBS (pack)
import Servant (ServantErr, errBody, err400)
import Flight.Comp (PilotId(..), IxTask(..))
errPilotTrackNotFound :: IxTask -> PilotId -> ServantErr
errPilotTrackNotFound (IxTask ix) (PilotId p) =
err400
{ errBody = LBS.pack
$ printf "For task %d, the tracklog for pilot %s was not found" ix p
}
errPilotNotFound :: PilotId -> ServantErr
errPilotNotFound (PilotId p) =
err400 {errBody = LBS.pack $ printf "Pilot %s not found" p}
errTaskBounds :: Int -> ServantErr
errTaskBounds ii =
err400 {errBody = LBS.pack $ printf "Out of bounds task %d" ii}
errTaskLengths :: ServantErr
errTaskLengths =
err400 {errBody = LBS.pack "I need the lengths of each task" }
errTaskPoints :: ServantErr
errTaskPoints =
err400 {errBody = LBS.pack "I need the points of each task" }
errAltPoints :: ServantErr
errAltPoints =
err400 {errBody = LBS.pack "I need the expected points of each task" }
errTaskStep :: String -> Int -> ServantErr
errTaskStep step ii =
err400
{ errBody = LBS.pack
$ "I need to have access to data from "
++ step
++ " for task: #"
++ show ii
}
| null | https://raw.githubusercontent.com/GlideAngle/flare-timing/27bd34c1943496987382091441a1c2516c169263/lang-haskell/app-serve/src/Serve/Error.hs | haskell | module Serve.Error
( errTaskPoints
, errAltPoints
, errTaskBounds
, errTaskStep
, errPilotTrackNotFound
, errPilotNotFound
, errTaskLengths
) where
import Text.Printf (printf)
import qualified Data.ByteString.Lazy.Char8 as LBS (pack)
import Servant (ServantErr, errBody, err400)
import Flight.Comp (PilotId(..), IxTask(..))
errPilotTrackNotFound :: IxTask -> PilotId -> ServantErr
errPilotTrackNotFound (IxTask ix) (PilotId p) =
err400
{ errBody = LBS.pack
$ printf "For task %d, the tracklog for pilot %s was not found" ix p
}
errPilotNotFound :: PilotId -> ServantErr
errPilotNotFound (PilotId p) =
err400 {errBody = LBS.pack $ printf "Pilot %s not found" p}
errTaskBounds :: Int -> ServantErr
errTaskBounds ii =
err400 {errBody = LBS.pack $ printf "Out of bounds task %d" ii}
errTaskLengths :: ServantErr
errTaskLengths =
err400 {errBody = LBS.pack "I need the lengths of each task" }
errTaskPoints :: ServantErr
errTaskPoints =
err400 {errBody = LBS.pack "I need the points of each task" }
errAltPoints :: ServantErr
errAltPoints =
err400 {errBody = LBS.pack "I need the expected points of each task" }
errTaskStep :: String -> Int -> ServantErr
errTaskStep step ii =
err400
{ errBody = LBS.pack
$ "I need to have access to data from "
++ step
++ " for task: #"
++ show ii
}
| |
229657e2292ef01b03e43a0c84bf23658e64d242e82a6050cef10cd3265344d0 | ocaml/dune | sub_dirs.mli | open Import
module Status : sig
type t =
| Data_only
| Normal
| Vendored
val to_dyn : t -> Dyn.t
module Or_ignored : sig
type nonrec t =
| Ignored
| Status of t
end
module Map : sig
type status := t
type 'a t =
{ data_only : 'a
; vendored : 'a
; normal : 'a
}
val merge : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
val find : 'a t -> status -> 'a
val to_dyn : ('a -> Dyn.t) -> 'a t -> Dyn.t
end
module Set : sig
type t = bool Map.t
val all : t
val normal_only : t
end
end
type subdir_stanzas
val or_default : subdir_stanzas -> Predicate_lang.Glob.t Status.Map.t
val default : Predicate_lang.Glob.t Status.Map.t
type status_map
val eval : Predicate_lang.Glob.t Status.Map.t -> dirs:string list -> status_map
val status : status_map -> dir:string -> Status.Or_ignored.t
module Dir_map : sig
type t
type per_dir =
{ sexps : Dune_lang.Ast.t list
; subdir_status : subdir_stanzas
}
val dyn_of_per_dir : per_dir -> Dyn.t
val to_dyn : t -> Dyn.t
val empty : t
val descend : t -> string -> t option
val sub_dirs : t -> string list
val merge : t -> t -> t
val root : t -> per_dir
end
type decoder =
{ decode : 'a. Dune_lang.Ast.t list -> 'a Dune_lang.Decoder.t -> 'a }
val decode :
file:Path.Source.t -> decoder -> Dune_lang.Ast.t list -> Dir_map.t Memo.t
| null | https://raw.githubusercontent.com/ocaml/dune/a790dd36ae51ab13676e58cfff60b08a15508d93/src/dune_engine/sub_dirs.mli | ocaml | open Import
module Status : sig
type t =
| Data_only
| Normal
| Vendored
val to_dyn : t -> Dyn.t
module Or_ignored : sig
type nonrec t =
| Ignored
| Status of t
end
module Map : sig
type status := t
type 'a t =
{ data_only : 'a
; vendored : 'a
; normal : 'a
}
val merge : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
val find : 'a t -> status -> 'a
val to_dyn : ('a -> Dyn.t) -> 'a t -> Dyn.t
end
module Set : sig
type t = bool Map.t
val all : t
val normal_only : t
end
end
type subdir_stanzas
val or_default : subdir_stanzas -> Predicate_lang.Glob.t Status.Map.t
val default : Predicate_lang.Glob.t Status.Map.t
type status_map
val eval : Predicate_lang.Glob.t Status.Map.t -> dirs:string list -> status_map
val status : status_map -> dir:string -> Status.Or_ignored.t
module Dir_map : sig
type t
type per_dir =
{ sexps : Dune_lang.Ast.t list
; subdir_status : subdir_stanzas
}
val dyn_of_per_dir : per_dir -> Dyn.t
val to_dyn : t -> Dyn.t
val empty : t
val descend : t -> string -> t option
val sub_dirs : t -> string list
val merge : t -> t -> t
val root : t -> per_dir
end
type decoder =
{ decode : 'a. Dune_lang.Ast.t list -> 'a Dune_lang.Decoder.t -> 'a }
val decode :
file:Path.Source.t -> decoder -> Dune_lang.Ast.t list -> Dir_map.t Memo.t
| |
7563b803cff070626b96cf8efe2d61ba56b661471070b37f686f4be946de4e89 | aws-beam/aws-erlang | aws_redshift.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
@doc Amazon Redshift
%%
%% Overview
%%
This is an interface reference for Amazon Redshift .
%%
It contains documentation for one of the programming or command line
interfaces you can use to manage Amazon Redshift clusters . Note that
Amazon Redshift is asynchronous , which means that some interfaces may
%% require techniques, such as polling or asynchronous callback handlers, to
%% determine when a command has been applied. In this reference, the
%% parameter descriptions indicate whether a change is applied immediately,
%% on the next instance reboot, or during the next maintenance window. For a
summary of the Amazon Redshift cluster management interfaces , go to Using
the Amazon Redshift Management Interfaces .
%%
Amazon Redshift manages all the work of setting up , operating , and scaling
%% a data warehouse: provisioning capacity, monitoring and backing up the
cluster , and applying patches and upgrades to the Amazon Redshift engine .
%% You can focus on using your data to acquire new insights for your business
%% and customers.
%%
If you are a first - time user of Amazon Redshift , we recommend that you
begin by reading the Amazon Redshift Getting Started Guide .
%%
If you are a database developer , the Amazon Redshift Database Developer
%% Guide explains how to design, build, query, and maintain the databases
%% that make up your data warehouse.
-module(aws_redshift).
-export([accept_reserved_node_exchange/2,
accept_reserved_node_exchange/3,
add_partner/2,
add_partner/3,
associate_data_share_consumer/2,
associate_data_share_consumer/3,
authorize_cluster_security_group_ingress/2,
authorize_cluster_security_group_ingress/3,
authorize_data_share/2,
authorize_data_share/3,
authorize_endpoint_access/2,
authorize_endpoint_access/3,
authorize_snapshot_access/2,
authorize_snapshot_access/3,
batch_delete_cluster_snapshots/2,
batch_delete_cluster_snapshots/3,
batch_modify_cluster_snapshots/2,
batch_modify_cluster_snapshots/3,
cancel_resize/2,
cancel_resize/3,
copy_cluster_snapshot/2,
copy_cluster_snapshot/3,
create_authentication_profile/2,
create_authentication_profile/3,
create_cluster/2,
create_cluster/3,
create_cluster_parameter_group/2,
create_cluster_parameter_group/3,
create_cluster_security_group/2,
create_cluster_security_group/3,
create_cluster_snapshot/2,
create_cluster_snapshot/3,
create_cluster_subnet_group/2,
create_cluster_subnet_group/3,
create_endpoint_access/2,
create_endpoint_access/3,
create_event_subscription/2,
create_event_subscription/3,
create_hsm_client_certificate/2,
create_hsm_client_certificate/3,
create_hsm_configuration/2,
create_hsm_configuration/3,
create_scheduled_action/2,
create_scheduled_action/3,
create_snapshot_copy_grant/2,
create_snapshot_copy_grant/3,
create_snapshot_schedule/2,
create_snapshot_schedule/3,
create_tags/2,
create_tags/3,
create_usage_limit/2,
create_usage_limit/3,
deauthorize_data_share/2,
deauthorize_data_share/3,
delete_authentication_profile/2,
delete_authentication_profile/3,
delete_cluster/2,
delete_cluster/3,
delete_cluster_parameter_group/2,
delete_cluster_parameter_group/3,
delete_cluster_security_group/2,
delete_cluster_security_group/3,
delete_cluster_snapshot/2,
delete_cluster_snapshot/3,
delete_cluster_subnet_group/2,
delete_cluster_subnet_group/3,
delete_endpoint_access/2,
delete_endpoint_access/3,
delete_event_subscription/2,
delete_event_subscription/3,
delete_hsm_client_certificate/2,
delete_hsm_client_certificate/3,
delete_hsm_configuration/2,
delete_hsm_configuration/3,
delete_partner/2,
delete_partner/3,
delete_scheduled_action/2,
delete_scheduled_action/3,
delete_snapshot_copy_grant/2,
delete_snapshot_copy_grant/3,
delete_snapshot_schedule/2,
delete_snapshot_schedule/3,
delete_tags/2,
delete_tags/3,
delete_usage_limit/2,
delete_usage_limit/3,
describe_account_attributes/2,
describe_account_attributes/3,
describe_authentication_profiles/2,
describe_authentication_profiles/3,
describe_cluster_db_revisions/2,
describe_cluster_db_revisions/3,
describe_cluster_parameter_groups/2,
describe_cluster_parameter_groups/3,
describe_cluster_parameters/2,
describe_cluster_parameters/3,
describe_cluster_security_groups/2,
describe_cluster_security_groups/3,
describe_cluster_snapshots/2,
describe_cluster_snapshots/3,
describe_cluster_subnet_groups/2,
describe_cluster_subnet_groups/3,
describe_cluster_tracks/2,
describe_cluster_tracks/3,
describe_cluster_versions/2,
describe_cluster_versions/3,
describe_clusters/2,
describe_clusters/3,
describe_data_shares/2,
describe_data_shares/3,
describe_data_shares_for_consumer/2,
describe_data_shares_for_consumer/3,
describe_data_shares_for_producer/2,
describe_data_shares_for_producer/3,
describe_default_cluster_parameters/2,
describe_default_cluster_parameters/3,
describe_endpoint_access/2,
describe_endpoint_access/3,
describe_endpoint_authorization/2,
describe_endpoint_authorization/3,
describe_event_categories/2,
describe_event_categories/3,
describe_event_subscriptions/2,
describe_event_subscriptions/3,
describe_events/2,
describe_events/3,
describe_hsm_client_certificates/2,
describe_hsm_client_certificates/3,
describe_hsm_configurations/2,
describe_hsm_configurations/3,
describe_logging_status/2,
describe_logging_status/3,
describe_node_configuration_options/2,
describe_node_configuration_options/3,
describe_orderable_cluster_options/2,
describe_orderable_cluster_options/3,
describe_partners/2,
describe_partners/3,
describe_reserved_node_exchange_status/2,
describe_reserved_node_exchange_status/3,
describe_reserved_node_offerings/2,
describe_reserved_node_offerings/3,
describe_reserved_nodes/2,
describe_reserved_nodes/3,
describe_resize/2,
describe_resize/3,
describe_scheduled_actions/2,
describe_scheduled_actions/3,
describe_snapshot_copy_grants/2,
describe_snapshot_copy_grants/3,
describe_snapshot_schedules/2,
describe_snapshot_schedules/3,
describe_storage/2,
describe_storage/3,
describe_table_restore_status/2,
describe_table_restore_status/3,
describe_tags/2,
describe_tags/3,
describe_usage_limits/2,
describe_usage_limits/3,
disable_logging/2,
disable_logging/3,
disable_snapshot_copy/2,
disable_snapshot_copy/3,
disassociate_data_share_consumer/2,
disassociate_data_share_consumer/3,
enable_logging/2,
enable_logging/3,
enable_snapshot_copy/2,
enable_snapshot_copy/3,
get_cluster_credentials/2,
get_cluster_credentials/3,
get_cluster_credentials_with_iam/2,
get_cluster_credentials_with_iam/3,
get_reserved_node_exchange_configuration_options/2,
get_reserved_node_exchange_configuration_options/3,
get_reserved_node_exchange_offerings/2,
get_reserved_node_exchange_offerings/3,
modify_aqua_configuration/2,
modify_aqua_configuration/3,
modify_authentication_profile/2,
modify_authentication_profile/3,
modify_cluster/2,
modify_cluster/3,
modify_cluster_db_revision/2,
modify_cluster_db_revision/3,
modify_cluster_iam_roles/2,
modify_cluster_iam_roles/3,
modify_cluster_maintenance/2,
modify_cluster_maintenance/3,
modify_cluster_parameter_group/2,
modify_cluster_parameter_group/3,
modify_cluster_snapshot/2,
modify_cluster_snapshot/3,
modify_cluster_snapshot_schedule/2,
modify_cluster_snapshot_schedule/3,
modify_cluster_subnet_group/2,
modify_cluster_subnet_group/3,
modify_endpoint_access/2,
modify_endpoint_access/3,
modify_event_subscription/2,
modify_event_subscription/3,
modify_scheduled_action/2,
modify_scheduled_action/3,
modify_snapshot_copy_retention_period/2,
modify_snapshot_copy_retention_period/3,
modify_snapshot_schedule/2,
modify_snapshot_schedule/3,
modify_usage_limit/2,
modify_usage_limit/3,
pause_cluster/2,
pause_cluster/3,
purchase_reserved_node_offering/2,
purchase_reserved_node_offering/3,
reboot_cluster/2,
reboot_cluster/3,
reject_data_share/2,
reject_data_share/3,
reset_cluster_parameter_group/2,
reset_cluster_parameter_group/3,
resize_cluster/2,
resize_cluster/3,
restore_from_cluster_snapshot/2,
restore_from_cluster_snapshot/3,
restore_table_from_cluster_snapshot/2,
restore_table_from_cluster_snapshot/3,
resume_cluster/2,
resume_cluster/3,
revoke_cluster_security_group_ingress/2,
revoke_cluster_security_group_ingress/3,
revoke_endpoint_access/2,
revoke_endpoint_access/3,
revoke_snapshot_access/2,
revoke_snapshot_access/3,
rotate_encryption_key/2,
rotate_encryption_key/3,
update_partner_status/2,
update_partner_status/3]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Exchanges a DC1 Reserved Node for a DC2 Reserved Node with no changes
%% to the configuration (term, payment type, or number of nodes) and no
%% additional costs.
accept_reserved_node_exchange(Client, Input)
when is_map(Client), is_map(Input) ->
accept_reserved_node_exchange(Client, Input, []).
accept_reserved_node_exchange(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AcceptReservedNodeExchange">>, Input, Options).
%% @doc Adds a partner integration to a cluster.
%%
%% This operation authorizes a partner to push status updates for the
%% specified database. To complete the integration, you also set up the
%% integration on the partner website.
add_partner(Client, Input)
when is_map(Client), is_map(Input) ->
add_partner(Client, Input, []).
add_partner(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AddPartner">>, Input, Options).
%% @doc From a datashare consumer account, associates a datashare with the
account ( AssociateEntireAccount ) or the specified namespace ( ConsumerArn ) .
%%
%% If you make this association, the consumer can consume the datashare.
associate_data_share_consumer(Client, Input)
when is_map(Client), is_map(Input) ->
associate_data_share_consumer(Client, Input, []).
associate_data_share_consumer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AssociateDataShareConsumer">>, Input, Options).
@doc Adds an inbound ( ingress ) rule to an Amazon Redshift security group .
%%
%% Depending on whether the application accessing your cluster is running on
the Internet or an Amazon EC2 instance , you can authorize inbound access
to either a Classless Interdomain Routing ( CIDR)/Internet Protocol ( IP )
range or to an Amazon EC2 security group . You can add as many as 20
ingress rules to an Amazon Redshift security group .
%%
If you authorize access to an Amazon EC2 security group , specify
EC2SecurityGroupName and EC2SecurityGroupOwnerId . The Amazon EC2 security
group and Amazon Redshift cluster must be in the same Amazon Web Services
%% Region.
%%
If you authorize access to a CIDR / IP address range , specify CIDRIP . For an
%% overview of CIDR blocks, see the Wikipedia article on Classless
Inter - Domain Routing .
%%
%% You must also associate the security group with a cluster so that clients
%% running on these IP addresses or the EC2 instance are authorized to
%% connect to the cluster. For information about managing security groups, go
to Working with Security Groups in the Amazon Redshift Cluster Management
%% Guide.
authorize_cluster_security_group_ingress(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_cluster_security_group_ingress(Client, Input, []).
authorize_cluster_security_group_ingress(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeClusterSecurityGroupIngress">>, Input, Options).
%% @doc From a data producer account, authorizes the sharing of a datashare
with one or more consumer accounts or managing entities .
%%
%% To authorize a datashare for a data consumer, the producer account must
%% have the correct access permissions.
authorize_data_share(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_data_share(Client, Input, []).
authorize_data_share(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeDataShare">>, Input, Options).
%% @doc Grants access to a cluster.
authorize_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_endpoint_access(Client, Input, []).
authorize_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeEndpointAccess">>, Input, Options).
@doc Authorizes the specified Amazon Web Services account to restore the
%% specified snapshot.
%%
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
authorize_snapshot_access(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_snapshot_access(Client, Input, []).
authorize_snapshot_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeSnapshotAccess">>, Input, Options).
%% @doc Deletes a set of cluster snapshots.
batch_delete_cluster_snapshots(Client, Input)
when is_map(Client), is_map(Input) ->
batch_delete_cluster_snapshots(Client, Input, []).
batch_delete_cluster_snapshots(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchDeleteClusterSnapshots">>, Input, Options).
%% @doc Modifies the settings for a set of cluster snapshots.
batch_modify_cluster_snapshots(Client, Input)
when is_map(Client), is_map(Input) ->
batch_modify_cluster_snapshots(Client, Input, []).
batch_modify_cluster_snapshots(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchModifyClusterSnapshots">>, Input, Options).
%% @doc Cancels a resize operation for a cluster.
cancel_resize(Client, Input)
when is_map(Client), is_map(Input) ->
cancel_resize(Client, Input, []).
cancel_resize(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CancelResize">>, Input, Options).
%% @doc Copies the specified automated cluster snapshot to a new manual
%% cluster snapshot.
%%
%% The source must be an automated snapshot and it must be in the available
%% state.
%%
When you delete a cluster , Amazon Redshift deletes any automated snapshots
%% of the cluster. Also, when the retention period of the snapshot expires,
Amazon Redshift automatically deletes it . If you want to keep an automated
%% snapshot for a longer period, you can make a manual copy of the snapshot.
%% Manual snapshots are retained until you delete them.
%%
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
copy_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
copy_cluster_snapshot(Client, Input, []).
copy_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CopyClusterSnapshot">>, Input, Options).
%% @doc Creates an authentication profile with the specified parameters.
create_authentication_profile(Client, Input)
when is_map(Client), is_map(Input) ->
create_authentication_profile(Client, Input, []).
create_authentication_profile(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateAuthenticationProfile">>, Input, Options).
%% @doc Creates a new cluster with the specified parameters.
%%
%% To create a cluster in Virtual Private Cloud (VPC), you must provide a
%% cluster subnet group name. The cluster subnet group identifies the subnets
of your VPC that Amazon Redshift uses when creating the cluster . For more
information about managing clusters , go to Amazon Redshift Clusters in the
%% Amazon Redshift Cluster Management Guide.
create_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster(Client, Input, []).
create_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateCluster">>, Input, Options).
@doc Creates an Amazon Redshift parameter group .
%%
%% Creating parameter groups is independent of creating clusters. You can
%% associate a cluster with a parameter group when you create the cluster.
%% You can also associate an existing cluster with a parameter group after
%% the cluster is created by using `ModifyCluster'.
%%
%% Parameters in the parameter group define specific behavior that applies to
%% the databases you create on the cluster. For more information about
parameters and parameter groups , go to Amazon Redshift Parameter Groups in
the Amazon Redshift Cluster Management Guide .
create_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_parameter_group(Client, Input, []).
create_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterParameterGroup">>, Input, Options).
@doc Creates a new Amazon Redshift security group .
%%
%% You use security groups to control access to non-VPC clusters.
%%
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
create_cluster_security_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_security_group(Client, Input, []).
create_cluster_security_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterSecurityGroup">>, Input, Options).
%% @doc Creates a manual snapshot of the specified cluster.
%%
%% The cluster must be in the `available' state.
%%
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
create_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_snapshot(Client, Input, []).
create_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterSnapshot">>, Input, Options).
@doc Creates a new Amazon Redshift subnet group .
%%
You must provide a list of one or more subnets in your existing Amazon
Virtual Private Cloud ( Amazon VPC ) when creating Amazon Redshift subnet
%% group.
%%
For information about subnet groups , go to Amazon Redshift Cluster Subnet
Groups in the Amazon Redshift Cluster Management Guide .
create_cluster_subnet_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_subnet_group(Client, Input, []).
create_cluster_subnet_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterSubnetGroup">>, Input, Options).
@doc Creates a Redshift - managed VPC endpoint .
create_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
create_endpoint_access(Client, Input, []).
create_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateEndpointAccess">>, Input, Options).
@doc Creates an Amazon Redshift event notification subscription .
%%
This action requires an ARN ( Amazon Resource Name ) of an Amazon SNS topic
created by either the Amazon Redshift console , the Amazon SNS console , or
the Amazon SNS API . To obtain an ARN with Amazon SNS , you must create a
topic in Amazon SNS and subscribe to the topic . The ARN is displayed in
the SNS console .
%%
You can specify the source type , and lists of Amazon Redshift source IDs ,
%% event categories, and event severities. Notifications will be sent for all
%% events you want that match those criteria. For example, you can specify
source type = cluster , source ID = my - cluster-1 and mycluster2 , event
categories = Availability , Backup , and severity = ERROR . The subscription
%% will only send notifications for those ERROR events in the Availability
%% and Backup categories for the specified clusters.
%%
%% If you specify both the source type and source IDs, such as source type =
%% cluster and source identifier = my-cluster-1, notifications will be sent
%% for all the cluster events for my-cluster-1. If you specify a source type
%% but do not specify a source identifier, you will receive notice of the
events for the objects of that type in your Amazon Web Services account .
If you do not specify either the SourceType nor the SourceIdentifier , you
will be notified of events generated from all Amazon Redshift sources
belonging to your Amazon Web Services account . You must specify a source
%% type if you specify a source ID.
create_event_subscription(Client, Input)
when is_map(Client), is_map(Input) ->
create_event_subscription(Client, Input, []).
create_event_subscription(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateEventSubscription">>, Input, Options).
@doc Creates an HSM client certificate that an Amazon Redshift cluster
will use to connect to the client 's HSM in order to store and retrieve
%% the keys used to encrypt the cluster databases.
%%
The command returns a public key , which you must store in the HSM . In
addition to creating the HSM certificate , you must create an Amazon
Redshift HSM configuration that provides a cluster the information needed
to store and use encryption keys in the HSM . For more information , go to
Hardware Security Modules in the Amazon Redshift Cluster Management Guide .
create_hsm_client_certificate(Client, Input)
when is_map(Client), is_map(Input) ->
create_hsm_client_certificate(Client, Input, []).
create_hsm_client_certificate(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateHsmClientCertificate">>, Input, Options).
@doc Creates an HSM configuration that contains the information required
by an Amazon Redshift cluster to store and use database encryption keys in
a Hardware Security Module ( HSM ) .
%%
After creating the HSM configuration , you can specify it as a parameter
%% when creating a cluster. The cluster will then store its encryption keys
in the HSM .
%%
In addition to creating an HSM configuration , you must also create an HSM
client certificate . For more information , go to Hardware Security Modules
in the Amazon Redshift Cluster Management Guide .
create_hsm_configuration(Client, Input)
when is_map(Client), is_map(Input) ->
create_hsm_configuration(Client, Input, []).
create_hsm_configuration(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateHsmConfiguration">>, Input, Options).
%% @doc Creates a scheduled action.
%%
A scheduled action contains a schedule and an Amazon Redshift API action .
%% For example, you can create a schedule of when to run the
%% `ResizeCluster' API operation.
create_scheduled_action(Client, Input)
when is_map(Client), is_map(Input) ->
create_scheduled_action(Client, Input, []).
create_scheduled_action(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateScheduledAction">>, Input, Options).
@doc Creates a snapshot copy grant that permits Amazon Redshift to use an
encrypted symmetric key from Key Management Service ( KMS ) to encrypt
%% copied snapshots in a destination region.
%%
For more information about managing snapshot copy grants , go to Amazon
Redshift Database Encryption in the Amazon Redshift Cluster Management
%% Guide.
create_snapshot_copy_grant(Client, Input)
when is_map(Client), is_map(Input) ->
create_snapshot_copy_grant(Client, Input, []).
create_snapshot_copy_grant(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateSnapshotCopyGrant">>, Input, Options).
%% @doc Create a snapshot schedule that can be associated to a cluster and
%% which overrides the default system backup schedule.
create_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
create_snapshot_schedule(Client, Input, []).
create_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateSnapshotSchedule">>, Input, Options).
%% @doc Adds tags to a cluster.
%%
A resource can have up to 50 tags . If you try to create more than 50 tags
%% for a resource, you will receive an error and the attempt will fail.
%%
%% If you specify a key that already exists for the resource, the value for
%% that key will be updated with the new value.
create_tags(Client, Input)
when is_map(Client), is_map(Input) ->
create_tags(Client, Input, []).
create_tags(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateTags">>, Input, Options).
@doc Creates a usage limit for a specified Amazon Redshift feature on a
%% cluster.
%%
%% The usage limit is identified by the returned usage limit identifier.
create_usage_limit(Client, Input)
when is_map(Client), is_map(Input) ->
create_usage_limit(Client, Input, []).
create_usage_limit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateUsageLimit">>, Input, Options).
%% @doc From a datashare producer account, removes authorization from the
%% specified datashare.
deauthorize_data_share(Client, Input)
when is_map(Client), is_map(Input) ->
deauthorize_data_share(Client, Input, []).
deauthorize_data_share(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeauthorizeDataShare">>, Input, Options).
%% @doc Deletes an authentication profile.
delete_authentication_profile(Client, Input)
when is_map(Client), is_map(Input) ->
delete_authentication_profile(Client, Input, []).
delete_authentication_profile(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteAuthenticationProfile">>, Input, Options).
%% @doc Deletes a previously provisioned cluster without its final snapshot
%% being created.
%%
%% A successful response from the web service indicates that the request was
%% received correctly. Use `DescribeClusters' to monitor the status of
%% the deletion. The delete operation cannot be canceled or reverted once
submitted . For more information about managing clusters , go to Amazon
Redshift Clusters in the Amazon Redshift Cluster Management Guide .
%%
%% If you want to shut down the cluster and retain it for future use, set
%% SkipFinalClusterSnapshot to `false' and specify a name for
%% FinalClusterSnapshotIdentifier. You can later restore this snapshot to
%% resume using the cluster. If a final cluster snapshot is requested, the
%% status of the cluster will be "final-snapshot" while the
snapshot is being taken , then it 's & quot;deleting" ; once Amazon
%% Redshift begins deleting the cluster.
%%
For more information about managing clusters , go to Amazon Redshift
Clusters in the Amazon Redshift Cluster Management Guide .
delete_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster(Client, Input, []).
delete_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteCluster">>, Input, Options).
@doc Deletes a specified Amazon Redshift parameter group .
%%
%% You cannot delete a parameter group if it is associated with a cluster.
delete_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_parameter_group(Client, Input, []).
delete_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterParameterGroup">>, Input, Options).
@doc Deletes an Amazon Redshift security group .
%%
%% You cannot delete a security group that is associated with any clusters.
%% You cannot delete the default security group.
%%
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
delete_cluster_security_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_security_group(Client, Input, []).
delete_cluster_security_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterSecurityGroup">>, Input, Options).
%% @doc Deletes the specified manual snapshot.
%%
%% The snapshot must be in the `available' state, with no other users
%% authorized to access the snapshot.
%%
%% Unlike automated snapshots, manual snapshots are retained even after you
delete your cluster . Amazon Redshift does not delete your manual
%% snapshots. You must delete manual snapshot explicitly to avoid getting
%% charged. If other accounts are authorized to access the snapshot, you must
%% revoke all of the authorizations before you can delete the snapshot.
delete_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_snapshot(Client, Input, []).
delete_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterSnapshot">>, Input, Options).
%% @doc Deletes the specified cluster subnet group.
delete_cluster_subnet_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_subnet_group(Client, Input, []).
delete_cluster_subnet_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterSubnetGroup">>, Input, Options).
@doc Deletes a Redshift - managed VPC endpoint .
delete_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
delete_endpoint_access(Client, Input, []).
delete_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteEndpointAccess">>, Input, Options).
@doc Deletes an Amazon Redshift event notification subscription .
delete_event_subscription(Client, Input)
when is_map(Client), is_map(Input) ->
delete_event_subscription(Client, Input, []).
delete_event_subscription(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteEventSubscription">>, Input, Options).
@doc Deletes the specified HSM client certificate .
delete_hsm_client_certificate(Client, Input)
when is_map(Client), is_map(Input) ->
delete_hsm_client_certificate(Client, Input, []).
delete_hsm_client_certificate(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteHsmClientCertificate">>, Input, Options).
@doc Deletes the specified Amazon Redshift HSM configuration .
delete_hsm_configuration(Client, Input)
when is_map(Client), is_map(Input) ->
delete_hsm_configuration(Client, Input, []).
delete_hsm_configuration(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteHsmConfiguration">>, Input, Options).
%% @doc Deletes a partner integration from a cluster.
%%
%% Data can still flow to the cluster until the integration is deleted at the
%% partner's website.
delete_partner(Client, Input)
when is_map(Client), is_map(Input) ->
delete_partner(Client, Input, []).
delete_partner(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePartner">>, Input, Options).
%% @doc Deletes a scheduled action.
delete_scheduled_action(Client, Input)
when is_map(Client), is_map(Input) ->
delete_scheduled_action(Client, Input, []).
delete_scheduled_action(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteScheduledAction">>, Input, Options).
%% @doc Deletes the specified snapshot copy grant.
delete_snapshot_copy_grant(Client, Input)
when is_map(Client), is_map(Input) ->
delete_snapshot_copy_grant(Client, Input, []).
delete_snapshot_copy_grant(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteSnapshotCopyGrant">>, Input, Options).
%% @doc Deletes a snapshot schedule.
delete_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
delete_snapshot_schedule(Client, Input, []).
delete_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteSnapshotSchedule">>, Input, Options).
%% @doc Deletes tags from a resource.
%%
You must provide the ARN of the resource from which you want to delete the
%% tag or tags.
delete_tags(Client, Input)
when is_map(Client), is_map(Input) ->
delete_tags(Client, Input, []).
delete_tags(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteTags">>, Input, Options).
%% @doc Deletes a usage limit from a cluster.
delete_usage_limit(Client, Input)
when is_map(Client), is_map(Input) ->
delete_usage_limit(Client, Input, []).
delete_usage_limit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteUsageLimit">>, Input, Options).
%% @doc Returns a list of attributes attached to an account
describe_account_attributes(Client, Input)
when is_map(Client), is_map(Input) ->
describe_account_attributes(Client, Input, []).
describe_account_attributes(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeAccountAttributes">>, Input, Options).
%% @doc Describes an authentication profile.
describe_authentication_profiles(Client, Input)
when is_map(Client), is_map(Input) ->
describe_authentication_profiles(Client, Input, []).
describe_authentication_profiles(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeAuthenticationProfiles">>, Input, Options).
%% @doc Returns an array of `ClusterDbRevision' objects.
describe_cluster_db_revisions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_db_revisions(Client, Input, []).
describe_cluster_db_revisions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterDbRevisions">>, Input, Options).
@doc Returns a list of Amazon Redshift parameter groups , including
%% parameter groups you created and the default parameter group.
%%
%% For each parameter group, the response includes the parameter group name,
%% description, and parameter group family name. You can optionally specify a
%% name to retrieve the description of a specific parameter group.
%%
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
%%
If you specify both tag keys and tag values in the same request , Amazon
%% Redshift returns all parameter groups that match any combination of the
%% specified keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
%% values, all parameter groups that have any combination of those values are
%% returned.
%%
%% If both tag keys and values are omitted from the request, parameter groups
%% are returned regardless of whether they have tag keys or values associated
%% with them.
describe_cluster_parameter_groups(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_parameter_groups(Client, Input, []).
describe_cluster_parameter_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterParameterGroups">>, Input, Options).
%% @doc Returns a detailed list of parameters contained within the specified
Amazon Redshift parameter group .
%%
%% For each parameter the response includes information such as parameter
%% name, description, data type, value, whether the parameter value is
%% modifiable, and so on.
%%
%% You can specify source filter to retrieve parameters of only specific
%% type. For example, to retrieve parameters that were modified by a user
%% action such as from `ModifyClusterParameterGroup', you can specify
%% source equal to user.
%%
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
describe_cluster_parameters(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_parameters(Client, Input, []).
describe_cluster_parameters(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterParameters">>, Input, Options).
@doc Returns information about Amazon Redshift security groups .
%%
%% If the name of a security group is specified, the response will contain
%% only information about only that security group.
%%
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
%%
If you specify both tag keys and tag values in the same request , Amazon
%% Redshift returns all security groups that match any combination of the
%% specified keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
%% values, all security groups that have any combination of those values are
%% returned.
%%
%% If both tag keys and values are omitted from the request, security groups
%% are returned regardless of whether they have tag keys or values associated
%% with them.
describe_cluster_security_groups(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_security_groups(Client, Input, []).
describe_cluster_security_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterSecurityGroups">>, Input, Options).
@doc Returns one or more snapshot objects , which contain metadata about
%% your cluster snapshots.
%%
%% By default, this operation returns information about all snapshots of all
clusters that are owned by your Amazon Web Services account . No
information is returned for snapshots owned by inactive Amazon Web
%% Services accounts.
%%
If you specify both tag keys and tag values in the same request , Amazon
%% Redshift returns all snapshots that match any combination of the specified
%% keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
%% values, all snapshots that have any combination of those values are
%% returned. Only snapshots that you own are returned in the response; shared
%% snapshots are not returned with the tag key and tag value request
%% parameters.
%%
%% If both tag keys and values are omitted from the request, snapshots are
%% returned regardless of whether they have tag keys or values associated
%% with them.
describe_cluster_snapshots(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_snapshots(Client, Input, []).
describe_cluster_snapshots(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterSnapshots">>, Input, Options).
@doc Returns one or more cluster subnet group objects , which contain
%% metadata about your cluster subnet groups.
%%
%% By default, this operation returns information about all cluster subnet
groups that are defined in your Amazon Web Services account .
%%
If you specify both tag keys and tag values in the same request , Amazon
%% Redshift returns all subnet groups that match any combination of the
%% specified keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
%% values, all subnet groups that have any combination of those values are
%% returned.
%%
%% If both tag keys and values are omitted from the request, subnet groups
%% are returned regardless of whether they have tag keys or values associated
%% with them.
describe_cluster_subnet_groups(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_subnet_groups(Client, Input, []).
describe_cluster_subnet_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterSubnetGroups">>, Input, Options).
%% @doc Returns a list of all the available maintenance tracks.
describe_cluster_tracks(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_tracks(Client, Input, []).
describe_cluster_tracks(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterTracks">>, Input, Options).
@doc Returns descriptions of the available Amazon Redshift cluster
%% versions.
%%
%% You can call this operation even before creating any clusters to learn
more about the Amazon Redshift versions . For more information about
managing clusters , go to Amazon Redshift Clusters in the Amazon Redshift
%% Cluster Management Guide.
describe_cluster_versions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_versions(Client, Input, []).
describe_cluster_versions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterVersions">>, Input, Options).
%% @doc Returns properties of provisioned clusters including general cluster
%% properties, cluster database properties, maintenance and backup
%% properties, and security and access properties.
%%
%% This operation supports pagination. For more information about managing
clusters , go to Amazon Redshift Clusters in the Amazon Redshift Cluster
%% Management Guide.
%%
If you specify both tag keys and tag values in the same request , Amazon
%% Redshift returns all clusters that match any combination of the specified
%% keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
%% values, all clusters that have any combination of those values are
%% returned.
%%
%% If both tag keys and values are omitted from the request, clusters are
%% returned regardless of whether they have tag keys or values associated
%% with them.
describe_clusters(Client, Input)
when is_map(Client), is_map(Input) ->
describe_clusters(Client, Input, []).
describe_clusters(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusters">>, Input, Options).
%% @doc Shows the status of any inbound or outbound datashares available in
%% the specified account.
describe_data_shares(Client, Input)
when is_map(Client), is_map(Input) ->
describe_data_shares(Client, Input, []).
describe_data_shares(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataShares">>, Input, Options).
@doc Returns a list of where the account identifier being
%% called is a consumer account identifier.
describe_data_shares_for_consumer(Client, Input)
when is_map(Client), is_map(Input) ->
describe_data_shares_for_consumer(Client, Input, []).
describe_data_shares_for_consumer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataSharesForConsumer">>, Input, Options).
@doc Returns a list of when the account identifier being called
%% is a producer account identifier.
describe_data_shares_for_producer(Client, Input)
when is_map(Client), is_map(Input) ->
describe_data_shares_for_producer(Client, Input, []).
describe_data_shares_for_producer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataSharesForProducer">>, Input, Options).
%% @doc Returns a list of parameter settings for the specified parameter
%% group family.
%%
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
describe_default_cluster_parameters(Client, Input)
when is_map(Client), is_map(Input) ->
describe_default_cluster_parameters(Client, Input, []).
describe_default_cluster_parameters(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDefaultClusterParameters">>, Input, Options).
@doc Describes a Redshift - managed VPC endpoint .
describe_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
describe_endpoint_access(Client, Input, []).
describe_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEndpointAccess">>, Input, Options).
%% @doc Describes an endpoint authorization.
describe_endpoint_authorization(Client, Input)
when is_map(Client), is_map(Input) ->
describe_endpoint_authorization(Client, Input, []).
describe_endpoint_authorization(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEndpointAuthorization">>, Input, Options).
%% @doc Displays a list of event categories for all event source types, or
%% for a specified source type.
%%
For a list of the event categories and source types , go to Amazon Redshift
%% Event Notifications.
describe_event_categories(Client, Input)
when is_map(Client), is_map(Input) ->
describe_event_categories(Client, Input, []).
describe_event_categories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEventCategories">>, Input, Options).
@doc Lists descriptions of all the Amazon Redshift event notification
%% subscriptions for a customer account.
%%
%% If you specify a subscription name, lists the description for that
%% subscription.
%%
If you specify both tag keys and tag values in the same request , Amazon
%% Redshift returns all event notification subscriptions that match any
%% combination of the specified keys and values. For example, if you have
%% `owner' and `environment' for tag keys, and `admin' and
%% `test' for tag values, all subscriptions that have any combination of
%% those values are returned.
%%
%% If both tag keys and values are omitted from the request, subscriptions
%% are returned regardless of whether they have tag keys or values associated
%% with them.
describe_event_subscriptions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_event_subscriptions(Client, Input, []).
describe_event_subscriptions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEventSubscriptions">>, Input, Options).
%% @doc Returns events related to clusters, security groups, snapshots, and
parameter groups for the past 14 days .
%%
%% Events specific to a particular cluster, security group, snapshot or
%% parameter group can be obtained by providing the name as a parameter. By
default , the past hour of events are returned .
describe_events(Client, Input)
when is_map(Client), is_map(Input) ->
describe_events(Client, Input, []).
describe_events(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEvents">>, Input, Options).
@doc Returns information about the specified HSM client certificate .
%%
If no certificate ID is specified , returns information about all the HSM
certificates owned by your Amazon Web Services account .
%%
If you specify both tag keys and tag values in the same request , Amazon
Redshift returns all HSM client certificates that match any combination of
%% the specified keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
values , all HSM client certificates that have any combination of those
%% values are returned.
%%
If both tag keys and values are omitted from the request , HSM client
%% certificates are returned regardless of whether they have tag keys or
%% values associated with them.
describe_hsm_client_certificates(Client, Input)
when is_map(Client), is_map(Input) ->
describe_hsm_client_certificates(Client, Input, []).
describe_hsm_client_certificates(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeHsmClientCertificates">>, Input, Options).
@doc Returns information about the specified Amazon Redshift HSM
%% configuration.
%%
If no configuration ID is specified , returns information about all the HSM
configurations owned by your Amazon Web Services account .
%%
If you specify both tag keys and tag values in the same request , Amazon
Redshift returns all HSM connections that match any combination of the
%% specified keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
values , all HSM connections that have any combination of those values are
%% returned.
%%
If both tag keys and values are omitted from the request , HSM connections
%% are returned regardless of whether they have tag keys or values associated
%% with them.
describe_hsm_configurations(Client, Input)
when is_map(Client), is_map(Input) ->
describe_hsm_configurations(Client, Input, []).
describe_hsm_configurations(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeHsmConfigurations">>, Input, Options).
%% @doc Describes whether information, such as queries and connection
attempts , is being logged for the specified Amazon Redshift cluster .
describe_logging_status(Client, Input)
when is_map(Client), is_map(Input) ->
describe_logging_status(Client, Input, []).
describe_logging_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeLoggingStatus">>, Input, Options).
%% @doc Returns properties of possible node configurations such as node type,
%% number of nodes, and disk usage for the specified action type.
describe_node_configuration_options(Client, Input)
when is_map(Client), is_map(Input) ->
describe_node_configuration_options(Client, Input, []).
describe_node_configuration_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeNodeConfigurationOptions">>, Input, Options).
%% @doc Returns a list of orderable cluster options.
%%
%% Before you create a new cluster you can use this operation to find what
options are available , such as the EC2 Availability Zones ( AZ ) in the
specific Amazon Web Services Region that you can specify , and the node
%% types you can request. The node types differ by available storage, memory,
%% CPU and price. With the cost involved you might want to obtain a list of
%% cluster options in the specific region and specify values when creating a
cluster . For more information about managing clusters , go to Amazon
Redshift Clusters in the Amazon Redshift Cluster Management Guide .
describe_orderable_cluster_options(Client, Input)
when is_map(Client), is_map(Input) ->
describe_orderable_cluster_options(Client, Input, []).
describe_orderable_cluster_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeOrderableClusterOptions">>, Input, Options).
%% @doc Returns information about the partner integrations defined for a
%% cluster.
describe_partners(Client, Input)
when is_map(Client), is_map(Input) ->
describe_partners(Client, Input, []).
describe_partners(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePartners">>, Input, Options).
%% @doc Returns exchange status details and associated metadata for a
%% reserved-node exchange.
%%
%% Statuses include such values as in progress and requested.
describe_reserved_node_exchange_status(Client, Input)
when is_map(Client), is_map(Input) ->
describe_reserved_node_exchange_status(Client, Input, []).
describe_reserved_node_exchange_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeReservedNodeExchangeStatus">>, Input, Options).
@doc Returns a list of the available reserved node offerings by Amazon
%% Redshift with their descriptions including the node type, the fixed and
%% recurring costs of reserving the node and duration the node will be
%% reserved for you.
%%
%% These descriptions help you determine which reserve node offering you want
%% to purchase. You then use the unique offering ID in you call to
` PurchaseReservedNodeOffering ' to reserve one or more nodes for your
Amazon Redshift cluster .
%%
%% For more information about reserved node offerings, go to Purchasing
Reserved Nodes in the Amazon Redshift Cluster Management Guide .
describe_reserved_node_offerings(Client, Input)
when is_map(Client), is_map(Input) ->
describe_reserved_node_offerings(Client, Input, []).
describe_reserved_node_offerings(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeReservedNodeOfferings">>, Input, Options).
%% @doc Returns the descriptions of the reserved nodes.
describe_reserved_nodes(Client, Input)
when is_map(Client), is_map(Input) ->
describe_reserved_nodes(Client, Input, []).
describe_reserved_nodes(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeReservedNodes">>, Input, Options).
%% @doc Returns information about the last resize operation for the specified
%% cluster.
%%
%% If no resize operation has ever been initiated for the specified cluster,
%% a `HTTP 404' error is returned. If a resize operation was initiated
%% and completed, the status of the resize remains as `SUCCEEDED' until
%% the next resize.
%%
%% A resize operation can be requested using `ModifyCluster' and
%% specifying a different number or type of nodes for the cluster.
describe_resize(Client, Input)
when is_map(Client), is_map(Input) ->
describe_resize(Client, Input, []).
describe_resize(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeResize">>, Input, Options).
%% @doc Describes properties of scheduled actions.
describe_scheduled_actions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_scheduled_actions(Client, Input, []).
describe_scheduled_actions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeScheduledActions">>, Input, Options).
@doc Returns a list of snapshot copy grants owned by the Amazon Web
%% Services account in the destination region.
%%
For more information about managing snapshot copy grants , go to Amazon
Redshift Database Encryption in the Amazon Redshift Cluster Management
%% Guide.
describe_snapshot_copy_grants(Client, Input)
when is_map(Client), is_map(Input) ->
describe_snapshot_copy_grants(Client, Input, []).
describe_snapshot_copy_grants(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeSnapshotCopyGrants">>, Input, Options).
%% @doc Returns a list of snapshot schedules.
describe_snapshot_schedules(Client, Input)
when is_map(Client), is_map(Input) ->
describe_snapshot_schedules(Client, Input, []).
describe_snapshot_schedules(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeSnapshotSchedules">>, Input, Options).
%% @doc Returns account level backups storage size and provisional storage.
describe_storage(Client, Input)
when is_map(Client), is_map(Input) ->
describe_storage(Client, Input, []).
describe_storage(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeStorage">>, Input, Options).
@doc Lists the status of one or more table restore requests made using the
%% `RestoreTableFromClusterSnapshot' API action.
%%
If you do n't specify a value for the ` TableRestoreRequestId '
%% parameter, then `DescribeTableRestoreStatus' returns the status of all
%% table restore requests ordered by the date and time of the request in
%% ascending order. Otherwise `DescribeTableRestoreStatus' returns the
status of the table specified by ` TableRestoreRequestId ' .
describe_table_restore_status(Client, Input)
when is_map(Client), is_map(Input) ->
describe_table_restore_status(Client, Input, []).
describe_table_restore_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeTableRestoreStatus">>, Input, Options).
%% @doc Returns a list of tags.
%%
You can return tags from a specific resource by specifying an ARN , or you
%% can return all tags for a given type of resource, such as clusters,
%% snapshots, and so on.
%%
The following are limitations for ` DescribeTags ' :
%%
< ul > < li > You can not specify an ARN and a resource - type value together in
%% the same request.
%%
< /li > < li > You can not use the ` MaxRecords ' and ` Marker ' parameters
together with the ARN parameter .
%%
< /li > < li > The ` MaxRecords ' parameter can be a range from 10 to 50
%% results to return in a request.
%%
%% </li> </ul> If you specify both tag keys and tag values in the same
request , Amazon Redshift returns all resources that match any combination
%% of the specified keys and values. For example, if you have `owner' and
%% `environment' for tag keys, and `admin' and `test' for tag
%% values, all resources that have any combination of those values are
%% returned.
%%
%% If both tag keys and values are omitted from the request, resources are
%% returned regardless of whether they have tag keys or values associated
%% with them.
describe_tags(Client, Input)
when is_map(Client), is_map(Input) ->
describe_tags(Client, Input, []).
describe_tags(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeTags">>, Input, Options).
%% @doc Shows usage limits on a cluster.
%%
%% Results are filtered based on the combination of input usage limit
%% identifier, cluster identifier, and feature type parameters:
%%
%% <ul> <li> If usage limit identifier, cluster identifier, and feature type
%% are not provided, then all usage limit objects for the current account in
%% the current region are returned.
%%
%% </li> <li> If usage limit identifier is provided, then the corresponding
%% usage limit object is returned.
%%
%% </li> <li> If cluster identifier is provided, then all usage limit objects
%% for the specified cluster are returned.
%%
%% </li> <li> If cluster identifier and feature type are provided, then all
%% usage limit objects for the combination of cluster and feature are
%% returned.
%%
%% </li> </ul>
describe_usage_limits(Client, Input)
when is_map(Client), is_map(Input) ->
describe_usage_limits(Client, Input, []).
describe_usage_limits(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeUsageLimits">>, Input, Options).
%% @doc Stops logging information, such as queries and connection attempts,
for the specified Amazon Redshift cluster .
disable_logging(Client, Input)
when is_map(Client), is_map(Input) ->
disable_logging(Client, Input, []).
disable_logging(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisableLogging">>, Input, Options).
@doc Disables the automatic copying of snapshots from one region to
%% another region for a specified cluster.
%%
%% If your cluster and its snapshots are encrypted using an encrypted
symmetric key from Key Management Service , use
` DeleteSnapshotCopyGrant ' to delete the grant that grants Amazon
%% Redshift permission to the key in the destination region.
disable_snapshot_copy(Client, Input)
when is_map(Client), is_map(Input) ->
disable_snapshot_copy(Client, Input, []).
disable_snapshot_copy(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisableSnapshotCopy">>, Input, Options).
%% @doc From a datashare consumer account, remove association for the
%% specified datashare.
disassociate_data_share_consumer(Client, Input)
when is_map(Client), is_map(Input) ->
disassociate_data_share_consumer(Client, Input, []).
disassociate_data_share_consumer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisassociateDataShareConsumer">>, Input, Options).
%% @doc Starts logging information, such as queries and connection attempts,
for the specified Amazon Redshift cluster .
enable_logging(Client, Input)
when is_map(Client), is_map(Input) ->
enable_logging(Client, Input, []).
enable_logging(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"EnableLogging">>, Input, Options).
@doc Enables the automatic copy of snapshots from one region to another
%% region for a specified cluster.
enable_snapshot_copy(Client, Input)
when is_map(Client), is_map(Input) ->
enable_snapshot_copy(Client, Input, []).
enable_snapshot_copy(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"EnableSnapshotCopy">>, Input, Options).
%% @doc Returns a database user name and temporary password with temporary
authorization to log on to an Amazon Redshift database .
%%
The action returns the database user name prefixed with ` IAM : ' if
` AutoCreate ' is ` False ' or ` IAMA : ' if ` AutoCreate ' is
` True ' . You can optionally specify one or more database user groups
%% that the user will join at log on. By default, the temporary credentials
expire in 900 seconds . You can optionally specify a duration between 900
seconds ( 15 minutes ) and 3600 seconds ( 60 minutes ) . For more information ,
%% see Using IAM Authentication to Generate Database User Credentials in the
%% Amazon Redshift Cluster Management Guide.
%%
The Identity and Access Management ( IAM ) user or role that runs
GetClusterCredentials must have an IAM policy attached that allows access
%% to all necessary actions and resources. For more information about
permissions , see Resource Policies for GetClusterCredentials in the Amazon
Redshift Cluster Management Guide .
%%
If the ` DbGroups ' parameter is specified , the IAM policy must allow
the ` redshift : JoinGroup ' action with access to the listed
%% `dbgroups'.
%%
In addition , if the ` AutoCreate ' parameter is set to ` True ' , then
%% the policy must include the `redshift:CreateClusterUser' permission.
%%
If the ` DbName ' parameter is specified , the IAM policy must allow
%% access to the resource `dbname' for the specified database name.
get_cluster_credentials(Client, Input)
when is_map(Client), is_map(Input) ->
get_cluster_credentials(Client, Input, []).
get_cluster_credentials(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetClusterCredentials">>, Input, Options).
%% @doc Returns a database user name and temporary password with temporary
authorization to log in to an Amazon Redshift database .
%%
The database user is mapped 1:1 to the source Identity and Access
Management ( IAM ) identity . For more information about IAM identities , see
IAM Identities ( users , user groups , and roles ) in the Amazon Web Services
Identity and Access Management User Guide .
%%
The Identity and Access Management ( IAM ) identity that runs this operation
must have an IAM policy attached that allows access to all necessary
%% actions and resources. For more information about permissions, see Using
identity - based policies ( IAM policies ) in the Amazon Redshift Cluster
%% Management Guide.
get_cluster_credentials_with_iam(Client, Input)
when is_map(Client), is_map(Input) ->
get_cluster_credentials_with_iam(Client, Input, []).
get_cluster_credentials_with_iam(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetClusterCredentialsWithIAM">>, Input, Options).
%% @doc Gets the configuration options for the reserved-node exchange.
%%
%% These options include information about the source reserved node and
%% target reserved node offering. Details include the node type, the price,
%% the node count, and the offering type.
get_reserved_node_exchange_configuration_options(Client, Input)
when is_map(Client), is_map(Input) ->
get_reserved_node_exchange_configuration_options(Client, Input, []).
get_reserved_node_exchange_configuration_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetReservedNodeExchangeConfigurationOptions">>, Input, Options).
%% @doc Returns an array of DC2 ReservedNodeOfferings that matches the
payment type , term , and usage price of the given reserved node .
get_reserved_node_exchange_offerings(Client, Input)
when is_map(Client), is_map(Input) ->
get_reserved_node_exchange_offerings(Client, Input, []).
get_reserved_node_exchange_offerings(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetReservedNodeExchangeOfferings">>, Input, Options).
%% @doc This operation is retired.
%%
Calling this operation does not change AQUA configuration . Amazon Redshift
%% automatically determines whether to use AQUA (Advanced Query Accelerator).
modify_aqua_configuration(Client, Input)
when is_map(Client), is_map(Input) ->
modify_aqua_configuration(Client, Input, []).
modify_aqua_configuration(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyAquaConfiguration">>, Input, Options).
%% @doc Modifies an authentication profile.
modify_authentication_profile(Client, Input)
when is_map(Client), is_map(Input) ->
modify_authentication_profile(Client, Input, []).
modify_authentication_profile(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyAuthenticationProfile">>, Input, Options).
%% @doc Modifies the settings for a cluster.
%%
%% You can also change node type and the number of nodes to scale up or down
%% the cluster. When resizing a cluster, you must specify both the number of
%% nodes and the node type even if one of the parameters does not change.
%%
%% You can add another security or parameter group, or change the admin user
%% password. Resetting a cluster password or modifying the security groups
%% associated with a cluster do not need a reboot. However, modifying a
%% parameter group requires a reboot for parameters to take effect. For more
information about managing clusters , go to Amazon Redshift Clusters in the
%% Amazon Redshift Cluster Management Guide.
modify_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster(Client, Input, []).
modify_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyCluster">>, Input, Options).
%% @doc Modifies the database revision of a cluster.
%%
%% The database revision is a unique revision of the database running in a
%% cluster.
modify_cluster_db_revision(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_db_revision(Client, Input, []).
modify_cluster_db_revision(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterDbRevision">>, Input, Options).
@doc Modifies the list of Identity and Access Management ( IAM ) roles that
can be used by the cluster to access other Amazon Web Services services .
%%
The maximum number of IAM roles that you can associate is subject to a
quota . For more information , go to Quotas and limits in the Amazon
Redshift Cluster Management Guide .
modify_cluster_iam_roles(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_iam_roles(Client, Input, []).
modify_cluster_iam_roles(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterIamRoles">>, Input, Options).
%% @doc Modifies the maintenance settings of a cluster.
modify_cluster_maintenance(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_maintenance(Client, Input, []).
modify_cluster_maintenance(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterMaintenance">>, Input, Options).
%% @doc Modifies the parameters of a parameter group.
%%
%% For the parameters parameter, it can't contain ASCII characters.
%%
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
modify_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_parameter_group(Client, Input, []).
modify_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterParameterGroup">>, Input, Options).
%% @doc Modifies the settings for a snapshot.
%%
%% This exanmple modifies the manual retention period setting for a cluster
%% snapshot.
modify_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_snapshot(Client, Input, []).
modify_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterSnapshot">>, Input, Options).
%% @doc Modifies a snapshot schedule for a cluster.
modify_cluster_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_snapshot_schedule(Client, Input, []).
modify_cluster_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterSnapshotSchedule">>, Input, Options).
@doc Modifies a cluster subnet group to include the specified list of VPC
%% subnets.
%%
%% The operation replaces the existing list of subnets with the new list of
%% subnets.
modify_cluster_subnet_group(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_subnet_group(Client, Input, []).
modify_cluster_subnet_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterSubnetGroup">>, Input, Options).
@doc Modifies a Redshift - managed VPC endpoint .
modify_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
modify_endpoint_access(Client, Input, []).
modify_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyEndpointAccess">>, Input, Options).
@doc Modifies an existing Amazon Redshift event notification subscription .
modify_event_subscription(Client, Input)
when is_map(Client), is_map(Input) ->
modify_event_subscription(Client, Input, []).
modify_event_subscription(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyEventSubscription">>, Input, Options).
%% @doc Modifies a scheduled action.
modify_scheduled_action(Client, Input)
when is_map(Client), is_map(Input) ->
modify_scheduled_action(Client, Input, []).
modify_scheduled_action(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyScheduledAction">>, Input, Options).
%% @doc Modifies the number of days to retain snapshots in the destination
Amazon Web Services Region after they are copied from the source Amazon
Web Services Region .
%%
%% By default, this operation only changes the retention period of copied
%% automated snapshots. The retention periods for both new and existing
%% copied automated snapshots are updated with the new retention period. You
%% can set the manual option to change only the retention periods of copied
%% manual snapshots. If you set this option, only newly copied manual
%% snapshots have the new retention period.
modify_snapshot_copy_retention_period(Client, Input)
when is_map(Client), is_map(Input) ->
modify_snapshot_copy_retention_period(Client, Input, []).
modify_snapshot_copy_retention_period(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifySnapshotCopyRetentionPeriod">>, Input, Options).
%% @doc Modifies a snapshot schedule.
%%
%% Any schedule associated with a cluster is modified asynchronously.
modify_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
modify_snapshot_schedule(Client, Input, []).
modify_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifySnapshotSchedule">>, Input, Options).
%% @doc Modifies a usage limit in a cluster.
%%
%% You can't modify the feature type or period of a usage limit.
modify_usage_limit(Client, Input)
when is_map(Client), is_map(Input) ->
modify_usage_limit(Client, Input, []).
modify_usage_limit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyUsageLimit">>, Input, Options).
%% @doc Pauses a cluster.
pause_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
pause_cluster(Client, Input, []).
pause_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PauseCluster">>, Input, Options).
%% @doc Allows you to purchase reserved nodes.
%%
Amazon Redshift offers a predefined set of reserved node offerings . You
can purchase one or more of the offerings . You can call the
%% `DescribeReservedNodeOfferings' API to obtain the available reserved
%% node offerings. You can call this API by providing a specific reserved
%% node offering and the number of nodes you want to reserve.
%%
%% For more information about reserved node offerings, go to Purchasing
Reserved Nodes in the Amazon Redshift Cluster Management Guide .
purchase_reserved_node_offering(Client, Input)
when is_map(Client), is_map(Input) ->
purchase_reserved_node_offering(Client, Input, []).
purchase_reserved_node_offering(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PurchaseReservedNodeOffering">>, Input, Options).
%% @doc Reboots a cluster.
%%
%% This action is taken as soon as possible. It results in a momentary outage
%% to the cluster, during which the cluster status is set to `rebooting'.
%% A cluster event is created when the reboot is completed. Any pending
%% cluster modifications (see `ModifyCluster') are applied at this
reboot . For more information about managing clusters , go to Amazon
Redshift Clusters in the Amazon Redshift Cluster Management Guide .
reboot_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
reboot_cluster(Client, Input, []).
reboot_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RebootCluster">>, Input, Options).
%% @doc From a datashare consumer account, rejects the specified datashare.
reject_data_share(Client, Input)
when is_map(Client), is_map(Input) ->
reject_data_share(Client, Input, []).
reject_data_share(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RejectDataShare">>, Input, Options).
@doc Sets one or more parameters of the specified parameter group to their
%% default values and sets the source values of the parameters to
%% "engine-default".
%%
%% To reset the entire parameter group specify the ResetAllParameters
%% parameter. For parameter changes to take effect you must reboot any
%% associated clusters.
reset_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
reset_cluster_parameter_group(Client, Input, []).
reset_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResetClusterParameterGroup">>, Input, Options).
%% @doc Changes the size of the cluster.
%%
%% You can change the cluster's type, or change the number or type of
%% nodes. The default behavior is to use the elastic resize method. With an
%% elastic resize, your cluster is available for read and write operations
%% more quickly than with the classic resize method.
%%
%% Elastic resize operations have the following restrictions:
%%
%% <ul> <li> You can only resize clusters of the following types:
%%
< ul > < li > dc1.large ( if your cluster is in a VPC )
%%
%% </li> <li> dc1.8xlarge (if your cluster is in a VPC)
%%
%% </li> <li> dc2.large
%%
%% </li> <li> dc2.8xlarge
%%
%% </li> <li> ds2.xlarge
%%
%% </li> <li> ds2.8xlarge
%%
%% </li> <li> ra3.xlplus
%%
%% </li> <li> ra3.4xlarge
%%
%% </li> <li> ra3.16xlarge
%%
%% </li> </ul> </li> <li> The type of nodes that you add must match the node
%% type for the cluster.
%%
%% </li> </ul>
resize_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
resize_cluster(Client, Input, []).
resize_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResizeCluster">>, Input, Options).
%% @doc Creates a new cluster from a snapshot.
%%
By default , Amazon Redshift creates the resulting cluster with the same
%% configuration as the original cluster from which the snapshot was created,
%% except that the new cluster is created with the default cluster security
and parameter groups . After Amazon Redshift creates the cluster , you can
%% use the `ModifyCluster' API to associate a different security group
%% and different parameter group with the restored cluster. If you are using
%% a DS node type, you can also choose to change to another DS node type of
%% the same size during restore.
%%
If you restore a cluster into a VPC , you must provide a cluster subnet
%% group where you want the cluster restored.
%%
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
restore_from_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
restore_from_cluster_snapshot(Client, Input, []).
restore_from_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RestoreFromClusterSnapshot">>, Input, Options).
@doc Creates a new table from a table in an Amazon Redshift cluster
%% snapshot.
%%
You must create the new table within the Amazon Redshift cluster that the
%% snapshot was taken from.
%%
%% You cannot use `RestoreTableFromClusterSnapshot' to restore a table
with the same name as an existing table in an Amazon Redshift cluster .
%% That is, you cannot overwrite an existing table in a cluster with a
%% restored table. If you want to replace your original table with a new,
%% restored table, then rename or drop your original table before you call
%% `RestoreTableFromClusterSnapshot'. When you have renamed your original
%% table, then you can pass the original name of the table as the
%% `NewTableName' parameter value in the call to
%% `RestoreTableFromClusterSnapshot'. This way, you can replace the
%% original table with the table created from the snapshot.
%%
%% You can't use this operation to restore tables with interleaved sort
%% keys.
restore_table_from_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
restore_table_from_cluster_snapshot(Client, Input, []).
restore_table_from_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RestoreTableFromClusterSnapshot">>, Input, Options).
%% @doc Resumes a paused cluster.
resume_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
resume_cluster(Client, Input, []).
resume_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResumeCluster">>, Input, Options).
@doc Revokes an ingress rule in an Amazon Redshift security group for a
previously authorized IP range or Amazon EC2 security group .
%%
To add an ingress rule , see ` AuthorizeClusterSecurityGroupIngress ' .
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
revoke_cluster_security_group_ingress(Client, Input)
when is_map(Client), is_map(Input) ->
revoke_cluster_security_group_ingress(Client, Input, []).
revoke_cluster_security_group_ingress(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RevokeClusterSecurityGroupIngress">>, Input, Options).
%% @doc Revokes access to a cluster.
revoke_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
revoke_endpoint_access(Client, Input, []).
revoke_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RevokeEndpointAccess">>, Input, Options).
@doc Removes the ability of the specified Amazon Web Services account to
%% restore the specified snapshot.
%%
%% If the account is currently restoring the snapshot, the restore will run
%% to completion.
%%
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
revoke_snapshot_access(Client, Input)
when is_map(Client), is_map(Input) ->
revoke_snapshot_access(Client, Input, []).
revoke_snapshot_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RevokeSnapshotAccess">>, Input, Options).
%% @doc Rotates the encryption keys for a cluster.
rotate_encryption_key(Client, Input)
when is_map(Client), is_map(Input) ->
rotate_encryption_key(Client, Input, []).
rotate_encryption_key(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RotateEncryptionKey">>, Input, Options).
%% @doc Updates the status of a partner integration.
update_partner_status(Client, Input)
when is_map(Client), is_map(Input) ->
update_partner_status(Client, Input, []).
update_partner_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePartnerStatus">>, Input, Options).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), binary(), map(), list()) ->
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map() | undefined,
Error :: map().
request(Client, Action, Input, Options) ->
RequestFun = fun() -> do_request(Client, Action, Input, Options) end,
aws_request:request(RequestFun, Options).
do_request(Client, Action, Input0, Options) ->
Client1 = Client#{service => <<"redshift">>},
Host = build_host(<<"redshift">>, Client1),
URL = build_url(Host, Client1),
Headers = [
{<<"Host">>, Host},
{<<"Content-Type">>, <<"application/x-www-form-urlencoded">>}
],
Input = Input0#{ <<"Action">> => Action
, <<"Version">> => <<"2012-12-01">>
},
Payload = aws_util:encode_query(Input),
SignedHeaders = aws_request:sign_request(Client1, <<"POST">>, URL, Headers, Payload),
Response = hackney:request(post, URL, SignedHeaders, Payload, Options),
handle_response(Response).
handle_response({ok, 200, ResponseHeaders, Client}) ->
case hackney:body(Client) of
{ok, <<>>} ->
{ok, undefined, {200, ResponseHeaders, Client}};
{ok, Body} ->
Result = aws_util:decode_xml(Body),
{ok, Result, {200, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, ResponseHeaders, Client}) ->
{ok, Body} = hackney:body(Client),
Error = aws_util:decode_xml(Body),
{error, Error, {StatusCode, ResponseHeaders, Client}};
handle_response({error, Reason}) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Client) ->
Proto = aws_client:proto(Client),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, <<"/">>], <<"">>).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_redshift.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
Overview
require techniques, such as polling or asynchronous callback handlers, to
determine when a command has been applied. In this reference, the
parameter descriptions indicate whether a change is applied immediately,
on the next instance reboot, or during the next maintenance window. For a
a data warehouse: provisioning capacity, monitoring and backing up the
You can focus on using your data to acquire new insights for your business
and customers.
Guide explains how to design, build, query, and maintain the databases
that make up your data warehouse.
====================================================================
API
====================================================================
@doc Exchanges a DC1 Reserved Node for a DC2 Reserved Node with no changes
to the configuration (term, payment type, or number of nodes) and no
additional costs.
@doc Adds a partner integration to a cluster.
This operation authorizes a partner to push status updates for the
specified database. To complete the integration, you also set up the
integration on the partner website.
@doc From a datashare consumer account, associates a datashare with the
If you make this association, the consumer can consume the datashare.
Depending on whether the application accessing your cluster is running on
Region.
overview of CIDR blocks, see the Wikipedia article on Classless
You must also associate the security group with a cluster so that clients
running on these IP addresses or the EC2 instance are authorized to
connect to the cluster. For information about managing security groups, go
Guide.
@doc From a data producer account, authorizes the sharing of a datashare
To authorize a datashare for a data consumer, the producer account must
have the correct access permissions.
@doc Grants access to a cluster.
specified snapshot.
@doc Deletes a set of cluster snapshots.
@doc Modifies the settings for a set of cluster snapshots.
@doc Cancels a resize operation for a cluster.
@doc Copies the specified automated cluster snapshot to a new manual
cluster snapshot.
The source must be an automated snapshot and it must be in the available
state.
of the cluster. Also, when the retention period of the snapshot expires,
snapshot for a longer period, you can make a manual copy of the snapshot.
Manual snapshots are retained until you delete them.
@doc Creates an authentication profile with the specified parameters.
@doc Creates a new cluster with the specified parameters.
To create a cluster in Virtual Private Cloud (VPC), you must provide a
cluster subnet group name. The cluster subnet group identifies the subnets
Amazon Redshift Cluster Management Guide.
Creating parameter groups is independent of creating clusters. You can
associate a cluster with a parameter group when you create the cluster.
You can also associate an existing cluster with a parameter group after
the cluster is created by using `ModifyCluster'.
Parameters in the parameter group define specific behavior that applies to
the databases you create on the cluster. For more information about
You use security groups to control access to non-VPC clusters.
@doc Creates a manual snapshot of the specified cluster.
The cluster must be in the `available' state.
group.
event categories, and event severities. Notifications will be sent for all
events you want that match those criteria. For example, you can specify
will only send notifications for those ERROR events in the Availability
and Backup categories for the specified clusters.
If you specify both the source type and source IDs, such as source type =
cluster and source identifier = my-cluster-1, notifications will be sent
for all the cluster events for my-cluster-1. If you specify a source type
but do not specify a source identifier, you will receive notice of the
type if you specify a source ID.
the keys used to encrypt the cluster databases.
when creating a cluster. The cluster will then store its encryption keys
@doc Creates a scheduled action.
For example, you can create a schedule of when to run the
`ResizeCluster' API operation.
copied snapshots in a destination region.
Guide.
@doc Create a snapshot schedule that can be associated to a cluster and
which overrides the default system backup schedule.
@doc Adds tags to a cluster.
for a resource, you will receive an error and the attempt will fail.
If you specify a key that already exists for the resource, the value for
that key will be updated with the new value.
cluster.
The usage limit is identified by the returned usage limit identifier.
@doc From a datashare producer account, removes authorization from the
specified datashare.
@doc Deletes an authentication profile.
@doc Deletes a previously provisioned cluster without its final snapshot
being created.
A successful response from the web service indicates that the request was
received correctly. Use `DescribeClusters' to monitor the status of
the deletion. The delete operation cannot be canceled or reverted once
If you want to shut down the cluster and retain it for future use, set
SkipFinalClusterSnapshot to `false' and specify a name for
FinalClusterSnapshotIdentifier. You can later restore this snapshot to
resume using the cluster. If a final cluster snapshot is requested, the
status of the cluster will be "final-snapshot" while the
Redshift begins deleting the cluster.
You cannot delete a parameter group if it is associated with a cluster.
You cannot delete a security group that is associated with any clusters.
You cannot delete the default security group.
@doc Deletes the specified manual snapshot.
The snapshot must be in the `available' state, with no other users
authorized to access the snapshot.
Unlike automated snapshots, manual snapshots are retained even after you
snapshots. You must delete manual snapshot explicitly to avoid getting
charged. If other accounts are authorized to access the snapshot, you must
revoke all of the authorizations before you can delete the snapshot.
@doc Deletes the specified cluster subnet group.
@doc Deletes a partner integration from a cluster.
Data can still flow to the cluster until the integration is deleted at the
partner's website.
@doc Deletes a scheduled action.
@doc Deletes the specified snapshot copy grant.
@doc Deletes a snapshot schedule.
@doc Deletes tags from a resource.
tag or tags.
@doc Deletes a usage limit from a cluster.
@doc Returns a list of attributes attached to an account
@doc Describes an authentication profile.
@doc Returns an array of `ClusterDbRevision' objects.
parameter groups you created and the default parameter group.
For each parameter group, the response includes the parameter group name,
description, and parameter group family name. You can optionally specify a
name to retrieve the description of a specific parameter group.
Redshift returns all parameter groups that match any combination of the
specified keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
values, all parameter groups that have any combination of those values are
returned.
If both tag keys and values are omitted from the request, parameter groups
are returned regardless of whether they have tag keys or values associated
with them.
@doc Returns a detailed list of parameters contained within the specified
For each parameter the response includes information such as parameter
name, description, data type, value, whether the parameter value is
modifiable, and so on.
You can specify source filter to retrieve parameters of only specific
type. For example, to retrieve parameters that were modified by a user
action such as from `ModifyClusterParameterGroup', you can specify
source equal to user.
If the name of a security group is specified, the response will contain
only information about only that security group.
Redshift returns all security groups that match any combination of the
specified keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
values, all security groups that have any combination of those values are
returned.
If both tag keys and values are omitted from the request, security groups
are returned regardless of whether they have tag keys or values associated
with them.
your cluster snapshots.
By default, this operation returns information about all snapshots of all
Services accounts.
Redshift returns all snapshots that match any combination of the specified
keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
values, all snapshots that have any combination of those values are
returned. Only snapshots that you own are returned in the response; shared
snapshots are not returned with the tag key and tag value request
parameters.
If both tag keys and values are omitted from the request, snapshots are
returned regardless of whether they have tag keys or values associated
with them.
metadata about your cluster subnet groups.
By default, this operation returns information about all cluster subnet
Redshift returns all subnet groups that match any combination of the
specified keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
values, all subnet groups that have any combination of those values are
returned.
If both tag keys and values are omitted from the request, subnet groups
are returned regardless of whether they have tag keys or values associated
with them.
@doc Returns a list of all the available maintenance tracks.
versions.
You can call this operation even before creating any clusters to learn
Cluster Management Guide.
@doc Returns properties of provisioned clusters including general cluster
properties, cluster database properties, maintenance and backup
properties, and security and access properties.
This operation supports pagination. For more information about managing
Management Guide.
Redshift returns all clusters that match any combination of the specified
keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
values, all clusters that have any combination of those values are
returned.
If both tag keys and values are omitted from the request, clusters are
returned regardless of whether they have tag keys or values associated
with them.
@doc Shows the status of any inbound or outbound datashares available in
the specified account.
called is a consumer account identifier.
is a producer account identifier.
@doc Returns a list of parameter settings for the specified parameter
group family.
@doc Describes an endpoint authorization.
@doc Displays a list of event categories for all event source types, or
for a specified source type.
Event Notifications.
subscriptions for a customer account.
If you specify a subscription name, lists the description for that
subscription.
Redshift returns all event notification subscriptions that match any
combination of the specified keys and values. For example, if you have
`owner' and `environment' for tag keys, and `admin' and
`test' for tag values, all subscriptions that have any combination of
those values are returned.
If both tag keys and values are omitted from the request, subscriptions
are returned regardless of whether they have tag keys or values associated
with them.
@doc Returns events related to clusters, security groups, snapshots, and
Events specific to a particular cluster, security group, snapshot or
parameter group can be obtained by providing the name as a parameter. By
the specified keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
values are returned.
certificates are returned regardless of whether they have tag keys or
values associated with them.
configuration.
specified keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
returned.
are returned regardless of whether they have tag keys or values associated
with them.
@doc Describes whether information, such as queries and connection
@doc Returns properties of possible node configurations such as node type,
number of nodes, and disk usage for the specified action type.
@doc Returns a list of orderable cluster options.
Before you create a new cluster you can use this operation to find what
types you can request. The node types differ by available storage, memory,
CPU and price. With the cost involved you might want to obtain a list of
cluster options in the specific region and specify values when creating a
@doc Returns information about the partner integrations defined for a
cluster.
@doc Returns exchange status details and associated metadata for a
reserved-node exchange.
Statuses include such values as in progress and requested.
Redshift with their descriptions including the node type, the fixed and
recurring costs of reserving the node and duration the node will be
reserved for you.
These descriptions help you determine which reserve node offering you want
to purchase. You then use the unique offering ID in you call to
For more information about reserved node offerings, go to Purchasing
@doc Returns the descriptions of the reserved nodes.
@doc Returns information about the last resize operation for the specified
cluster.
If no resize operation has ever been initiated for the specified cluster,
a `HTTP 404' error is returned. If a resize operation was initiated
and completed, the status of the resize remains as `SUCCEEDED' until
the next resize.
A resize operation can be requested using `ModifyCluster' and
specifying a different number or type of nodes for the cluster.
@doc Describes properties of scheduled actions.
Services account in the destination region.
Guide.
@doc Returns a list of snapshot schedules.
@doc Returns account level backups storage size and provisional storage.
`RestoreTableFromClusterSnapshot' API action.
parameter, then `DescribeTableRestoreStatus' returns the status of all
table restore requests ordered by the date and time of the request in
ascending order. Otherwise `DescribeTableRestoreStatus' returns the
@doc Returns a list of tags.
can return all tags for a given type of resource, such as clusters,
snapshots, and so on.
the same request.
results to return in a request.
</li> </ul> If you specify both tag keys and tag values in the same
of the specified keys and values. For example, if you have `owner' and
`environment' for tag keys, and `admin' and `test' for tag
values, all resources that have any combination of those values are
returned.
If both tag keys and values are omitted from the request, resources are
returned regardless of whether they have tag keys or values associated
with them.
@doc Shows usage limits on a cluster.
Results are filtered based on the combination of input usage limit
identifier, cluster identifier, and feature type parameters:
<ul> <li> If usage limit identifier, cluster identifier, and feature type
are not provided, then all usage limit objects for the current account in
the current region are returned.
</li> <li> If usage limit identifier is provided, then the corresponding
usage limit object is returned.
</li> <li> If cluster identifier is provided, then all usage limit objects
for the specified cluster are returned.
</li> <li> If cluster identifier and feature type are provided, then all
usage limit objects for the combination of cluster and feature are
returned.
</li> </ul>
@doc Stops logging information, such as queries and connection attempts,
another region for a specified cluster.
If your cluster and its snapshots are encrypted using an encrypted
Redshift permission to the key in the destination region.
@doc From a datashare consumer account, remove association for the
specified datashare.
@doc Starts logging information, such as queries and connection attempts,
region for a specified cluster.
@doc Returns a database user name and temporary password with temporary
that the user will join at log on. By default, the temporary credentials
see Using IAM Authentication to Generate Database User Credentials in the
Amazon Redshift Cluster Management Guide.
to all necessary actions and resources. For more information about
`dbgroups'.
the policy must include the `redshift:CreateClusterUser' permission.
access to the resource `dbname' for the specified database name.
@doc Returns a database user name and temporary password with temporary
actions and resources. For more information about permissions, see Using
Management Guide.
@doc Gets the configuration options for the reserved-node exchange.
These options include information about the source reserved node and
target reserved node offering. Details include the node type, the price,
the node count, and the offering type.
@doc Returns an array of DC2 ReservedNodeOfferings that matches the
@doc This operation is retired.
automatically determines whether to use AQUA (Advanced Query Accelerator).
@doc Modifies an authentication profile.
@doc Modifies the settings for a cluster.
You can also change node type and the number of nodes to scale up or down
the cluster. When resizing a cluster, you must specify both the number of
nodes and the node type even if one of the parameters does not change.
You can add another security or parameter group, or change the admin user
password. Resetting a cluster password or modifying the security groups
associated with a cluster do not need a reboot. However, modifying a
parameter group requires a reboot for parameters to take effect. For more
Amazon Redshift Cluster Management Guide.
@doc Modifies the database revision of a cluster.
The database revision is a unique revision of the database running in a
cluster.
@doc Modifies the maintenance settings of a cluster.
@doc Modifies the parameters of a parameter group.
For the parameters parameter, it can't contain ASCII characters.
@doc Modifies the settings for a snapshot.
This exanmple modifies the manual retention period setting for a cluster
snapshot.
@doc Modifies a snapshot schedule for a cluster.
subnets.
The operation replaces the existing list of subnets with the new list of
subnets.
@doc Modifies a scheduled action.
@doc Modifies the number of days to retain snapshots in the destination
By default, this operation only changes the retention period of copied
automated snapshots. The retention periods for both new and existing
copied automated snapshots are updated with the new retention period. You
can set the manual option to change only the retention periods of copied
manual snapshots. If you set this option, only newly copied manual
snapshots have the new retention period.
@doc Modifies a snapshot schedule.
Any schedule associated with a cluster is modified asynchronously.
@doc Modifies a usage limit in a cluster.
You can't modify the feature type or period of a usage limit.
@doc Pauses a cluster.
@doc Allows you to purchase reserved nodes.
`DescribeReservedNodeOfferings' API to obtain the available reserved
node offerings. You can call this API by providing a specific reserved
node offering and the number of nodes you want to reserve.
For more information about reserved node offerings, go to Purchasing
@doc Reboots a cluster.
This action is taken as soon as possible. It results in a momentary outage
to the cluster, during which the cluster status is set to `rebooting'.
A cluster event is created when the reboot is completed. Any pending
cluster modifications (see `ModifyCluster') are applied at this
@doc From a datashare consumer account, rejects the specified datashare.
default values and sets the source values of the parameters to
"engine-default".
To reset the entire parameter group specify the ResetAllParameters
parameter. For parameter changes to take effect you must reboot any
associated clusters.
@doc Changes the size of the cluster.
You can change the cluster's type, or change the number or type of
nodes. The default behavior is to use the elastic resize method. With an
elastic resize, your cluster is available for read and write operations
more quickly than with the classic resize method.
Elastic resize operations have the following restrictions:
<ul> <li> You can only resize clusters of the following types:
</li> <li> dc1.8xlarge (if your cluster is in a VPC)
</li> <li> dc2.large
</li> <li> dc2.8xlarge
</li> <li> ds2.xlarge
</li> <li> ds2.8xlarge
</li> <li> ra3.xlplus
</li> <li> ra3.4xlarge
</li> <li> ra3.16xlarge
</li> </ul> </li> <li> The type of nodes that you add must match the node
type for the cluster.
</li> </ul>
@doc Creates a new cluster from a snapshot.
configuration as the original cluster from which the snapshot was created,
except that the new cluster is created with the default cluster security
use the `ModifyCluster' API to associate a different security group
and different parameter group with the restored cluster. If you are using
a DS node type, you can also choose to change to another DS node type of
the same size during restore.
group where you want the cluster restored.
snapshot.
snapshot was taken from.
You cannot use `RestoreTableFromClusterSnapshot' to restore a table
That is, you cannot overwrite an existing table in a cluster with a
restored table. If you want to replace your original table with a new,
restored table, then rename or drop your original table before you call
`RestoreTableFromClusterSnapshot'. When you have renamed your original
table, then you can pass the original name of the table as the
`NewTableName' parameter value in the call to
`RestoreTableFromClusterSnapshot'. This way, you can replace the
original table with the table created from the snapshot.
You can't use this operation to restore tables with interleaved sort
keys.
@doc Resumes a paused cluster.
@doc Revokes access to a cluster.
restore the specified snapshot.
If the account is currently restoring the snapshot, the restore will run
to completion.
@doc Rotates the encryption keys for a cluster.
@doc Updates the status of a partner integration.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
@doc Amazon Redshift
This is an interface reference for Amazon Redshift .
It contains documentation for one of the programming or command line
interfaces you can use to manage Amazon Redshift clusters . Note that
Amazon Redshift is asynchronous , which means that some interfaces may
summary of the Amazon Redshift cluster management interfaces , go to Using
the Amazon Redshift Management Interfaces .
Amazon Redshift manages all the work of setting up , operating , and scaling
cluster , and applying patches and upgrades to the Amazon Redshift engine .
If you are a first - time user of Amazon Redshift , we recommend that you
begin by reading the Amazon Redshift Getting Started Guide .
If you are a database developer , the Amazon Redshift Database Developer
-module(aws_redshift).
-export([accept_reserved_node_exchange/2,
accept_reserved_node_exchange/3,
add_partner/2,
add_partner/3,
associate_data_share_consumer/2,
associate_data_share_consumer/3,
authorize_cluster_security_group_ingress/2,
authorize_cluster_security_group_ingress/3,
authorize_data_share/2,
authorize_data_share/3,
authorize_endpoint_access/2,
authorize_endpoint_access/3,
authorize_snapshot_access/2,
authorize_snapshot_access/3,
batch_delete_cluster_snapshots/2,
batch_delete_cluster_snapshots/3,
batch_modify_cluster_snapshots/2,
batch_modify_cluster_snapshots/3,
cancel_resize/2,
cancel_resize/3,
copy_cluster_snapshot/2,
copy_cluster_snapshot/3,
create_authentication_profile/2,
create_authentication_profile/3,
create_cluster/2,
create_cluster/3,
create_cluster_parameter_group/2,
create_cluster_parameter_group/3,
create_cluster_security_group/2,
create_cluster_security_group/3,
create_cluster_snapshot/2,
create_cluster_snapshot/3,
create_cluster_subnet_group/2,
create_cluster_subnet_group/3,
create_endpoint_access/2,
create_endpoint_access/3,
create_event_subscription/2,
create_event_subscription/3,
create_hsm_client_certificate/2,
create_hsm_client_certificate/3,
create_hsm_configuration/2,
create_hsm_configuration/3,
create_scheduled_action/2,
create_scheduled_action/3,
create_snapshot_copy_grant/2,
create_snapshot_copy_grant/3,
create_snapshot_schedule/2,
create_snapshot_schedule/3,
create_tags/2,
create_tags/3,
create_usage_limit/2,
create_usage_limit/3,
deauthorize_data_share/2,
deauthorize_data_share/3,
delete_authentication_profile/2,
delete_authentication_profile/3,
delete_cluster/2,
delete_cluster/3,
delete_cluster_parameter_group/2,
delete_cluster_parameter_group/3,
delete_cluster_security_group/2,
delete_cluster_security_group/3,
delete_cluster_snapshot/2,
delete_cluster_snapshot/3,
delete_cluster_subnet_group/2,
delete_cluster_subnet_group/3,
delete_endpoint_access/2,
delete_endpoint_access/3,
delete_event_subscription/2,
delete_event_subscription/3,
delete_hsm_client_certificate/2,
delete_hsm_client_certificate/3,
delete_hsm_configuration/2,
delete_hsm_configuration/3,
delete_partner/2,
delete_partner/3,
delete_scheduled_action/2,
delete_scheduled_action/3,
delete_snapshot_copy_grant/2,
delete_snapshot_copy_grant/3,
delete_snapshot_schedule/2,
delete_snapshot_schedule/3,
delete_tags/2,
delete_tags/3,
delete_usage_limit/2,
delete_usage_limit/3,
describe_account_attributes/2,
describe_account_attributes/3,
describe_authentication_profiles/2,
describe_authentication_profiles/3,
describe_cluster_db_revisions/2,
describe_cluster_db_revisions/3,
describe_cluster_parameter_groups/2,
describe_cluster_parameter_groups/3,
describe_cluster_parameters/2,
describe_cluster_parameters/3,
describe_cluster_security_groups/2,
describe_cluster_security_groups/3,
describe_cluster_snapshots/2,
describe_cluster_snapshots/3,
describe_cluster_subnet_groups/2,
describe_cluster_subnet_groups/3,
describe_cluster_tracks/2,
describe_cluster_tracks/3,
describe_cluster_versions/2,
describe_cluster_versions/3,
describe_clusters/2,
describe_clusters/3,
describe_data_shares/2,
describe_data_shares/3,
describe_data_shares_for_consumer/2,
describe_data_shares_for_consumer/3,
describe_data_shares_for_producer/2,
describe_data_shares_for_producer/3,
describe_default_cluster_parameters/2,
describe_default_cluster_parameters/3,
describe_endpoint_access/2,
describe_endpoint_access/3,
describe_endpoint_authorization/2,
describe_endpoint_authorization/3,
describe_event_categories/2,
describe_event_categories/3,
describe_event_subscriptions/2,
describe_event_subscriptions/3,
describe_events/2,
describe_events/3,
describe_hsm_client_certificates/2,
describe_hsm_client_certificates/3,
describe_hsm_configurations/2,
describe_hsm_configurations/3,
describe_logging_status/2,
describe_logging_status/3,
describe_node_configuration_options/2,
describe_node_configuration_options/3,
describe_orderable_cluster_options/2,
describe_orderable_cluster_options/3,
describe_partners/2,
describe_partners/3,
describe_reserved_node_exchange_status/2,
describe_reserved_node_exchange_status/3,
describe_reserved_node_offerings/2,
describe_reserved_node_offerings/3,
describe_reserved_nodes/2,
describe_reserved_nodes/3,
describe_resize/2,
describe_resize/3,
describe_scheduled_actions/2,
describe_scheduled_actions/3,
describe_snapshot_copy_grants/2,
describe_snapshot_copy_grants/3,
describe_snapshot_schedules/2,
describe_snapshot_schedules/3,
describe_storage/2,
describe_storage/3,
describe_table_restore_status/2,
describe_table_restore_status/3,
describe_tags/2,
describe_tags/3,
describe_usage_limits/2,
describe_usage_limits/3,
disable_logging/2,
disable_logging/3,
disable_snapshot_copy/2,
disable_snapshot_copy/3,
disassociate_data_share_consumer/2,
disassociate_data_share_consumer/3,
enable_logging/2,
enable_logging/3,
enable_snapshot_copy/2,
enable_snapshot_copy/3,
get_cluster_credentials/2,
get_cluster_credentials/3,
get_cluster_credentials_with_iam/2,
get_cluster_credentials_with_iam/3,
get_reserved_node_exchange_configuration_options/2,
get_reserved_node_exchange_configuration_options/3,
get_reserved_node_exchange_offerings/2,
get_reserved_node_exchange_offerings/3,
modify_aqua_configuration/2,
modify_aqua_configuration/3,
modify_authentication_profile/2,
modify_authentication_profile/3,
modify_cluster/2,
modify_cluster/3,
modify_cluster_db_revision/2,
modify_cluster_db_revision/3,
modify_cluster_iam_roles/2,
modify_cluster_iam_roles/3,
modify_cluster_maintenance/2,
modify_cluster_maintenance/3,
modify_cluster_parameter_group/2,
modify_cluster_parameter_group/3,
modify_cluster_snapshot/2,
modify_cluster_snapshot/3,
modify_cluster_snapshot_schedule/2,
modify_cluster_snapshot_schedule/3,
modify_cluster_subnet_group/2,
modify_cluster_subnet_group/3,
modify_endpoint_access/2,
modify_endpoint_access/3,
modify_event_subscription/2,
modify_event_subscription/3,
modify_scheduled_action/2,
modify_scheduled_action/3,
modify_snapshot_copy_retention_period/2,
modify_snapshot_copy_retention_period/3,
modify_snapshot_schedule/2,
modify_snapshot_schedule/3,
modify_usage_limit/2,
modify_usage_limit/3,
pause_cluster/2,
pause_cluster/3,
purchase_reserved_node_offering/2,
purchase_reserved_node_offering/3,
reboot_cluster/2,
reboot_cluster/3,
reject_data_share/2,
reject_data_share/3,
reset_cluster_parameter_group/2,
reset_cluster_parameter_group/3,
resize_cluster/2,
resize_cluster/3,
restore_from_cluster_snapshot/2,
restore_from_cluster_snapshot/3,
restore_table_from_cluster_snapshot/2,
restore_table_from_cluster_snapshot/3,
resume_cluster/2,
resume_cluster/3,
revoke_cluster_security_group_ingress/2,
revoke_cluster_security_group_ingress/3,
revoke_endpoint_access/2,
revoke_endpoint_access/3,
revoke_snapshot_access/2,
revoke_snapshot_access/3,
rotate_encryption_key/2,
rotate_encryption_key/3,
update_partner_status/2,
update_partner_status/3]).
-include_lib("hackney/include/hackney_lib.hrl").
accept_reserved_node_exchange(Client, Input)
when is_map(Client), is_map(Input) ->
accept_reserved_node_exchange(Client, Input, []).
accept_reserved_node_exchange(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AcceptReservedNodeExchange">>, Input, Options).
add_partner(Client, Input)
when is_map(Client), is_map(Input) ->
add_partner(Client, Input, []).
add_partner(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AddPartner">>, Input, Options).
account ( AssociateEntireAccount ) or the specified namespace ( ConsumerArn ) .
associate_data_share_consumer(Client, Input)
when is_map(Client), is_map(Input) ->
associate_data_share_consumer(Client, Input, []).
associate_data_share_consumer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AssociateDataShareConsumer">>, Input, Options).
@doc Adds an inbound ( ingress ) rule to an Amazon Redshift security group .
the Internet or an Amazon EC2 instance , you can authorize inbound access
to either a Classless Interdomain Routing ( CIDR)/Internet Protocol ( IP )
range or to an Amazon EC2 security group . You can add as many as 20
ingress rules to an Amazon Redshift security group .
If you authorize access to an Amazon EC2 security group , specify
EC2SecurityGroupName and EC2SecurityGroupOwnerId . The Amazon EC2 security
group and Amazon Redshift cluster must be in the same Amazon Web Services
If you authorize access to a CIDR / IP address range , specify CIDRIP . For an
Inter - Domain Routing .
to Working with Security Groups in the Amazon Redshift Cluster Management
authorize_cluster_security_group_ingress(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_cluster_security_group_ingress(Client, Input, []).
authorize_cluster_security_group_ingress(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeClusterSecurityGroupIngress">>, Input, Options).
with one or more consumer accounts or managing entities .
authorize_data_share(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_data_share(Client, Input, []).
authorize_data_share(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeDataShare">>, Input, Options).
authorize_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_endpoint_access(Client, Input, []).
authorize_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeEndpointAccess">>, Input, Options).
@doc Authorizes the specified Amazon Web Services account to restore the
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
authorize_snapshot_access(Client, Input)
when is_map(Client), is_map(Input) ->
authorize_snapshot_access(Client, Input, []).
authorize_snapshot_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AuthorizeSnapshotAccess">>, Input, Options).
batch_delete_cluster_snapshots(Client, Input)
when is_map(Client), is_map(Input) ->
batch_delete_cluster_snapshots(Client, Input, []).
batch_delete_cluster_snapshots(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchDeleteClusterSnapshots">>, Input, Options).
batch_modify_cluster_snapshots(Client, Input)
when is_map(Client), is_map(Input) ->
batch_modify_cluster_snapshots(Client, Input, []).
batch_modify_cluster_snapshots(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchModifyClusterSnapshots">>, Input, Options).
cancel_resize(Client, Input)
when is_map(Client), is_map(Input) ->
cancel_resize(Client, Input, []).
cancel_resize(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CancelResize">>, Input, Options).
When you delete a cluster , Amazon Redshift deletes any automated snapshots
Amazon Redshift automatically deletes it . If you want to keep an automated
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
copy_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
copy_cluster_snapshot(Client, Input, []).
copy_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CopyClusterSnapshot">>, Input, Options).
create_authentication_profile(Client, Input)
when is_map(Client), is_map(Input) ->
create_authentication_profile(Client, Input, []).
create_authentication_profile(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateAuthenticationProfile">>, Input, Options).
of your VPC that Amazon Redshift uses when creating the cluster . For more
information about managing clusters , go to Amazon Redshift Clusters in the
create_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster(Client, Input, []).
create_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateCluster">>, Input, Options).
@doc Creates an Amazon Redshift parameter group .
parameters and parameter groups , go to Amazon Redshift Parameter Groups in
the Amazon Redshift Cluster Management Guide .
create_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_parameter_group(Client, Input, []).
create_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterParameterGroup">>, Input, Options).
@doc Creates a new Amazon Redshift security group .
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
create_cluster_security_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_security_group(Client, Input, []).
create_cluster_security_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterSecurityGroup">>, Input, Options).
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
create_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_snapshot(Client, Input, []).
create_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterSnapshot">>, Input, Options).
@doc Creates a new Amazon Redshift subnet group .
You must provide a list of one or more subnets in your existing Amazon
Virtual Private Cloud ( Amazon VPC ) when creating Amazon Redshift subnet
For information about subnet groups , go to Amazon Redshift Cluster Subnet
Groups in the Amazon Redshift Cluster Management Guide .
create_cluster_subnet_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_cluster_subnet_group(Client, Input, []).
create_cluster_subnet_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateClusterSubnetGroup">>, Input, Options).
@doc Creates a Redshift - managed VPC endpoint .
create_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
create_endpoint_access(Client, Input, []).
create_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateEndpointAccess">>, Input, Options).
@doc Creates an Amazon Redshift event notification subscription .
This action requires an ARN ( Amazon Resource Name ) of an Amazon SNS topic
created by either the Amazon Redshift console , the Amazon SNS console , or
the Amazon SNS API . To obtain an ARN with Amazon SNS , you must create a
topic in Amazon SNS and subscribe to the topic . The ARN is displayed in
the SNS console .
You can specify the source type , and lists of Amazon Redshift source IDs ,
source type = cluster , source ID = my - cluster-1 and mycluster2 , event
categories = Availability , Backup , and severity = ERROR . The subscription
events for the objects of that type in your Amazon Web Services account .
If you do not specify either the SourceType nor the SourceIdentifier , you
will be notified of events generated from all Amazon Redshift sources
belonging to your Amazon Web Services account . You must specify a source
create_event_subscription(Client, Input)
when is_map(Client), is_map(Input) ->
create_event_subscription(Client, Input, []).
create_event_subscription(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateEventSubscription">>, Input, Options).
@doc Creates an HSM client certificate that an Amazon Redshift cluster
will use to connect to the client 's HSM in order to store and retrieve
The command returns a public key , which you must store in the HSM . In
addition to creating the HSM certificate , you must create an Amazon
Redshift HSM configuration that provides a cluster the information needed
to store and use encryption keys in the HSM . For more information , go to
Hardware Security Modules in the Amazon Redshift Cluster Management Guide .
create_hsm_client_certificate(Client, Input)
when is_map(Client), is_map(Input) ->
create_hsm_client_certificate(Client, Input, []).
create_hsm_client_certificate(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateHsmClientCertificate">>, Input, Options).
@doc Creates an HSM configuration that contains the information required
by an Amazon Redshift cluster to store and use database encryption keys in
a Hardware Security Module ( HSM ) .
After creating the HSM configuration , you can specify it as a parameter
in the HSM .
In addition to creating an HSM configuration , you must also create an HSM
client certificate . For more information , go to Hardware Security Modules
in the Amazon Redshift Cluster Management Guide .
create_hsm_configuration(Client, Input)
when is_map(Client), is_map(Input) ->
create_hsm_configuration(Client, Input, []).
create_hsm_configuration(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateHsmConfiguration">>, Input, Options).
A scheduled action contains a schedule and an Amazon Redshift API action .
create_scheduled_action(Client, Input)
when is_map(Client), is_map(Input) ->
create_scheduled_action(Client, Input, []).
create_scheduled_action(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateScheduledAction">>, Input, Options).
@doc Creates a snapshot copy grant that permits Amazon Redshift to use an
encrypted symmetric key from Key Management Service ( KMS ) to encrypt
For more information about managing snapshot copy grants , go to Amazon
Redshift Database Encryption in the Amazon Redshift Cluster Management
create_snapshot_copy_grant(Client, Input)
when is_map(Client), is_map(Input) ->
create_snapshot_copy_grant(Client, Input, []).
create_snapshot_copy_grant(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateSnapshotCopyGrant">>, Input, Options).
create_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
create_snapshot_schedule(Client, Input, []).
create_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateSnapshotSchedule">>, Input, Options).
A resource can have up to 50 tags . If you try to create more than 50 tags
create_tags(Client, Input)
when is_map(Client), is_map(Input) ->
create_tags(Client, Input, []).
create_tags(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateTags">>, Input, Options).
@doc Creates a usage limit for a specified Amazon Redshift feature on a
create_usage_limit(Client, Input)
when is_map(Client), is_map(Input) ->
create_usage_limit(Client, Input, []).
create_usage_limit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateUsageLimit">>, Input, Options).
deauthorize_data_share(Client, Input)
when is_map(Client), is_map(Input) ->
deauthorize_data_share(Client, Input, []).
deauthorize_data_share(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeauthorizeDataShare">>, Input, Options).
delete_authentication_profile(Client, Input)
when is_map(Client), is_map(Input) ->
delete_authentication_profile(Client, Input, []).
delete_authentication_profile(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteAuthenticationProfile">>, Input, Options).
submitted . For more information about managing clusters , go to Amazon
Redshift Clusters in the Amazon Redshift Cluster Management Guide .
snapshot is being taken , then it 's & quot;deleting" ; once Amazon
For more information about managing clusters , go to Amazon Redshift
Clusters in the Amazon Redshift Cluster Management Guide .
delete_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster(Client, Input, []).
delete_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteCluster">>, Input, Options).
@doc Deletes a specified Amazon Redshift parameter group .
delete_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_parameter_group(Client, Input, []).
delete_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterParameterGroup">>, Input, Options).
@doc Deletes an Amazon Redshift security group .
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
delete_cluster_security_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_security_group(Client, Input, []).
delete_cluster_security_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterSecurityGroup">>, Input, Options).
delete your cluster . Amazon Redshift does not delete your manual
delete_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_snapshot(Client, Input, []).
delete_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterSnapshot">>, Input, Options).
delete_cluster_subnet_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_cluster_subnet_group(Client, Input, []).
delete_cluster_subnet_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteClusterSubnetGroup">>, Input, Options).
@doc Deletes a Redshift - managed VPC endpoint .
delete_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
delete_endpoint_access(Client, Input, []).
delete_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteEndpointAccess">>, Input, Options).
@doc Deletes an Amazon Redshift event notification subscription .
delete_event_subscription(Client, Input)
when is_map(Client), is_map(Input) ->
delete_event_subscription(Client, Input, []).
delete_event_subscription(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteEventSubscription">>, Input, Options).
@doc Deletes the specified HSM client certificate .
delete_hsm_client_certificate(Client, Input)
when is_map(Client), is_map(Input) ->
delete_hsm_client_certificate(Client, Input, []).
delete_hsm_client_certificate(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteHsmClientCertificate">>, Input, Options).
@doc Deletes the specified Amazon Redshift HSM configuration .
delete_hsm_configuration(Client, Input)
when is_map(Client), is_map(Input) ->
delete_hsm_configuration(Client, Input, []).
delete_hsm_configuration(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteHsmConfiguration">>, Input, Options).
delete_partner(Client, Input)
when is_map(Client), is_map(Input) ->
delete_partner(Client, Input, []).
delete_partner(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePartner">>, Input, Options).
delete_scheduled_action(Client, Input)
when is_map(Client), is_map(Input) ->
delete_scheduled_action(Client, Input, []).
delete_scheduled_action(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteScheduledAction">>, Input, Options).
delete_snapshot_copy_grant(Client, Input)
when is_map(Client), is_map(Input) ->
delete_snapshot_copy_grant(Client, Input, []).
delete_snapshot_copy_grant(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteSnapshotCopyGrant">>, Input, Options).
delete_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
delete_snapshot_schedule(Client, Input, []).
delete_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteSnapshotSchedule">>, Input, Options).
You must provide the ARN of the resource from which you want to delete the
delete_tags(Client, Input)
when is_map(Client), is_map(Input) ->
delete_tags(Client, Input, []).
delete_tags(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteTags">>, Input, Options).
delete_usage_limit(Client, Input)
when is_map(Client), is_map(Input) ->
delete_usage_limit(Client, Input, []).
delete_usage_limit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteUsageLimit">>, Input, Options).
describe_account_attributes(Client, Input)
when is_map(Client), is_map(Input) ->
describe_account_attributes(Client, Input, []).
describe_account_attributes(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeAccountAttributes">>, Input, Options).
describe_authentication_profiles(Client, Input)
when is_map(Client), is_map(Input) ->
describe_authentication_profiles(Client, Input, []).
describe_authentication_profiles(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeAuthenticationProfiles">>, Input, Options).
describe_cluster_db_revisions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_db_revisions(Client, Input, []).
describe_cluster_db_revisions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterDbRevisions">>, Input, Options).
@doc Returns a list of Amazon Redshift parameter groups , including
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
If you specify both tag keys and tag values in the same request , Amazon
describe_cluster_parameter_groups(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_parameter_groups(Client, Input, []).
describe_cluster_parameter_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterParameterGroups">>, Input, Options).
Amazon Redshift parameter group .
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
describe_cluster_parameters(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_parameters(Client, Input, []).
describe_cluster_parameters(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterParameters">>, Input, Options).
@doc Returns information about Amazon Redshift security groups .
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
If you specify both tag keys and tag values in the same request , Amazon
describe_cluster_security_groups(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_security_groups(Client, Input, []).
describe_cluster_security_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterSecurityGroups">>, Input, Options).
@doc Returns one or more snapshot objects , which contain metadata about
clusters that are owned by your Amazon Web Services account . No
information is returned for snapshots owned by inactive Amazon Web
If you specify both tag keys and tag values in the same request , Amazon
describe_cluster_snapshots(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_snapshots(Client, Input, []).
describe_cluster_snapshots(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterSnapshots">>, Input, Options).
@doc Returns one or more cluster subnet group objects , which contain
groups that are defined in your Amazon Web Services account .
If you specify both tag keys and tag values in the same request , Amazon
describe_cluster_subnet_groups(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_subnet_groups(Client, Input, []).
describe_cluster_subnet_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterSubnetGroups">>, Input, Options).
describe_cluster_tracks(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_tracks(Client, Input, []).
describe_cluster_tracks(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterTracks">>, Input, Options).
@doc Returns descriptions of the available Amazon Redshift cluster
more about the Amazon Redshift versions . For more information about
managing clusters , go to Amazon Redshift Clusters in the Amazon Redshift
describe_cluster_versions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_cluster_versions(Client, Input, []).
describe_cluster_versions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusterVersions">>, Input, Options).
clusters , go to Amazon Redshift Clusters in the Amazon Redshift Cluster
If you specify both tag keys and tag values in the same request , Amazon
describe_clusters(Client, Input)
when is_map(Client), is_map(Input) ->
describe_clusters(Client, Input, []).
describe_clusters(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeClusters">>, Input, Options).
describe_data_shares(Client, Input)
when is_map(Client), is_map(Input) ->
describe_data_shares(Client, Input, []).
describe_data_shares(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataShares">>, Input, Options).
@doc Returns a list of where the account identifier being
describe_data_shares_for_consumer(Client, Input)
when is_map(Client), is_map(Input) ->
describe_data_shares_for_consumer(Client, Input, []).
describe_data_shares_for_consumer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataSharesForConsumer">>, Input, Options).
@doc Returns a list of when the account identifier being called
describe_data_shares_for_producer(Client, Input)
when is_map(Client), is_map(Input) ->
describe_data_shares_for_producer(Client, Input, []).
describe_data_shares_for_producer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataSharesForProducer">>, Input, Options).
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
describe_default_cluster_parameters(Client, Input)
when is_map(Client), is_map(Input) ->
describe_default_cluster_parameters(Client, Input, []).
describe_default_cluster_parameters(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDefaultClusterParameters">>, Input, Options).
@doc Describes a Redshift - managed VPC endpoint .
describe_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
describe_endpoint_access(Client, Input, []).
describe_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEndpointAccess">>, Input, Options).
describe_endpoint_authorization(Client, Input)
when is_map(Client), is_map(Input) ->
describe_endpoint_authorization(Client, Input, []).
describe_endpoint_authorization(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEndpointAuthorization">>, Input, Options).
For a list of the event categories and source types , go to Amazon Redshift
describe_event_categories(Client, Input)
when is_map(Client), is_map(Input) ->
describe_event_categories(Client, Input, []).
describe_event_categories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEventCategories">>, Input, Options).
@doc Lists descriptions of all the Amazon Redshift event notification
If you specify both tag keys and tag values in the same request , Amazon
describe_event_subscriptions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_event_subscriptions(Client, Input, []).
describe_event_subscriptions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEventSubscriptions">>, Input, Options).
parameter groups for the past 14 days .
default , the past hour of events are returned .
describe_events(Client, Input)
when is_map(Client), is_map(Input) ->
describe_events(Client, Input, []).
describe_events(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeEvents">>, Input, Options).
@doc Returns information about the specified HSM client certificate .
If no certificate ID is specified , returns information about all the HSM
certificates owned by your Amazon Web Services account .
If you specify both tag keys and tag values in the same request , Amazon
Redshift returns all HSM client certificates that match any combination of
values , all HSM client certificates that have any combination of those
If both tag keys and values are omitted from the request , HSM client
describe_hsm_client_certificates(Client, Input)
when is_map(Client), is_map(Input) ->
describe_hsm_client_certificates(Client, Input, []).
describe_hsm_client_certificates(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeHsmClientCertificates">>, Input, Options).
@doc Returns information about the specified Amazon Redshift HSM
If no configuration ID is specified , returns information about all the HSM
configurations owned by your Amazon Web Services account .
If you specify both tag keys and tag values in the same request , Amazon
Redshift returns all HSM connections that match any combination of the
values , all HSM connections that have any combination of those values are
If both tag keys and values are omitted from the request , HSM connections
describe_hsm_configurations(Client, Input)
when is_map(Client), is_map(Input) ->
describe_hsm_configurations(Client, Input, []).
describe_hsm_configurations(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeHsmConfigurations">>, Input, Options).
attempts , is being logged for the specified Amazon Redshift cluster .
describe_logging_status(Client, Input)
when is_map(Client), is_map(Input) ->
describe_logging_status(Client, Input, []).
describe_logging_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeLoggingStatus">>, Input, Options).
describe_node_configuration_options(Client, Input)
when is_map(Client), is_map(Input) ->
describe_node_configuration_options(Client, Input, []).
describe_node_configuration_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeNodeConfigurationOptions">>, Input, Options).
options are available , such as the EC2 Availability Zones ( AZ ) in the
specific Amazon Web Services Region that you can specify , and the node
cluster . For more information about managing clusters , go to Amazon
Redshift Clusters in the Amazon Redshift Cluster Management Guide .
describe_orderable_cluster_options(Client, Input)
when is_map(Client), is_map(Input) ->
describe_orderable_cluster_options(Client, Input, []).
describe_orderable_cluster_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeOrderableClusterOptions">>, Input, Options).
describe_partners(Client, Input)
when is_map(Client), is_map(Input) ->
describe_partners(Client, Input, []).
describe_partners(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePartners">>, Input, Options).
describe_reserved_node_exchange_status(Client, Input)
when is_map(Client), is_map(Input) ->
describe_reserved_node_exchange_status(Client, Input, []).
describe_reserved_node_exchange_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeReservedNodeExchangeStatus">>, Input, Options).
@doc Returns a list of the available reserved node offerings by Amazon
` PurchaseReservedNodeOffering ' to reserve one or more nodes for your
Amazon Redshift cluster .
Reserved Nodes in the Amazon Redshift Cluster Management Guide .
describe_reserved_node_offerings(Client, Input)
when is_map(Client), is_map(Input) ->
describe_reserved_node_offerings(Client, Input, []).
describe_reserved_node_offerings(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeReservedNodeOfferings">>, Input, Options).
describe_reserved_nodes(Client, Input)
when is_map(Client), is_map(Input) ->
describe_reserved_nodes(Client, Input, []).
describe_reserved_nodes(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeReservedNodes">>, Input, Options).
describe_resize(Client, Input)
when is_map(Client), is_map(Input) ->
describe_resize(Client, Input, []).
describe_resize(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeResize">>, Input, Options).
describe_scheduled_actions(Client, Input)
when is_map(Client), is_map(Input) ->
describe_scheduled_actions(Client, Input, []).
describe_scheduled_actions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeScheduledActions">>, Input, Options).
@doc Returns a list of snapshot copy grants owned by the Amazon Web
For more information about managing snapshot copy grants , go to Amazon
Redshift Database Encryption in the Amazon Redshift Cluster Management
describe_snapshot_copy_grants(Client, Input)
when is_map(Client), is_map(Input) ->
describe_snapshot_copy_grants(Client, Input, []).
describe_snapshot_copy_grants(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeSnapshotCopyGrants">>, Input, Options).
describe_snapshot_schedules(Client, Input)
when is_map(Client), is_map(Input) ->
describe_snapshot_schedules(Client, Input, []).
describe_snapshot_schedules(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeSnapshotSchedules">>, Input, Options).
describe_storage(Client, Input)
when is_map(Client), is_map(Input) ->
describe_storage(Client, Input, []).
describe_storage(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeStorage">>, Input, Options).
@doc Lists the status of one or more table restore requests made using the
If you do n't specify a value for the ` TableRestoreRequestId '
status of the table specified by ` TableRestoreRequestId ' .
describe_table_restore_status(Client, Input)
when is_map(Client), is_map(Input) ->
describe_table_restore_status(Client, Input, []).
describe_table_restore_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeTableRestoreStatus">>, Input, Options).
You can return tags from a specific resource by specifying an ARN , or you
The following are limitations for ` DescribeTags ' :
< ul > < li > You can not specify an ARN and a resource - type value together in
< /li > < li > You can not use the ` MaxRecords ' and ` Marker ' parameters
together with the ARN parameter .
< /li > < li > The ` MaxRecords ' parameter can be a range from 10 to 50
request , Amazon Redshift returns all resources that match any combination
describe_tags(Client, Input)
when is_map(Client), is_map(Input) ->
describe_tags(Client, Input, []).
describe_tags(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeTags">>, Input, Options).
describe_usage_limits(Client, Input)
when is_map(Client), is_map(Input) ->
describe_usage_limits(Client, Input, []).
describe_usage_limits(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeUsageLimits">>, Input, Options).
for the specified Amazon Redshift cluster .
disable_logging(Client, Input)
when is_map(Client), is_map(Input) ->
disable_logging(Client, Input, []).
disable_logging(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisableLogging">>, Input, Options).
@doc Disables the automatic copying of snapshots from one region to
symmetric key from Key Management Service , use
` DeleteSnapshotCopyGrant ' to delete the grant that grants Amazon
disable_snapshot_copy(Client, Input)
when is_map(Client), is_map(Input) ->
disable_snapshot_copy(Client, Input, []).
disable_snapshot_copy(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisableSnapshotCopy">>, Input, Options).
disassociate_data_share_consumer(Client, Input)
when is_map(Client), is_map(Input) ->
disassociate_data_share_consumer(Client, Input, []).
disassociate_data_share_consumer(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisassociateDataShareConsumer">>, Input, Options).
for the specified Amazon Redshift cluster .
enable_logging(Client, Input)
when is_map(Client), is_map(Input) ->
enable_logging(Client, Input, []).
enable_logging(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"EnableLogging">>, Input, Options).
@doc Enables the automatic copy of snapshots from one region to another
enable_snapshot_copy(Client, Input)
when is_map(Client), is_map(Input) ->
enable_snapshot_copy(Client, Input, []).
enable_snapshot_copy(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"EnableSnapshotCopy">>, Input, Options).
authorization to log on to an Amazon Redshift database .
The action returns the database user name prefixed with ` IAM : ' if
` AutoCreate ' is ` False ' or ` IAMA : ' if ` AutoCreate ' is
` True ' . You can optionally specify one or more database user groups
expire in 900 seconds . You can optionally specify a duration between 900
seconds ( 15 minutes ) and 3600 seconds ( 60 minutes ) . For more information ,
The Identity and Access Management ( IAM ) user or role that runs
GetClusterCredentials must have an IAM policy attached that allows access
permissions , see Resource Policies for GetClusterCredentials in the Amazon
Redshift Cluster Management Guide .
If the ` DbGroups ' parameter is specified , the IAM policy must allow
the ` redshift : JoinGroup ' action with access to the listed
In addition , if the ` AutoCreate ' parameter is set to ` True ' , then
If the ` DbName ' parameter is specified , the IAM policy must allow
get_cluster_credentials(Client, Input)
when is_map(Client), is_map(Input) ->
get_cluster_credentials(Client, Input, []).
get_cluster_credentials(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetClusterCredentials">>, Input, Options).
authorization to log in to an Amazon Redshift database .
The database user is mapped 1:1 to the source Identity and Access
Management ( IAM ) identity . For more information about IAM identities , see
IAM Identities ( users , user groups , and roles ) in the Amazon Web Services
Identity and Access Management User Guide .
The Identity and Access Management ( IAM ) identity that runs this operation
must have an IAM policy attached that allows access to all necessary
identity - based policies ( IAM policies ) in the Amazon Redshift Cluster
get_cluster_credentials_with_iam(Client, Input)
when is_map(Client), is_map(Input) ->
get_cluster_credentials_with_iam(Client, Input, []).
get_cluster_credentials_with_iam(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetClusterCredentialsWithIAM">>, Input, Options).
get_reserved_node_exchange_configuration_options(Client, Input)
when is_map(Client), is_map(Input) ->
get_reserved_node_exchange_configuration_options(Client, Input, []).
get_reserved_node_exchange_configuration_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetReservedNodeExchangeConfigurationOptions">>, Input, Options).
payment type , term , and usage price of the given reserved node .
get_reserved_node_exchange_offerings(Client, Input)
when is_map(Client), is_map(Input) ->
get_reserved_node_exchange_offerings(Client, Input, []).
get_reserved_node_exchange_offerings(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetReservedNodeExchangeOfferings">>, Input, Options).
Calling this operation does not change AQUA configuration . Amazon Redshift
modify_aqua_configuration(Client, Input)
when is_map(Client), is_map(Input) ->
modify_aqua_configuration(Client, Input, []).
modify_aqua_configuration(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyAquaConfiguration">>, Input, Options).
modify_authentication_profile(Client, Input)
when is_map(Client), is_map(Input) ->
modify_authentication_profile(Client, Input, []).
modify_authentication_profile(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyAuthenticationProfile">>, Input, Options).
information about managing clusters , go to Amazon Redshift Clusters in the
modify_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster(Client, Input, []).
modify_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyCluster">>, Input, Options).
modify_cluster_db_revision(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_db_revision(Client, Input, []).
modify_cluster_db_revision(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterDbRevision">>, Input, Options).
@doc Modifies the list of Identity and Access Management ( IAM ) roles that
can be used by the cluster to access other Amazon Web Services services .
The maximum number of IAM roles that you can associate is subject to a
quota . For more information , go to Quotas and limits in the Amazon
Redshift Cluster Management Guide .
modify_cluster_iam_roles(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_iam_roles(Client, Input, []).
modify_cluster_iam_roles(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterIamRoles">>, Input, Options).
modify_cluster_maintenance(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_maintenance(Client, Input, []).
modify_cluster_maintenance(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterMaintenance">>, Input, Options).
For more information about parameters and parameter groups , go to Amazon
Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
modify_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_parameter_group(Client, Input, []).
modify_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterParameterGroup">>, Input, Options).
modify_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_snapshot(Client, Input, []).
modify_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterSnapshot">>, Input, Options).
modify_cluster_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_snapshot_schedule(Client, Input, []).
modify_cluster_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterSnapshotSchedule">>, Input, Options).
@doc Modifies a cluster subnet group to include the specified list of VPC
modify_cluster_subnet_group(Client, Input)
when is_map(Client), is_map(Input) ->
modify_cluster_subnet_group(Client, Input, []).
modify_cluster_subnet_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyClusterSubnetGroup">>, Input, Options).
@doc Modifies a Redshift - managed VPC endpoint .
modify_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
modify_endpoint_access(Client, Input, []).
modify_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyEndpointAccess">>, Input, Options).
@doc Modifies an existing Amazon Redshift event notification subscription .
modify_event_subscription(Client, Input)
when is_map(Client), is_map(Input) ->
modify_event_subscription(Client, Input, []).
modify_event_subscription(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyEventSubscription">>, Input, Options).
modify_scheduled_action(Client, Input)
when is_map(Client), is_map(Input) ->
modify_scheduled_action(Client, Input, []).
modify_scheduled_action(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyScheduledAction">>, Input, Options).
Amazon Web Services Region after they are copied from the source Amazon
Web Services Region .
modify_snapshot_copy_retention_period(Client, Input)
when is_map(Client), is_map(Input) ->
modify_snapshot_copy_retention_period(Client, Input, []).
modify_snapshot_copy_retention_period(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifySnapshotCopyRetentionPeriod">>, Input, Options).
modify_snapshot_schedule(Client, Input)
when is_map(Client), is_map(Input) ->
modify_snapshot_schedule(Client, Input, []).
modify_snapshot_schedule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifySnapshotSchedule">>, Input, Options).
modify_usage_limit(Client, Input)
when is_map(Client), is_map(Input) ->
modify_usage_limit(Client, Input, []).
modify_usage_limit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ModifyUsageLimit">>, Input, Options).
pause_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
pause_cluster(Client, Input, []).
pause_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PauseCluster">>, Input, Options).
Amazon Redshift offers a predefined set of reserved node offerings . You
can purchase one or more of the offerings . You can call the
Reserved Nodes in the Amazon Redshift Cluster Management Guide .
purchase_reserved_node_offering(Client, Input)
when is_map(Client), is_map(Input) ->
purchase_reserved_node_offering(Client, Input, []).
purchase_reserved_node_offering(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PurchaseReservedNodeOffering">>, Input, Options).
reboot . For more information about managing clusters , go to Amazon
Redshift Clusters in the Amazon Redshift Cluster Management Guide .
reboot_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
reboot_cluster(Client, Input, []).
reboot_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RebootCluster">>, Input, Options).
reject_data_share(Client, Input)
when is_map(Client), is_map(Input) ->
reject_data_share(Client, Input, []).
reject_data_share(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RejectDataShare">>, Input, Options).
@doc Sets one or more parameters of the specified parameter group to their
reset_cluster_parameter_group(Client, Input)
when is_map(Client), is_map(Input) ->
reset_cluster_parameter_group(Client, Input, []).
reset_cluster_parameter_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResetClusterParameterGroup">>, Input, Options).
< ul > < li > dc1.large ( if your cluster is in a VPC )
resize_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
resize_cluster(Client, Input, []).
resize_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResizeCluster">>, Input, Options).
By default , Amazon Redshift creates the resulting cluster with the same
and parameter groups . After Amazon Redshift creates the cluster , you can
If you restore a cluster into a VPC , you must provide a cluster subnet
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
restore_from_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
restore_from_cluster_snapshot(Client, Input, []).
restore_from_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RestoreFromClusterSnapshot">>, Input, Options).
@doc Creates a new table from a table in an Amazon Redshift cluster
You must create the new table within the Amazon Redshift cluster that the
with the same name as an existing table in an Amazon Redshift cluster .
restore_table_from_cluster_snapshot(Client, Input)
when is_map(Client), is_map(Input) ->
restore_table_from_cluster_snapshot(Client, Input, []).
restore_table_from_cluster_snapshot(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RestoreTableFromClusterSnapshot">>, Input, Options).
resume_cluster(Client, Input)
when is_map(Client), is_map(Input) ->
resume_cluster(Client, Input, []).
resume_cluster(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResumeCluster">>, Input, Options).
@doc Revokes an ingress rule in an Amazon Redshift security group for a
previously authorized IP range or Amazon EC2 security group .
To add an ingress rule , see ` AuthorizeClusterSecurityGroupIngress ' .
For information about managing security groups , go to Amazon Redshift
Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
revoke_cluster_security_group_ingress(Client, Input)
when is_map(Client), is_map(Input) ->
revoke_cluster_security_group_ingress(Client, Input, []).
revoke_cluster_security_group_ingress(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RevokeClusterSecurityGroupIngress">>, Input, Options).
revoke_endpoint_access(Client, Input)
when is_map(Client), is_map(Input) ->
revoke_endpoint_access(Client, Input, []).
revoke_endpoint_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RevokeEndpointAccess">>, Input, Options).
@doc Removes the ability of the specified Amazon Web Services account to
For more information about working with snapshots , go to Amazon Redshift
Snapshots in the Amazon Redshift Cluster Management Guide .
revoke_snapshot_access(Client, Input)
when is_map(Client), is_map(Input) ->
revoke_snapshot_access(Client, Input, []).
revoke_snapshot_access(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RevokeSnapshotAccess">>, Input, Options).
rotate_encryption_key(Client, Input)
when is_map(Client), is_map(Input) ->
rotate_encryption_key(Client, Input, []).
rotate_encryption_key(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"RotateEncryptionKey">>, Input, Options).
update_partner_status(Client, Input)
when is_map(Client), is_map(Input) ->
update_partner_status(Client, Input, []).
update_partner_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePartnerStatus">>, Input, Options).
Internal functions
-spec request(aws_client:aws_client(), binary(), map(), list()) ->
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map() | undefined,
Error :: map().
request(Client, Action, Input, Options) ->
RequestFun = fun() -> do_request(Client, Action, Input, Options) end,
aws_request:request(RequestFun, Options).
do_request(Client, Action, Input0, Options) ->
Client1 = Client#{service => <<"redshift">>},
Host = build_host(<<"redshift">>, Client1),
URL = build_url(Host, Client1),
Headers = [
{<<"Host">>, Host},
{<<"Content-Type">>, <<"application/x-www-form-urlencoded">>}
],
Input = Input0#{ <<"Action">> => Action
, <<"Version">> => <<"2012-12-01">>
},
Payload = aws_util:encode_query(Input),
SignedHeaders = aws_request:sign_request(Client1, <<"POST">>, URL, Headers, Payload),
Response = hackney:request(post, URL, SignedHeaders, Payload, Options),
handle_response(Response).
handle_response({ok, 200, ResponseHeaders, Client}) ->
case hackney:body(Client) of
{ok, <<>>} ->
{ok, undefined, {200, ResponseHeaders, Client}};
{ok, Body} ->
Result = aws_util:decode_xml(Body),
{ok, Result, {200, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, ResponseHeaders, Client}) ->
{ok, Body} = hackney:body(Client),
Error = aws_util:decode_xml(Body),
{error, Error, {StatusCode, ResponseHeaders, Client}};
handle_response({error, Reason}) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Client) ->
Proto = aws_client:proto(Client),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, <<"/">>], <<"">>).
|
b8df20073f9949f3897cb34a94091a53d508a2fc22e8073685551088ed578182 | jpmonettas/hansel | dev.cljs | (ns dev
(:require [hansel.instrument.runtime]))
(defn print-form-init [data]
(println "[form-init] data:" data))
(defn print-fn-call [data]
(println "[fn-call] data:" data))
(defn print-fn-return [{:keys [return] :as data}]
(println "[fn-return] data:" data)
return) ;; must return return!
(defn print-expr-exec [{:keys [result] :as data}]
(println "[expr-exec] data:" data)
result) ;; must return result!
(defn print-bind [data]
(println "[bind] data:" data))
| null | https://raw.githubusercontent.com/jpmonettas/hansel/344bf5021f932a6260424958ce2ebb538ce4b323/dev-src/dev.cljs | clojure | must return return!
must return result! | (ns dev
(:require [hansel.instrument.runtime]))
(defn print-form-init [data]
(println "[form-init] data:" data))
(defn print-fn-call [data]
(println "[fn-call] data:" data))
(defn print-fn-return [{:keys [return] :as data}]
(println "[fn-return] data:" data)
(defn print-expr-exec [{:keys [result] :as data}]
(println "[expr-exec] data:" data)
(defn print-bind [data]
(println "[bind] data:" data))
|
7127ca9a2b53cc78286d8448dcbc8d3f0486117368d83e2c79752c49cc1c27bd | shaunlebron/gh4st | texts.cljs | (ns gh4st.texts)
(def victory-text
[:p.victory-text
"You got him!" [:br]
"Press " [:kbd.green "SHIFT"] " + " [:kbd.green "→"] " for next level." [:br]
"Or press Z or Y to undo/redo for study."])
(def defeat-text
[:p.defeat-text
"Bah! He got the fruit!" [:br]
[:span.focus "Press " [:kbd "Z"] " or " [:kbd "Y"] " to undo/redo."] [:br]
"Or press R to restart from the beginning."])
(def allow-defeat-text
[:p.defeat-text
"Yup. It was inevitable." [:br]
"Press " [:kbd.green "SHIFT"] " + " [:kbd.green "→"] " for next level." [:br]
"Or press Z or Y to undo/redo for study."])
(def text0
{:title "Meet Pac-Man..."
:desc [:p
"He wants fruit, but must run from the face of a ghost." [:br]
[:span.focus "Press " [:kbd.blinky "1"] " to move your ghost."] [:br]
"(Pac-Man will wait for his turn to move.)"]
})
(def text1
{:title "No turning back"
:desc [:p
"Even though Pac-Man wants his fruit," [:br]
" he can't turn back and is forced to take the long way." [:br]
"Press " [:kbd.blinky "1"] " to cycle your turn to see Pac-Man move."]})
(def text2
{:title "Follow your nose"
:desc [:p
"Pac-Man follows his nose, but he can smell fruit through walls." [:br]
"He'll take a wrong turn if it smells better than the others." [:br]
"Press" [:kbd.blinky "1"] " to cycle. " [:kbd "Z"] " to undo."]
}
)
(def text3
{:title "Decisions, decisions"
:desc [:p
"When he can't decide which turn smells better," [:br]
"he favors UP, LEFT, DOWN, then RIGHT (counter-clockwise)." [:br]
"Press " [:kbd.blinky "1"] " to see him favor the LEFT turn."
]}
)
(def text4
{:title "Release the ghost!"
:desc [:p "Meet " [:span.blinky "Blinky"] ". He behaves exactly like Pac-Man," [:br]
"except he prefers fruit of the Pac-Man variety :)" [:br]
"Press " [:kbd.blinky "1"] " to cycle Blinky until he catches Pac-Man."]
})
(def text5
{:title "Blinky needs help"
:desc [:p
"Blinky has no hope of catching Pac-Man here." [:br]
"His behavior simply won't allow him to." [:br]
"Press " [:kbd.blinky "1"] " to cycle to the inevitable defeat."]
}
)
(def text6
{:title "Pinky can help"
:desc [:p
"Meet " [:span.pinky "Pinky"] ", your second ghost!" [:br]
"You can now choose which ghost to move each turn." [:br]
"Press" [:kbd.pinky "2"] " to move Pinky. " [:kbd.blinky "1"] " for Blinky."]
}
)
(def text7
{:title "Pinky is different"
:desc [:p
"Pinky tries to get ahead of Pac-Man" [:br]
"by aiming two tiles ahead of where Pac-Man is looking." [:br]
"This doesn't always work out, as you can see here..." [:br]
"Press" [:kbd.pinky "2"] " to move Pinky. "]
}
)
(def text8
{:title "Work together"
:desc [:p
"They have to work together here." [:br]
"Press" [:kbd.pinky "2"] " to move Pinky. " [:kbd.blinky "1"] " for Blinky."]
}
)
(def text9
{:title "Oh, Clyde"
:desc [:p
"Meet your third ghost, " [:span.clyde "Clyde!"] [:br]
" He chases Pac-Man like Blinky, except he gets scared" [:br]
"when he's too close and tries to run away." [:br]
"Press" [:kbd.clyde "3"] " to move Clyde."]
}
)
(def text10
{:title "Forced Bravery"
:desc [:p
"Despite Clyde's fear, he can still spook Pac-Man since" [:br]
"Clyde can only change his mind at intersections." [:br]
"Press" [:kbd.clyde "3"] " to move Clyde."]
}
)
(def text11
{:title "Blinky and Clyde"
:desc [:p
"Enable the Paths/Target visuals on the right" [:br]
"to help Clyde charge at the right moment." [:br]
"Press " [:kbd.blinky "1"] " to move Blinky. " [:kbd.clyde "3"] " for Clyde."
]
}
)
(def text12
{:title "Pinky and Clyde"
:desc [:p
"You may be noticing some foolproof strategies at this point." [:br]
"Press " [:kbd.pinky "2"] " to move Pinky. " [:kbd.clyde "3"] " for Clyde."
]
}
)
(def text13
{:title "Inky, the wingman"
:desc [:p
"Meet "[:span.inky "Inky"] ", your last ghost. He tries to flank Pac-Man" [:br]
"from the side opposite to Blinky. See \"Targets\" button." [:br]
"Press " [:kbd.inky "4"] " to move Inky." [:kbd.blinky "1"] " for Blinky."
]
}
)
(def text14
{:title "All together now"
:desc [:p
"Alright, we can do this!" [:br]
"Use "
[:kbd.blinky "1"] " "
[:kbd.pinky "2"] " "
[:kbd.clyde "3"] " "
[:kbd.inky "4"] " to coordinate a swift death."
]
}
)
(def text-end
{:title "Well Done!"
:desc [:div
[:p
"Now that you've lived as these meticulous monsters," [:br]
"you should have no problem outsmarting them in their original games." [:br]
"Thanks for playing this 'post-compo' concept and Happy Ludum Dare 33!" [:br]]
[:div.links
[:a {:href ""} "@shaunlebron"]
" | " [:a {:href ""} "github"]
" | " [:a {:href "-dare-33/?action=preview&uid=31638"} "Ludum Entry"]
]]
}
)
(def texts
[
text0
text1
text2
text3
text4
text5
text6
text7
text8
text9
text10
text11
text12
text13
text14
text-end
;; {:title "Decisions, decisions"}
{ : title " Release the Ghost ! " }
;; {:title "Blinky needs help"}
;; {:title "Pinky can help"}
;; {:title "Pinky is different"}
;; {:title "Work together"}
{ : title " Oh " }
{ : title " Well done " }
{ : title " Blinky and Clyde " }
]
)
| null | https://raw.githubusercontent.com/shaunlebron/gh4st/e329f42f0d19ddeebcf85ca0e1efa03d9af22a56/src/gh4st/texts.cljs | clojure | {:title "Decisions, decisions"}
{:title "Blinky needs help"}
{:title "Pinky can help"}
{:title "Pinky is different"}
{:title "Work together"} | (ns gh4st.texts)
(def victory-text
[:p.victory-text
"You got him!" [:br]
"Press " [:kbd.green "SHIFT"] " + " [:kbd.green "→"] " for next level." [:br]
"Or press Z or Y to undo/redo for study."])
(def defeat-text
[:p.defeat-text
"Bah! He got the fruit!" [:br]
[:span.focus "Press " [:kbd "Z"] " or " [:kbd "Y"] " to undo/redo."] [:br]
"Or press R to restart from the beginning."])
(def allow-defeat-text
[:p.defeat-text
"Yup. It was inevitable." [:br]
"Press " [:kbd.green "SHIFT"] " + " [:kbd.green "→"] " for next level." [:br]
"Or press Z or Y to undo/redo for study."])
(def text0
{:title "Meet Pac-Man..."
:desc [:p
"He wants fruit, but must run from the face of a ghost." [:br]
[:span.focus "Press " [:kbd.blinky "1"] " to move your ghost."] [:br]
"(Pac-Man will wait for his turn to move.)"]
})
(def text1
{:title "No turning back"
:desc [:p
"Even though Pac-Man wants his fruit," [:br]
" he can't turn back and is forced to take the long way." [:br]
"Press " [:kbd.blinky "1"] " to cycle your turn to see Pac-Man move."]})
(def text2
{:title "Follow your nose"
:desc [:p
"Pac-Man follows his nose, but he can smell fruit through walls." [:br]
"He'll take a wrong turn if it smells better than the others." [:br]
"Press" [:kbd.blinky "1"] " to cycle. " [:kbd "Z"] " to undo."]
}
)
(def text3
{:title "Decisions, decisions"
:desc [:p
"When he can't decide which turn smells better," [:br]
"he favors UP, LEFT, DOWN, then RIGHT (counter-clockwise)." [:br]
"Press " [:kbd.blinky "1"] " to see him favor the LEFT turn."
]}
)
(def text4
{:title "Release the ghost!"
:desc [:p "Meet " [:span.blinky "Blinky"] ". He behaves exactly like Pac-Man," [:br]
"except he prefers fruit of the Pac-Man variety :)" [:br]
"Press " [:kbd.blinky "1"] " to cycle Blinky until he catches Pac-Man."]
})
(def text5
{:title "Blinky needs help"
:desc [:p
"Blinky has no hope of catching Pac-Man here." [:br]
"His behavior simply won't allow him to." [:br]
"Press " [:kbd.blinky "1"] " to cycle to the inevitable defeat."]
}
)
(def text6
{:title "Pinky can help"
:desc [:p
"Meet " [:span.pinky "Pinky"] ", your second ghost!" [:br]
"You can now choose which ghost to move each turn." [:br]
"Press" [:kbd.pinky "2"] " to move Pinky. " [:kbd.blinky "1"] " for Blinky."]
}
)
(def text7
{:title "Pinky is different"
:desc [:p
"Pinky tries to get ahead of Pac-Man" [:br]
"by aiming two tiles ahead of where Pac-Man is looking." [:br]
"This doesn't always work out, as you can see here..." [:br]
"Press" [:kbd.pinky "2"] " to move Pinky. "]
}
)
(def text8
{:title "Work together"
:desc [:p
"They have to work together here." [:br]
"Press" [:kbd.pinky "2"] " to move Pinky. " [:kbd.blinky "1"] " for Blinky."]
}
)
(def text9
{:title "Oh, Clyde"
:desc [:p
"Meet your third ghost, " [:span.clyde "Clyde!"] [:br]
" He chases Pac-Man like Blinky, except he gets scared" [:br]
"when he's too close and tries to run away." [:br]
"Press" [:kbd.clyde "3"] " to move Clyde."]
}
)
(def text10
{:title "Forced Bravery"
:desc [:p
"Despite Clyde's fear, he can still spook Pac-Man since" [:br]
"Clyde can only change his mind at intersections." [:br]
"Press" [:kbd.clyde "3"] " to move Clyde."]
}
)
(def text11
{:title "Blinky and Clyde"
:desc [:p
"Enable the Paths/Target visuals on the right" [:br]
"to help Clyde charge at the right moment." [:br]
"Press " [:kbd.blinky "1"] " to move Blinky. " [:kbd.clyde "3"] " for Clyde."
]
}
)
(def text12
{:title "Pinky and Clyde"
:desc [:p
"You may be noticing some foolproof strategies at this point." [:br]
"Press " [:kbd.pinky "2"] " to move Pinky. " [:kbd.clyde "3"] " for Clyde."
]
}
)
(def text13
{:title "Inky, the wingman"
:desc [:p
"Meet "[:span.inky "Inky"] ", your last ghost. He tries to flank Pac-Man" [:br]
"from the side opposite to Blinky. See \"Targets\" button." [:br]
"Press " [:kbd.inky "4"] " to move Inky." [:kbd.blinky "1"] " for Blinky."
]
}
)
(def text14
{:title "All together now"
:desc [:p
"Alright, we can do this!" [:br]
"Use "
[:kbd.blinky "1"] " "
[:kbd.pinky "2"] " "
[:kbd.clyde "3"] " "
[:kbd.inky "4"] " to coordinate a swift death."
]
}
)
(def text-end
{:title "Well Done!"
:desc [:div
[:p
"Now that you've lived as these meticulous monsters," [:br]
"you should have no problem outsmarting them in their original games." [:br]
"Thanks for playing this 'post-compo' concept and Happy Ludum Dare 33!" [:br]]
[:div.links
[:a {:href ""} "@shaunlebron"]
" | " [:a {:href ""} "github"]
" | " [:a {:href "-dare-33/?action=preview&uid=31638"} "Ludum Entry"]
]]
}
)
(def texts
[
text0
text1
text2
text3
text4
text5
text6
text7
text8
text9
text10
text11
text12
text13
text14
text-end
{ : title " Release the Ghost ! " }
{ : title " Oh " }
{ : title " Well done " }
{ : title " Blinky and Clyde " }
]
)
|
a31f6123d85e4ad2f0ddab9fd852043ad95367cb236622ae3761a09411e392bb | triffon/fp-2019-20 | 02--my-exp.rkt | #lang racket
(require rackunit rackunit/text-ui)
(require "01--sum.rkt")
(define (fact n)
(if (= n 0)
1
(* n (fact (- n 1)))))
(define (1+ n) (+ 1 n))
# # # Задача 2
Напишете функция ` ( my - exp m x ) ` ,
; която изчислява `m`-тата частична сума на функцията `e^x` в точката `x`.
(define (my-exp m x)
(if (< m 0)
0
(+ (/ (expt x m) (fact m))
(my-exp (- m 1) x))))
(define (my-exp* m x)
(sum 0
m
(lambda (i) (/ (expt x i) (fact i)))
1+))
(run-tests (test-suite "my-exp tests"
(check-= (my-exp 25 1) (exp 1) 0.0001)
(check-= (my-exp 25 2.) (exp 2) 0.0001)
(check-= (my-exp 25 6.) (exp 6) 0.0001))
'verbose)
| null | https://raw.githubusercontent.com/triffon/fp-2019-20/7efb13ff4de3ea13baa2c5c59eb57341fac15641/exercises/computer-science-2/03--higher-order-functions--accumulate/solutions/02--my-exp.rkt | racket | която изчислява `m`-тата частична сума на функцията `e^x` в точката `x`. | #lang racket
(require rackunit rackunit/text-ui)
(require "01--sum.rkt")
(define (fact n)
(if (= n 0)
1
(* n (fact (- n 1)))))
(define (1+ n) (+ 1 n))
# # # Задача 2
Напишете функция ` ( my - exp m x ) ` ,
(define (my-exp m x)
(if (< m 0)
0
(+ (/ (expt x m) (fact m))
(my-exp (- m 1) x))))
(define (my-exp* m x)
(sum 0
m
(lambda (i) (/ (expt x i) (fact i)))
1+))
(run-tests (test-suite "my-exp tests"
(check-= (my-exp 25 1) (exp 1) 0.0001)
(check-= (my-exp 25 2.) (exp 2) 0.0001)
(check-= (my-exp 25 6.) (exp 6) 0.0001))
'verbose)
|
89c9e507e0e921d18ff1ab9f406896af5212c118febe8e34861abfe644ece176 | erlang/otp | timer.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2022 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(timer).
-export([apply_after/4,
send_after/3, send_after/2,
exit_after/3, exit_after/2, kill_after/2, kill_after/1,
apply_interval/4, apply_repeatedly/4,
send_interval/3, send_interval/2,
cancel/1, sleep/1, tc/1, tc/2, tc/3, tc/4, now_diff/2,
seconds/1, minutes/1, hours/1, hms/3]).
-export([start_link/0, start/0,
handle_call/3, handle_info/2,
init/1,
code_change/3, handle_cast/2, terminate/2]).
%% Types which can be used by other modules
-export_type([tref/0]).
value for a receive 's after clause .
-define(MAX_RECEIVE_AFTER, 16#ffffffff).
Validations
-define(valid_time(T), is_integer(T), T >= 0).
-define(valid_mfa(M, F, A), is_atom(M), is_atom(F), is_list(A)).
%%
Time is in milliseconds .
%%
-opaque tref() :: {type(), reference()}.
-type type() :: 'once' | 'interval' | 'instant' | 'send_local'.
-type time() :: non_neg_integer().
%%
Interface functions
%%
-spec apply_after(Time, Module, Function, Arguments) ->
{'ok', TRef} | {'error', Reason}
when Time :: time(),
Module :: module(),
Function :: atom(),
Arguments :: [term()],
TRef :: tref(),
Reason :: term().
apply_after(0, M, F, A)
when ?valid_mfa(M, F, A) ->
_ = do_apply({M, F, A}, false),
{ok, {instant, make_ref()}};
apply_after(Time, M, F, A)
when ?valid_time(Time),
?valid_mfa(M, F, A) ->
req(apply_once, {system_time(), Time, {M, F, A}});
apply_after(_Time, _M, _F, _A) ->
{error, badarg}.
-spec send_after(Time, Destination, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Destination :: pid() | (RegName :: atom()) | {RegName :: atom(), Node :: node()},
Message :: term(),
TRef :: tref(),
Reason :: term().
send_after(0, PidOrRegName, Message)
when is_pid(PidOrRegName);
is_atom(PidOrRegName) ->
PidOrRegName ! Message,
{ok, {instant, make_ref()}};
send_after(0, {RegName, Node} = Dest, Message)
when is_atom(RegName),
is_atom(Node) ->
Dest ! Message,
{ok, {instant, make_ref()}};
send_after(Time, Pid, Message)
when ?valid_time(Time),
is_pid(Pid),
node(Pid) =:= node() ->
TRef = erlang:send_after(Time, Pid, Message),
{ok, {send_local, TRef}};
send_after(Time, Pid, Message)
when is_pid(Pid) ->
apply_after(Time, ?MODULE, send, [Pid, Message]);
send_after(Time, RegName, Message)
when is_atom(RegName) ->
apply_after(Time, ?MODULE, send, [RegName, Message]);
send_after(Time, {RegName, Node} = Dest, Message)
when is_atom(RegName),
is_atom(Node) ->
apply_after(Time, ?MODULE, send, [Dest, Message]);
send_after(_Time, _PidOrRegName, _Message) ->
{error, badarg}.
-spec send_after(Time, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Message :: term(),
TRef :: tref(),
Reason :: term().
send_after(Time, Message) ->
send_after(Time, self(), Message).
-spec exit_after(Time, Target, Reason1) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
Target :: pid() | (RegName :: atom()),
TRef :: tref(),
Reason1 :: term(),
Reason2 :: term().
exit_after(Time, Pid, Reason) ->
apply_after(Time, erlang, exit, [Pid, Reason]).
-spec exit_after(Time, Reason1) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
TRef :: tref(),
Reason1 :: term(),
Reason2 :: term().
exit_after(Time, Reason) ->
exit_after(Time, self(), Reason).
-spec kill_after(Time, Target) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
Target :: pid() | (RegName :: atom()),
TRef :: tref(),
Reason2 :: term().
kill_after(Time, Pid) ->
exit_after(Time, Pid, kill).
-spec kill_after(Time) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
TRef :: tref(),
Reason2 :: term().
kill_after(Time) ->
exit_after(Time, self(), kill).
-spec apply_interval(Time, Module, Function, Arguments) ->
{'ok', TRef} | {'error', Reason}
when Time :: time(),
Module :: module(),
Function :: atom(),
Arguments :: [term()],
TRef :: tref(),
Reason :: term().
apply_interval(Time, M, F, A)
when ?valid_time(Time),
?valid_mfa(M, F, A) ->
req(apply_interval, {system_time(), Time, self(), {M, F, A}});
apply_interval(_Time, _M, _F, _A) ->
{error, badarg}.
-spec apply_repeatedly(Time, Module, Function, Arguments) ->
{'ok', TRef} | {'error', Reason}
when Time :: time(),
Module :: module(),
Function :: atom(),
Arguments :: [term()],
TRef :: tref(),
Reason :: term().
apply_repeatedly(Time, M, F, A)
when ?valid_time(Time),
?valid_mfa(M, F, A) ->
req(apply_repeatedly, {system_time(), Time, self(), {M, F, A}});
apply_repeatedly(_Time, _M, _F, _A) ->
{error, badarg}.
-spec send_interval(Time, Destination, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Destination :: pid() | (RegName :: atom()) | {RegName :: atom(), Node :: node()},
Message :: term(),
TRef :: tref(),
Reason :: term().
send_interval(Time, Pid, Message)
when ?valid_time(Time),
is_pid(Pid) ->
req(apply_interval, {system_time(), Time, Pid, {?MODULE, send, [Pid, Message]}});
send_interval(Time, RegName, Message)
when ?valid_time(Time),
is_atom(RegName) ->
req(apply_interval, {system_time(), Time, RegName, {?MODULE, send, [RegName, Message]}});
send_interval(Time, Dest = {RegName, Node}, Message)
when ?valid_time(Time),
is_atom(RegName),
is_atom(Node) ->
req(apply_interval, {system_time(), Time, Dest, {?MODULE, send, [Dest, Message]}});
send_interval(_Time, _Pid, _Message) ->
{error, badarg}.
-spec send_interval(Time, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Message :: term(),
TRef :: tref(),
Reason :: term().
send_interval(Time, Message) ->
send_interval(Time, self(), Message).
-spec cancel(TRef) -> {'ok', 'cancel'} | {'error', Reason}
when TRef :: tref(),
Reason :: term().
cancel({instant, Ref})
when is_reference(Ref) ->
{ok, cancel};
cancel({send_local, Ref})
when is_reference(Ref) ->
_ = erlang:cancel_timer(Ref),
{ok, cancel};
cancel({once, Ref} = TRef)
when is_reference(Ref) ->
req(cancel, TRef);
cancel({interval, Ref} = TRef)
when is_reference(Ref) ->
req(cancel, TRef);
cancel(_TRef) ->
{error, badarg}.
-spec sleep(Time) -> 'ok'
when Time :: timeout().
sleep(T)
when is_integer(T),
T > ?MAX_RECEIVE_AFTER ->
receive
after ?MAX_RECEIVE_AFTER ->
sleep(T - ?MAX_RECEIVE_AFTER)
end;
sleep(T) ->
receive
after T -> ok
end.
%%
%% Measure the execution time (in microseconds) for Fun().
%%
-spec tc(Fun) -> {Time, Value}
when Fun :: function(),
Time :: integer(),
Value :: term().
tc(F) ->
tc(F, microsecond).
%%
Measure the execution time ( in microseconds ) for Fun(Args )
%% or the execution time (in TimeUnit) for Fun().
%%
-spec tc(Fun, Arguments) -> {Time, Value}
when Fun :: function(),
Arguments :: [term()],
Time :: integer(),
Value :: term();
(Fun, TimeUnit) -> {Time, Value}
when Fun :: function(),
TimeUnit :: erlang:time_unit(),
Time :: integer(),
Value :: term().
tc(F, A) when is_list(A) ->
tc(F, A, microsecond);
tc(F, TimeUnit) ->
T1 = erlang:monotonic_time(),
Val = F(),
T2 = erlang:monotonic_time(),
Time = erlang:convert_time_unit(T2 - T1, native, TimeUnit),
{Time, Val}.
%%
Measure the execution time ( in microseconds ) for an MFA
or the execution time ( in TimeUnit ) for Fun(Args ) .
%%
-spec tc(Module, Function, Arguments) -> {Time, Value}
when Module :: module(),
Function :: atom(),
Arguments :: [term()],
Time :: integer(),
Value :: term();
(Fun, Arguments, TimeUnit) -> {Time, Value}
when Fun :: function(),
Arguments :: [term()],
TimeUnit :: erlang:time_unit(),
Time :: integer(),
Value :: term().
tc(M, F, A) when is_list(A) ->
tc(M, F, A, microsecond);
tc(F, A, TimeUnit) ->
T1 = erlang:monotonic_time(),
Val = apply(F, A),
T2 = erlang:monotonic_time(),
Time = erlang:convert_time_unit(T2 - T1, native, TimeUnit),
{Time, Val}.
%%
Measure the execution time ( in TimeUnit ) for an MFA .
%%
-spec tc(Module, Function, Arguments, TimeUnit) -> {Time, Value}
when Module :: module(),
Function :: atom(),
Arguments :: [term()],
TimeUnit :: erlang:time_unit(),
Time :: integer(),
Value :: term().
tc(M, F, A, TimeUnit) ->
T1 = erlang:monotonic_time(),
Val = apply(M, F, A),
T2 = erlang:monotonic_time(),
Time = erlang:convert_time_unit(T2 - T1, native, TimeUnit),
{Time, Val}.
%%
Calculate the time difference ( in microseconds ) of two
%% erlang:now() timestamps, T2-T1.
%%
-spec now_diff(T2, T1) -> Tdiff
when T1 :: erlang:timestamp(),
T2 :: erlang:timestamp(),
Tdiff :: integer().
now_diff({A2, B2, C2}, {A1, B1, C1}) ->
((A2-A1)*1000000 + B2-B1)*1000000 + C2-C1.
%%
%% Convert seconds, minutes etc. to milliseconds.
%%
-spec seconds(Seconds) -> MilliSeconds
when Seconds :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
seconds(Seconds) ->
1000*Seconds.
-spec minutes(Minutes) -> MilliSeconds
when Minutes :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
minutes(Minutes) ->
1000*60*Minutes.
-spec hours(Hours) -> MilliSeconds
when Hours :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
hours(Hours) ->
1000*60*60*Hours.
-spec hms(Hours, Minutes, Seconds) -> MilliSeconds
when Hours :: non_neg_integer(),
Minutes :: non_neg_integer(),
Seconds :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
hms(H, M, S) ->
hours(H) + minutes(M) + seconds(S).
%%
%% Start/init functions
%%
-spec start() -> 'ok'.
start() ->
{ok, _Pid} = do_start(),
ok.
do_start() ->
case
supervisor:start_child(
kernel_sup,
#{
id => timer_server,
start => {?MODULE, start_link, []},
restart => permanent,
shutdown => 1000,
type => worker,
modules => [?MODULE]
}
)
of
{ok, Pid} ->
{ok, Pid};
{ok, Pid, _} ->
{ok, Pid};
{error, {already_started, Pid}} ->
{ok, Pid};
{error, already_present} ->
case supervisor:restart_child(kernel_sup, timer_server) of
{ok, Pid} ->
{ok, Pid};
{error, {already_started, Pid}} ->
{ok, Pid}
end;
Error ->
Error
end.
-spec start_link() -> {'ok', pid()} | {'error', term()}.
start_link() ->
gen_server:start_link({local, timer_server}, ?MODULE, [], []).
-spec init([]) -> {'ok', ets:tid()}.
init([]) ->
process_flag(trap_exit, true),
Tab = ets:new(?MODULE, []),
{ok, Tab}.
%% server calls
%% Try sending a call. If it fails with reason noproc,
%% try starting the timer server and try once again.
req(Req, Arg) ->
try
maybe_req(Req, Arg)
catch
exit:{noproc, _} ->
{ok, _Pid} = do_start(),
maybe_req(Req, Arg)
end.
maybe_req(Req, Arg) ->
gen_server:call(timer_server, {Req, Arg}, infinity).
%% Call handling.
-spec handle_call(term(), term(), Tab) ->
{'reply', term(), Tab} | {'noreply', Tab} when
Tab :: ets:tid().
Start a one - shot timer .
handle_call({apply_once, {Started, Time, MFA}}, _From, Tab) ->
Timeout = Started + Time,
Reply = try
erlang:start_timer(Timeout, self(), {apply_once, MFA},
[{abs, true}])
of
SRef ->
ets:insert(Tab, {SRef}),
{ok, {once, SRef}}
catch
error:badarg ->
{error, badarg}
end,
{reply, Reply, Tab};
%% Start an interval timer.
handle_call({apply_interval, {Started, Time, Pid, MFA}}, _From, Tab) ->
{TRef, TPid, Tag} = start_interval_loop(Started, Time, Pid, MFA, false),
ets:insert(Tab, {TRef, TPid, Tag}),
{reply, {ok, {interval, TRef}}, Tab};
handle_call({apply_repeatedly, {Started, Time, Pid, MFA}}, _From, Tab) ->
{TRef, TPid, Tag} = start_interval_loop(Started, Time, Pid, MFA, true),
ets:insert(Tab, {TRef, TPid, Tag}),
{reply, {ok, {interval, TRef}}, Tab};
Cancel a one - shot timer .
handle_call({cancel, {once, TRef}}, _From, Tab) ->
_ = remove_timer(TRef, Tab),
{reply, {ok, cancel}, Tab};
%% Cancel an interval timer.
handle_call({cancel, {interval, TRef}}, _From, Tab) ->
_ = case remove_timer(TRef, Tab) of
true ->
demonitor(TRef, [flush]);
false ->
ok
end,
{reply, {ok, cancel}, Tab};
%% Unexpected.
handle_call(_Req, _From, Tab) ->
{noreply, Tab}.
%% Info handling.
-spec handle_info(term(), Tab) -> {'noreply', Tab}
when Tab :: ets:tid().
One - shot timer timeout .
handle_info({timeout, TRef, {apply_once, MFA}}, Tab) ->
_ = case ets:take(Tab, TRef) of
[{TRef}] ->
do_apply(MFA, false);
[] ->
ok
end,
{noreply, Tab};
%% An interval timer loop process died.
handle_info({'DOWN', TRef, process, _Pid, _Reason}, Tab) ->
_ = remove_timer(TRef, Tab),
{noreply, Tab};
%% Unexpected.
handle_info(_Req, Tab) ->
{noreply, Tab}.
%% Cast handling.
-spec handle_cast(term(), Tab) -> {'noreply', Tab}
when Tab :: ets:tid().
%% Unexpected.
handle_cast(_Req, Tab) ->
{noreply, Tab}.
-spec terminate(term(), _Tab) -> 'ok'.
terminate(_Reason, undefined) ->
ok;
terminate(Reason, Tab) ->
_ = ets:foldl(fun
({TRef}, Acc) ->
_ = cancel_timer(TRef),
Acc;
({_TRef, TPid, Tag}, Acc) ->
TPid ! {cancel, Tag},
Acc
end,
undefined,
Tab),
true = ets:delete(Tab),
terminate(Reason, undefined).
-spec code_change(term(), State, term()) -> {'ok', State}.
code_change(_OldVsn, Tab, _Extra) ->
%% According to the man for gen server no timer can be set here.
{ok, Tab}.
start_interval_loop(Started, Time, TargetPid, MFA, WaitComplete) ->
Tag = make_ref(),
TimeServerPid = self(),
{TPid, TRef} = spawn_monitor(fun() ->
TimeServerRef = monitor(process, TimeServerPid),
TargetRef = monitor(process, TargetPid),
TimerRef = schedule_interval_timer(Started, Time,
MFA),
_ = interval_loop(TimeServerRef, TargetRef, Tag,
WaitComplete, TimerRef)
end),
{TRef, TPid, Tag}.
%% Interval timer loop.
interval_loop(TimerServerMon, TargetMon, Tag, WaitComplete, TimerRef0) ->
receive
{cancel, Tag} ->
ok = cancel_timer(TimerRef0);
{'DOWN', TimerServerMon, process, _, _} ->
ok = cancel_timer(TimerRef0);
{'DOWN', TargetMon, process, _, _} ->
ok = cancel_timer(TimerRef0);
{timeout, TimerRef0, {apply_interval, CurTimeout, Time, MFA}} ->
case do_apply(MFA, WaitComplete) of
{ok, {spawn, ActionMon}} ->
receive
{cancel, Tag} ->
ok;
{'DOWN', TimerServerMon, process, _, _} ->
ok;
{'DOWN', TargetMon, process, _, _} ->
ok;
{'DOWN', ActionMon, process, _, _} ->
TimerRef1 = schedule_interval_timer(CurTimeout, Time, MFA),
interval_loop(TimerServerMon, TargetMon, Tag, WaitComplete, TimerRef1)
end;
_ ->
TimerRef1 = schedule_interval_timer(CurTimeout, Time, MFA),
interval_loop(TimerServerMon, TargetMon, Tag, WaitComplete, TimerRef1)
end
end.
schedule_interval_timer(CurTimeout, Time, MFA) ->
NextTimeout = CurTimeout + Time,
case NextTimeout =< system_time() of
true ->
TimerRef = make_ref(),
self() ! {timeout, TimerRef, {apply_interval, NextTimeout, Time, MFA}},
TimerRef;
false ->
erlang:start_timer(NextTimeout, self(), {apply_interval, NextTimeout, Time, MFA}, [{abs, true}])
end.
%% Remove a timer.
remove_timer(TRef, Tab) ->
case ets:take(Tab, TRef) of
One - shot timer .
ok = cancel_timer(TRef),
true;
[{TRef, TPid, Tag}] -> % Interval timer.
TPid ! {cancel, Tag},
true;
[] -> % TimerReference does not exist, do nothing
false
end.
%% Cancel a timer.
cancel_timer(TRef) ->
erlang:cancel_timer(TRef, [{async, true}, {info, false}]).
%% Help functions
If send op . send directly ( faster than spawn )
do_apply({?MODULE, send, A}, _) ->
try send(A)
of _ -> {ok, send}
catch _:_ -> error
end;
%% If exit op. resolve registered name
do_apply({erlang, exit, [Name, Reason]}, _) ->
try exit(get_pid(Name), Reason)
of _ -> {ok, exit}
catch _:_ -> error
end;
do_apply({M,F,A}, false) ->
try spawn(M, F, A)
of _ -> {ok, spawn}
catch _:_ -> error
end;
do_apply({M, F, A}, true) ->
try spawn_monitor(M, F, A)
of {_, Ref} -> {ok, {spawn, Ref}}
catch _:_ -> error
end.
%% Get current time in milliseconds,
%% ceil'ed to the next millisecond.
system_time() ->
(erlang:monotonic_time(microsecond) + 999) div 1000.
send([Pid, Msg]) ->
Pid ! Msg.
%% Resolve a registered name.
get_pid(Name) when is_pid(Name) ->
Name;
get_pid(undefined) ->
undefined;
get_pid(Name) when is_atom(Name) ->
get_pid(whereis(Name));
get_pid(_) ->
undefined.
| null | https://raw.githubusercontent.com/erlang/otp/fcb6abc96710b6bde88a6320ad44a2fd33c23d47/lib/stdlib/src/timer.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
Types which can be used by other modules
Measure the execution time (in microseconds) for Fun().
or the execution time (in TimeUnit) for Fun().
erlang:now() timestamps, T2-T1.
Convert seconds, minutes etc. to milliseconds.
Start/init functions
server calls
Try sending a call. If it fails with reason noproc,
try starting the timer server and try once again.
Call handling.
Start an interval timer.
Cancel an interval timer.
Unexpected.
Info handling.
An interval timer loop process died.
Unexpected.
Cast handling.
Unexpected.
According to the man for gen server no timer can be set here.
Interval timer loop.
Remove a timer.
Interval timer.
TimerReference does not exist, do nothing
Cancel a timer.
Help functions
If exit op. resolve registered name
Get current time in milliseconds,
ceil'ed to the next millisecond.
Resolve a registered name. | Copyright Ericsson AB 1996 - 2022 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(timer).
-export([apply_after/4,
send_after/3, send_after/2,
exit_after/3, exit_after/2, kill_after/2, kill_after/1,
apply_interval/4, apply_repeatedly/4,
send_interval/3, send_interval/2,
cancel/1, sleep/1, tc/1, tc/2, tc/3, tc/4, now_diff/2,
seconds/1, minutes/1, hours/1, hms/3]).
-export([start_link/0, start/0,
handle_call/3, handle_info/2,
init/1,
code_change/3, handle_cast/2, terminate/2]).
-export_type([tref/0]).
value for a receive 's after clause .
-define(MAX_RECEIVE_AFTER, 16#ffffffff).
Validations
-define(valid_time(T), is_integer(T), T >= 0).
-define(valid_mfa(M, F, A), is_atom(M), is_atom(F), is_list(A)).
Time is in milliseconds .
-opaque tref() :: {type(), reference()}.
-type type() :: 'once' | 'interval' | 'instant' | 'send_local'.
-type time() :: non_neg_integer().
Interface functions
-spec apply_after(Time, Module, Function, Arguments) ->
{'ok', TRef} | {'error', Reason}
when Time :: time(),
Module :: module(),
Function :: atom(),
Arguments :: [term()],
TRef :: tref(),
Reason :: term().
apply_after(0, M, F, A)
when ?valid_mfa(M, F, A) ->
_ = do_apply({M, F, A}, false),
{ok, {instant, make_ref()}};
apply_after(Time, M, F, A)
when ?valid_time(Time),
?valid_mfa(M, F, A) ->
req(apply_once, {system_time(), Time, {M, F, A}});
apply_after(_Time, _M, _F, _A) ->
{error, badarg}.
-spec send_after(Time, Destination, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Destination :: pid() | (RegName :: atom()) | {RegName :: atom(), Node :: node()},
Message :: term(),
TRef :: tref(),
Reason :: term().
send_after(0, PidOrRegName, Message)
when is_pid(PidOrRegName);
is_atom(PidOrRegName) ->
PidOrRegName ! Message,
{ok, {instant, make_ref()}};
send_after(0, {RegName, Node} = Dest, Message)
when is_atom(RegName),
is_atom(Node) ->
Dest ! Message,
{ok, {instant, make_ref()}};
send_after(Time, Pid, Message)
when ?valid_time(Time),
is_pid(Pid),
node(Pid) =:= node() ->
TRef = erlang:send_after(Time, Pid, Message),
{ok, {send_local, TRef}};
send_after(Time, Pid, Message)
when is_pid(Pid) ->
apply_after(Time, ?MODULE, send, [Pid, Message]);
send_after(Time, RegName, Message)
when is_atom(RegName) ->
apply_after(Time, ?MODULE, send, [RegName, Message]);
send_after(Time, {RegName, Node} = Dest, Message)
when is_atom(RegName),
is_atom(Node) ->
apply_after(Time, ?MODULE, send, [Dest, Message]);
send_after(_Time, _PidOrRegName, _Message) ->
{error, badarg}.
-spec send_after(Time, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Message :: term(),
TRef :: tref(),
Reason :: term().
send_after(Time, Message) ->
send_after(Time, self(), Message).
-spec exit_after(Time, Target, Reason1) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
Target :: pid() | (RegName :: atom()),
TRef :: tref(),
Reason1 :: term(),
Reason2 :: term().
exit_after(Time, Pid, Reason) ->
apply_after(Time, erlang, exit, [Pid, Reason]).
-spec exit_after(Time, Reason1) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
TRef :: tref(),
Reason1 :: term(),
Reason2 :: term().
exit_after(Time, Reason) ->
exit_after(Time, self(), Reason).
-spec kill_after(Time, Target) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
Target :: pid() | (RegName :: atom()),
TRef :: tref(),
Reason2 :: term().
kill_after(Time, Pid) ->
exit_after(Time, Pid, kill).
-spec kill_after(Time) -> {'ok', TRef} | {'error', Reason2}
when Time :: time(),
TRef :: tref(),
Reason2 :: term().
kill_after(Time) ->
exit_after(Time, self(), kill).
-spec apply_interval(Time, Module, Function, Arguments) ->
{'ok', TRef} | {'error', Reason}
when Time :: time(),
Module :: module(),
Function :: atom(),
Arguments :: [term()],
TRef :: tref(),
Reason :: term().
apply_interval(Time, M, F, A)
when ?valid_time(Time),
?valid_mfa(M, F, A) ->
req(apply_interval, {system_time(), Time, self(), {M, F, A}});
apply_interval(_Time, _M, _F, _A) ->
{error, badarg}.
-spec apply_repeatedly(Time, Module, Function, Arguments) ->
{'ok', TRef} | {'error', Reason}
when Time :: time(),
Module :: module(),
Function :: atom(),
Arguments :: [term()],
TRef :: tref(),
Reason :: term().
apply_repeatedly(Time, M, F, A)
when ?valid_time(Time),
?valid_mfa(M, F, A) ->
req(apply_repeatedly, {system_time(), Time, self(), {M, F, A}});
apply_repeatedly(_Time, _M, _F, _A) ->
{error, badarg}.
-spec send_interval(Time, Destination, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Destination :: pid() | (RegName :: atom()) | {RegName :: atom(), Node :: node()},
Message :: term(),
TRef :: tref(),
Reason :: term().
send_interval(Time, Pid, Message)
when ?valid_time(Time),
is_pid(Pid) ->
req(apply_interval, {system_time(), Time, Pid, {?MODULE, send, [Pid, Message]}});
send_interval(Time, RegName, Message)
when ?valid_time(Time),
is_atom(RegName) ->
req(apply_interval, {system_time(), Time, RegName, {?MODULE, send, [RegName, Message]}});
send_interval(Time, Dest = {RegName, Node}, Message)
when ?valid_time(Time),
is_atom(RegName),
is_atom(Node) ->
req(apply_interval, {system_time(), Time, Dest, {?MODULE, send, [Dest, Message]}});
send_interval(_Time, _Pid, _Message) ->
{error, badarg}.
-spec send_interval(Time, Message) -> {'ok', TRef} | {'error', Reason}
when Time :: time(),
Message :: term(),
TRef :: tref(),
Reason :: term().
send_interval(Time, Message) ->
send_interval(Time, self(), Message).
-spec cancel(TRef) -> {'ok', 'cancel'} | {'error', Reason}
when TRef :: tref(),
Reason :: term().
cancel({instant, Ref})
when is_reference(Ref) ->
{ok, cancel};
cancel({send_local, Ref})
when is_reference(Ref) ->
_ = erlang:cancel_timer(Ref),
{ok, cancel};
cancel({once, Ref} = TRef)
when is_reference(Ref) ->
req(cancel, TRef);
cancel({interval, Ref} = TRef)
when is_reference(Ref) ->
req(cancel, TRef);
cancel(_TRef) ->
{error, badarg}.
-spec sleep(Time) -> 'ok'
when Time :: timeout().
sleep(T)
when is_integer(T),
T > ?MAX_RECEIVE_AFTER ->
receive
after ?MAX_RECEIVE_AFTER ->
sleep(T - ?MAX_RECEIVE_AFTER)
end;
sleep(T) ->
receive
after T -> ok
end.
-spec tc(Fun) -> {Time, Value}
when Fun :: function(),
Time :: integer(),
Value :: term().
tc(F) ->
tc(F, microsecond).
Measure the execution time ( in microseconds ) for Fun(Args )
-spec tc(Fun, Arguments) -> {Time, Value}
when Fun :: function(),
Arguments :: [term()],
Time :: integer(),
Value :: term();
(Fun, TimeUnit) -> {Time, Value}
when Fun :: function(),
TimeUnit :: erlang:time_unit(),
Time :: integer(),
Value :: term().
tc(F, A) when is_list(A) ->
tc(F, A, microsecond);
tc(F, TimeUnit) ->
T1 = erlang:monotonic_time(),
Val = F(),
T2 = erlang:monotonic_time(),
Time = erlang:convert_time_unit(T2 - T1, native, TimeUnit),
{Time, Val}.
Measure the execution time ( in microseconds ) for an MFA
or the execution time ( in TimeUnit ) for Fun(Args ) .
-spec tc(Module, Function, Arguments) -> {Time, Value}
when Module :: module(),
Function :: atom(),
Arguments :: [term()],
Time :: integer(),
Value :: term();
(Fun, Arguments, TimeUnit) -> {Time, Value}
when Fun :: function(),
Arguments :: [term()],
TimeUnit :: erlang:time_unit(),
Time :: integer(),
Value :: term().
tc(M, F, A) when is_list(A) ->
tc(M, F, A, microsecond);
tc(F, A, TimeUnit) ->
T1 = erlang:monotonic_time(),
Val = apply(F, A),
T2 = erlang:monotonic_time(),
Time = erlang:convert_time_unit(T2 - T1, native, TimeUnit),
{Time, Val}.
Measure the execution time ( in TimeUnit ) for an MFA .
-spec tc(Module, Function, Arguments, TimeUnit) -> {Time, Value}
when Module :: module(),
Function :: atom(),
Arguments :: [term()],
TimeUnit :: erlang:time_unit(),
Time :: integer(),
Value :: term().
tc(M, F, A, TimeUnit) ->
T1 = erlang:monotonic_time(),
Val = apply(M, F, A),
T2 = erlang:monotonic_time(),
Time = erlang:convert_time_unit(T2 - T1, native, TimeUnit),
{Time, Val}.
Calculate the time difference ( in microseconds ) of two
-spec now_diff(T2, T1) -> Tdiff
when T1 :: erlang:timestamp(),
T2 :: erlang:timestamp(),
Tdiff :: integer().
now_diff({A2, B2, C2}, {A1, B1, C1}) ->
((A2-A1)*1000000 + B2-B1)*1000000 + C2-C1.
-spec seconds(Seconds) -> MilliSeconds
when Seconds :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
seconds(Seconds) ->
1000*Seconds.
-spec minutes(Minutes) -> MilliSeconds
when Minutes :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
minutes(Minutes) ->
1000*60*Minutes.
-spec hours(Hours) -> MilliSeconds
when Hours :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
hours(Hours) ->
1000*60*60*Hours.
-spec hms(Hours, Minutes, Seconds) -> MilliSeconds
when Hours :: non_neg_integer(),
Minutes :: non_neg_integer(),
Seconds :: non_neg_integer(),
MilliSeconds :: non_neg_integer().
hms(H, M, S) ->
hours(H) + minutes(M) + seconds(S).
-spec start() -> 'ok'.
start() ->
{ok, _Pid} = do_start(),
ok.
do_start() ->
case
supervisor:start_child(
kernel_sup,
#{
id => timer_server,
start => {?MODULE, start_link, []},
restart => permanent,
shutdown => 1000,
type => worker,
modules => [?MODULE]
}
)
of
{ok, Pid} ->
{ok, Pid};
{ok, Pid, _} ->
{ok, Pid};
{error, {already_started, Pid}} ->
{ok, Pid};
{error, already_present} ->
case supervisor:restart_child(kernel_sup, timer_server) of
{ok, Pid} ->
{ok, Pid};
{error, {already_started, Pid}} ->
{ok, Pid}
end;
Error ->
Error
end.
-spec start_link() -> {'ok', pid()} | {'error', term()}.
start_link() ->
gen_server:start_link({local, timer_server}, ?MODULE, [], []).
-spec init([]) -> {'ok', ets:tid()}.
init([]) ->
process_flag(trap_exit, true),
Tab = ets:new(?MODULE, []),
{ok, Tab}.
req(Req, Arg) ->
try
maybe_req(Req, Arg)
catch
exit:{noproc, _} ->
{ok, _Pid} = do_start(),
maybe_req(Req, Arg)
end.
maybe_req(Req, Arg) ->
gen_server:call(timer_server, {Req, Arg}, infinity).
-spec handle_call(term(), term(), Tab) ->
{'reply', term(), Tab} | {'noreply', Tab} when
Tab :: ets:tid().
Start a one - shot timer .
handle_call({apply_once, {Started, Time, MFA}}, _From, Tab) ->
Timeout = Started + Time,
Reply = try
erlang:start_timer(Timeout, self(), {apply_once, MFA},
[{abs, true}])
of
SRef ->
ets:insert(Tab, {SRef}),
{ok, {once, SRef}}
catch
error:badarg ->
{error, badarg}
end,
{reply, Reply, Tab};
handle_call({apply_interval, {Started, Time, Pid, MFA}}, _From, Tab) ->
{TRef, TPid, Tag} = start_interval_loop(Started, Time, Pid, MFA, false),
ets:insert(Tab, {TRef, TPid, Tag}),
{reply, {ok, {interval, TRef}}, Tab};
handle_call({apply_repeatedly, {Started, Time, Pid, MFA}}, _From, Tab) ->
{TRef, TPid, Tag} = start_interval_loop(Started, Time, Pid, MFA, true),
ets:insert(Tab, {TRef, TPid, Tag}),
{reply, {ok, {interval, TRef}}, Tab};
Cancel a one - shot timer .
handle_call({cancel, {once, TRef}}, _From, Tab) ->
_ = remove_timer(TRef, Tab),
{reply, {ok, cancel}, Tab};
handle_call({cancel, {interval, TRef}}, _From, Tab) ->
_ = case remove_timer(TRef, Tab) of
true ->
demonitor(TRef, [flush]);
false ->
ok
end,
{reply, {ok, cancel}, Tab};
handle_call(_Req, _From, Tab) ->
{noreply, Tab}.
-spec handle_info(term(), Tab) -> {'noreply', Tab}
when Tab :: ets:tid().
One - shot timer timeout .
handle_info({timeout, TRef, {apply_once, MFA}}, Tab) ->
_ = case ets:take(Tab, TRef) of
[{TRef}] ->
do_apply(MFA, false);
[] ->
ok
end,
{noreply, Tab};
handle_info({'DOWN', TRef, process, _Pid, _Reason}, Tab) ->
_ = remove_timer(TRef, Tab),
{noreply, Tab};
handle_info(_Req, Tab) ->
{noreply, Tab}.
-spec handle_cast(term(), Tab) -> {'noreply', Tab}
when Tab :: ets:tid().
handle_cast(_Req, Tab) ->
{noreply, Tab}.
-spec terminate(term(), _Tab) -> 'ok'.
terminate(_Reason, undefined) ->
ok;
terminate(Reason, Tab) ->
_ = ets:foldl(fun
({TRef}, Acc) ->
_ = cancel_timer(TRef),
Acc;
({_TRef, TPid, Tag}, Acc) ->
TPid ! {cancel, Tag},
Acc
end,
undefined,
Tab),
true = ets:delete(Tab),
terminate(Reason, undefined).
-spec code_change(term(), State, term()) -> {'ok', State}.
code_change(_OldVsn, Tab, _Extra) ->
{ok, Tab}.
start_interval_loop(Started, Time, TargetPid, MFA, WaitComplete) ->
Tag = make_ref(),
TimeServerPid = self(),
{TPid, TRef} = spawn_monitor(fun() ->
TimeServerRef = monitor(process, TimeServerPid),
TargetRef = monitor(process, TargetPid),
TimerRef = schedule_interval_timer(Started, Time,
MFA),
_ = interval_loop(TimeServerRef, TargetRef, Tag,
WaitComplete, TimerRef)
end),
{TRef, TPid, Tag}.
interval_loop(TimerServerMon, TargetMon, Tag, WaitComplete, TimerRef0) ->
receive
{cancel, Tag} ->
ok = cancel_timer(TimerRef0);
{'DOWN', TimerServerMon, process, _, _} ->
ok = cancel_timer(TimerRef0);
{'DOWN', TargetMon, process, _, _} ->
ok = cancel_timer(TimerRef0);
{timeout, TimerRef0, {apply_interval, CurTimeout, Time, MFA}} ->
case do_apply(MFA, WaitComplete) of
{ok, {spawn, ActionMon}} ->
receive
{cancel, Tag} ->
ok;
{'DOWN', TimerServerMon, process, _, _} ->
ok;
{'DOWN', TargetMon, process, _, _} ->
ok;
{'DOWN', ActionMon, process, _, _} ->
TimerRef1 = schedule_interval_timer(CurTimeout, Time, MFA),
interval_loop(TimerServerMon, TargetMon, Tag, WaitComplete, TimerRef1)
end;
_ ->
TimerRef1 = schedule_interval_timer(CurTimeout, Time, MFA),
interval_loop(TimerServerMon, TargetMon, Tag, WaitComplete, TimerRef1)
end
end.
schedule_interval_timer(CurTimeout, Time, MFA) ->
NextTimeout = CurTimeout + Time,
case NextTimeout =< system_time() of
true ->
TimerRef = make_ref(),
self() ! {timeout, TimerRef, {apply_interval, NextTimeout, Time, MFA}},
TimerRef;
false ->
erlang:start_timer(NextTimeout, self(), {apply_interval, NextTimeout, Time, MFA}, [{abs, true}])
end.
remove_timer(TRef, Tab) ->
case ets:take(Tab, TRef) of
One - shot timer .
ok = cancel_timer(TRef),
true;
TPid ! {cancel, Tag},
true;
false
end.
cancel_timer(TRef) ->
erlang:cancel_timer(TRef, [{async, true}, {info, false}]).
If send op . send directly ( faster than spawn )
do_apply({?MODULE, send, A}, _) ->
try send(A)
of _ -> {ok, send}
catch _:_ -> error
end;
do_apply({erlang, exit, [Name, Reason]}, _) ->
try exit(get_pid(Name), Reason)
of _ -> {ok, exit}
catch _:_ -> error
end;
do_apply({M,F,A}, false) ->
try spawn(M, F, A)
of _ -> {ok, spawn}
catch _:_ -> error
end;
do_apply({M, F, A}, true) ->
try spawn_monitor(M, F, A)
of {_, Ref} -> {ok, {spawn, Ref}}
catch _:_ -> error
end.
system_time() ->
(erlang:monotonic_time(microsecond) + 999) div 1000.
send([Pid, Msg]) ->
Pid ! Msg.
get_pid(Name) when is_pid(Name) ->
Name;
get_pid(undefined) ->
undefined;
get_pid(Name) when is_atom(Name) ->
get_pid(whereis(Name));
get_pid(_) ->
undefined.
|
0e0a236a52afe6167ae891c48429bcb9093c6cfc5834badbdbaeb4a50ca46a06 | zshipko/yurt | yurt.mli | (** [Route]s are used to build URLs with types variables *)
module Route : sig
type route =
[ `String of string
| `Int of string
| `Float of string
| `Path of string
| `Match of string * string
| `Route of route list ]
exception Invalid_route_type
(** [Invalid_route_type] is raised when a value of the wrong type is requested *)
type params = (string, route) Hashtbl.t
(** Param map *)
val to_string : route -> string
(** Convert a route to string *)
val to_regexp : route -> Str.regexp
(** Convert a route to regular expressions *)
val of_string : string -> route
(** Create a [Route] from the given string *)
val params : route -> string -> params
(** Get parameters from a route *)
val string : params -> string -> string
(** Get a string parameter *)
val int : params -> string -> int
(** Get an int parameter *)
val float : params -> string -> float
(** Get a float parameter *)
val to_json : params -> Ezjsonm.t
(** Convert parameters to JSON *)
end
(** The [Body] module contains methods needed for creating, reading and modifying request data *)
module Body : sig
type t = Cohttp_lwt.Body.t
type transfer_encoding = Cohttp.Transfer.encoding
val to_string : t -> string Lwt.t
(** Convert body to string *)
val to_stream : t -> string Lwt_stream.t
(** Convert body to stream *)
val to_json : t -> Ezjsonm.t Lwt.t
(** Convert body to JSON *)
val of_string : string -> t
(** Create body from string *)
val of_stream : string Lwt_stream.t -> t
(** Create body from stream *)
val of_json : Ezjsonm.t -> t
(** Create body from JSON *)
val map : (string -> string) -> t -> t
(** Modify body *)
val length : t -> (int64 * t) Lwt.t
(** Get body length *)
val is_empty : t -> bool Lwt.t
(** Returns true when body has no content *)
val drain : t -> unit Lwt.t
(** Ignore body content *)
val transfer_encoding : t -> transfer_encoding
end
module Request = Cohttp_lwt_unix.Request
module Response = Cohttp.Response
module Header = Cohttp.Header
type response = (Response.t * Body.t) Lwt.t
(** Response type *)
and endpoint = Request.t -> Route.params -> Body.t -> response
(** HTTP handler *)
(** [Query] contains methods for reading query string parameters *)
module Query : sig
type t = (string, string list) Hashtbl.t
val get : Request.t -> t
* the request 's query string
val to_json : Request.t -> Ezjsonm.t
(** Convert query string to JSON *)
val string : Request.t -> string -> string option
(** Get string query string parameter *)
val int : Request.t -> string -> int option
(** Get int query string parameter *)
val float : Request.t -> string -> float option
(** Get float query string parameter *)
(* Get json query string parameter *)
val json : Request.t -> string -> Ezjsonm.value option
end
(** [Server] contains the methods needed to build a [Yurt] server *)
module Server : sig
include Cohttp_lwt.S.Server with module IO = Cohttp_lwt_unix.IO
val resolve_file : docroot:string -> uri:Uri.t -> string
type server = {
host : string;
port : int;
mutable routes : (string * Route.route * endpoint) list;
mutable tls_config : Tls.Config.server option;
mutable logger : Lwt_log.logger;
}
val server :
?tls_config:Tls.Config.server ->
?logger:Lwt_log.logger ->
string ->
int ->
server
(** Create a new server *)
val server_from_config : string -> server
(** Create a new server from an existing configuration file *)
val log_debug : server -> string -> string -> unit
val log_info : server -> string -> string -> unit
val log_notice : server -> string -> string -> unit
val log_error : server -> string -> string -> unit
val log_fatal : server -> string -> string -> unit
val configure_tls : server -> string -> string -> server
(** Configure TLS after the server has been created *)
val stream :
?flush:bool ->
?headers:Header.t ->
?status:int ->
string Lwt_stream.t ->
(Response.t * Body.t) Lwt.t
(** Respond with a stream *)
val json :
?flush:bool ->
?headers:Header.t ->
?status:int ->
Ezjsonm.t ->
(Response.t * Body.t) Lwt.t
(** Respond with JSON data *)
val html :
?flush:bool ->
?headers:Header.t ->
?status:int ->
Yurt_html.t ->
(Response.t * Body.t) Lwt.t
(** Respond with HTML data *)
val string :
?flush:bool ->
?headers:Header.t ->
?status:int ->
string ->
(Response.t * Body.t) Lwt.t
(** Respond with string data *)
val redirect : ?headers:Header.t -> string -> (Response.t * Body.t) Lwt.t
(** Redirect client *)
val file : ?headers:Header.t -> string -> (Response.t * Body.t) Lwt.t
(** Respond with datas from file *)
val register : server -> (string * Route.route * endpoint) list -> server
(** Register a list of routes with the server *)
val register_route : server -> string -> Route.route -> endpoint -> server
(** Register a single route with the server *)
val register_route_string : server -> string -> string -> endpoint -> server
(** Register a single route, formatted as a string, with the server *)
val options : string -> endpoint -> server -> server
(** Register OPTIONS endpoint *)
val get : string -> endpoint -> server -> server
(** Register GET endpoint *)
val post : string -> endpoint -> server -> server
(** Register POST endpoint *)
val put : string -> endpoint -> server -> server
(** Register PUT endpoint *)
val update : string -> endpoint -> server -> server
(** Register UPDATE endpoint *)
val delete : string -> endpoint -> server -> server
(** Register delete endpoint *)
val static_file : string -> string -> server -> server
* endpoint that returns a single static file for all requests
val static_files : string -> string -> server -> server
* endpoint that will serve files from a firectory
val daemonize : ?directory:string -> ?syslog:bool -> server -> unit
(** Daemonize the server *)
exception Cannot_start_server
val start : server -> unit Lwt.t
val run : server -> unit
val route : server -> (server -> server) -> server
val ( >| ) : server -> (server -> server) -> server
val ( >|| ) : server -> (server -> unit) -> server
end
(** [Client] contains functions for sending HTTP requests *)
module Client : sig
val get :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
string ->
(Response.t * string) Lwt.t
(** Send a GET request *)
val post :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?body:Body.t ->
string ->
(Response.t * string) Lwt.t
(** Send a POST request *)
val post_form :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
params:(string * string list) list ->
string ->
(Response.t * string) Lwt.t
(** Send a POST request with form encoded data *)
val request :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?body:Body.t ->
Cohttp.Code.meth ->
string ->
(Response.t * string) Lwt.t
(** Send another type of request other than POST or GET *)
val get_json :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
string ->
(Response.t * Ezjsonm.t) Lwt.t
(** Send a get request and return JSON response *)
val post_json :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?body:Body.t ->
string ->
(Response.t * Ezjsonm.t) Lwt.t
(** Send a post request and return JSON response *)
val post_form_json :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?params:(string * string list) list ->
string ->
(Response.t * Ezjsonm.t) Lwt.t
(** Send a POST request with from encoded data and return JSON response *)
end
module Form : sig
exception Invalid_multipart_form
val urlencoded : Body.t -> (string, string list) Hashtbl.t Lwt.t
val urlencoded_list : Body.t -> (string * string list) list Lwt.t
val urlencoded_json : Body.t -> Ezjsonm.t Lwt.t
type multipart = {
mutable data : char Lwt_stream.t;
mutable name : string;
attr : (string, string list) Hashtbl.t;
}
val get_attr : multipart -> string -> string list
val is_multipart : Request.t -> bool
val multipart : Request.t -> Body.t -> multipart list Lwt.t
type form =
| Multipart of multipart list
| Urlencoded of (string, string list) Hashtbl.t
val parse_form : Request.t -> Body.t -> form Lwt.t
end
module Util : sig
val unwrap_option : 'a option -> 'a
val unwrap_option_default : 'a option -> 'a -> 'a
val uuid4 : unit -> string
val is_safe_path : ?prefix:string -> string -> bool
end
| null | https://raw.githubusercontent.com/zshipko/yurt/b0baac0bc55740a361ae94181d5c0bb313e96f88/src/yurt.mli | ocaml | * [Route]s are used to build URLs with types variables
* [Invalid_route_type] is raised when a value of the wrong type is requested
* Param map
* Convert a route to string
* Convert a route to regular expressions
* Create a [Route] from the given string
* Get parameters from a route
* Get a string parameter
* Get an int parameter
* Get a float parameter
* Convert parameters to JSON
* The [Body] module contains methods needed for creating, reading and modifying request data
* Convert body to string
* Convert body to stream
* Convert body to JSON
* Create body from string
* Create body from stream
* Create body from JSON
* Modify body
* Get body length
* Returns true when body has no content
* Ignore body content
* Response type
* HTTP handler
* [Query] contains methods for reading query string parameters
* Convert query string to JSON
* Get string query string parameter
* Get int query string parameter
* Get float query string parameter
Get json query string parameter
* [Server] contains the methods needed to build a [Yurt] server
* Create a new server
* Create a new server from an existing configuration file
* Configure TLS after the server has been created
* Respond with a stream
* Respond with JSON data
* Respond with HTML data
* Respond with string data
* Redirect client
* Respond with datas from file
* Register a list of routes with the server
* Register a single route with the server
* Register a single route, formatted as a string, with the server
* Register OPTIONS endpoint
* Register GET endpoint
* Register POST endpoint
* Register PUT endpoint
* Register UPDATE endpoint
* Register delete endpoint
* Daemonize the server
* [Client] contains functions for sending HTTP requests
* Send a GET request
* Send a POST request
* Send a POST request with form encoded data
* Send another type of request other than POST or GET
* Send a get request and return JSON response
* Send a post request and return JSON response
* Send a POST request with from encoded data and return JSON response | module Route : sig
type route =
[ `String of string
| `Int of string
| `Float of string
| `Path of string
| `Match of string * string
| `Route of route list ]
exception Invalid_route_type
type params = (string, route) Hashtbl.t
val to_string : route -> string
val to_regexp : route -> Str.regexp
val of_string : string -> route
val params : route -> string -> params
val string : params -> string -> string
val int : params -> string -> int
val float : params -> string -> float
val to_json : params -> Ezjsonm.t
end
module Body : sig
type t = Cohttp_lwt.Body.t
type transfer_encoding = Cohttp.Transfer.encoding
val to_string : t -> string Lwt.t
val to_stream : t -> string Lwt_stream.t
val to_json : t -> Ezjsonm.t Lwt.t
val of_string : string -> t
val of_stream : string Lwt_stream.t -> t
val of_json : Ezjsonm.t -> t
val map : (string -> string) -> t -> t
val length : t -> (int64 * t) Lwt.t
val is_empty : t -> bool Lwt.t
val drain : t -> unit Lwt.t
val transfer_encoding : t -> transfer_encoding
end
module Request = Cohttp_lwt_unix.Request
module Response = Cohttp.Response
module Header = Cohttp.Header
type response = (Response.t * Body.t) Lwt.t
and endpoint = Request.t -> Route.params -> Body.t -> response
module Query : sig
type t = (string, string list) Hashtbl.t
val get : Request.t -> t
* the request 's query string
val to_json : Request.t -> Ezjsonm.t
val string : Request.t -> string -> string option
val int : Request.t -> string -> int option
val float : Request.t -> string -> float option
val json : Request.t -> string -> Ezjsonm.value option
end
module Server : sig
include Cohttp_lwt.S.Server with module IO = Cohttp_lwt_unix.IO
val resolve_file : docroot:string -> uri:Uri.t -> string
type server = {
host : string;
port : int;
mutable routes : (string * Route.route * endpoint) list;
mutable tls_config : Tls.Config.server option;
mutable logger : Lwt_log.logger;
}
val server :
?tls_config:Tls.Config.server ->
?logger:Lwt_log.logger ->
string ->
int ->
server
val server_from_config : string -> server
val log_debug : server -> string -> string -> unit
val log_info : server -> string -> string -> unit
val log_notice : server -> string -> string -> unit
val log_error : server -> string -> string -> unit
val log_fatal : server -> string -> string -> unit
val configure_tls : server -> string -> string -> server
val stream :
?flush:bool ->
?headers:Header.t ->
?status:int ->
string Lwt_stream.t ->
(Response.t * Body.t) Lwt.t
val json :
?flush:bool ->
?headers:Header.t ->
?status:int ->
Ezjsonm.t ->
(Response.t * Body.t) Lwt.t
val html :
?flush:bool ->
?headers:Header.t ->
?status:int ->
Yurt_html.t ->
(Response.t * Body.t) Lwt.t
val string :
?flush:bool ->
?headers:Header.t ->
?status:int ->
string ->
(Response.t * Body.t) Lwt.t
val redirect : ?headers:Header.t -> string -> (Response.t * Body.t) Lwt.t
val file : ?headers:Header.t -> string -> (Response.t * Body.t) Lwt.t
val register : server -> (string * Route.route * endpoint) list -> server
val register_route : server -> string -> Route.route -> endpoint -> server
val register_route_string : server -> string -> string -> endpoint -> server
val options : string -> endpoint -> server -> server
val get : string -> endpoint -> server -> server
val post : string -> endpoint -> server -> server
val put : string -> endpoint -> server -> server
val update : string -> endpoint -> server -> server
val delete : string -> endpoint -> server -> server
val static_file : string -> string -> server -> server
* endpoint that returns a single static file for all requests
val static_files : string -> string -> server -> server
* endpoint that will serve files from a firectory
val daemonize : ?directory:string -> ?syslog:bool -> server -> unit
exception Cannot_start_server
val start : server -> unit Lwt.t
val run : server -> unit
val route : server -> (server -> server) -> server
val ( >| ) : server -> (server -> server) -> server
val ( >|| ) : server -> (server -> unit) -> server
end
module Client : sig
val get :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
string ->
(Response.t * string) Lwt.t
val post :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?body:Body.t ->
string ->
(Response.t * string) Lwt.t
val post_form :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
params:(string * string list) list ->
string ->
(Response.t * string) Lwt.t
val request :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?body:Body.t ->
Cohttp.Code.meth ->
string ->
(Response.t * string) Lwt.t
val get_json :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
string ->
(Response.t * Ezjsonm.t) Lwt.t
val post_json :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?body:Body.t ->
string ->
(Response.t * Ezjsonm.t) Lwt.t
val post_form_json :
?resolvers:Conduit.resolvers ->
?headers:Header.t ->
?params:(string * string list) list ->
string ->
(Response.t * Ezjsonm.t) Lwt.t
end
module Form : sig
exception Invalid_multipart_form
val urlencoded : Body.t -> (string, string list) Hashtbl.t Lwt.t
val urlencoded_list : Body.t -> (string * string list) list Lwt.t
val urlencoded_json : Body.t -> Ezjsonm.t Lwt.t
type multipart = {
mutable data : char Lwt_stream.t;
mutable name : string;
attr : (string, string list) Hashtbl.t;
}
val get_attr : multipart -> string -> string list
val is_multipart : Request.t -> bool
val multipart : Request.t -> Body.t -> multipart list Lwt.t
type form =
| Multipart of multipart list
| Urlencoded of (string, string list) Hashtbl.t
val parse_form : Request.t -> Body.t -> form Lwt.t
end
module Util : sig
val unwrap_option : 'a option -> 'a
val unwrap_option_default : 'a option -> 'a -> 'a
val uuid4 : unit -> string
val is_safe_path : ?prefix:string -> string -> bool
end
|
c59683c04e44358cf55e070730e1b5013b1ce5cb0950c64577d753458af3dddc | qitab/qmynd | qtest.lisp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;;
Free Software published under an MIT - like license . See LICENSE ; ; ;
;;; ;;;
Copyright ( c ) 2012 Google , Inc. All rights reserved . ; ; ;
;;; ;;;
Original author : ; ; ;
;;; ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package :qmynd-test)
Ultra light - weight test framework
(defmacro define-test (test-name () &body body)
`(defun ,test-name ()
(handler-case
(progn ,@body)
(error (e)
(warn "An error was signalled executing ~S:~% ~A"
',test-name e)))))
(defmacro define-test-suite (suite-name () &body body)
(if (listp (car body))
;; QRes-style body
`(defun ,suite-name ()
,@(loop for test in (car body)
collect (list test)))
;; The more sensible style
`(defun ,suite-name ()
,@(loop for test in body
collect (list test)))))
(defvar *all-registered-tests* ())
(defmacro register-test (test-name)
`(pushnew ,test-name *all-registered-tests*))
(defmacro run-test (test-name)
`(progn
(format t "~&Running test ~A" ',test-name)
(funcall ',test-name)))
(defun run-all-tests ()
(dolist (test *all-registered-tests*)
(format t "~&Running test ~A" test)
(funcall test)))
(defmacro assert-equal (actual expected &key (test ''equal))
`(assert-equal-helper ',actual ,actual ',expected ,expected ,test))
(defun assert-equal-helper (actual-form actual expected-form expected test)
(unless (funcall test actual expected)
(warn "These two expressions yield values that are not ~S:~% ~S => ~S~%~S => ~S"
test actual-form actual expected-form expected)))
(defmacro assert-true (form)
`(unless ,form
(warn "The value ~S does not evaluate to 'true'"
',form)))
(defmacro assert-false (form)
`(when ,form
(warn "The value ~S does not evaluate to 'false'"
',form)))
| null | https://raw.githubusercontent.com/qitab/qmynd/7e56daf73f0ed5f49a931c01af75fb874bcf3445/tests/qtest.lisp | lisp |
;;;
; ;
;;;
; ;
;;;
; ;
;;;
QRes-style body
The more sensible style |
(in-package :qmynd-test)
Ultra light - weight test framework
(defmacro define-test (test-name () &body body)
`(defun ,test-name ()
(handler-case
(progn ,@body)
(error (e)
(warn "An error was signalled executing ~S:~% ~A"
',test-name e)))))
(defmacro define-test-suite (suite-name () &body body)
(if (listp (car body))
`(defun ,suite-name ()
,@(loop for test in (car body)
collect (list test)))
`(defun ,suite-name ()
,@(loop for test in body
collect (list test)))))
(defvar *all-registered-tests* ())
(defmacro register-test (test-name)
`(pushnew ,test-name *all-registered-tests*))
(defmacro run-test (test-name)
`(progn
(format t "~&Running test ~A" ',test-name)
(funcall ',test-name)))
(defun run-all-tests ()
(dolist (test *all-registered-tests*)
(format t "~&Running test ~A" test)
(funcall test)))
(defmacro assert-equal (actual expected &key (test ''equal))
`(assert-equal-helper ',actual ,actual ',expected ,expected ,test))
(defun assert-equal-helper (actual-form actual expected-form expected test)
(unless (funcall test actual expected)
(warn "These two expressions yield values that are not ~S:~% ~S => ~S~%~S => ~S"
test actual-form actual expected-form expected)))
(defmacro assert-true (form)
`(unless ,form
(warn "The value ~S does not evaluate to 'true'"
',form)))
(defmacro assert-false (form)
`(when ,form
(warn "The value ~S does not evaluate to 'false'"
',form)))
|
296605298a289cb082eda6c39d085bcb3efdcb77c11b7248514a41ebba8cb480 | cornell-pl/forest | PWS.hs | # LANGUAGE TypeSynonymInstances , TemplateHaskell , QuasiQuotes , MultiParamTypeClasses , FlexibleInstances , DeriveDataTypeable , ScopedTypeVariables #
module Examples.PWS where
import Language.Pads.Padsc
import Language.Forest.Forestc hiding (sources)
import System.IO.Unsafe (unsafePerformIO)
import Language.Pads.GenPretty
import Language.Forest.Graph
[pads|
{- Configuration file for learning demo web site; contains paths to various web site components. -}
data Config_f = Config_f {
header :: [Line StringLn] length 13,
"$host_name =", host_name :: Config_entry_t, -- Name of machine hosting web site
"$static_path =", static_path :: Config_entry_t, -- URL prefix for static content
"$cgi_path =", cgi_path :: Config_entry_t, -- URL prefix for cgi content
"$script_path =", script_path :: Config_entry_t, -- Path to directory of scripts in live web site
"$tmp_root =", tmp_root :: Config_entry_t, -- Path to directory for demo user data
"$pads_home =", pads_home :: Config_entry_t, -- Path to directory containing pads system
"$learn_home =", learn_home :: Config_entry_t, -- Path to directory containing learning system
Path to directory containing SML executable
"$install_src =", install_src :: Config_entry_t, -- Path to directory containing learning demo website source
"$static_dst =", static_dst :: Config_entry_t, -- Path to directory for static content in live web site
"$cgi_dst =", cgi_dst :: Config_entry_t, -- Path to directory for cgi content in live web site site
trailer :: [Line StringLn]
}
type Config_entry_t = Line (" \"", StringC '\"', "\";")
newtype Header_t = Header_t ([Line StringLn] length 13)
{- Fle listing data sources for web site -}
newtype SourceNames_f = SourceNames_f [Line StringLn]
{- Information related to a single user's use of the web site -}
newtype UserEntries_f = UserEntries_f ([Line UserEntry_t] terminator EOR)
{- Each visitor gets assigned a userId that is passed as a ? parameter in URL.
Security considerations preclude using user-modifiable values as part of file paths.
Thus, we map each userId to a corresponding dirId.
The dirId names the directory containing the associated user's data.
A userEntry_t contains a single such mapping.
A file with type userEntries_t describes a collection of such mappings.
-}
data UserEntry_t = UserEntry_t {
"id.", usrId :: Int,
",id.", dirId :: (Int, '.', Int) where <| usrId == fst dirId |>
}
{- Log of requests. Used to prevent denial of service attacks. -}
newtype LogFile_f = LogFile_f [Line LogEntry_t]
{- Request entry. -}
data LogEntry_t = LogEntry_t {
userId :: Int, ',', -- user making request
ip :: IP_t, ',', -- IP address of requestor
script :: StringC ' ', ' ', -- script to be executed
userDir:: StringC ' ', ' ', -- directory to put results, corresponds to user
version of PADS used
version of SML used
msg :: Maybe StringLn -- optional message
}
type IP_t = (Int, '.', Int, '.', Int, '.', Int)
|]
[forest|
{- Files with various permission settings. -}
type BinaryRO = BinaryFile where <| get_modes this_att == "-rw-r--r--" |>
type BinaryRX = BinaryFile where <| get_modes this_att == "-rwxr-xr-x" |>
type TextRX = TextFile where <| get_modes this_att == "-rwxr-xr-x" |>
type TextRO = TextFile where <| get_modes this_att == "-rw-r--r--" |>
{- Optional binary file with read/execute permission. -}
type OptBinaryRX = Maybe BinaryRX
{- Files with PADS descriptions -}
type Config = File Config_f where <| get_modes this_att == "-rw-r--r--" |>
type SourceNames = File SourceNames_f where <| isReadOnly this_att |>
type UserEntries = File UserEntries_f where <| isReadOnly this_att |>
type LogFile = File LogFile_f where <| isReadOnly this_att |>
{- Directory of image files -}
type Imgs_d = Directory {
logo is "pads_small.jpg" :: BinaryRO,
favicon is "favicon.ico" :: BinaryRO
}
{- Directory of static content -}
type Static_d = Directory {
style_sheet is "pads.css" :: TextRO,
intro_redir is "learning-demo.html" :: TextRO,
title_frame is "atitle.html" :: TextRO,
logo_frame is "top-left.html" :: TextRO,
top_frame is "banner.html" :: TextRO,
empty_frame is "nothing.html" :: TextRO,
images is "images" :: Imgs_d where <| get_modes images_md == "drwxr-xr-x" |>
}
{- Directory of dynamic content -}
type Cgi_d = Directory {
config' is "PLConfig.pm" :: TextRO,
perl_utils is "PLUtilities.pm" :: TextRO,
intro is "learning-demo.cgi" :: TextRX,
intro_nav is "navbar-orig.cgi" :: TextRX,
select_data is "pads.cgi" :: TextRX,
result_nav is "navbar.cgi" :: TextRX,
format_chosen is "data-results.cgi" :: TextRX,
gen_desc is "build-description.cgi" :: TextRX,
get_user_data is "build-roll-your-own.cgi" :: TextRX,
gen_desc_usr is "genData.cgi" :: TextRX,
build_lib is "build-library.cgi" :: TextRX,
build_accum is "build-accum.cgi" :: TextRX,
build_xml is "build-xml.cgi" :: TextRX,
build_fmt is "build-fmt.cgi" :: TextRX
}
{- Directory of shell scripts invoked by CGI to run learning system -}
type Scripts_d = Directory {
Shell script for running PADS comiler on stock format
Shell script for running PADS compiler on user format
Shell script to generate and run accumulator
Shell script to generate and run XML converter
Shell script to generate and run formating program
Shell script to build PADS library
}
{- Directory containing administrative files used by demo web site -}
type Info_d = Directory {
sources is "sampleFiles" :: SourceNames, -- List of source data files whose formats can be learned
users is "userFile" :: UserEntries, -- Mapping from userIDs to associated directory names
logFile is "logFile" :: LogFile -- Log of server actions.
}
{- Collection of files named by sources containing actual data. -}
type DataSource_d(sources :: [String]) = [ s :: TextFile | s <- sources ]
{- Type of a symbolic link with pointing to source-}
type SymLink_f (path :: FilePath) = SymLink where <| this == path |>
{- Directory of optional links to source data files -}
type Data_d ((root,sources) :: (FilePath, [String])) = Directory {
datareps is [s :: Maybe TextFile | s <- sources],
datalinks is [s :: Maybe (SymLink_f <| root++"/"++ s |>) | s <- sources]
}
{- Directory that stores the generated machine-dependent output for data source named source -}
type MachineDep_d (source :: String) = Directory {
Generated C source for PADS description
Generated C header for PADS description
Compiled library for PADS description
PADS description in xml syntax
Xschema of XML syntax for source description
pads_acc is <| source ++ "-accum"|> :: OptBinaryRX, -- Optional generated accumulator program
pads_fmt is <| source ++ "-fmt" |> :: OptBinaryRX, -- Optional generated formatting program
pads_xml is <| source ++ "-xml" |> :: OptBinaryRX -- Optional generated XML conversion program
}
{- Directory that stores the generated output for data source named "source". -}
type Example_d (source :: String) = Directory {
PADS / C description of data source
PADS / ML description of data source
vanilla is "vanilla.p" :: TextRO, -- input tokenization
makefile is "GNUmakefile" :: TextFile, -- Makefile
machine is <| envVar "AST_ARCH"|> :: Maybe (MachineDep_d source), -- Platform dependent files
accum_c is <| source ++ "-accum.c" |> :: Maybe TextRO, -- Template for accumulator program
accum_out is <| source ++ "-accum.out"|> :: Maybe TextRO, -- ASCII Accumulator output
accum_xml_out is <| source ++ "-accum_xml.out"|> :: Maybe TextRO, -- XML Accumulator output
xml_c is <| source ++ "-xml.c"|> :: Maybe TextRO, -- Template for XML converter
xml_out is <| source ++ "-xml.out"|> :: Maybe TextRO, -- XML representation of source
Xschema for XML representation of source
fmt_c is <| source ++ "-fmt.c" |> :: Maybe TextRO, -- Template for formatting program
fmt_out is <| source ++ "-fmt.out" |> :: Maybe TextRO -- Formatted representation of source
}
Directory that stores all information for one user .
type User_d(arg@ (r, sources) :: (FilePath, [String])) = Directory {
dataSets is "data" :: Maybe (Data_d arg),
runExamples is [ s :: Maybe (Example_d s) | s <- sources]
}
{- Collection of directories containing temporary information for all users. -}
type Users_d((r,info) :: (FilePath, Info_d)) =
[userDir :: User_d <|(r, getSources info) |> | userDir <- <| userNames info |> ]
Top - level of PADS website .
type Website_d(config::FilePath) = Directory {
c is config :: Config, -- Configuration file with locations of other components
Static web site content
Dynamic web site content
Shell scripts invoked by cgi to run learning system
admin_info is <| gstatic_dst c |> :: Info_d, -- Administrative information about website
data_dir is <| (glearn_home c)++"/examples/data" |>
:: DataSource_d <|(getSources admin_info)|>, -- Stock data files for website
usr_data is <| gtmp_root c |> :: Users_d <|(get_fullpath data_dir_md, admin_info)|> -- User-specific information
}
|]
{- HASKELL HELPER FUNCTIONS -}
isReadOnly md = get_modes md == "-rw-r--r--"
{- Function userName gets the list of user directorn names from an info structure. -}
userNames info = getUserEntries (users info)
getUserEntries (UserEntries (UserEntries_f users)) = map userEntryToFileName users
userEntryToFileName userEntry = pairToFileName (dirId userEntry)
pairToFileName (n1, n2) = "id."++(show n1)++"."++(show n2)
{- Helper functiosn to convert a Config entry to a FileName -}
ghost_name (Config c) = host_name c
gstatic_path (Config c) = static_path c
gcgi_path (Config c) = cgi_path c
gscript_path (Config c) = script_path c
glearn_home (Config c) = learn_home c
gtmp_root (Config c) = tmp_root c
gstatic_dst (Config c) = static_dst c
gcgi_dst (Config c) = cgi_dst c
{- Loading functions -}
config_location = "/Users/kfisher/Sites/cgi-bin/PLConfig.PM"
doLoadWebsite = website_d_load config_location "/Users/kfisher/Sites"
{- print graph of website -}
doGraph md = mdToPDF md "Examples/website.pdf"
users_dir = "/Users/kfisher/Sites/cgi-bin/gen"
(users'_rep, users'_md) :: (Users_d, Users_d_md) = unsafePerformIO $ load1 ("/Users/kfisher/pads/infer/examples/data", info_rep) users_dir
user_dir = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7"
(userE_rep, userE_md) :: (User_d, User_d_md) = unsafePerformIO $ load1 ("/Users/kfisher/pads/infer/examples/data", ["ai.3000"]) user_dir
graphUserIO = mdToPDF userE_md "Examples/users.pdf"
example_dir = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7/ai.3000"
(example_rep, example_md) :: (Example_d, Example_d_md) = unsafePerformIO $ load1 "ai.3000" example_dir
machine_dir = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7/ai.3000/darwin.i386"
(machinedep_rep, machinedep_md) :: (MachineDep_d, MachineDep_d_md) = unsafePerformIO $ load1 "ai.3000" machine_dir
root_data_dir = "/Users/kfisher/pads/infer/examples/data"
data_dir_path = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7/data"
(data_d_rep, data_d_md) :: (Data_d, Data_d_md) = unsafePerformIO $ load1 (root_data_dir, datasources) data_dir_path
link_path = "Examples/data/Simple/mylink"
(link_rep,link_md) :: (SymLink_f, SymLink_f_md) = unsafePerformIO $ load1 "quantum" link_path
info_dir = "/Users/kfisher/Sites"
(info_rep, info_md) :: (Info_d, Info_d_md) = unsafePerformIO $ load info_dir
dataSource_dir = "/Users/kfisher/pads/infer/examples/data"
(datasource_rep, datasource_md) :: (DataSource_d, DataSource_d_md) = unsafePerformIO $ load1 datasources dataSource_dir
( ( PstringSE s ) ) = s
getStrings s = s
getSources' (SourceNames (SourceNames_f pstrlns)) = map getStrings pstrlns
getSources :: Info_d -> [String]
getSources info = getSources' (sources info)
datasources :: [String]
datasources = getSources info_rep
scripts_dir = "/Users/kfisher/Sites/cgi-bin"
(scripts_rep, scripts'_md) :: (Scripts_d, Scripts_d_md) = unsafePerformIO $ load scripts_dir
cgi_dir = "/Users/kfisher/Sites/cgi-bin"
(cgi_rep, cgi_md) :: (Cgi_d, Cgi_d_md) = unsafePerformIO $ load cgi_dir
static_dir = "/Users/kfisher/Sites"
(static_rep, static_md) :: (Static_d, Static_d_md) = unsafePerformIO $ load static_dir
image_dir = "/Users/kfisher/Sites/images"
(img_rep, img_md) :: (Imgs_d, Imgs_d_md) = unsafePerformIO $ load image_dir
config_file = "/Users/kfisher/Sites/cgi-bin/PLConfig.pm"
(config_rep, config_md) :: (Config_f, Config_f_md) = unsafePerformIO $ parseFile config_file
(head_rep, head_md) :: (Header_t, Header_t_md) = unsafePerformIO $ parseFile config_file
sampleFiles = "/Users/kfisher/Sites/sampleFiles"
(sample_rep, sample_md) :: (SourceNames_f, SourceNames_f_md) = unsafePerformIO $ parseFile sampleFiles
userEntries = "/Users/kfisher/Sites/userFile"
(user_rep, user_md) :: (UserEntries_f, UserEntries_f_md) = unsafePerformIO $ parseFile userEntries
logFiles = "/Users/kfisher/Sites/logFile"
(logFiles_rep, logFiles_md) :: (LogFile_f, LogFile_f_md) = unsafePerformIO $ parseFile logFiles
| null | https://raw.githubusercontent.com/cornell-pl/forest/3772c1f44cdee0b705e927a14b54d84e60e224e6/src/Examples/PWS.hs | haskell | Configuration file for learning demo web site; contains paths to various web site components.
Name of machine hosting web site
URL prefix for static content
URL prefix for cgi content
Path to directory of scripts in live web site
Path to directory for demo user data
Path to directory containing pads system
Path to directory containing learning system
Path to directory containing learning demo website source
Path to directory for static content in live web site
Path to directory for cgi content in live web site site
Fle listing data sources for web site
Information related to a single user's use of the web site
Each visitor gets assigned a userId that is passed as a ? parameter in URL.
Security considerations preclude using user-modifiable values as part of file paths.
Thus, we map each userId to a corresponding dirId.
The dirId names the directory containing the associated user's data.
A userEntry_t contains a single such mapping.
A file with type userEntries_t describes a collection of such mappings.
Log of requests. Used to prevent denial of service attacks.
Request entry.
user making request
IP address of requestor
script to be executed
directory to put results, corresponds to user
optional message
Files with various permission settings.
Optional binary file with read/execute permission.
Files with PADS descriptions
Directory of image files
Directory of static content
Directory of dynamic content
Directory of shell scripts invoked by CGI to run learning system
Directory containing administrative files used by demo web site
List of source data files whose formats can be learned
Mapping from userIDs to associated directory names
Log of server actions.
Collection of files named by sources containing actual data.
Type of a symbolic link with pointing to source
Directory of optional links to source data files
Directory that stores the generated machine-dependent output for data source named source
Optional generated accumulator program
Optional generated formatting program
Optional generated XML conversion program
Directory that stores the generated output for data source named "source".
input tokenization
Makefile
Platform dependent files
Template for accumulator program
ASCII Accumulator output
XML Accumulator output
Template for XML converter
XML representation of source
Template for formatting program
Formatted representation of source
Collection of directories containing temporary information for all users.
Configuration file with locations of other components
Administrative information about website
Stock data files for website
User-specific information
HASKELL HELPER FUNCTIONS
Function userName gets the list of user directorn names from an info structure.
Helper functiosn to convert a Config entry to a FileName
Loading functions
print graph of website | # LANGUAGE TypeSynonymInstances , TemplateHaskell , QuasiQuotes , MultiParamTypeClasses , FlexibleInstances , DeriveDataTypeable , ScopedTypeVariables #
module Examples.PWS where
import Language.Pads.Padsc
import Language.Forest.Forestc hiding (sources)
import System.IO.Unsafe (unsafePerformIO)
import Language.Pads.GenPretty
import Language.Forest.Graph
[pads|
data Config_f = Config_f {
header :: [Line StringLn] length 13,
Path to directory containing SML executable
trailer :: [Line StringLn]
}
type Config_entry_t = Line (" \"", StringC '\"', "\";")
newtype Header_t = Header_t ([Line StringLn] length 13)
newtype SourceNames_f = SourceNames_f [Line StringLn]
newtype UserEntries_f = UserEntries_f ([Line UserEntry_t] terminator EOR)
data UserEntry_t = UserEntry_t {
"id.", usrId :: Int,
",id.", dirId :: (Int, '.', Int) where <| usrId == fst dirId |>
}
newtype LogFile_f = LogFile_f [Line LogEntry_t]
data LogEntry_t = LogEntry_t {
version of PADS used
version of SML used
}
type IP_t = (Int, '.', Int, '.', Int, '.', Int)
|]
[forest|
type BinaryRO = BinaryFile where <| get_modes this_att == "-rw-r--r--" |>
type BinaryRX = BinaryFile where <| get_modes this_att == "-rwxr-xr-x" |>
type TextRX = TextFile where <| get_modes this_att == "-rwxr-xr-x" |>
type TextRO = TextFile where <| get_modes this_att == "-rw-r--r--" |>
type OptBinaryRX = Maybe BinaryRX
type Config = File Config_f where <| get_modes this_att == "-rw-r--r--" |>
type SourceNames = File SourceNames_f where <| isReadOnly this_att |>
type UserEntries = File UserEntries_f where <| isReadOnly this_att |>
type LogFile = File LogFile_f where <| isReadOnly this_att |>
type Imgs_d = Directory {
logo is "pads_small.jpg" :: BinaryRO,
favicon is "favicon.ico" :: BinaryRO
}
type Static_d = Directory {
style_sheet is "pads.css" :: TextRO,
intro_redir is "learning-demo.html" :: TextRO,
title_frame is "atitle.html" :: TextRO,
logo_frame is "top-left.html" :: TextRO,
top_frame is "banner.html" :: TextRO,
empty_frame is "nothing.html" :: TextRO,
images is "images" :: Imgs_d where <| get_modes images_md == "drwxr-xr-x" |>
}
type Cgi_d = Directory {
config' is "PLConfig.pm" :: TextRO,
perl_utils is "PLUtilities.pm" :: TextRO,
intro is "learning-demo.cgi" :: TextRX,
intro_nav is "navbar-orig.cgi" :: TextRX,
select_data is "pads.cgi" :: TextRX,
result_nav is "navbar.cgi" :: TextRX,
format_chosen is "data-results.cgi" :: TextRX,
gen_desc is "build-description.cgi" :: TextRX,
get_user_data is "build-roll-your-own.cgi" :: TextRX,
gen_desc_usr is "genData.cgi" :: TextRX,
build_lib is "build-library.cgi" :: TextRX,
build_accum is "build-accum.cgi" :: TextRX,
build_xml is "build-xml.cgi" :: TextRX,
build_fmt is "build-fmt.cgi" :: TextRX
}
type Scripts_d = Directory {
Shell script for running PADS comiler on stock format
Shell script for running PADS compiler on user format
Shell script to generate and run accumulator
Shell script to generate and run XML converter
Shell script to generate and run formating program
Shell script to build PADS library
}
type Info_d = Directory {
}
type DataSource_d(sources :: [String]) = [ s :: TextFile | s <- sources ]
type SymLink_f (path :: FilePath) = SymLink where <| this == path |>
type Data_d ((root,sources) :: (FilePath, [String])) = Directory {
datareps is [s :: Maybe TextFile | s <- sources],
datalinks is [s :: Maybe (SymLink_f <| root++"/"++ s |>) | s <- sources]
}
type MachineDep_d (source :: String) = Directory {
Generated C source for PADS description
Generated C header for PADS description
Compiled library for PADS description
PADS description in xml syntax
Xschema of XML syntax for source description
}
type Example_d (source :: String) = Directory {
PADS / C description of data source
PADS / ML description of data source
Xschema for XML representation of source
}
Directory that stores all information for one user .
type User_d(arg@ (r, sources) :: (FilePath, [String])) = Directory {
dataSets is "data" :: Maybe (Data_d arg),
runExamples is [ s :: Maybe (Example_d s) | s <- sources]
}
type Users_d((r,info) :: (FilePath, Info_d)) =
[userDir :: User_d <|(r, getSources info) |> | userDir <- <| userNames info |> ]
Top - level of PADS website .
type Website_d(config::FilePath) = Directory {
Static web site content
Dynamic web site content
Shell scripts invoked by cgi to run learning system
data_dir is <| (glearn_home c)++"/examples/data" |>
}
|]
isReadOnly md = get_modes md == "-rw-r--r--"
userNames info = getUserEntries (users info)
getUserEntries (UserEntries (UserEntries_f users)) = map userEntryToFileName users
userEntryToFileName userEntry = pairToFileName (dirId userEntry)
pairToFileName (n1, n2) = "id."++(show n1)++"."++(show n2)
ghost_name (Config c) = host_name c
gstatic_path (Config c) = static_path c
gcgi_path (Config c) = cgi_path c
gscript_path (Config c) = script_path c
glearn_home (Config c) = learn_home c
gtmp_root (Config c) = tmp_root c
gstatic_dst (Config c) = static_dst c
gcgi_dst (Config c) = cgi_dst c
config_location = "/Users/kfisher/Sites/cgi-bin/PLConfig.PM"
doLoadWebsite = website_d_load config_location "/Users/kfisher/Sites"
doGraph md = mdToPDF md "Examples/website.pdf"
users_dir = "/Users/kfisher/Sites/cgi-bin/gen"
(users'_rep, users'_md) :: (Users_d, Users_d_md) = unsafePerformIO $ load1 ("/Users/kfisher/pads/infer/examples/data", info_rep) users_dir
user_dir = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7"
(userE_rep, userE_md) :: (User_d, User_d_md) = unsafePerformIO $ load1 ("/Users/kfisher/pads/infer/examples/data", ["ai.3000"]) user_dir
graphUserIO = mdToPDF userE_md "Examples/users.pdf"
example_dir = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7/ai.3000"
(example_rep, example_md) :: (Example_d, Example_d_md) = unsafePerformIO $ load1 "ai.3000" example_dir
machine_dir = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7/ai.3000/darwin.i386"
(machinedep_rep, machinedep_md) :: (MachineDep_d, MachineDep_d_md) = unsafePerformIO $ load1 "ai.3000" machine_dir
root_data_dir = "/Users/kfisher/pads/infer/examples/data"
data_dir_path = "/Users/kfisher/Sites/cgi-bin/gen/id.1192115633.7/data"
(data_d_rep, data_d_md) :: (Data_d, Data_d_md) = unsafePerformIO $ load1 (root_data_dir, datasources) data_dir_path
link_path = "Examples/data/Simple/mylink"
(link_rep,link_md) :: (SymLink_f, SymLink_f_md) = unsafePerformIO $ load1 "quantum" link_path
info_dir = "/Users/kfisher/Sites"
(info_rep, info_md) :: (Info_d, Info_d_md) = unsafePerformIO $ load info_dir
dataSource_dir = "/Users/kfisher/pads/infer/examples/data"
(datasource_rep, datasource_md) :: (DataSource_d, DataSource_d_md) = unsafePerformIO $ load1 datasources dataSource_dir
( ( PstringSE s ) ) = s
getStrings s = s
getSources' (SourceNames (SourceNames_f pstrlns)) = map getStrings pstrlns
getSources :: Info_d -> [String]
getSources info = getSources' (sources info)
datasources :: [String]
datasources = getSources info_rep
scripts_dir = "/Users/kfisher/Sites/cgi-bin"
(scripts_rep, scripts'_md) :: (Scripts_d, Scripts_d_md) = unsafePerformIO $ load scripts_dir
cgi_dir = "/Users/kfisher/Sites/cgi-bin"
(cgi_rep, cgi_md) :: (Cgi_d, Cgi_d_md) = unsafePerformIO $ load cgi_dir
static_dir = "/Users/kfisher/Sites"
(static_rep, static_md) :: (Static_d, Static_d_md) = unsafePerformIO $ load static_dir
image_dir = "/Users/kfisher/Sites/images"
(img_rep, img_md) :: (Imgs_d, Imgs_d_md) = unsafePerformIO $ load image_dir
config_file = "/Users/kfisher/Sites/cgi-bin/PLConfig.pm"
(config_rep, config_md) :: (Config_f, Config_f_md) = unsafePerformIO $ parseFile config_file
(head_rep, head_md) :: (Header_t, Header_t_md) = unsafePerformIO $ parseFile config_file
sampleFiles = "/Users/kfisher/Sites/sampleFiles"
(sample_rep, sample_md) :: (SourceNames_f, SourceNames_f_md) = unsafePerformIO $ parseFile sampleFiles
userEntries = "/Users/kfisher/Sites/userFile"
(user_rep, user_md) :: (UserEntries_f, UserEntries_f_md) = unsafePerformIO $ parseFile userEntries
logFiles = "/Users/kfisher/Sites/logFile"
(logFiles_rep, logFiles_md) :: (LogFile_f, LogFile_f_md) = unsafePerformIO $ parseFile logFiles
|
2ef71172e311247930d8a59b9fae94014225121bbf57b010621acab9eb08d1a7 | jimcrayne/jhc | read066.hs |
{-# OPTIONS_NO_SUCH_PRAGMA --no-such-flag #-}
-- We should parse the above as an unrecognised pragma, not as an OPTIONS
pragma containing " _ NO_SUCH_PRAGMA -wibble " . Trac # 2847 .
module Test where
| null | https://raw.githubusercontent.com/jimcrayne/jhc/1ff035af3d697f9175f8761c8d08edbffde03b4e/regress/tests/0_parse/2_pass/ghc/read066.hs | haskell | # OPTIONS_NO_SUCH_PRAGMA --no-such-flag #
We should parse the above as an unrecognised pragma, not as an OPTIONS |
pragma containing " _ NO_SUCH_PRAGMA -wibble " . Trac # 2847 .
module Test where
|
26924410659c90b124cd66b32788f74ee744650dd21aab6353b37189b3662eb9 | haskell-opengl/OpenGLRaw | ShaderBufferLoad.hs | # LANGUAGE PatternSynonyms #
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.NV.ShaderBufferLoad
Copyright : ( c ) 2019
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.NV.ShaderBufferLoad (
-- * Extension Support
glGetNVShaderBufferLoad,
gl_NV_shader_buffer_load,
-- * Enums
pattern GL_BUFFER_GPU_ADDRESS_NV,
pattern GL_GPU_ADDRESS_NV,
pattern GL_MAX_SHADER_BUFFER_ADDRESS_NV,
-- * Functions
glGetBufferParameterui64vNV,
glGetIntegerui64vNV,
glGetNamedBufferParameterui64vNV,
glGetUniformui64vNV,
glIsBufferResidentNV,
glIsNamedBufferResidentNV,
glMakeBufferNonResidentNV,
glMakeBufferResidentNV,
glMakeNamedBufferNonResidentNV,
glMakeNamedBufferResidentNV,
glProgramUniformui64NV,
glProgramUniformui64vNV,
glUniformui64NV,
glUniformui64vNV
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| null | https://raw.githubusercontent.com/haskell-opengl/OpenGLRaw/57e50c9d28dfa62d6a87ae9b561af28f64ce32a0/src/Graphics/GL/NV/ShaderBufferLoad.hs | haskell | ------------------------------------------------------------------------------
|
Module : Graphics.GL.NV.ShaderBufferLoad
License : BSD3
Stability : stable
Portability : portable
------------------------------------------------------------------------------
* Extension Support
* Enums
* Functions | # LANGUAGE PatternSynonyms #
Copyright : ( c ) 2019
Maintainer : < >
module Graphics.GL.NV.ShaderBufferLoad (
glGetNVShaderBufferLoad,
gl_NV_shader_buffer_load,
pattern GL_BUFFER_GPU_ADDRESS_NV,
pattern GL_GPU_ADDRESS_NV,
pattern GL_MAX_SHADER_BUFFER_ADDRESS_NV,
glGetBufferParameterui64vNV,
glGetIntegerui64vNV,
glGetNamedBufferParameterui64vNV,
glGetUniformui64vNV,
glIsBufferResidentNV,
glIsNamedBufferResidentNV,
glMakeBufferNonResidentNV,
glMakeBufferResidentNV,
glMakeNamedBufferNonResidentNV,
glMakeNamedBufferResidentNV,
glProgramUniformui64NV,
glProgramUniformui64vNV,
glUniformui64NV,
glUniformui64vNV
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
13dd08e9c70cd50694932f6b599abc5a773e14287a8b70d1f39425a483777ca3 | Smoltbob/Caml-Est-Belle | if_int.ml |
let a = 0 in
let b = 1 in
let c = 2 in
if a < 1 then
print_int 2
else ()
| null | https://raw.githubusercontent.com/Smoltbob/Caml-Est-Belle/3d6f53d4e8e01bbae57a0a402b7c0f02f4ed767c/tests/gen-code/valid/if_int.ml | ocaml |
let a = 0 in
let b = 1 in
let c = 2 in
if a < 1 then
print_int 2
else ()
| |
0905be58ed25d0fa2f19ac51390e3aaa7e35fb8474c574b995c6d4c7f2a69d19 | Dasudian/DSDIN | dsdtx_utils.erl | %%%=============================================================================
2018 , Dasudian Technologies
%%% @doc
Common utility functions for DSD transactions
%%% @end
%%%=============================================================================
-module(dsdtx_utils).
%% API
-export([check_account/3,
check_account/4,
check_nonce/2,
check_ttl/2]).
%%%===================================================================
%%% API
%%%===================================================================
%% Checks that an account (PubKey) exist at this height, has enough funds,
and that the is ok .
-spec check_account(Account :: dsdc_keys:pubkey(),
Trees :: dsdc_trees:trees(),
Nonce :: non_neg_integer(),
Amount :: non_neg_integer()) -> ok | {error, term()}.
check_account(AccountPubKey, Trees, Nonce, Amount) ->
case get_account(AccountPubKey, Trees) of
{value, Account} ->
BalanceOk = check_balance(Account, Amount),
NonceOk = check_nonce(Account, Nonce),
checks_ok([BalanceOk, NonceOk]);
none ->
{error, account_not_found}
end.
-spec check_account(Account :: dsdc_keys:pubkey(),
Trees :: dsdc_trees:trees(),
Amount :: non_neg_integer()) -> ok | {error, term()}.
check_account(AccountPubKey, Trees, Amount) ->
case get_account(AccountPubKey, Trees) of
{value, Account} ->
BalanceOk = check_balance(Account, Amount),
checks_ok([BalanceOk]);
none ->
{error, account_not_found}
end.
-spec check_ttl(non_neg_integer(), non_neg_integer()) ->
ok | {error, ttl_expired}.
check_ttl(TTL, Height) ->
case TTL >= Height of
true -> ok;
false -> {error, ttl_expired}
end.
%%%===================================================================
Internal functions
%%%===================================================================
-spec get_account(dsdc_keys:pubkey(), dsdc_trees:trees()) ->
none | {value, dsdc_accounts:account()}.
get_account(AccountPubKey, Trees) ->
AccountsTrees = dsdc_trees:accounts(Trees),
dsdc_accounts_trees:lookup(AccountPubKey, AccountsTrees).
-spec check_balance(dsdc_accounts:account(), non_neg_integer()) ->
ok | {error, insufficient_funds}.
check_balance(Account, Amount) ->
case dsdc_accounts:balance(Account) >= Amount of
true ->
ok;
false ->
{error, insufficient_funds}
end.
-spec check_nonce(dsdc_accounts:account(), non_neg_integer()) ->
ok | {error, account_nonce_too_high | account_nonce_too_low}.
check_nonce(Account, Nonce) ->
AccountNonce = dsdc_accounts:nonce(Account),
if
Nonce =:= (AccountNonce + 1) -> ok;
Nonce =< AccountNonce -> {error, account_nonce_too_high};
Nonce > AccountNonce -> {error, account_nonce_too_low}
end.
-spec checks_ok(list(ok | {error, term()})) -> ok | {error, term()}.
checks_ok([]) -> ok;
checks_ok([ok | Xs]) -> checks_ok(Xs);
checks_ok([Err | _]) -> Err.
| null | https://raw.githubusercontent.com/Dasudian/DSDIN/b27a437d8deecae68613604fffcbb9804a6f1729/apps/dsdtx/src/dsdtx_utils.erl | erlang | =============================================================================
@doc
@end
=============================================================================
API
===================================================================
API
===================================================================
Checks that an account (PubKey) exist at this height, has enough funds,
===================================================================
=================================================================== | 2018 , Dasudian Technologies
Common utility functions for DSD transactions
-module(dsdtx_utils).
-export([check_account/3,
check_account/4,
check_nonce/2,
check_ttl/2]).
and that the is ok .
-spec check_account(Account :: dsdc_keys:pubkey(),
Trees :: dsdc_trees:trees(),
Nonce :: non_neg_integer(),
Amount :: non_neg_integer()) -> ok | {error, term()}.
check_account(AccountPubKey, Trees, Nonce, Amount) ->
case get_account(AccountPubKey, Trees) of
{value, Account} ->
BalanceOk = check_balance(Account, Amount),
NonceOk = check_nonce(Account, Nonce),
checks_ok([BalanceOk, NonceOk]);
none ->
{error, account_not_found}
end.
-spec check_account(Account :: dsdc_keys:pubkey(),
Trees :: dsdc_trees:trees(),
Amount :: non_neg_integer()) -> ok | {error, term()}.
check_account(AccountPubKey, Trees, Amount) ->
case get_account(AccountPubKey, Trees) of
{value, Account} ->
BalanceOk = check_balance(Account, Amount),
checks_ok([BalanceOk]);
none ->
{error, account_not_found}
end.
-spec check_ttl(non_neg_integer(), non_neg_integer()) ->
ok | {error, ttl_expired}.
check_ttl(TTL, Height) ->
case TTL >= Height of
true -> ok;
false -> {error, ttl_expired}
end.
Internal functions
-spec get_account(dsdc_keys:pubkey(), dsdc_trees:trees()) ->
none | {value, dsdc_accounts:account()}.
get_account(AccountPubKey, Trees) ->
AccountsTrees = dsdc_trees:accounts(Trees),
dsdc_accounts_trees:lookup(AccountPubKey, AccountsTrees).
-spec check_balance(dsdc_accounts:account(), non_neg_integer()) ->
ok | {error, insufficient_funds}.
check_balance(Account, Amount) ->
case dsdc_accounts:balance(Account) >= Amount of
true ->
ok;
false ->
{error, insufficient_funds}
end.
-spec check_nonce(dsdc_accounts:account(), non_neg_integer()) ->
ok | {error, account_nonce_too_high | account_nonce_too_low}.
check_nonce(Account, Nonce) ->
AccountNonce = dsdc_accounts:nonce(Account),
if
Nonce =:= (AccountNonce + 1) -> ok;
Nonce =< AccountNonce -> {error, account_nonce_too_high};
Nonce > AccountNonce -> {error, account_nonce_too_low}
end.
-spec checks_ok(list(ok | {error, term()})) -> ok | {error, term()}.
checks_ok([]) -> ok;
checks_ok([ok | Xs]) -> checks_ok(Xs);
checks_ok([Err | _]) -> Err.
|
d558aa26450607a0b6959ae008e06ce635845d06e51856bd23afc748105f3990 | unisonweb/unison | SyncEphemeral.hs | module Unison.Codebase.SqliteCodebase.SyncEphemeral where
import U.Codebase.HashTags (CausalHash)
import U.Codebase.Sqlite.DbId (SchemaVersion)
import qualified U.Codebase.Sqlite.Sync22 as Sync22
import Unison.Hash (Hash)
import Unison.Prelude
data Dependencies = Dependencies
{ definitions :: Set Hash,
branches :: Set Hash
}
data Error
= Sync22Error Sync22.Error
| SrcWrongSchema SchemaVersion
| DestWrongSchema SchemaVersion
| DisappearingBranch CausalHash
deriving stock (Show)
deriving anyclass (Exception)
| null | https://raw.githubusercontent.com/unisonweb/unison/3bfb412a4d7c22654712105b9c3d16193830901f/parser-typechecker/src/Unison/Codebase/SqliteCodebase/SyncEphemeral.hs | haskell | module Unison.Codebase.SqliteCodebase.SyncEphemeral where
import U.Codebase.HashTags (CausalHash)
import U.Codebase.Sqlite.DbId (SchemaVersion)
import qualified U.Codebase.Sqlite.Sync22 as Sync22
import Unison.Hash (Hash)
import Unison.Prelude
data Dependencies = Dependencies
{ definitions :: Set Hash,
branches :: Set Hash
}
data Error
= Sync22Error Sync22.Error
| SrcWrongSchema SchemaVersion
| DestWrongSchema SchemaVersion
| DisappearingBranch CausalHash
deriving stock (Show)
deriving anyclass (Exception)
| |
d910450ffc70c1b7aab9d0fb1339a5706d1bc62700f11b74e7ff45ede538ee58 | lehins/massiv | Unsafe.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
-- |
-- Module : Data.Massiv.Array.Stencil.Unsafe
Copyright : ( c ) 2018 - 2022
-- License : BSD3
Maintainer : < >
-- Stability : experimental
-- Portability : non-portable
module Data.Massiv.Array.Stencil.Unsafe (
-- * Stencil
makeUnsafeStencil,
makeUnsafeConvolutionStencil,
makeUnsafeCorrelationStencil,
unsafeTransformStencil,
) where
import Data.Massiv.Array.Stencil.Internal
import Data.Massiv.Core.Common
import GHC.Exts (inline)
-- | Similar to `Data.Massiv.Array.Stencil.makeStencil`, but there are no guarantees that the
-- stencil will not read out of bounds memory. This stencil is also a bit more powerful in sense it
-- gets an extra peice of information, namely the exact index for the element it is constructing.
--
-- @since 0.3.0
makeUnsafeStencil
:: Index ix
=> Sz ix
-- ^ Size of the stencil
-> ix
-- ^ Center of the stencil
-> (ix -> (ix -> e) -> a)
-- ^ Stencil function.
-> Stencil ix e a
makeUnsafeStencil !sSz !sCenter relStencil = Stencil sSz sCenter stencil
where
stencil unsafeGetVal _getVal !ix =
inline (relStencil ix (unsafeGetVal . liftIndex2 (+) ix))
# INLINE stencil #
# INLINE makeUnsafeStencil #
-- | Same as `Data.Massiv.Array.Stencil.makeConvolutionStencil`, but will result in
-- reading memory out of bounds and potential segfaults if supplied arguments are not valid.
--
@since 0.6.0
makeUnsafeConvolutionStencil
:: (Index ix, Num e)
=> Sz ix
-> ix
-> ((ix -> e -> e -> e) -> e -> e)
-> Stencil ix e e
makeUnsafeConvolutionStencil !sz !sCenter relStencil =
Stencil sz sInvertCenter stencil
where
!sInvertCenter = liftIndex2 (-) (liftIndex (subtract 1) (unSz sz)) sCenter
stencil uget _ !ix =
(inline relStencil $ \ !ixD !kVal !acc -> uget (liftIndex2 (-) ix ixD) * kVal + acc) 0
# INLINE stencil #
# INLINE makeUnsafeConvolutionStencil #
-- | Same as `Data.Massiv.Array.Stencil.makeCorrelationStencil`, but will result in
-- reading memory out of bounds and potential segfaults if supplied arguments are not
-- valid.
--
@since 0.6.0
makeUnsafeCorrelationStencil
:: (Index ix, Num e)
=> Sz ix
-> ix
-> ((ix -> e -> e -> e) -> e -> e)
-> Stencil ix e e
makeUnsafeCorrelationStencil !sSz !sCenter relStencil = Stencil sSz sCenter stencil
where
stencil _ getVal !ix =
(inline relStencil $ \ !ixD !kVal !acc -> getVal (liftIndex2 (+) ix ixD) * kVal + acc) 0
# INLINE stencil #
# INLINE makeUnsafeCorrelationStencil #
-- | Perform an arbitrary transformation of a stencil. This stencil modifier can be used for
-- example to turn a vector stencil into a matrix stencil implement, or transpose a matrix
-- stencil. It is really easy to get this wrong, so be extremely careful.
--
-- ====__Examples__
--
Convert a 1D stencil into a row or column 2D stencil :
--
> > > import Data . Massiv . Array
> > > import Data . Massiv . Array . Unsafe
> > > let = compute $ iterateN 3 succ 0 : : Array P Ix2 Int
-- >>> arr
Array P Seq ( Sz ( 3 : . 3 ) )
[ [ 1 , 2 , 3 ]
, [ 4 , 5 , 6 ]
, [ 7 , 8 , 9 ]
-- ]
> > > let ( \(Sz n ) - > Sz ( 1 : . n ) ) ( 0 : . ) $ \ f uget getVal ( i : . j ) - > f ( uget . ( i : . ) ) ( getVal . ( i : . ) ) j
> > > applyStencil noPadding ( ( sumStencil ( Sz1 3 ) ) ) arr
Array DW Seq ( Sz ( 3 : . 1 ) )
[ [ 6 ]
, [ 15 ]
, [ 24 ]
-- ]
> > > let columnStencil = unsafeTransformStencil ( \(Sz n ) - > Sz ( n : . 1 ) ) (: . 0 ) $ \ f uget getVal ( i : . j ) - > f ( uget . (: . j ) ) ( getVal . (: . j ) ) i
> > > applyStencil noPadding ( columnStencil ( sumStencil ( Sz1 3 ) ) ) arr
Array DW Seq ( Sz ( 1 : . 3 ) )
[ [ 12 , 15 , 18 ]
-- ]
--
-- @since 0.5.4
unsafeTransformStencil
:: (Sz ix' -> Sz ix)
-- ^ Forward modifier for the size
-> (ix' -> ix)
-- ^ Forward index modifier
-> ( ((ix' -> e) -> (ix' -> e) -> ix' -> a)
-> (ix -> e)
-> (ix -> e)
-> ix
-> a
)
-- ^ Inverse stencil function modifier
-> Stencil ix' e a
-- ^ Original stencil.
-> Stencil ix e a
unsafeTransformStencil transformSize transformIndex transformFunc Stencil{..} =
Stencil
{ stencilSize = transformSize stencilSize
, stencilCenter = transformIndex stencilCenter
, stencilFunc = transformFunc stencilFunc
}
# INLINE unsafeTransformStencil #
Invalid stencil transformer function .
TODO : figure out if there is a safe way to do stencil index trnasformation .
transformStencil : :
( Default e , Index ix )
= > ( Sz ix ' - > Sz ix )
-- ^ Forward modifier for the size
- > ( ix ' - > ix )
-- ^ Forward index modifier
- > ( ix - > ix ' )
-- ^ Inverse index modifier
- > Stencil ix ' e a
-- ^ Original stencil .
- > Stencil ix e a
transformStencil transformSize transformIndex transformIndex ' stencil =
validateStencil def $ ! unsafeTransformStencil transformSize transformIndex transformIndex ' stencil
{ - # INLINE transformStencil #
Invalid stencil transformer function.
TODO: figure out if there is a safe way to do stencil index trnasformation.
transformStencil ::
(Default e, Index ix)
=> (Sz ix' -> Sz ix)
-- ^ Forward modifier for the size
-> (ix' -> ix)
-- ^ Forward index modifier
-> (ix -> ix')
-- ^ Inverse index modifier
-> Stencil ix' e a
-- ^ Original stencil.
-> Stencil ix e a
transformStencil transformSize transformIndex transformIndex' stencil =
validateStencil def $! unsafeTransformStencil transformSize transformIndex transformIndex' stencil
{-# INLINE transformStencil #-}
-}
| null | https://raw.githubusercontent.com/lehins/massiv/67a920d4403f210d0bfdad1acc4bec208d80a588/massiv/src/Data/Massiv/Array/Stencil/Unsafe.hs | haskell | # LANGUAGE BangPatterns #
|
Module : Data.Massiv.Array.Stencil.Unsafe
License : BSD3
Stability : experimental
Portability : non-portable
* Stencil
| Similar to `Data.Massiv.Array.Stencil.makeStencil`, but there are no guarantees that the
stencil will not read out of bounds memory. This stencil is also a bit more powerful in sense it
gets an extra peice of information, namely the exact index for the element it is constructing.
@since 0.3.0
^ Size of the stencil
^ Center of the stencil
^ Stencil function.
| Same as `Data.Massiv.Array.Stencil.makeConvolutionStencil`, but will result in
reading memory out of bounds and potential segfaults if supplied arguments are not valid.
| Same as `Data.Massiv.Array.Stencil.makeCorrelationStencil`, but will result in
reading memory out of bounds and potential segfaults if supplied arguments are not
valid.
| Perform an arbitrary transformation of a stencil. This stencil modifier can be used for
example to turn a vector stencil into a matrix stencil implement, or transpose a matrix
stencil. It is really easy to get this wrong, so be extremely careful.
====__Examples__
>>> arr
]
]
]
@since 0.5.4
^ Forward modifier for the size
^ Forward index modifier
^ Inverse stencil function modifier
^ Original stencil.
^ Forward modifier for the size
^ Forward index modifier
^ Inverse index modifier
^ Original stencil .
^ Forward modifier for the size
^ Forward index modifier
^ Inverse index modifier
^ Original stencil.
# INLINE transformStencil # | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
Copyright : ( c ) 2018 - 2022
Maintainer : < >
module Data.Massiv.Array.Stencil.Unsafe (
makeUnsafeStencil,
makeUnsafeConvolutionStencil,
makeUnsafeCorrelationStencil,
unsafeTransformStencil,
) where
import Data.Massiv.Array.Stencil.Internal
import Data.Massiv.Core.Common
import GHC.Exts (inline)
makeUnsafeStencil
:: Index ix
=> Sz ix
-> ix
-> (ix -> (ix -> e) -> a)
-> Stencil ix e a
makeUnsafeStencil !sSz !sCenter relStencil = Stencil sSz sCenter stencil
where
stencil unsafeGetVal _getVal !ix =
inline (relStencil ix (unsafeGetVal . liftIndex2 (+) ix))
# INLINE stencil #
# INLINE makeUnsafeStencil #
@since 0.6.0
makeUnsafeConvolutionStencil
:: (Index ix, Num e)
=> Sz ix
-> ix
-> ((ix -> e -> e -> e) -> e -> e)
-> Stencil ix e e
makeUnsafeConvolutionStencil !sz !sCenter relStencil =
Stencil sz sInvertCenter stencil
where
!sInvertCenter = liftIndex2 (-) (liftIndex (subtract 1) (unSz sz)) sCenter
stencil uget _ !ix =
(inline relStencil $ \ !ixD !kVal !acc -> uget (liftIndex2 (-) ix ixD) * kVal + acc) 0
# INLINE stencil #
# INLINE makeUnsafeConvolutionStencil #
@since 0.6.0
makeUnsafeCorrelationStencil
:: (Index ix, Num e)
=> Sz ix
-> ix
-> ((ix -> e -> e -> e) -> e -> e)
-> Stencil ix e e
makeUnsafeCorrelationStencil !sSz !sCenter relStencil = Stencil sSz sCenter stencil
where
stencil _ getVal !ix =
(inline relStencil $ \ !ixD !kVal !acc -> getVal (liftIndex2 (+) ix ixD) * kVal + acc) 0
# INLINE stencil #
# INLINE makeUnsafeCorrelationStencil #
Convert a 1D stencil into a row or column 2D stencil :
> > > import Data . Massiv . Array
> > > import Data . Massiv . Array . Unsafe
> > > let = compute $ iterateN 3 succ 0 : : Array P Ix2 Int
Array P Seq ( Sz ( 3 : . 3 ) )
[ [ 1 , 2 , 3 ]
, [ 4 , 5 , 6 ]
, [ 7 , 8 , 9 ]
> > > let ( \(Sz n ) - > Sz ( 1 : . n ) ) ( 0 : . ) $ \ f uget getVal ( i : . j ) - > f ( uget . ( i : . ) ) ( getVal . ( i : . ) ) j
> > > applyStencil noPadding ( ( sumStencil ( Sz1 3 ) ) ) arr
Array DW Seq ( Sz ( 3 : . 1 ) )
[ [ 6 ]
, [ 15 ]
, [ 24 ]
> > > let columnStencil = unsafeTransformStencil ( \(Sz n ) - > Sz ( n : . 1 ) ) (: . 0 ) $ \ f uget getVal ( i : . j ) - > f ( uget . (: . j ) ) ( getVal . (: . j ) ) i
> > > applyStencil noPadding ( columnStencil ( sumStencil ( Sz1 3 ) ) ) arr
Array DW Seq ( Sz ( 1 : . 3 ) )
[ [ 12 , 15 , 18 ]
unsafeTransformStencil
:: (Sz ix' -> Sz ix)
-> (ix' -> ix)
-> ( ((ix' -> e) -> (ix' -> e) -> ix' -> a)
-> (ix -> e)
-> (ix -> e)
-> ix
-> a
)
-> Stencil ix' e a
-> Stencil ix e a
unsafeTransformStencil transformSize transformIndex transformFunc Stencil{..} =
Stencil
{ stencilSize = transformSize stencilSize
, stencilCenter = transformIndex stencilCenter
, stencilFunc = transformFunc stencilFunc
}
# INLINE unsafeTransformStencil #
Invalid stencil transformer function .
TODO : figure out if there is a safe way to do stencil index trnasformation .
transformStencil : :
( Default e , Index ix )
= > ( Sz ix ' - > Sz ix )
- > ( ix ' - > ix )
- > ( ix - > ix ' )
- > Stencil ix ' e a
- > Stencil ix e a
transformStencil transformSize transformIndex transformIndex ' stencil =
validateStencil def $ ! unsafeTransformStencil transformSize transformIndex transformIndex ' stencil
{ - # INLINE transformStencil #
Invalid stencil transformer function.
TODO: figure out if there is a safe way to do stencil index trnasformation.
transformStencil ::
(Default e, Index ix)
=> (Sz ix' -> Sz ix)
-> (ix' -> ix)
-> (ix -> ix')
-> Stencil ix' e a
-> Stencil ix e a
transformStencil transformSize transformIndex transformIndex' stencil =
validateStencil def $! unsafeTransformStencil transformSize transformIndex transformIndex' stencil
-}
|
a32af1e3b8865d9713c149a1ad2cb2ef77bb4a931563d07cd5cbb83fe67723a4 | BinaryAnalysisPlatform/bap | bap_primus_lisp.mli | open Core_kernel[@@warning "-D"]
open Bap.Std
open Format
open Bap_primus_types
open Bap_core_theory
type program
type context
type message
module Load : sig
type error
val program : ?paths:string list -> Project.t -> string list -> (program,error) result
val pp_program : formatter -> program -> unit
val pp_error : formatter -> error -> unit
end
module Context : sig
type t = context
val create : (string * string list) list -> context
val of_program : program -> t
val pp : Format.formatter -> t -> unit
end
module Doc : sig
module type Element = sig
type t
val pp : formatter -> t -> unit
end
module Category : Element
module Name = KB.Name
module Descr : sig
include Element
val has_source : t -> bool
val pp_location : formatter -> t -> unit
val pp_source : formatter -> t -> unit
end
type index = (Category.t * (Name.t * Descr.t) list) list
module Make(Machine : Machine) : sig
val generate_index : index Machine.t
end
end
module Message : sig
type t = message
val pp : Format.formatter -> message -> unit
end
module Type : sig
type t
type env
type signature = Theory.Target.t -> Bap_primus_lisp_type.signature
type error
type parameters = [
| `All of t
| `Gen of t list * t
| `Tuple of t list
]
module Spec : sig
val any : t
val var : string -> t
val sym : t
val int : t
val bool : t
val byte : t
val word : int -> t
val a : t
val b : t
val c : t
val d : t
val tuple : t list -> [`Tuple of t list]
val all : t -> [`All of t]
val one : t -> [`Tuple of t list]
val unit : [`Tuple of t list]
val (//) : [`Tuple of t list] -> [`All of t] -> parameters
val (@->) : [< parameters] -> t -> signature
end
val error : error observation
val errors : env -> error list
val check : Var.t seq -> program -> error list
val pp_error : Format.formatter -> error -> unit
end
module Closure : sig
module type S = functor(Machine : Machine) -> sig
val run : value list -> value Machine.t
end
type t = (module S)
module Make(Machine : Machine) : sig
val name : string Machine.t
end
end
module type Closure = Closure.S
type closure = (module Closure)
module Primitive : sig
type 'a t
val create : ?docs:string -> ?package:string -> string -> (value list -> 'a) -> 'a t
end
val message : message observation
module type Primitives = functor (Machine : Machine) -> sig
val defs : unit -> value Machine.t Primitive.t list
end
type primitives = (module Primitives)
type exn += Runtime_error of string
val primitive : (string * value list) observation
module Make (Machine : Machine) : sig
val failf : ('a, unit, string, unit -> 'b Machine.t) format4 -> 'a
val link_program : program -> unit Machine.t
val program : program Machine.t
val typecheck : unit Machine.t
val types : Type.env Machine.t
val define : ?types:Type.signature -> ?docs:string ->
?package:string -> string -> closure -> unit Machine.t
val signal :
?params:[< Type.parameters] ->
?doc:string ->
'a observation ->
('a -> value list Machine.t) -> unit Machine.t
val eval_fun : string -> value list -> value Machine.t
val eval_method : string -> value list -> unit Machine.t
val optimize : unit -> unit Machine.t
val refine : Bap_primus_lisp_context.t -> unit Machine.t
(* deprecated *)
val link_primitives : primitives -> unit Machine.t
end
module Semantics : sig
type value = unit Theory.Value.t
type KB.conflict += Unresolved_definition of string
type KB.conflict += Illtyped_program of Type.error list
type KB.conflict += Failed_primitive of KB.Name.t * string
val program : (Theory.Source.cls, program) KB.slot
val context : (Theory.Unit.cls, context) KB.slot
val definition : (Theory.program, Theory.Label.t option) KB.slot
val name : (Theory.program, KB.Name.t option) KB.slot
val args : (Theory.program, unit Theory.Value.t list option) KB.slot
val symbol : (Theory.Value.cls, String.t option) KB.slot
val static : (Theory.Value.cls, Bitvec.t option) KB.slot
val enable : ?stdout:Format.formatter -> unit -> unit
val failp : ('a, Format.formatter, unit, 'b KB.t) format4 -> 'a
val declare :
?types:Type.signature ->
?docs:string ->
?package:string ->
?body:(Theory.Target.t -> (Theory.Label.t -> Theory.Value.Top.t list -> unit Theory.eff) KB.t) ->
string -> unit
module Value : sig
type t = unit Theory.Value.t
val static : Bitvec.t -> t
val symbol : string -> t
val custom : (Theory.Value.cls, 'a) KB.slot -> 'a -> t
val nil : t
end
module Effect : sig
type t = unit Theory.Effect.t
val pure : Value.t -> t
val return : Value.t -> t KB.t
end
val signal :
?params:[< Type.parameters] ->
?docs:string ->
(Theory.program,'p) KB.slot ->
(Theory.Label.t -> 'p -> Value.t list KB.t) ->
unit
val documentation : Theory.Unit.t -> Doc.index KB.t
end
module Unit : sig
val create : ?name:string -> Theory.Target.t -> Theory.Unit.t KB.t
val is_lisp : Theory.Unit.t -> bool KB.t
val language : Theory.language
end
module Attribute : sig
type 'a t
type set
module Parse : sig
type tree
type error = ..
type error += Expect_atom | Expect_list
val atom : tree -> string option
val list : tree -> tree list option
val tree :
atom:(string -> 'a) ->
list:(tree list -> 'a) ->
tree -> 'a
val fail : error -> tree list -> _
end
val declare :
?desc:string ->
?package:string ->
domain:'a KB.domain ->
parse:(package:string -> Parse.tree list -> 'a) ->
string -> 'a t
module Set : sig
include KB.Value.S with type t := set
val get : 'a t -> set -> 'a
val slot : (Theory.program, set) KB.slot
end
end
val init : ?log:formatter -> ?paths:string list -> string list -> unit
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap/ce62b09cf059cc79fc92ac3022bd2830659fb236/lib/bap_primus/bap_primus_lisp.mli | ocaml | deprecated | open Core_kernel[@@warning "-D"]
open Bap.Std
open Format
open Bap_primus_types
open Bap_core_theory
type program
type context
type message
module Load : sig
type error
val program : ?paths:string list -> Project.t -> string list -> (program,error) result
val pp_program : formatter -> program -> unit
val pp_error : formatter -> error -> unit
end
module Context : sig
type t = context
val create : (string * string list) list -> context
val of_program : program -> t
val pp : Format.formatter -> t -> unit
end
module Doc : sig
module type Element = sig
type t
val pp : formatter -> t -> unit
end
module Category : Element
module Name = KB.Name
module Descr : sig
include Element
val has_source : t -> bool
val pp_location : formatter -> t -> unit
val pp_source : formatter -> t -> unit
end
type index = (Category.t * (Name.t * Descr.t) list) list
module Make(Machine : Machine) : sig
val generate_index : index Machine.t
end
end
module Message : sig
type t = message
val pp : Format.formatter -> message -> unit
end
module Type : sig
type t
type env
type signature = Theory.Target.t -> Bap_primus_lisp_type.signature
type error
type parameters = [
| `All of t
| `Gen of t list * t
| `Tuple of t list
]
module Spec : sig
val any : t
val var : string -> t
val sym : t
val int : t
val bool : t
val byte : t
val word : int -> t
val a : t
val b : t
val c : t
val d : t
val tuple : t list -> [`Tuple of t list]
val all : t -> [`All of t]
val one : t -> [`Tuple of t list]
val unit : [`Tuple of t list]
val (//) : [`Tuple of t list] -> [`All of t] -> parameters
val (@->) : [< parameters] -> t -> signature
end
val error : error observation
val errors : env -> error list
val check : Var.t seq -> program -> error list
val pp_error : Format.formatter -> error -> unit
end
module Closure : sig
module type S = functor(Machine : Machine) -> sig
val run : value list -> value Machine.t
end
type t = (module S)
module Make(Machine : Machine) : sig
val name : string Machine.t
end
end
module type Closure = Closure.S
type closure = (module Closure)
module Primitive : sig
type 'a t
val create : ?docs:string -> ?package:string -> string -> (value list -> 'a) -> 'a t
end
val message : message observation
module type Primitives = functor (Machine : Machine) -> sig
val defs : unit -> value Machine.t Primitive.t list
end
type primitives = (module Primitives)
type exn += Runtime_error of string
val primitive : (string * value list) observation
module Make (Machine : Machine) : sig
val failf : ('a, unit, string, unit -> 'b Machine.t) format4 -> 'a
val link_program : program -> unit Machine.t
val program : program Machine.t
val typecheck : unit Machine.t
val types : Type.env Machine.t
val define : ?types:Type.signature -> ?docs:string ->
?package:string -> string -> closure -> unit Machine.t
val signal :
?params:[< Type.parameters] ->
?doc:string ->
'a observation ->
('a -> value list Machine.t) -> unit Machine.t
val eval_fun : string -> value list -> value Machine.t
val eval_method : string -> value list -> unit Machine.t
val optimize : unit -> unit Machine.t
val refine : Bap_primus_lisp_context.t -> unit Machine.t
val link_primitives : primitives -> unit Machine.t
end
module Semantics : sig
type value = unit Theory.Value.t
type KB.conflict += Unresolved_definition of string
type KB.conflict += Illtyped_program of Type.error list
type KB.conflict += Failed_primitive of KB.Name.t * string
val program : (Theory.Source.cls, program) KB.slot
val context : (Theory.Unit.cls, context) KB.slot
val definition : (Theory.program, Theory.Label.t option) KB.slot
val name : (Theory.program, KB.Name.t option) KB.slot
val args : (Theory.program, unit Theory.Value.t list option) KB.slot
val symbol : (Theory.Value.cls, String.t option) KB.slot
val static : (Theory.Value.cls, Bitvec.t option) KB.slot
val enable : ?stdout:Format.formatter -> unit -> unit
val failp : ('a, Format.formatter, unit, 'b KB.t) format4 -> 'a
val declare :
?types:Type.signature ->
?docs:string ->
?package:string ->
?body:(Theory.Target.t -> (Theory.Label.t -> Theory.Value.Top.t list -> unit Theory.eff) KB.t) ->
string -> unit
module Value : sig
type t = unit Theory.Value.t
val static : Bitvec.t -> t
val symbol : string -> t
val custom : (Theory.Value.cls, 'a) KB.slot -> 'a -> t
val nil : t
end
module Effect : sig
type t = unit Theory.Effect.t
val pure : Value.t -> t
val return : Value.t -> t KB.t
end
val signal :
?params:[< Type.parameters] ->
?docs:string ->
(Theory.program,'p) KB.slot ->
(Theory.Label.t -> 'p -> Value.t list KB.t) ->
unit
val documentation : Theory.Unit.t -> Doc.index KB.t
end
module Unit : sig
val create : ?name:string -> Theory.Target.t -> Theory.Unit.t KB.t
val is_lisp : Theory.Unit.t -> bool KB.t
val language : Theory.language
end
module Attribute : sig
type 'a t
type set
module Parse : sig
type tree
type error = ..
type error += Expect_atom | Expect_list
val atom : tree -> string option
val list : tree -> tree list option
val tree :
atom:(string -> 'a) ->
list:(tree list -> 'a) ->
tree -> 'a
val fail : error -> tree list -> _
end
val declare :
?desc:string ->
?package:string ->
domain:'a KB.domain ->
parse:(package:string -> Parse.tree list -> 'a) ->
string -> 'a t
module Set : sig
include KB.Value.S with type t := set
val get : 'a t -> set -> 'a
val slot : (Theory.program, set) KB.slot
end
end
val init : ?log:formatter -> ?paths:string list -> string list -> unit
|
4fbd07a06cd7d6d1048d2797f032b62d07f90b60a95f1c8c014fe21dba4caf2e | 223kazuki/clj-graphql-server | streamer.clj | (ns graphql-server.handler.streamer
(:require [integrant.core :as ig]
[clojure.core.async :refer [pub sub chan go-loop go >! <!
timeout close! >!! <!! unsub]]
[graphql-server.boundary.db :as db]))
(defmethod ig/init-key ::stream-torikumis [_ {:keys [db channel]}]
(fn [{request :request :as ctx} {:keys [num]} source-stream]
(println "Start subscription.")
(let [{:keys [id]} (get-in request [:auth-info :client :user])
torikumis (db/find-torikumis db id num)]
(source-stream torikumis)
(let [{:keys [publication]} channel
subscription (chan)]
(sub publication :torikumi/updated subscription)
(go-loop []
(when-let [{:keys [data]} (<! subscription)]
(let [torikumis (db/find-torikumis db id num)]
(println "Subscription received data" data)
(source-stream torikumis)
(recur))))
#(do
(println "Stop subscription.")
(unsub publication :torikumi/updated subscription)
(close! subscription))))))
(comment
(>!! (:channel (:graphql-server/channel integrant.repl.state/system))
{:msg-type :torikumi/updated :data {:msg "Updated!"}})
)
| null | https://raw.githubusercontent.com/223kazuki/clj-graphql-server/0994d2bb98cd483dcb1c02a8175766e4fd35c861/src/graphql_server/handler/streamer.clj | clojure | (ns graphql-server.handler.streamer
(:require [integrant.core :as ig]
[clojure.core.async :refer [pub sub chan go-loop go >! <!
timeout close! >!! <!! unsub]]
[graphql-server.boundary.db :as db]))
(defmethod ig/init-key ::stream-torikumis [_ {:keys [db channel]}]
(fn [{request :request :as ctx} {:keys [num]} source-stream]
(println "Start subscription.")
(let [{:keys [id]} (get-in request [:auth-info :client :user])
torikumis (db/find-torikumis db id num)]
(source-stream torikumis)
(let [{:keys [publication]} channel
subscription (chan)]
(sub publication :torikumi/updated subscription)
(go-loop []
(when-let [{:keys [data]} (<! subscription)]
(let [torikumis (db/find-torikumis db id num)]
(println "Subscription received data" data)
(source-stream torikumis)
(recur))))
#(do
(println "Stop subscription.")
(unsub publication :torikumi/updated subscription)
(close! subscription))))))
(comment
(>!! (:channel (:graphql-server/channel integrant.repl.state/system))
{:msg-type :torikumi/updated :data {:msg "Updated!"}})
)
| |
f394d2d3caf28459dedc9a3e0bf71a2620f22d577126a81c05564810c6d9046e | bmeurer/ocamljit2 | typedecl.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
and , projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(**** Typing of type definitions ****)
open Misc
open Asttypes
open Parsetree
open Primitive
open Types
open Typedtree
open Typetexp
type error =
Repeated_parameter
| Duplicate_constructor of string
| Too_many_constructors
| Duplicate_label of string
| Recursive_abbrev of string
| Definition_mismatch of type_expr * Includecore.type_mismatch list
| Constraint_failed of type_expr * type_expr
| Unconsistent_constraint of (type_expr * type_expr) list
| Type_clash of (type_expr * type_expr) list
| Parameters_differ of Path.t * type_expr * type_expr
| Null_arity_external
| Missing_native_external
| Unbound_type_var of type_expr * type_declaration
| Unbound_exception of Longident.t
| Not_an_exception of Longident.t
| Bad_variance of int * (bool * bool) * (bool * bool)
| Unavailable_type_constructor of Path.t
| Bad_fixed_type of string
| Unbound_type_var_exc of type_expr * type_expr
exception Error of Location.t * error
(* Enter all declared types in the environment as abstract types *)
let enter_type env (name, sdecl) id =
let decl =
{ type_params =
List.map (fun _ -> Btype.newgenvar ()) sdecl.ptype_params;
type_arity = List.length sdecl.ptype_params;
type_kind = Type_abstract;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with None -> None
| Some _ -> Some(Ctype.newvar ()) end;
type_variance = List.map (fun _ -> true, true, true) sdecl.ptype_params;
}
in
Env.add_type id decl env
let update_type temp_env env id loc =
let path = Path.Pident id in
let decl = Env.find_type path temp_env in
match decl.type_manifest with None -> ()
| Some ty ->
let params = List.map (fun _ -> Ctype.newvar ()) decl.type_params in
try Ctype.unify env (Ctype.newconstr path params) ty
with Ctype.Unify trace ->
raise (Error(loc, Type_clash trace))
(* Determine if a type is (an abbreviation for) the type "float" *)
We use the Ctype.expand_head_opt version of expand_head to get access
to the manifest type of private abbreviations .
to the manifest type of private abbreviations. *)
let is_float env ty =
match Ctype.repr (Ctype.expand_head_opt env ty) with
{desc = Tconstr(p, _, _)} -> Path.same p Predef.path_float
| _ -> false
Determine if a type definition defines a fixed type . ( PW )
let is_fixed_type sd =
(match sd.ptype_manifest with
| Some { ptyp_desc =
(Ptyp_variant _|Ptyp_object _|Ptyp_class _|Ptyp_alias
({ptyp_desc = Ptyp_variant _|Ptyp_object _|Ptyp_class _},_)) } -> true
| _ -> false) &&
sd.ptype_kind = Ptype_abstract &&
sd.ptype_private = Private
(* Set the row variable in a fixed type *)
let set_fixed_row env loc p decl =
let tm =
match decl.type_manifest with
None -> assert false
| Some t -> Ctype.expand_head env t
in
let rv =
match tm.desc with
Tvariant row ->
let row = Btype.row_repr row in
tm.desc <- Tvariant {row with row_fixed = true};
if Btype.static_row row then Btype.newgenty Tnil
else row.row_more
| Tobject (ty, _) ->
snd (Ctype.flatten_fields ty)
| _ ->
raise (Error (loc, Bad_fixed_type "is not an object or variant"))
in
if rv.desc <> Tvar then
raise (Error (loc, Bad_fixed_type "has no row variable"));
rv.desc <- Tconstr (p, decl.type_params, ref Mnil)
Translate one type declaration
module StringSet =
Set.Make(struct
type t = string
let compare = compare
end)
let transl_declaration env (name, sdecl) id =
(* Bind type parameters *)
reset_type_variables();
Ctype.begin_def ();
let params =
try List.map (enter_type_variable true sdecl.ptype_loc) sdecl.ptype_params
with Already_bound ->
raise(Error(sdecl.ptype_loc, Repeated_parameter))
in
let cstrs = List.map
(fun (sty, sty', loc) ->
transl_simple_type env false sty,
transl_simple_type env false sty', loc)
sdecl.ptype_cstrs
in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind =
begin match sdecl.ptype_kind with
Ptype_abstract -> Type_abstract
| Ptype_variant cstrs ->
let all_constrs = ref StringSet.empty in
List.iter
(fun (name, args, loc) ->
if StringSet.mem name !all_constrs then
raise(Error(sdecl.ptype_loc, Duplicate_constructor name));
all_constrs := StringSet.add name !all_constrs)
cstrs;
if List.length (List.filter (fun (_, args, _) -> args <> []) cstrs)
> (Config.max_tag + 1) then
raise(Error(sdecl.ptype_loc, Too_many_constructors));
Type_variant
(List.map
(fun (name, args, loc) ->
(name, List.map (transl_simple_type env true) args))
cstrs)
| Ptype_record lbls ->
let all_labels = ref StringSet.empty in
List.iter
(fun (name, mut, arg, loc) ->
if StringSet.mem name !all_labels then
raise(Error(sdecl.ptype_loc, Duplicate_label name));
all_labels := StringSet.add name !all_labels)
lbls;
let lbls' =
List.map
(fun (name, mut, arg, loc) ->
let ty = transl_simple_type env true arg in
name, mut, match ty.desc with Tpoly(t,[]) -> t | _ -> ty)
lbls in
let rep =
if List.for_all (fun (name, mut, arg) -> is_float env arg) lbls'
then Record_float
else Record_regular in
Type_record(lbls', rep)
end;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with
None -> None
| Some sty ->
let no_row = not (is_fixed_type sdecl) in
Some (transl_simple_type env no_row sty)
end;
type_variance = List.map (fun _ -> true, true, true) params;
} in
(* Check constraints *)
List.iter
(fun (ty, ty', loc) ->
try Ctype.unify env ty ty' with Ctype.Unify tr ->
raise(Error(loc, Unconsistent_constraint tr)))
cstrs;
Ctype.end_def ();
(* Add abstract row *)
if is_fixed_type sdecl then begin
let (p, _) =
try Env.lookup_type (Longident.Lident(Ident.name id ^ "#row")) env
with Not_found -> assert false in
set_fixed_row env sdecl.ptype_loc p decl
end;
(* Check for cyclic abbreviations *)
begin match decl.type_manifest with None -> ()
| Some ty ->
if Ctype.cyclic_abbrev env id ty then
raise(Error(sdecl.ptype_loc, Recursive_abbrev name));
end;
(id, decl)
a type declaration
let generalize_decl decl =
List.iter Ctype.generalize decl.type_params;
begin match decl.type_kind with
Type_abstract ->
()
| Type_variant v ->
List.iter (fun (_, tyl) -> List.iter Ctype.generalize tyl) v
| Type_record(r, rep) ->
List.iter (fun (_, _, ty) -> Ctype.generalize ty) r
end;
begin match decl.type_manifest with
| None -> ()
| Some ty -> Ctype.generalize ty
end
(* Check that all constraints are enforced *)
module TypeSet =
Set.Make
(struct
type t = type_expr
let compare t1 t2 = t1.id - t2.id
end)
let rec check_constraints_rec env loc visited ty =
let ty = Ctype.repr ty in
if TypeSet.mem ty !visited then () else begin
visited := TypeSet.add ty !visited;
match ty.desc with
| Tconstr (path, args, _) ->
let args' = List.map (fun _ -> Ctype.newvar ()) args in
let ty' = Ctype.newconstr path args' in
begin try Ctype.enforce_constraints env ty'
with Ctype.Unify _ -> assert false
| Not_found -> raise (Error(loc, Unavailable_type_constructor path))
end;
if not (Ctype.matches env ty ty') then
raise (Error(loc, Constraint_failed (ty, ty')));
List.iter (check_constraints_rec env loc visited) args
| Tpoly (ty, tl) ->
let _, ty = Ctype.instance_poly false tl ty in
check_constraints_rec env loc visited ty
| _ ->
Btype.iter_type_expr (check_constraints_rec env loc visited) ty
end
let check_constraints env (_, sdecl) (_, decl) =
let visited = ref TypeSet.empty in
begin match decl.type_kind with
| Type_abstract -> ()
| Type_variant l ->
let rec find_pl = function
Ptype_variant pl -> pl
| Ptype_record _ | Ptype_abstract -> assert false
in
let pl = find_pl sdecl.ptype_kind in
List.iter
(fun (name, tyl) ->
let styl =
try let (_,sty,_) = List.find (fun (n,_,_) -> n = name) pl in sty
with Not_found -> assert false in
List.iter2
(fun sty ty ->
check_constraints_rec env sty.ptyp_loc visited ty)
styl tyl)
l
| Type_record (l, _) ->
let rec find_pl = function
Ptype_record pl -> pl
| Ptype_variant _ | Ptype_abstract -> assert false
in
let pl = find_pl sdecl.ptype_kind in
let rec get_loc name = function
[] -> assert false
| (name', _, sty, _) :: tl ->
if name = name' then sty.ptyp_loc else get_loc name tl
in
List.iter
(fun (name, _, ty) ->
check_constraints_rec env (get_loc name pl) visited ty)
l
end;
begin match decl.type_manifest with
| None -> ()
| Some ty ->
let sty =
match sdecl.ptype_manifest with Some sty -> sty | _ -> assert false
in
check_constraints_rec env sty.ptyp_loc visited ty
end
(*
If both a variant/record definition and a type equation are given,
need to check that the equation refers to a type of the same kind
with the same constructors and labels.
*)
let check_abbrev env (_, sdecl) (id, decl) =
match decl with
{type_kind = (Type_variant _ | Type_record _); type_manifest = Some ty} ->
begin match (Ctype.repr ty).desc with
Tconstr(path, args, _) ->
begin try
let decl' = Env.find_type path env in
let err =
if List.length args <> List.length decl.type_params
then [Includecore.Arity]
else if not (Ctype.equal env false args decl.type_params)
then [Includecore.Constraint]
else
Includecore.type_declarations env id
decl'
(Subst.type_declaration
(Subst.add_type id path Subst.identity) decl)
in
if err <> [] then
raise(Error(sdecl.ptype_loc, Definition_mismatch (ty, err)))
with Not_found ->
raise(Error(sdecl.ptype_loc, Unavailable_type_constructor path))
end
| _ -> raise(Error(sdecl.ptype_loc, Definition_mismatch (ty, [])))
end
| _ -> ()
Check for ill - defined
let check_recursion env loc path decl to_check =
to_check is true for potentially mutually recursive paths .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
let visited = ref [] in
let rec check_regular cpath args prev_exp ty =
let ty = Ctype.repr ty in
if not (List.memq ty !visited) then begin
visited := ty :: !visited;
match ty.desc with
| Tconstr(path', args', _) ->
if Path.same path path' then begin
if not (Ctype.equal env false args args') then
raise (Error(loc,
Parameters_differ(cpath, ty, Ctype.newconstr path args)))
end
(* Attempt to expand a type abbreviation if:
1- [to_check path'] holds
(otherwise the expansion cannot involve [path]);
2- we haven't expanded this type constructor before
(otherwise we could loop if [path'] is itself
a non-regular abbreviation). *)
else if to_check path' && not (List.mem path' prev_exp) then begin
try
(* Attempt expansion *)
let (params0, body0) = Env.find_type_expansion path' env in
let (params, body) =
Ctype.instance_parameterized_type params0 body0 in
begin
try List.iter2 (Ctype.unify env) params args'
with Ctype.Unify _ ->
raise (Error(loc, Constraint_failed
(ty, Ctype.newconstr path' params0)));
end;
check_regular path' args (path' :: prev_exp) body
with Not_found -> ()
end;
List.iter (check_regular cpath args prev_exp) args'
| Tpoly (ty, tl) ->
let (_, ty) = Ctype.instance_poly false tl ty in
check_regular cpath args prev_exp ty
| _ ->
Btype.iter_type_expr (check_regular cpath args prev_exp) ty
end in
match decl.type_manifest with
| None -> ()
| Some body ->
(* Check that recursion is well-founded *)
begin try
Ctype.correct_abbrev env path decl.type_params body
with Ctype.Recursive_abbrev ->
raise(Error(loc, Recursive_abbrev (Path.name path)))
| Ctype.Unify trace -> raise(Error(loc, Type_clash trace))
end;
(* Check that recursion is regular *)
if decl.type_params = [] then () else
let (args, body) =
Ctype.instance_parameterized_type decl.type_params body in
check_regular path args [] body
let check_abbrev_recursion env id_loc_list (id, decl) =
check_recursion env (List.assoc id id_loc_list) (Path.Pident id) decl
(function Path.Pident id -> List.mem_assoc id id_loc_list | _ -> false)
(* Compute variance *)
let compute_variance env tvl nega posi cntr ty =
let pvisited = ref TypeSet.empty
and nvisited = ref TypeSet.empty
and cvisited = ref TypeSet.empty in
let rec compute_variance_rec posi nega cntr ty =
let ty = Ctype.repr ty in
if (not posi || TypeSet.mem ty !pvisited)
&& (not nega || TypeSet.mem ty !nvisited)
&& (not cntr || TypeSet.mem ty !cvisited) then
()
else begin
if posi then pvisited := TypeSet.add ty !pvisited;
if nega then nvisited := TypeSet.add ty !nvisited;
if cntr then cvisited := TypeSet.add ty !cvisited;
let compute_same = compute_variance_rec posi nega cntr in
match ty.desc with
Tarrow (_, ty1, ty2, _) ->
compute_variance_rec nega posi true ty1;
compute_same ty2
| Ttuple tl ->
List.iter compute_same tl
| Tconstr (path, tl, _) ->
if tl = [] then () else begin
try
let decl = Env.find_type path env in
List.iter2
(fun ty (co,cn,ct) ->
compute_variance_rec
(posi && co || nega && cn)
(posi && cn || nega && co)
(cntr || ct)
ty)
tl decl.type_variance
with Not_found ->
List.iter (compute_variance_rec true true true) tl
end
| Tobject (ty, _) ->
compute_same ty
| Tfield (_, _, ty1, ty2) ->
compute_same ty1;
compute_same ty2
| Tsubst ty ->
compute_same ty
| Tvariant row ->
let row = Btype.row_repr row in
List.iter
(fun (_,f) ->
match Btype.row_field_repr f with
Rpresent (Some ty) ->
compute_same ty
| Reither (_, tyl, _, _) ->
List.iter compute_same tyl
| _ -> ())
row.row_fields;
compute_same row.row_more
| Tpoly (ty, _) ->
compute_same ty
| Tvar | Tnil | Tlink _ | Tunivar -> ()
| Tpackage (_, _, tyl) ->
List.iter (compute_variance_rec true true true) tyl
end
in
compute_variance_rec nega posi cntr ty;
List.iter
(fun (ty, covar, convar, ctvar) ->
if TypeSet.mem ty !pvisited then covar := true;
if TypeSet.mem ty !nvisited then convar := true;
if TypeSet.mem ty !cvisited then ctvar := true)
tvl
let make_variance ty = (ty, ref false, ref false, ref false)
let whole_type decl =
match decl.type_kind with
Type_variant tll ->
Btype.newgenty
(Ttuple (List.map (fun (_, tl) -> Btype.newgenty (Ttuple tl)) tll))
| Type_record (ftl, _) ->
Btype.newgenty
(Ttuple (List.map (fun (_, _, ty) -> ty) ftl))
| Type_abstract ->
match decl.type_manifest with
Some ty -> ty
| _ -> Btype.newgenty (Ttuple [])
let compute_variance_decl env check decl (required, loc) =
if decl.type_kind = Type_abstract && decl.type_manifest = None then
List.map (fun (c, n) -> if c || n then (c, n, n) else (true, true, true))
required
else
let params = List.map Btype.repr decl.type_params in
let tvl0 = List.map make_variance params in
let fvl = if check then Ctype.free_variables (whole_type decl) else [] in
let fvl = List.filter (fun v -> not (List.memq v params)) fvl in
let tvl1 = List.map make_variance fvl in
let tvl2 = List.map make_variance fvl in
let tvl = tvl0 @ tvl1 in
begin match decl.type_kind with
Type_abstract ->
begin match decl.type_manifest with
None -> assert false
| Some ty -> compute_variance env tvl true false false ty
end
| Type_variant tll ->
List.iter
(fun (_,tl) ->
List.iter (compute_variance env tvl true false false) tl)
tll
| Type_record (ftl, _) ->
List.iter
(fun (_, mut, ty) ->
let cn = (mut = Mutable) in
compute_variance env tvl true cn cn ty)
ftl
end;
let required =
List.map (fun (c,n as r) -> if c || n then r else (true,true))
required
in
List.iter2
(fun (ty, co, cn, ct) (c, n) ->
if ty.desc <> Tvar then begin
co := c; cn := n; ct := n;
compute_variance env tvl2 c n n ty
end)
tvl0 required;
List.iter2
(fun (ty, c1, n1, t1) (_, c2, n2, t2) ->
if !c1 && not !c2 || !n1 && not !n2
|| ! t1 & & not ! t2 & & decl.type_kind = Type_abstract
then raise (Error(loc,
if not (!c2 || !n2) then Unbound_type_var (ty, decl)
else Bad_variance (0, (!c1,!n1), (!c2,!n2)))))
tvl1 tvl2;
let pos = ref 0 in
List.map2
(fun (_, co, cn, ct) (c, n) ->
incr pos;
if !co && not c || !cn && not n
then raise (Error(loc, Bad_variance (!pos, (!co,!cn), (c,n))));
if decl.type_private = Private then (c,n,n) else
let ct = if decl.type_kind = Type_abstract then ct else cn in
(!co, !cn, !ct))
tvl0 required
let is_sharp id =
let s = Ident.name id in
String.length s > 0 && s.[0] = '#'
let rec compute_variance_fixpoint env decls required variances =
let new_decls =
List.map2
(fun (id, decl) variance -> id, {decl with type_variance = variance})
decls variances
in
let new_env =
List.fold_right (fun (id, decl) env -> Env.add_type id decl env)
new_decls env
in
let new_variances =
List.map2
(fun (id, decl) -> compute_variance_decl new_env false decl)
new_decls required
in
let new_variances =
List.map2
(List.map2 (fun (c1,n1,t1) (c2,n2,t2) -> c1||c2, n1||n2, t1||t2))
new_variances variances in
if new_variances <> variances then
compute_variance_fixpoint env decls required new_variances
else begin
List.iter2
(fun (id, decl) req -> if not (is_sharp id) then
ignore (compute_variance_decl new_env true decl req))
new_decls required;
new_decls, new_env
end
let init_variance (id, decl) =
List.map (fun _ -> (false, false, false)) decl.type_params
(* for typeclass.ml *)
let compute_variance_decls env cldecls =
let decls, required =
List.fold_right
(fun (obj_id, obj_abbr, cl_abbr, clty, cltydef, required) (decls, req) ->
(obj_id, obj_abbr) :: decls, required :: req)
cldecls ([],[])
in
let variances = List.map init_variance decls in
let (decls, _) = compute_variance_fixpoint env decls required variances in
List.map2
(fun (_,decl) (_, _, cl_abbr, clty, cltydef, _) ->
let variance = List.map (fun (c,n,t) -> (c,n)) decl.type_variance in
(decl, {cl_abbr with type_variance = decl.type_variance},
{clty with cty_variance = variance},
{cltydef with clty_variance = variance}))
decls cldecls
(* Check multiple declarations of labels/constructors *)
let check_duplicates name_sdecl_list =
let labels = Hashtbl.create 7 and constrs = Hashtbl.create 7 in
List.iter
(fun (name, sdecl) -> match sdecl.ptype_kind with
Ptype_variant cl ->
List.iter
(fun (cname, _, loc) ->
try
let name' = Hashtbl.find constrs cname in
Location.prerr_warning loc
(Warnings.Duplicate_definitions
("constructor", cname, name', name))
with Not_found -> Hashtbl.add constrs cname name)
cl
| Ptype_record fl ->
List.iter
(fun (cname, _, _, loc) ->
try
let name' = Hashtbl.find labels cname in
Location.prerr_warning loc
(Warnings.Duplicate_definitions ("label", cname, name', name))
with Not_found -> Hashtbl.add labels cname name)
fl
| Ptype_abstract -> ())
name_sdecl_list
Force recursion to go through i d for private types
let name_recursion sdecl id decl =
match decl with
| { type_kind = Type_abstract;
type_manifest = Some ty;
type_private = Private; } when is_fixed_type sdecl ->
let ty = Ctype.repr ty in
let ty' = Btype.newty2 ty.level ty.desc in
if Ctype.deep_occur ty ty' then
let td = Tconstr(Path.Pident id, decl.type_params, ref Mnil) in
Btype.link_type ty (Btype.newty2 ty.level td);
{decl with type_manifest = Some ty'}
else decl
| _ -> decl
(* Translate a set of mutually recursive type declarations *)
let transl_type_decl env name_sdecl_list =
(* Add dummy types for fixed rows *)
let fixed_types =
List.filter (fun (_, sd) -> is_fixed_type sd) name_sdecl_list
in
let name_sdecl_list =
List.map
(fun (name,sdecl) ->
name^"#row",
{sdecl with ptype_kind = Ptype_abstract; ptype_manifest = None})
fixed_types
@ name_sdecl_list
in
(* Create identifiers. *)
let id_list =
List.map (fun (name, _) -> Ident.create name) name_sdecl_list
in
Since we 've introduced fresh idents , make sure the definition
level is at least the binding time of these events . Otherwise ,
passing one of the recursively - defined type as argument
to an abbreviation may fail .
Since we've introduced fresh idents, make sure the definition
level is at least the binding time of these events. Otherwise,
passing one of the recursively-defined type constrs as argument
to an abbreviation may fail.
*)
Ctype.init_def(Ident.current_time());
Ctype.begin_def();
(* Enter types. *)
let temp_env = List.fold_left2 enter_type env name_sdecl_list id_list in
(* Translate each declaration. *)
let decls =
List.map2 (transl_declaration temp_env) name_sdecl_list id_list in
(* Check for duplicates *)
check_duplicates name_sdecl_list;
(* Build the final env. *)
let newenv =
List.fold_right
(fun (id, decl) env -> Env.add_type id decl env)
decls env
in
(* Update stubs *)
List.iter2
(fun id (_, sdecl) -> update_type temp_env newenv id sdecl.ptype_loc)
id_list name_sdecl_list;
Generalize type declarations .
Ctype.end_def();
List.iter (fun (_, decl) -> generalize_decl decl) decls;
Check for ill - formed
let id_loc_list =
List.map2 (fun id (_,sdecl) -> (id, sdecl.ptype_loc))
id_list name_sdecl_list
in
List.iter (check_abbrev_recursion newenv id_loc_list) decls;
(* Check that all type variable are closed *)
List.iter2
(fun (_, sdecl) (id, decl) ->
match Ctype.closed_type_decl decl with
Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
| None -> ())
name_sdecl_list decls;
(* Check re-exportation *)
List.iter2 (check_abbrev newenv) name_sdecl_list decls;
(* Check that constraints are enforced *)
List.iter2 (check_constraints newenv) name_sdecl_list decls;
(* Name recursion *)
let decls =
List.map2 (fun (_, sdecl) (id, decl) -> id, name_recursion sdecl id decl)
name_sdecl_list decls
in
(* Add variances to the environment *)
let required =
List.map (fun (_, sdecl) -> sdecl.ptype_variance, sdecl.ptype_loc)
name_sdecl_list
in
let final_decls, final_env =
compute_variance_fixpoint env decls required (List.map init_variance decls)
in
(* Done *)
(final_decls, final_env)
(* Translate an exception declaration *)
let transl_closed_type env sty =
let ty = transl_simple_type env true sty in
match Ctype.free_variables ty with
| [] -> ty
| tv :: _ -> raise (Error (sty.ptyp_loc, Unbound_type_var_exc (tv, ty)))
let transl_exception env excdecl =
reset_type_variables();
Ctype.begin_def();
let types = List.map (transl_closed_type env) excdecl in
Ctype.end_def();
List.iter Ctype.generalize types;
types
(* Translate an exception rebinding *)
let transl_exn_rebind env loc lid =
let cdescr =
try
Env.lookup_constructor lid env
with Not_found ->
raise(Error(loc, Unbound_exception lid)) in
match cdescr.cstr_tag with
Cstr_exception path -> (path, cdescr.cstr_args)
| _ -> raise(Error(loc, Not_an_exception lid))
(* Translate a value declaration *)
let transl_value_decl env valdecl =
let ty = Typetexp.transl_type_scheme env valdecl.pval_type in
match valdecl.pval_prim with
[] ->
{ val_type = ty; val_kind = Val_reg }
| decl ->
let arity = Ctype.arity ty in
if arity = 0 then
raise(Error(valdecl.pval_type.ptyp_loc, Null_arity_external));
let prim = Primitive.parse_declaration arity decl in
if !Clflags.native_code
&& prim.prim_arity > 5
&& prim.prim_native_name = ""
then raise(Error(valdecl.pval_type.ptyp_loc, Missing_native_external));
{ val_type = ty; val_kind = Val_prim prim }
Translate a " with " constraint -- much simplified version of
transl_type_decl .
transl_type_decl. *)
let transl_with_constraint env id row_path orig_decl sdecl =
reset_type_variables();
Ctype.begin_def();
let params =
try
List.map (enter_type_variable true sdecl.ptype_loc) sdecl.ptype_params
with Already_bound ->
raise(Error(sdecl.ptype_loc, Repeated_parameter)) in
let orig_decl = Ctype.instance_declaration orig_decl in
let arity_ok = List.length params = orig_decl.type_arity in
if arity_ok then
List.iter2 (Ctype.unify_var env) params orig_decl.type_params;
List.iter
(function (ty, ty', loc) ->
try
Ctype.unify env (transl_simple_type env false ty)
(transl_simple_type env false ty')
with Ctype.Unify tr ->
raise(Error(loc, Unconsistent_constraint tr)))
sdecl.ptype_cstrs;
let no_row = not (is_fixed_type sdecl) in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind = if arity_ok then orig_decl.type_kind else Type_abstract;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with
None -> None
| Some sty ->
Some(transl_simple_type env no_row sty)
end;
type_variance = [];
}
in
begin match row_path with None -> ()
| Some p -> set_fixed_row env sdecl.ptype_loc p decl
end;
begin match Ctype.closed_type_decl decl with None -> ()
| Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
end;
let decl = name_recursion sdecl id decl in
let decl =
{decl with type_variance =
compute_variance_decl env false decl
(sdecl.ptype_variance, sdecl.ptype_loc)} in
Ctype.end_def();
generalize_decl decl;
decl
(* Approximate a type declaration: just make all types abstract *)
let abstract_type_decl arity =
let rec make_params n =
if n <= 0 then [] else Ctype.newvar() :: make_params (n-1) in
Ctype.begin_def();
let decl =
{ type_params = make_params arity;
type_arity = arity;
type_kind = Type_abstract;
type_private = Public;
type_manifest = None;
type_variance = replicate_list (true, true, true) arity } in
Ctype.end_def();
generalize_decl decl;
decl
let approx_type_decl env name_sdecl_list =
List.map
(fun (name, sdecl) ->
(Ident.create name,
abstract_type_decl (List.length sdecl.ptype_params)))
name_sdecl_list
(* Variant of check_abbrev_recursion to check the well-formedness
conditions on type abbreviations defined within recursive modules. *)
let check_recmod_typedecl env loc recmod_ids path decl =
recmod_ids is the list of recursively - defined module idents .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
check_recursion env loc path decl
(fun path -> List.exists (fun id -> Path.isfree id path) recmod_ids)
(**** Error report ****)
open Format
let explain_unbound ppf tv tl typ kwd lab =
try
let ti = List.find (fun ti -> Ctype.deep_occur tv (typ ti)) tl in
let ty0 = (* Hack to force aliasing when needed *)
Btype.newgenty (Tobject(tv, ref None)) in
Printtyp.reset_and_mark_loops_list [typ ti; ty0];
fprintf ppf
".@.@[<hov2>In %s@ %s%a@;<1 -2>the variable %a is unbound@]"
kwd (lab ti) Printtyp.type_expr (typ ti) Printtyp.type_expr tv
with Not_found -> ()
let explain_unbound_single ppf tv ty =
let trivial ty =
explain_unbound ppf tv [ty] (fun t -> t) "type" (fun _ -> "") in
match (Ctype.repr ty).desc with
Tobject(fi,_) ->
let (tl, rv) = Ctype.flatten_fields fi in
if rv == tv then trivial ty else
explain_unbound ppf tv tl (fun (_,_,t) -> t)
"method" (fun (lab,_,_) -> lab ^ ": ")
| Tvariant row ->
let row = Btype.row_repr row in
if row.row_more == tv then trivial ty else
explain_unbound ppf tv row.row_fields
(fun (l,f) -> match Btype.row_field_repr f with
Rpresent (Some t) -> t
| Reither (_,[t],_,_) -> t
| Reither (_,tl,_,_) -> Btype.newgenty (Ttuple tl)
| _ -> Btype.newgenty (Ttuple[]))
"case" (fun (lab,_) -> "`" ^ lab ^ " of ")
| _ -> trivial ty
let report_error ppf = function
| Repeated_parameter ->
fprintf ppf "A type parameter occurs several times"
| Duplicate_constructor s ->
fprintf ppf "Two constructors are named %s" s
| Too_many_constructors ->
fprintf ppf
"@[Too many non-constant constructors@ -- maximum is %i %s@]"
(Config.max_tag + 1) "non-constant constructors"
| Duplicate_label s ->
fprintf ppf "Two labels are named %s" s
| Recursive_abbrev s ->
fprintf ppf "The type abbreviation %s is cyclic" s
| Definition_mismatch (ty, errs) ->
Printtyp.reset_and_mark_loops ty;
fprintf ppf "@[<v>@[<hov>%s@ %s@;<1 2>%a@]%a@]"
"This variant or record definition" "does not match that of type"
Printtyp.type_expr ty
(Includecore.report_type_mismatch "the original" "this" "definition")
errs
| Constraint_failed (ty, ty') ->
fprintf ppf "Constraints are not satisfied in this type.@.";
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf "@[<hv>Type@ %a@ should be an instance of@ %a@]"
Printtyp.type_expr ty Printtyp.type_expr ty'
| Parameters_differ (path, ty, ty') ->
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf
"@[<hv>In the definition of %s, type@ %a@ should be@ %a@]"
(Path.name path) Printtyp.type_expr ty Printtyp.type_expr ty'
| Unconsistent_constraint trace ->
fprintf ppf "The type constraints are not consistent.@.";
Printtyp.report_unification_error ppf trace
(fun ppf -> fprintf ppf "Type")
(fun ppf -> fprintf ppf "is not compatible with type")
| Type_clash trace ->
Printtyp.report_unification_error ppf trace
(function ppf ->
fprintf ppf "This type constructor expands to type")
(function ppf ->
fprintf ppf "but is used here with type")
| Null_arity_external ->
fprintf ppf "External identifiers must be functions"
| Missing_native_external ->
fprintf ppf "@[<hv>An external function with more than 5 arguments \
requires a second stub function@ \
for native-code compilation@]"
| Unbound_type_var (ty, decl) ->
fprintf ppf "A type variable is unbound in this type declaration";
let ty = Ctype.repr ty in
begin match decl.type_kind, decl.type_manifest with
Type_variant tl, _ ->
explain_unbound ppf ty tl (fun (_,tl) -> Btype.newgenty (Ttuple tl))
"case" (fun (lab,_) -> lab ^ " of ")
| Type_record (tl, _), _ ->
explain_unbound ppf ty tl (fun (_,_,t) -> t)
"field" (fun (lab,_,_) -> lab ^ ": ")
| Type_abstract, Some ty' ->
explain_unbound_single ppf ty ty'
| _ -> ()
end
| Unbound_type_var_exc (tv, ty) ->
fprintf ppf "A type variable is unbound in this exception declaration";
explain_unbound_single ppf (Ctype.repr tv) ty
| Unbound_exception lid ->
fprintf ppf "Unbound exception constructor@ %a" Printtyp.longident lid
| Not_an_exception lid ->
fprintf ppf "The constructor@ %a@ is not an exception"
Printtyp.longident lid
| Bad_variance (n, v1, v2) ->
let variance = function
(true, true) -> "invariant"
| (true, false) -> "covariant"
| (false,true) -> "contravariant"
| (false,false) -> "unrestricted"
in
let suffix n =
let teen = (n mod 100)/10 = 1 in
match n mod 10 with
| 1 when not teen -> "st"
| 2 when not teen -> "nd"
| 3 when not teen -> "rd"
| _ -> "th"
in
if n < 1 then
fprintf ppf "%s@ %s@ %s"
"In this definition, a type variable"
"has a variance that is not reflected"
"by its occurrence in type parameters."
else
fprintf ppf "%s@ %s@ %s %d%s %s %s,@ %s %s"
"In this definition, expected parameter"
"variances are not satisfied."
"The" n (suffix n)
"type parameter was expected to be" (variance v2)
"but it is" (variance v1)
| Unavailable_type_constructor p ->
fprintf ppf "The definition of type %a@ is unavailable" Printtyp.path p
| Bad_fixed_type r ->
fprintf ppf "This fixed type %s" r
| null | https://raw.githubusercontent.com/bmeurer/ocamljit2/ef06db5c688c1160acc1de1f63c29473bcd0055c/typing/typedecl.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
*** Typing of type definitions ***
Enter all declared types in the environment as abstract types
Determine if a type is (an abbreviation for) the type "float"
Set the row variable in a fixed type
Bind type parameters
Check constraints
Add abstract row
Check for cyclic abbreviations
Check that all constraints are enforced
If both a variant/record definition and a type equation are given,
need to check that the equation refers to a type of the same kind
with the same constructors and labels.
Attempt to expand a type abbreviation if:
1- [to_check path'] holds
(otherwise the expansion cannot involve [path]);
2- we haven't expanded this type constructor before
(otherwise we could loop if [path'] is itself
a non-regular abbreviation).
Attempt expansion
Check that recursion is well-founded
Check that recursion is regular
Compute variance
for typeclass.ml
Check multiple declarations of labels/constructors
Translate a set of mutually recursive type declarations
Add dummy types for fixed rows
Create identifiers.
Enter types.
Translate each declaration.
Check for duplicates
Build the final env.
Update stubs
Check that all type variable are closed
Check re-exportation
Check that constraints are enforced
Name recursion
Add variances to the environment
Done
Translate an exception declaration
Translate an exception rebinding
Translate a value declaration
Approximate a type declaration: just make all types abstract
Variant of check_abbrev_recursion to check the well-formedness
conditions on type abbreviations defined within recursive modules.
*** Error report ***
Hack to force aliasing when needed | and , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Misc
open Asttypes
open Parsetree
open Primitive
open Types
open Typedtree
open Typetexp
type error =
Repeated_parameter
| Duplicate_constructor of string
| Too_many_constructors
| Duplicate_label of string
| Recursive_abbrev of string
| Definition_mismatch of type_expr * Includecore.type_mismatch list
| Constraint_failed of type_expr * type_expr
| Unconsistent_constraint of (type_expr * type_expr) list
| Type_clash of (type_expr * type_expr) list
| Parameters_differ of Path.t * type_expr * type_expr
| Null_arity_external
| Missing_native_external
| Unbound_type_var of type_expr * type_declaration
| Unbound_exception of Longident.t
| Not_an_exception of Longident.t
| Bad_variance of int * (bool * bool) * (bool * bool)
| Unavailable_type_constructor of Path.t
| Bad_fixed_type of string
| Unbound_type_var_exc of type_expr * type_expr
exception Error of Location.t * error
let enter_type env (name, sdecl) id =
let decl =
{ type_params =
List.map (fun _ -> Btype.newgenvar ()) sdecl.ptype_params;
type_arity = List.length sdecl.ptype_params;
type_kind = Type_abstract;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with None -> None
| Some _ -> Some(Ctype.newvar ()) end;
type_variance = List.map (fun _ -> true, true, true) sdecl.ptype_params;
}
in
Env.add_type id decl env
let update_type temp_env env id loc =
let path = Path.Pident id in
let decl = Env.find_type path temp_env in
match decl.type_manifest with None -> ()
| Some ty ->
let params = List.map (fun _ -> Ctype.newvar ()) decl.type_params in
try Ctype.unify env (Ctype.newconstr path params) ty
with Ctype.Unify trace ->
raise (Error(loc, Type_clash trace))
We use the Ctype.expand_head_opt version of expand_head to get access
to the manifest type of private abbreviations .
to the manifest type of private abbreviations. *)
let is_float env ty =
match Ctype.repr (Ctype.expand_head_opt env ty) with
{desc = Tconstr(p, _, _)} -> Path.same p Predef.path_float
| _ -> false
Determine if a type definition defines a fixed type . ( PW )
let is_fixed_type sd =
(match sd.ptype_manifest with
| Some { ptyp_desc =
(Ptyp_variant _|Ptyp_object _|Ptyp_class _|Ptyp_alias
({ptyp_desc = Ptyp_variant _|Ptyp_object _|Ptyp_class _},_)) } -> true
| _ -> false) &&
sd.ptype_kind = Ptype_abstract &&
sd.ptype_private = Private
let set_fixed_row env loc p decl =
let tm =
match decl.type_manifest with
None -> assert false
| Some t -> Ctype.expand_head env t
in
let rv =
match tm.desc with
Tvariant row ->
let row = Btype.row_repr row in
tm.desc <- Tvariant {row with row_fixed = true};
if Btype.static_row row then Btype.newgenty Tnil
else row.row_more
| Tobject (ty, _) ->
snd (Ctype.flatten_fields ty)
| _ ->
raise (Error (loc, Bad_fixed_type "is not an object or variant"))
in
if rv.desc <> Tvar then
raise (Error (loc, Bad_fixed_type "has no row variable"));
rv.desc <- Tconstr (p, decl.type_params, ref Mnil)
Translate one type declaration
module StringSet =
Set.Make(struct
type t = string
let compare = compare
end)
let transl_declaration env (name, sdecl) id =
reset_type_variables();
Ctype.begin_def ();
let params =
try List.map (enter_type_variable true sdecl.ptype_loc) sdecl.ptype_params
with Already_bound ->
raise(Error(sdecl.ptype_loc, Repeated_parameter))
in
let cstrs = List.map
(fun (sty, sty', loc) ->
transl_simple_type env false sty,
transl_simple_type env false sty', loc)
sdecl.ptype_cstrs
in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind =
begin match sdecl.ptype_kind with
Ptype_abstract -> Type_abstract
| Ptype_variant cstrs ->
let all_constrs = ref StringSet.empty in
List.iter
(fun (name, args, loc) ->
if StringSet.mem name !all_constrs then
raise(Error(sdecl.ptype_loc, Duplicate_constructor name));
all_constrs := StringSet.add name !all_constrs)
cstrs;
if List.length (List.filter (fun (_, args, _) -> args <> []) cstrs)
> (Config.max_tag + 1) then
raise(Error(sdecl.ptype_loc, Too_many_constructors));
Type_variant
(List.map
(fun (name, args, loc) ->
(name, List.map (transl_simple_type env true) args))
cstrs)
| Ptype_record lbls ->
let all_labels = ref StringSet.empty in
List.iter
(fun (name, mut, arg, loc) ->
if StringSet.mem name !all_labels then
raise(Error(sdecl.ptype_loc, Duplicate_label name));
all_labels := StringSet.add name !all_labels)
lbls;
let lbls' =
List.map
(fun (name, mut, arg, loc) ->
let ty = transl_simple_type env true arg in
name, mut, match ty.desc with Tpoly(t,[]) -> t | _ -> ty)
lbls in
let rep =
if List.for_all (fun (name, mut, arg) -> is_float env arg) lbls'
then Record_float
else Record_regular in
Type_record(lbls', rep)
end;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with
None -> None
| Some sty ->
let no_row = not (is_fixed_type sdecl) in
Some (transl_simple_type env no_row sty)
end;
type_variance = List.map (fun _ -> true, true, true) params;
} in
List.iter
(fun (ty, ty', loc) ->
try Ctype.unify env ty ty' with Ctype.Unify tr ->
raise(Error(loc, Unconsistent_constraint tr)))
cstrs;
Ctype.end_def ();
if is_fixed_type sdecl then begin
let (p, _) =
try Env.lookup_type (Longident.Lident(Ident.name id ^ "#row")) env
with Not_found -> assert false in
set_fixed_row env sdecl.ptype_loc p decl
end;
begin match decl.type_manifest with None -> ()
| Some ty ->
if Ctype.cyclic_abbrev env id ty then
raise(Error(sdecl.ptype_loc, Recursive_abbrev name));
end;
(id, decl)
a type declaration
let generalize_decl decl =
List.iter Ctype.generalize decl.type_params;
begin match decl.type_kind with
Type_abstract ->
()
| Type_variant v ->
List.iter (fun (_, tyl) -> List.iter Ctype.generalize tyl) v
| Type_record(r, rep) ->
List.iter (fun (_, _, ty) -> Ctype.generalize ty) r
end;
begin match decl.type_manifest with
| None -> ()
| Some ty -> Ctype.generalize ty
end
module TypeSet =
Set.Make
(struct
type t = type_expr
let compare t1 t2 = t1.id - t2.id
end)
let rec check_constraints_rec env loc visited ty =
let ty = Ctype.repr ty in
if TypeSet.mem ty !visited then () else begin
visited := TypeSet.add ty !visited;
match ty.desc with
| Tconstr (path, args, _) ->
let args' = List.map (fun _ -> Ctype.newvar ()) args in
let ty' = Ctype.newconstr path args' in
begin try Ctype.enforce_constraints env ty'
with Ctype.Unify _ -> assert false
| Not_found -> raise (Error(loc, Unavailable_type_constructor path))
end;
if not (Ctype.matches env ty ty') then
raise (Error(loc, Constraint_failed (ty, ty')));
List.iter (check_constraints_rec env loc visited) args
| Tpoly (ty, tl) ->
let _, ty = Ctype.instance_poly false tl ty in
check_constraints_rec env loc visited ty
| _ ->
Btype.iter_type_expr (check_constraints_rec env loc visited) ty
end
let check_constraints env (_, sdecl) (_, decl) =
let visited = ref TypeSet.empty in
begin match decl.type_kind with
| Type_abstract -> ()
| Type_variant l ->
let rec find_pl = function
Ptype_variant pl -> pl
| Ptype_record _ | Ptype_abstract -> assert false
in
let pl = find_pl sdecl.ptype_kind in
List.iter
(fun (name, tyl) ->
let styl =
try let (_,sty,_) = List.find (fun (n,_,_) -> n = name) pl in sty
with Not_found -> assert false in
List.iter2
(fun sty ty ->
check_constraints_rec env sty.ptyp_loc visited ty)
styl tyl)
l
| Type_record (l, _) ->
let rec find_pl = function
Ptype_record pl -> pl
| Ptype_variant _ | Ptype_abstract -> assert false
in
let pl = find_pl sdecl.ptype_kind in
let rec get_loc name = function
[] -> assert false
| (name', _, sty, _) :: tl ->
if name = name' then sty.ptyp_loc else get_loc name tl
in
List.iter
(fun (name, _, ty) ->
check_constraints_rec env (get_loc name pl) visited ty)
l
end;
begin match decl.type_manifest with
| None -> ()
| Some ty ->
let sty =
match sdecl.ptype_manifest with Some sty -> sty | _ -> assert false
in
check_constraints_rec env sty.ptyp_loc visited ty
end
let check_abbrev env (_, sdecl) (id, decl) =
match decl with
{type_kind = (Type_variant _ | Type_record _); type_manifest = Some ty} ->
begin match (Ctype.repr ty).desc with
Tconstr(path, args, _) ->
begin try
let decl' = Env.find_type path env in
let err =
if List.length args <> List.length decl.type_params
then [Includecore.Arity]
else if not (Ctype.equal env false args decl.type_params)
then [Includecore.Constraint]
else
Includecore.type_declarations env id
decl'
(Subst.type_declaration
(Subst.add_type id path Subst.identity) decl)
in
if err <> [] then
raise(Error(sdecl.ptype_loc, Definition_mismatch (ty, err)))
with Not_found ->
raise(Error(sdecl.ptype_loc, Unavailable_type_constructor path))
end
| _ -> raise(Error(sdecl.ptype_loc, Definition_mismatch (ty, [])))
end
| _ -> ()
Check for ill - defined
let check_recursion env loc path decl to_check =
to_check is true for potentially mutually recursive paths .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
let visited = ref [] in
let rec check_regular cpath args prev_exp ty =
let ty = Ctype.repr ty in
if not (List.memq ty !visited) then begin
visited := ty :: !visited;
match ty.desc with
| Tconstr(path', args', _) ->
if Path.same path path' then begin
if not (Ctype.equal env false args args') then
raise (Error(loc,
Parameters_differ(cpath, ty, Ctype.newconstr path args)))
end
else if to_check path' && not (List.mem path' prev_exp) then begin
try
let (params0, body0) = Env.find_type_expansion path' env in
let (params, body) =
Ctype.instance_parameterized_type params0 body0 in
begin
try List.iter2 (Ctype.unify env) params args'
with Ctype.Unify _ ->
raise (Error(loc, Constraint_failed
(ty, Ctype.newconstr path' params0)));
end;
check_regular path' args (path' :: prev_exp) body
with Not_found -> ()
end;
List.iter (check_regular cpath args prev_exp) args'
| Tpoly (ty, tl) ->
let (_, ty) = Ctype.instance_poly false tl ty in
check_regular cpath args prev_exp ty
| _ ->
Btype.iter_type_expr (check_regular cpath args prev_exp) ty
end in
match decl.type_manifest with
| None -> ()
| Some body ->
begin try
Ctype.correct_abbrev env path decl.type_params body
with Ctype.Recursive_abbrev ->
raise(Error(loc, Recursive_abbrev (Path.name path)))
| Ctype.Unify trace -> raise(Error(loc, Type_clash trace))
end;
if decl.type_params = [] then () else
let (args, body) =
Ctype.instance_parameterized_type decl.type_params body in
check_regular path args [] body
let check_abbrev_recursion env id_loc_list (id, decl) =
check_recursion env (List.assoc id id_loc_list) (Path.Pident id) decl
(function Path.Pident id -> List.mem_assoc id id_loc_list | _ -> false)
let compute_variance env tvl nega posi cntr ty =
let pvisited = ref TypeSet.empty
and nvisited = ref TypeSet.empty
and cvisited = ref TypeSet.empty in
let rec compute_variance_rec posi nega cntr ty =
let ty = Ctype.repr ty in
if (not posi || TypeSet.mem ty !pvisited)
&& (not nega || TypeSet.mem ty !nvisited)
&& (not cntr || TypeSet.mem ty !cvisited) then
()
else begin
if posi then pvisited := TypeSet.add ty !pvisited;
if nega then nvisited := TypeSet.add ty !nvisited;
if cntr then cvisited := TypeSet.add ty !cvisited;
let compute_same = compute_variance_rec posi nega cntr in
match ty.desc with
Tarrow (_, ty1, ty2, _) ->
compute_variance_rec nega posi true ty1;
compute_same ty2
| Ttuple tl ->
List.iter compute_same tl
| Tconstr (path, tl, _) ->
if tl = [] then () else begin
try
let decl = Env.find_type path env in
List.iter2
(fun ty (co,cn,ct) ->
compute_variance_rec
(posi && co || nega && cn)
(posi && cn || nega && co)
(cntr || ct)
ty)
tl decl.type_variance
with Not_found ->
List.iter (compute_variance_rec true true true) tl
end
| Tobject (ty, _) ->
compute_same ty
| Tfield (_, _, ty1, ty2) ->
compute_same ty1;
compute_same ty2
| Tsubst ty ->
compute_same ty
| Tvariant row ->
let row = Btype.row_repr row in
List.iter
(fun (_,f) ->
match Btype.row_field_repr f with
Rpresent (Some ty) ->
compute_same ty
| Reither (_, tyl, _, _) ->
List.iter compute_same tyl
| _ -> ())
row.row_fields;
compute_same row.row_more
| Tpoly (ty, _) ->
compute_same ty
| Tvar | Tnil | Tlink _ | Tunivar -> ()
| Tpackage (_, _, tyl) ->
List.iter (compute_variance_rec true true true) tyl
end
in
compute_variance_rec nega posi cntr ty;
List.iter
(fun (ty, covar, convar, ctvar) ->
if TypeSet.mem ty !pvisited then covar := true;
if TypeSet.mem ty !nvisited then convar := true;
if TypeSet.mem ty !cvisited then ctvar := true)
tvl
let make_variance ty = (ty, ref false, ref false, ref false)
let whole_type decl =
match decl.type_kind with
Type_variant tll ->
Btype.newgenty
(Ttuple (List.map (fun (_, tl) -> Btype.newgenty (Ttuple tl)) tll))
| Type_record (ftl, _) ->
Btype.newgenty
(Ttuple (List.map (fun (_, _, ty) -> ty) ftl))
| Type_abstract ->
match decl.type_manifest with
Some ty -> ty
| _ -> Btype.newgenty (Ttuple [])
let compute_variance_decl env check decl (required, loc) =
if decl.type_kind = Type_abstract && decl.type_manifest = None then
List.map (fun (c, n) -> if c || n then (c, n, n) else (true, true, true))
required
else
let params = List.map Btype.repr decl.type_params in
let tvl0 = List.map make_variance params in
let fvl = if check then Ctype.free_variables (whole_type decl) else [] in
let fvl = List.filter (fun v -> not (List.memq v params)) fvl in
let tvl1 = List.map make_variance fvl in
let tvl2 = List.map make_variance fvl in
let tvl = tvl0 @ tvl1 in
begin match decl.type_kind with
Type_abstract ->
begin match decl.type_manifest with
None -> assert false
| Some ty -> compute_variance env tvl true false false ty
end
| Type_variant tll ->
List.iter
(fun (_,tl) ->
List.iter (compute_variance env tvl true false false) tl)
tll
| Type_record (ftl, _) ->
List.iter
(fun (_, mut, ty) ->
let cn = (mut = Mutable) in
compute_variance env tvl true cn cn ty)
ftl
end;
let required =
List.map (fun (c,n as r) -> if c || n then r else (true,true))
required
in
List.iter2
(fun (ty, co, cn, ct) (c, n) ->
if ty.desc <> Tvar then begin
co := c; cn := n; ct := n;
compute_variance env tvl2 c n n ty
end)
tvl0 required;
List.iter2
(fun (ty, c1, n1, t1) (_, c2, n2, t2) ->
if !c1 && not !c2 || !n1 && not !n2
|| ! t1 & & not ! t2 & & decl.type_kind = Type_abstract
then raise (Error(loc,
if not (!c2 || !n2) then Unbound_type_var (ty, decl)
else Bad_variance (0, (!c1,!n1), (!c2,!n2)))))
tvl1 tvl2;
let pos = ref 0 in
List.map2
(fun (_, co, cn, ct) (c, n) ->
incr pos;
if !co && not c || !cn && not n
then raise (Error(loc, Bad_variance (!pos, (!co,!cn), (c,n))));
if decl.type_private = Private then (c,n,n) else
let ct = if decl.type_kind = Type_abstract then ct else cn in
(!co, !cn, !ct))
tvl0 required
let is_sharp id =
let s = Ident.name id in
String.length s > 0 && s.[0] = '#'
let rec compute_variance_fixpoint env decls required variances =
let new_decls =
List.map2
(fun (id, decl) variance -> id, {decl with type_variance = variance})
decls variances
in
let new_env =
List.fold_right (fun (id, decl) env -> Env.add_type id decl env)
new_decls env
in
let new_variances =
List.map2
(fun (id, decl) -> compute_variance_decl new_env false decl)
new_decls required
in
let new_variances =
List.map2
(List.map2 (fun (c1,n1,t1) (c2,n2,t2) -> c1||c2, n1||n2, t1||t2))
new_variances variances in
if new_variances <> variances then
compute_variance_fixpoint env decls required new_variances
else begin
List.iter2
(fun (id, decl) req -> if not (is_sharp id) then
ignore (compute_variance_decl new_env true decl req))
new_decls required;
new_decls, new_env
end
let init_variance (id, decl) =
List.map (fun _ -> (false, false, false)) decl.type_params
let compute_variance_decls env cldecls =
let decls, required =
List.fold_right
(fun (obj_id, obj_abbr, cl_abbr, clty, cltydef, required) (decls, req) ->
(obj_id, obj_abbr) :: decls, required :: req)
cldecls ([],[])
in
let variances = List.map init_variance decls in
let (decls, _) = compute_variance_fixpoint env decls required variances in
List.map2
(fun (_,decl) (_, _, cl_abbr, clty, cltydef, _) ->
let variance = List.map (fun (c,n,t) -> (c,n)) decl.type_variance in
(decl, {cl_abbr with type_variance = decl.type_variance},
{clty with cty_variance = variance},
{cltydef with clty_variance = variance}))
decls cldecls
let check_duplicates name_sdecl_list =
let labels = Hashtbl.create 7 and constrs = Hashtbl.create 7 in
List.iter
(fun (name, sdecl) -> match sdecl.ptype_kind with
Ptype_variant cl ->
List.iter
(fun (cname, _, loc) ->
try
let name' = Hashtbl.find constrs cname in
Location.prerr_warning loc
(Warnings.Duplicate_definitions
("constructor", cname, name', name))
with Not_found -> Hashtbl.add constrs cname name)
cl
| Ptype_record fl ->
List.iter
(fun (cname, _, _, loc) ->
try
let name' = Hashtbl.find labels cname in
Location.prerr_warning loc
(Warnings.Duplicate_definitions ("label", cname, name', name))
with Not_found -> Hashtbl.add labels cname name)
fl
| Ptype_abstract -> ())
name_sdecl_list
Force recursion to go through i d for private types
let name_recursion sdecl id decl =
match decl with
| { type_kind = Type_abstract;
type_manifest = Some ty;
type_private = Private; } when is_fixed_type sdecl ->
let ty = Ctype.repr ty in
let ty' = Btype.newty2 ty.level ty.desc in
if Ctype.deep_occur ty ty' then
let td = Tconstr(Path.Pident id, decl.type_params, ref Mnil) in
Btype.link_type ty (Btype.newty2 ty.level td);
{decl with type_manifest = Some ty'}
else decl
| _ -> decl
let transl_type_decl env name_sdecl_list =
let fixed_types =
List.filter (fun (_, sd) -> is_fixed_type sd) name_sdecl_list
in
let name_sdecl_list =
List.map
(fun (name,sdecl) ->
name^"#row",
{sdecl with ptype_kind = Ptype_abstract; ptype_manifest = None})
fixed_types
@ name_sdecl_list
in
let id_list =
List.map (fun (name, _) -> Ident.create name) name_sdecl_list
in
Since we 've introduced fresh idents , make sure the definition
level is at least the binding time of these events . Otherwise ,
passing one of the recursively - defined type as argument
to an abbreviation may fail .
Since we've introduced fresh idents, make sure the definition
level is at least the binding time of these events. Otherwise,
passing one of the recursively-defined type constrs as argument
to an abbreviation may fail.
*)
Ctype.init_def(Ident.current_time());
Ctype.begin_def();
let temp_env = List.fold_left2 enter_type env name_sdecl_list id_list in
let decls =
List.map2 (transl_declaration temp_env) name_sdecl_list id_list in
check_duplicates name_sdecl_list;
let newenv =
List.fold_right
(fun (id, decl) env -> Env.add_type id decl env)
decls env
in
List.iter2
(fun id (_, sdecl) -> update_type temp_env newenv id sdecl.ptype_loc)
id_list name_sdecl_list;
Generalize type declarations .
Ctype.end_def();
List.iter (fun (_, decl) -> generalize_decl decl) decls;
Check for ill - formed
let id_loc_list =
List.map2 (fun id (_,sdecl) -> (id, sdecl.ptype_loc))
id_list name_sdecl_list
in
List.iter (check_abbrev_recursion newenv id_loc_list) decls;
List.iter2
(fun (_, sdecl) (id, decl) ->
match Ctype.closed_type_decl decl with
Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
| None -> ())
name_sdecl_list decls;
List.iter2 (check_abbrev newenv) name_sdecl_list decls;
List.iter2 (check_constraints newenv) name_sdecl_list decls;
let decls =
List.map2 (fun (_, sdecl) (id, decl) -> id, name_recursion sdecl id decl)
name_sdecl_list decls
in
let required =
List.map (fun (_, sdecl) -> sdecl.ptype_variance, sdecl.ptype_loc)
name_sdecl_list
in
let final_decls, final_env =
compute_variance_fixpoint env decls required (List.map init_variance decls)
in
(final_decls, final_env)
let transl_closed_type env sty =
let ty = transl_simple_type env true sty in
match Ctype.free_variables ty with
| [] -> ty
| tv :: _ -> raise (Error (sty.ptyp_loc, Unbound_type_var_exc (tv, ty)))
let transl_exception env excdecl =
reset_type_variables();
Ctype.begin_def();
let types = List.map (transl_closed_type env) excdecl in
Ctype.end_def();
List.iter Ctype.generalize types;
types
let transl_exn_rebind env loc lid =
let cdescr =
try
Env.lookup_constructor lid env
with Not_found ->
raise(Error(loc, Unbound_exception lid)) in
match cdescr.cstr_tag with
Cstr_exception path -> (path, cdescr.cstr_args)
| _ -> raise(Error(loc, Not_an_exception lid))
let transl_value_decl env valdecl =
let ty = Typetexp.transl_type_scheme env valdecl.pval_type in
match valdecl.pval_prim with
[] ->
{ val_type = ty; val_kind = Val_reg }
| decl ->
let arity = Ctype.arity ty in
if arity = 0 then
raise(Error(valdecl.pval_type.ptyp_loc, Null_arity_external));
let prim = Primitive.parse_declaration arity decl in
if !Clflags.native_code
&& prim.prim_arity > 5
&& prim.prim_native_name = ""
then raise(Error(valdecl.pval_type.ptyp_loc, Missing_native_external));
{ val_type = ty; val_kind = Val_prim prim }
Translate a " with " constraint -- much simplified version of
transl_type_decl .
transl_type_decl. *)
let transl_with_constraint env id row_path orig_decl sdecl =
reset_type_variables();
Ctype.begin_def();
let params =
try
List.map (enter_type_variable true sdecl.ptype_loc) sdecl.ptype_params
with Already_bound ->
raise(Error(sdecl.ptype_loc, Repeated_parameter)) in
let orig_decl = Ctype.instance_declaration orig_decl in
let arity_ok = List.length params = orig_decl.type_arity in
if arity_ok then
List.iter2 (Ctype.unify_var env) params orig_decl.type_params;
List.iter
(function (ty, ty', loc) ->
try
Ctype.unify env (transl_simple_type env false ty)
(transl_simple_type env false ty')
with Ctype.Unify tr ->
raise(Error(loc, Unconsistent_constraint tr)))
sdecl.ptype_cstrs;
let no_row = not (is_fixed_type sdecl) in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind = if arity_ok then orig_decl.type_kind else Type_abstract;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with
None -> None
| Some sty ->
Some(transl_simple_type env no_row sty)
end;
type_variance = [];
}
in
begin match row_path with None -> ()
| Some p -> set_fixed_row env sdecl.ptype_loc p decl
end;
begin match Ctype.closed_type_decl decl with None -> ()
| Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
end;
let decl = name_recursion sdecl id decl in
let decl =
{decl with type_variance =
compute_variance_decl env false decl
(sdecl.ptype_variance, sdecl.ptype_loc)} in
Ctype.end_def();
generalize_decl decl;
decl
let abstract_type_decl arity =
let rec make_params n =
if n <= 0 then [] else Ctype.newvar() :: make_params (n-1) in
Ctype.begin_def();
let decl =
{ type_params = make_params arity;
type_arity = arity;
type_kind = Type_abstract;
type_private = Public;
type_manifest = None;
type_variance = replicate_list (true, true, true) arity } in
Ctype.end_def();
generalize_decl decl;
decl
let approx_type_decl env name_sdecl_list =
List.map
(fun (name, sdecl) ->
(Ident.create name,
abstract_type_decl (List.length sdecl.ptype_params)))
name_sdecl_list
let check_recmod_typedecl env loc recmod_ids path decl =
recmod_ids is the list of recursively - defined module idents .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
check_recursion env loc path decl
(fun path -> List.exists (fun id -> Path.isfree id path) recmod_ids)
open Format
let explain_unbound ppf tv tl typ kwd lab =
try
let ti = List.find (fun ti -> Ctype.deep_occur tv (typ ti)) tl in
Btype.newgenty (Tobject(tv, ref None)) in
Printtyp.reset_and_mark_loops_list [typ ti; ty0];
fprintf ppf
".@.@[<hov2>In %s@ %s%a@;<1 -2>the variable %a is unbound@]"
kwd (lab ti) Printtyp.type_expr (typ ti) Printtyp.type_expr tv
with Not_found -> ()
let explain_unbound_single ppf tv ty =
let trivial ty =
explain_unbound ppf tv [ty] (fun t -> t) "type" (fun _ -> "") in
match (Ctype.repr ty).desc with
Tobject(fi,_) ->
let (tl, rv) = Ctype.flatten_fields fi in
if rv == tv then trivial ty else
explain_unbound ppf tv tl (fun (_,_,t) -> t)
"method" (fun (lab,_,_) -> lab ^ ": ")
| Tvariant row ->
let row = Btype.row_repr row in
if row.row_more == tv then trivial ty else
explain_unbound ppf tv row.row_fields
(fun (l,f) -> match Btype.row_field_repr f with
Rpresent (Some t) -> t
| Reither (_,[t],_,_) -> t
| Reither (_,tl,_,_) -> Btype.newgenty (Ttuple tl)
| _ -> Btype.newgenty (Ttuple[]))
"case" (fun (lab,_) -> "`" ^ lab ^ " of ")
| _ -> trivial ty
let report_error ppf = function
| Repeated_parameter ->
fprintf ppf "A type parameter occurs several times"
| Duplicate_constructor s ->
fprintf ppf "Two constructors are named %s" s
| Too_many_constructors ->
fprintf ppf
"@[Too many non-constant constructors@ -- maximum is %i %s@]"
(Config.max_tag + 1) "non-constant constructors"
| Duplicate_label s ->
fprintf ppf "Two labels are named %s" s
| Recursive_abbrev s ->
fprintf ppf "The type abbreviation %s is cyclic" s
| Definition_mismatch (ty, errs) ->
Printtyp.reset_and_mark_loops ty;
fprintf ppf "@[<v>@[<hov>%s@ %s@;<1 2>%a@]%a@]"
"This variant or record definition" "does not match that of type"
Printtyp.type_expr ty
(Includecore.report_type_mismatch "the original" "this" "definition")
errs
| Constraint_failed (ty, ty') ->
fprintf ppf "Constraints are not satisfied in this type.@.";
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf "@[<hv>Type@ %a@ should be an instance of@ %a@]"
Printtyp.type_expr ty Printtyp.type_expr ty'
| Parameters_differ (path, ty, ty') ->
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf
"@[<hv>In the definition of %s, type@ %a@ should be@ %a@]"
(Path.name path) Printtyp.type_expr ty Printtyp.type_expr ty'
| Unconsistent_constraint trace ->
fprintf ppf "The type constraints are not consistent.@.";
Printtyp.report_unification_error ppf trace
(fun ppf -> fprintf ppf "Type")
(fun ppf -> fprintf ppf "is not compatible with type")
| Type_clash trace ->
Printtyp.report_unification_error ppf trace
(function ppf ->
fprintf ppf "This type constructor expands to type")
(function ppf ->
fprintf ppf "but is used here with type")
| Null_arity_external ->
fprintf ppf "External identifiers must be functions"
| Missing_native_external ->
fprintf ppf "@[<hv>An external function with more than 5 arguments \
requires a second stub function@ \
for native-code compilation@]"
| Unbound_type_var (ty, decl) ->
fprintf ppf "A type variable is unbound in this type declaration";
let ty = Ctype.repr ty in
begin match decl.type_kind, decl.type_manifest with
Type_variant tl, _ ->
explain_unbound ppf ty tl (fun (_,tl) -> Btype.newgenty (Ttuple tl))
"case" (fun (lab,_) -> lab ^ " of ")
| Type_record (tl, _), _ ->
explain_unbound ppf ty tl (fun (_,_,t) -> t)
"field" (fun (lab,_,_) -> lab ^ ": ")
| Type_abstract, Some ty' ->
explain_unbound_single ppf ty ty'
| _ -> ()
end
| Unbound_type_var_exc (tv, ty) ->
fprintf ppf "A type variable is unbound in this exception declaration";
explain_unbound_single ppf (Ctype.repr tv) ty
| Unbound_exception lid ->
fprintf ppf "Unbound exception constructor@ %a" Printtyp.longident lid
| Not_an_exception lid ->
fprintf ppf "The constructor@ %a@ is not an exception"
Printtyp.longident lid
| Bad_variance (n, v1, v2) ->
let variance = function
(true, true) -> "invariant"
| (true, false) -> "covariant"
| (false,true) -> "contravariant"
| (false,false) -> "unrestricted"
in
let suffix n =
let teen = (n mod 100)/10 = 1 in
match n mod 10 with
| 1 when not teen -> "st"
| 2 when not teen -> "nd"
| 3 when not teen -> "rd"
| _ -> "th"
in
if n < 1 then
fprintf ppf "%s@ %s@ %s"
"In this definition, a type variable"
"has a variance that is not reflected"
"by its occurrence in type parameters."
else
fprintf ppf "%s@ %s@ %s %d%s %s %s,@ %s %s"
"In this definition, expected parameter"
"variances are not satisfied."
"The" n (suffix n)
"type parameter was expected to be" (variance v2)
"but it is" (variance v1)
| Unavailable_type_constructor p ->
fprintf ppf "The definition of type %a@ is unavailable" Printtyp.path p
| Bad_fixed_type r ->
fprintf ppf "This fixed type %s" r
|
b69d6a09f7348831d1af1f838a435636ef66cf3232367119e598fe48eae3398f | tomjridge/tjr_kv | pvt_pcache_with_blocks_limit.ml | * This is a pcache wrapper which automatically detaches after
a certain number of blocks ; used by pcache_thread
a certain number of blocks; used by pcache_thread *)
$ ( CONFIG("pvt_pcache_with_blocks_limit.ml : dont_log " ) )
let dont_log = true
* fidi = find insert delete insertmany
type ('k,'v,'t) map_fidi_ops = {
find : 'k -> ('v option,'t) m;
insert : 'k -> 'v -> (unit,'t) m;
delete : 'k -> (unit,'t)m;
insert_many : 'k -> 'v -> ('k*'v) list -> (('k*'v)list,'t) m
}
type ('k,'v,'t) pcache_with_lim_ops = ('k,'v,'t) map_fidi_ops
* NOTE bt_find and bt_handle_detach are named for the particular
application we envisage : a persistent cache which hands over to a
btree
Construct the PCACHE , which uses the pcache_ops and wraps it in a routine which occasionally executes a B - tree roll - up .
Parameters :
- [ monad_ops ]
- [ pcache_ops ] : pcache from tjr_pcache , with pcache interface
- [ pcache_blocks_limit ] : how many blocks in the pcache before
attempting a roll - up ; if the length of pcache is [ > =] this limit , we
attempt a roll - up ; NOTE that this limit should be > = 2 ( if we roll
up with 1 block , then in fact nothing gets rolled up because we roll
up " upto " the current block ; not a problem but probably pointless
for testing )
- [ bt_find ] : called if key not in pcache map FIXME do we need a
write - through cache here ? or just rely on the front - end LRU ? FIXME
note that even if a rollup is taking place , we can use the old
B - tree root for the [ bt_find ] operation .
- [ bt_handle_detach ] : called to detach the rollup into another thread ;
typically this operation puts a msg on a message queue which is then
received and acted upon by the dedicated rollup thread
application we envisage: a persistent cache which hands over to a
btree
Construct the PCACHE, which uses the pcache_ops and wraps it in a routine which occasionally executes a B-tree roll-up.
Parameters:
- [monad_ops]
- [pcache_ops]: pcache from tjr_pcache, with pcache interface
- [pcache_blocks_limit]: how many blocks in the pcache before
attempting a roll-up; if the length of pcache is [>=] this limit, we
attempt a roll-up; NOTE that this limit should be >= 2 (if we roll
up with 1 block, then in fact nothing gets rolled up because we roll
up "upto" the current block; not a problem but probably pointless
for testing)
- [bt_find]: called if key not in pcache map FIXME do we need a
write-through cache here? or just rely on the front-end LRU? FIXME
note that even if a rollup is taking place, we can use the old
B-tree root for the [bt_find] operation.
- [bt_handle_detach]: called to detach the rollup into another thread;
typically this operation puts a msg on a message queue which is then
received and acted upon by the dedicated rollup thread
*)
let make_ops
~monad_ops
~(pcache_ops:('k,'v,'ptr,'kvop_map,'t) pcache_ops)
~pcache_blocks_limit
~bt_find
~(bt_handle_detach:('k,'v,'ptr,'kvop_map) Detach_info.t -> (unit,'t)m)
=
(* let open Mref_plus in *)
let ( >>= ) = monad_ops.bind in
let return = monad_ops.return in
persistent cache ; another name for pcache
let find k =
pc.find k >>= fun v ->
match v with
| None -> bt_find k
| Some v -> return (Some v)
in
let maybe_roll_up () =
pc.blk_len () >>= fun n ->
match n >= pcache_blocks_limit with
| false -> return `No_roll_up_needed
| true ->
assert(dont_log || (
Printf.printf "%s: pcache_thread, maybe_roll_up\n%!" __FILE__; true));
pc.detach () >>= fun detach_result ->
bt_handle_detach detach_result >>= fun () ->
return `Ok
in
let insert k v =
pc.insert k v >>= fun () ->
maybe_roll_up () >>= fun _ ->
return ()
in
let delete k =
pc.delete k >>= fun () ->
maybe_roll_up () >>= fun _ ->
return ()
in
let insert_many k v kvs =
(* FIXME we should do something smarter here *)
insert k v >>= fun () -> return kvs
in
{find;insert;delete;insert_many}
let _ = make_ops
| null | https://raw.githubusercontent.com/tomjridge/tjr_kv/68b3d2fa0c7a144765a3720d78cffb96d5478b32/src/pvt_pcache_with_blocks_limit.ml | ocaml | let open Mref_plus in
FIXME we should do something smarter here | * This is a pcache wrapper which automatically detaches after
a certain number of blocks ; used by pcache_thread
a certain number of blocks; used by pcache_thread *)
$ ( CONFIG("pvt_pcache_with_blocks_limit.ml : dont_log " ) )
let dont_log = true
* fidi = find insert delete insertmany
type ('k,'v,'t) map_fidi_ops = {
find : 'k -> ('v option,'t) m;
insert : 'k -> 'v -> (unit,'t) m;
delete : 'k -> (unit,'t)m;
insert_many : 'k -> 'v -> ('k*'v) list -> (('k*'v)list,'t) m
}
type ('k,'v,'t) pcache_with_lim_ops = ('k,'v,'t) map_fidi_ops
* NOTE bt_find and bt_handle_detach are named for the particular
application we envisage : a persistent cache which hands over to a
btree
Construct the PCACHE , which uses the pcache_ops and wraps it in a routine which occasionally executes a B - tree roll - up .
Parameters :
- [ monad_ops ]
- [ pcache_ops ] : pcache from tjr_pcache , with pcache interface
- [ pcache_blocks_limit ] : how many blocks in the pcache before
attempting a roll - up ; if the length of pcache is [ > =] this limit , we
attempt a roll - up ; NOTE that this limit should be > = 2 ( if we roll
up with 1 block , then in fact nothing gets rolled up because we roll
up " upto " the current block ; not a problem but probably pointless
for testing )
- [ bt_find ] : called if key not in pcache map FIXME do we need a
write - through cache here ? or just rely on the front - end LRU ? FIXME
note that even if a rollup is taking place , we can use the old
B - tree root for the [ bt_find ] operation .
- [ bt_handle_detach ] : called to detach the rollup into another thread ;
typically this operation puts a msg on a message queue which is then
received and acted upon by the dedicated rollup thread
application we envisage: a persistent cache which hands over to a
btree
Construct the PCACHE, which uses the pcache_ops and wraps it in a routine which occasionally executes a B-tree roll-up.
Parameters:
- [monad_ops]
- [pcache_ops]: pcache from tjr_pcache, with pcache interface
- [pcache_blocks_limit]: how many blocks in the pcache before
attempting a roll-up; if the length of pcache is [>=] this limit, we
attempt a roll-up; NOTE that this limit should be >= 2 (if we roll
up with 1 block, then in fact nothing gets rolled up because we roll
up "upto" the current block; not a problem but probably pointless
for testing)
- [bt_find]: called if key not in pcache map FIXME do we need a
write-through cache here? or just rely on the front-end LRU? FIXME
note that even if a rollup is taking place, we can use the old
B-tree root for the [bt_find] operation.
- [bt_handle_detach]: called to detach the rollup into another thread;
typically this operation puts a msg on a message queue which is then
received and acted upon by the dedicated rollup thread
*)
let make_ops
~monad_ops
~(pcache_ops:('k,'v,'ptr,'kvop_map,'t) pcache_ops)
~pcache_blocks_limit
~bt_find
~(bt_handle_detach:('k,'v,'ptr,'kvop_map) Detach_info.t -> (unit,'t)m)
=
let ( >>= ) = monad_ops.bind in
let return = monad_ops.return in
persistent cache ; another name for pcache
let find k =
pc.find k >>= fun v ->
match v with
| None -> bt_find k
| Some v -> return (Some v)
in
let maybe_roll_up () =
pc.blk_len () >>= fun n ->
match n >= pcache_blocks_limit with
| false -> return `No_roll_up_needed
| true ->
assert(dont_log || (
Printf.printf "%s: pcache_thread, maybe_roll_up\n%!" __FILE__; true));
pc.detach () >>= fun detach_result ->
bt_handle_detach detach_result >>= fun () ->
return `Ok
in
let insert k v =
pc.insert k v >>= fun () ->
maybe_roll_up () >>= fun _ ->
return ()
in
let delete k =
pc.delete k >>= fun () ->
maybe_roll_up () >>= fun _ ->
return ()
in
let insert_many k v kvs =
insert k v >>= fun () -> return kvs
in
{find;insert;delete;insert_many}
let _ = make_ops
|
e56990166c7d516cbdb4dc288846288099945723f0023ca84949b06b491dc2e7 | qitab/cl-protobufs | symbol-test.lisp | Copyright 2020 Google LLC
;;;
Use of this source code is governed by an MIT - style
;;; license that can be found in the LICENSE file or at
;;; .
(defpackage #:cl-protobufs.test.symbol
(:use #:cl
#:clunit)
(:local-nicknames (#:test-pb #:cl-protobufs.test-proto)
(#:proto #:cl-protobufs))
(:export :run))
(in-package :cl-protobufs.test.symbol)
(defsuite symbol-suite (cl-protobufs.test:root-suite))
(defun run (&key use-debugger)
"Run all tests in the test suite.
Parameters
USE-DEBUGGER: On assert failure bring up the debugger."
(clunit:run-suite 'symbol-suite :use-debugger use-debugger
:signal-condition-on-fail t))
;; For a variety of symbols we verify that we can serialize them and deserialize them both in
;; text and wire formats.
(defvar *symbols*
'(t nil :t :nil :foo abc)
)
(deftest test-back-and-forth-text (symbol-suite)
(dolist (symbol *symbols*)
(let ((msg (test-pb:make-text-format-test :symbol-field (list symbol)
:symbol-field2 symbol))
(out-stream (make-string-output-stream)))
(proto:print-text-format msg :stream out-stream)
(let* ((text (get-output-stream-string out-stream))
(msg-new (proto:parse-text-format 'test-pb:text-format-test
:stream (make-string-input-stream text))))
(assert-equality #'proto:proto-equal msg-new msg)))))
(deftest test-back-and-forth-binary (symbol-suite)
(dolist (symbol *symbols*)
(let* ((msg (test-pb:make-text-format-test :symbol-field (list symbol)
:symbol-field2 symbol))
(bytes (proto:serialize-to-bytes msg))
(msg-new (proto:deserialize-from-bytes 'test-pb:text-format-test bytes)))
(assert-true (proto:proto-equal msg-new msg)))))
(deftest test-parse-bad-symbols-format (symbol-suite)
(dolist (string '( "cl:new-cl-symbol" ":foo:bar" "foo:bar:baz" "q\"uote" "q\'uote" "s\\lash"))
(let ((text-format (format nil "TextFormatTest { symbol_field: ~S }" string)))
(multiple-value-bind (result condition)
(ignore-errors
(proto:parse-text-format 'test-pb:text-format-test
:stream (make-string-input-stream text-format)))
(declare (ignore result))
(assert-true condition)))))
(deftest test-uninterned-symbol (symbol-suite)
(let* ((textproto "symbol_field2: '#:foo'")
(msg (proto:parse-text-format
'test-pb:text-format-test
:stream (make-string-input-stream textproto)))
(sym (test-pb:text-format-test.symbol-field2 msg)))
(assert-equal "FOO" (symbol-name sym))
(assert-eq nil (symbol-package sym))
;; Can't use proto-equal to test this because the new proto will be different
;; because (eq #:foo #:foo) is false.
(let* ((bytes (proto:serialize-to-bytes msg))
(msg-new (proto:deserialize-from-bytes 'test-pb:text-format-test bytes))
(sym-new (test-pb:text-format-test.symbol-field2 msg-new)))
(assert-equal "FOO" (symbol-name sym-new))
(assert-eq nil (symbol-package sym-new)))
(let ((out-stream (make-string-output-stream)))
(proto:print-text-format msg :stream out-stream)
(let* ((text (get-output-stream-string out-stream)))
(assert-true (search "\"#:FOO\"" text))))))
| null | https://raw.githubusercontent.com/qitab/cl-protobufs/ebb85f19a7613f9973e96912a900b60cb7e9adb6/tests/symbol-test.lisp | lisp |
license that can be found in the LICENSE file or at
.
For a variety of symbols we verify that we can serialize them and deserialize them both in
text and wire formats.
Can't use proto-equal to test this because the new proto will be different
because (eq #:foo #:foo) is false. | Copyright 2020 Google LLC
Use of this source code is governed by an MIT - style
(defpackage #:cl-protobufs.test.symbol
(:use #:cl
#:clunit)
(:local-nicknames (#:test-pb #:cl-protobufs.test-proto)
(#:proto #:cl-protobufs))
(:export :run))
(in-package :cl-protobufs.test.symbol)
(defsuite symbol-suite (cl-protobufs.test:root-suite))
(defun run (&key use-debugger)
"Run all tests in the test suite.
Parameters
USE-DEBUGGER: On assert failure bring up the debugger."
(clunit:run-suite 'symbol-suite :use-debugger use-debugger
:signal-condition-on-fail t))
(defvar *symbols*
'(t nil :t :nil :foo abc)
)
(deftest test-back-and-forth-text (symbol-suite)
(dolist (symbol *symbols*)
(let ((msg (test-pb:make-text-format-test :symbol-field (list symbol)
:symbol-field2 symbol))
(out-stream (make-string-output-stream)))
(proto:print-text-format msg :stream out-stream)
(let* ((text (get-output-stream-string out-stream))
(msg-new (proto:parse-text-format 'test-pb:text-format-test
:stream (make-string-input-stream text))))
(assert-equality #'proto:proto-equal msg-new msg)))))
(deftest test-back-and-forth-binary (symbol-suite)
(dolist (symbol *symbols*)
(let* ((msg (test-pb:make-text-format-test :symbol-field (list symbol)
:symbol-field2 symbol))
(bytes (proto:serialize-to-bytes msg))
(msg-new (proto:deserialize-from-bytes 'test-pb:text-format-test bytes)))
(assert-true (proto:proto-equal msg-new msg)))))
(deftest test-parse-bad-symbols-format (symbol-suite)
(dolist (string '( "cl:new-cl-symbol" ":foo:bar" "foo:bar:baz" "q\"uote" "q\'uote" "s\\lash"))
(let ((text-format (format nil "TextFormatTest { symbol_field: ~S }" string)))
(multiple-value-bind (result condition)
(ignore-errors
(proto:parse-text-format 'test-pb:text-format-test
:stream (make-string-input-stream text-format)))
(declare (ignore result))
(assert-true condition)))))
(deftest test-uninterned-symbol (symbol-suite)
(let* ((textproto "symbol_field2: '#:foo'")
(msg (proto:parse-text-format
'test-pb:text-format-test
:stream (make-string-input-stream textproto)))
(sym (test-pb:text-format-test.symbol-field2 msg)))
(assert-equal "FOO" (symbol-name sym))
(assert-eq nil (symbol-package sym))
(let* ((bytes (proto:serialize-to-bytes msg))
(msg-new (proto:deserialize-from-bytes 'test-pb:text-format-test bytes))
(sym-new (test-pb:text-format-test.symbol-field2 msg-new)))
(assert-equal "FOO" (symbol-name sym-new))
(assert-eq nil (symbol-package sym-new)))
(let ((out-stream (make-string-output-stream)))
(proto:print-text-format msg :stream out-stream)
(let* ((text (get-output-stream-string out-stream)))
(assert-true (search "\"#:FOO\"" text))))))
|
fe94c2a0457ee98679e742d170faa05d8b3f70f244a530ac74a118a9b48dcac5 | jubnzv/iec-checker | use_define.mli | Detect common errors in ' use ' occurrences of the local variables in POUs .
open IECCheckerCore
module S = Syntax
val run : S.iec_library_element list -> Warn.t list
| null | https://raw.githubusercontent.com/jubnzv/iec-checker/666cd8a28cb1211465a900862df84d70460c2742/src/analysis/use_define.mli | ocaml | Detect common errors in ' use ' occurrences of the local variables in POUs .
open IECCheckerCore
module S = Syntax
val run : S.iec_library_element list -> Warn.t list
| |
007a8dadb1332b7dad20773eed9cf1d1730d5c5f13eb121a1d0fee7be39d95bf | dpom/clj-duckling | convert-rules.clj | #!/usr/bin/env inlein
;; syntax: convert-rules dirpath
'{:dependencies [[org.clojure/clojure "1.9.0-RC1"]
[cljfmt "0.5.7"]]}
(require '[clojure.java.io :as io])
(require '[clojure.string :as str])
(require '[clojure.pprint :as pp])
(require '[cljfmt.core :refer [reformat-string]])
(defn build-rule [[name pattern production]]
{:name name
:pattern (pr-str pattern)
:production (pr-str production)})
(defn convert-file
[infile outfile]
(let [items (->> (read-string (slurp infile))
(partition 3)
(map build-rule))]
(with-open [w (io/writer outfile)]
(.write w "[\n")
(doseq [item items]
;; (printf "item: %s\n" item)
(.write w "#clj-duckling.engine/rule ")
(.write w (prn-str item)))
(.write w "]"))
(spit outfile (reformat-string (slurp outfile)))))
(defn convert-dir
[dirpath]
(let [grammar-matcher (.getPathMatcher
(java.nio.file.FileSystems/getDefault)
"glob:*.{clj}")
files (->> dirpath
io/file
file-seq
(filter #(.isFile %))
(filter #(.matches grammar-matcher (.getFileName (.toPath %))))
(map #(.getAbsolutePath %)))]
(doseq [f files]
(pp/pprint f)
(convert-file f (str/replace f #"\.clj" ".edn")))))
(convert-dir (first *command-line-args*))
| null | https://raw.githubusercontent.com/dpom/clj-duckling/8728f9a99b4b002e9ce2ea62b3a82a61b0cdac06/bin/convert-rules.clj | clojure | syntax: convert-rules dirpath
(printf "item: %s\n" item) | #!/usr/bin/env inlein
'{:dependencies [[org.clojure/clojure "1.9.0-RC1"]
[cljfmt "0.5.7"]]}
(require '[clojure.java.io :as io])
(require '[clojure.string :as str])
(require '[clojure.pprint :as pp])
(require '[cljfmt.core :refer [reformat-string]])
(defn build-rule [[name pattern production]]
{:name name
:pattern (pr-str pattern)
:production (pr-str production)})
(defn convert-file
[infile outfile]
(let [items (->> (read-string (slurp infile))
(partition 3)
(map build-rule))]
(with-open [w (io/writer outfile)]
(.write w "[\n")
(doseq [item items]
(.write w "#clj-duckling.engine/rule ")
(.write w (prn-str item)))
(.write w "]"))
(spit outfile (reformat-string (slurp outfile)))))
(defn convert-dir
[dirpath]
(let [grammar-matcher (.getPathMatcher
(java.nio.file.FileSystems/getDefault)
"glob:*.{clj}")
files (->> dirpath
io/file
file-seq
(filter #(.isFile %))
(filter #(.matches grammar-matcher (.getFileName (.toPath %))))
(map #(.getAbsolutePath %)))]
(doseq [f files]
(pp/pprint f)
(convert-file f (str/replace f #"\.clj" ".edn")))))
(convert-dir (first *command-line-args*))
|
75493721dc4378027613ebd798b84254312f10725f9b0d4605ecdbdc753c4642 | cubicle-model-checker/cubicle | term.mli | (**************************************************************************)
(* *)
Cubicle
(* *)
Copyright ( C ) 2011 - 2014
(* *)
and
Universite Paris - Sud 11
(* *)
(* *)
This file is distributed under the terms of the Apache Software
(* License version 2.0 *)
(* *)
(**************************************************************************)
type t
type view = private {f: Symbols.t ; xs: t list; ty: Ty.t; tag : int}
val view : t -> view
val make : Symbols.t -> t list -> Ty.t -> t
val vrai : t
val faux : t
val int : string -> t
val real : string -> t
val is_int : t -> bool
val is_real : t -> bool
val compare : t -> t -> int
val equal : t -> t -> bool
val hash : t -> int
val print : Format.formatter -> t -> unit
module Map : Map.S with type key = t
module Set : Set.S with type elt = t
| null | https://raw.githubusercontent.com/cubicle-model-checker/cubicle/00f09bb2d4bb496549775e770d7ada08bc1e4866/smt/term.mli | ocaml | ************************************************************************
License version 2.0
************************************************************************ | Cubicle
Copyright ( C ) 2011 - 2014
and
Universite Paris - Sud 11
This file is distributed under the terms of the Apache Software
type t
type view = private {f: Symbols.t ; xs: t list; ty: Ty.t; tag : int}
val view : t -> view
val make : Symbols.t -> t list -> Ty.t -> t
val vrai : t
val faux : t
val int : string -> t
val real : string -> t
val is_int : t -> bool
val is_real : t -> bool
val compare : t -> t -> int
val equal : t -> t -> bool
val hash : t -> int
val print : Format.formatter -> t -> unit
module Map : Map.S with type key = t
module Set : Set.S with type elt = t
|
56194de6c8e8e0a7f1d84b22eccc8e18ca6c4799f513bed7d17d74f8f7b0cd48 | ernius/plutus-cardano-samples | Signed.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
module Policies.Signed where
import Control.Monad hiding (fmap)
import Data.Aeson (ToJSON, FromJSON)
import Data.Text (Text)
import Data.Void (Void)
import GHC.Generics (Generic)
import Plutus.Contract as Contract
import Plutus.Trace.Emulator as Emulator
import qualified PlutusTx
import PlutusTx.Prelude hiding (Semigroup(..), unless)
import Ledger hiding (mint, singleton)
import Ledger.Constraints as Constraints
import qualified Ledger.Typed.Scripts as Scripts
import Ledger.Value as Value
import Playground.Contract (printJson, printSchemas, ensureKnownCurrencies, stage, ToSchema)
import Playground.TH (mkKnownCurrencies, mkSchemaDefinitions)
import Playground.Types (KnownCurrency (..))
import Prelude (IO, Show (..), String)
import Text.Printf (printf)
import Wallet.Emulator.Wallet
{-# INLINABLE mkPolicy #-}
mkPolicy :: PaymentPubKeyHash -> () -> ScriptContext -> Bool
mkPolicy pkh () ctx = txSignedBy (scriptContextTxInfo ctx) $ unPaymentPubKeyHash pkh
policy :: PaymentPubKeyHash -> Scripts.MintingPolicy
policy pkh = mkMintingPolicyScript $
$$(PlutusTx.compile [|| Scripts.wrapMintingPolicy . mkPolicy ||])
`PlutusTx.applyCode`
PlutusTx.liftCode pkh
curSymbol :: PaymentPubKeyHash -> CurrencySymbol
curSymbol = scriptCurrencySymbol . policy
-- equivalent curSymbol pkh = scriptCurrencySymbol (policy pkh)
data MintParams = MintParams
{ mpTokenName :: !TokenName
, mpAmount :: !Integer
} deriving (Generic, ToJSON, FromJSON, ToSchema)
type FreeSchema = Endpoint "mint" MintParams
mint :: MintParams -> Contract w FreeSchema Text ()
mint mp = do
pkh <- Contract.ownPaymentPubKeyHash
let val = Value.singleton (curSymbol pkh) (mpTokenName mp) (mpAmount mp)
lookups = Constraints.mintingPolicy $ policy pkh
tx = Constraints.mustMintValue val
ledgerTx <- submitTxConstraintsWith @Void lookups tx
void $ awaitTxConfirmed $ getCardanoTxId ledgerTx
Contract.logInfo @String $ printf "forged %s" (show val)
endpoints :: Contract () FreeSchema Text ()
endpoints = mint' >> endpoints
where
mint' = awaitPromise $ endpoint @"mint" mint
mkSchemaDefinitions ''
--mkKnownCurrencies []
test :: IO ()
test = runEmulatorTraceIO $ do
let tn = "ABC"
h1 <- activateContractWallet (knownWallet 1) endpoints
h2 <- activateContractWallet (knownWallet 2) endpoints
callEndpoint @"mint" h1 $ MintParams
{ mpTokenName = tn
, mpAmount = 555
}
callEndpoint @"mint" h2 $ MintParams
{ mpTokenName = tn
, mpAmount = 444
}
void $ Emulator.waitNSlots 1
callEndpoint @"mint" h1 $ MintParams
{ mpTokenName = tn
, mpAmount = -222
}
void $ Emulator.waitNSlots 1
| null | https://raw.githubusercontent.com/ernius/plutus-cardano-samples/3b5476e45725578622889114e1b36d9a6cf56535/src/Policies/Signed.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeOperators #
# INLINABLE mkPolicy #
equivalent curSymbol pkh = scriptCurrencySymbol (policy pkh)
mkKnownCurrencies [] | # LANGUAGE DeriveGeneric #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module Policies.Signed where
import Control.Monad hiding (fmap)
import Data.Aeson (ToJSON, FromJSON)
import Data.Text (Text)
import Data.Void (Void)
import GHC.Generics (Generic)
import Plutus.Contract as Contract
import Plutus.Trace.Emulator as Emulator
import qualified PlutusTx
import PlutusTx.Prelude hiding (Semigroup(..), unless)
import Ledger hiding (mint, singleton)
import Ledger.Constraints as Constraints
import qualified Ledger.Typed.Scripts as Scripts
import Ledger.Value as Value
import Playground.Contract (printJson, printSchemas, ensureKnownCurrencies, stage, ToSchema)
import Playground.TH (mkKnownCurrencies, mkSchemaDefinitions)
import Playground.Types (KnownCurrency (..))
import Prelude (IO, Show (..), String)
import Text.Printf (printf)
import Wallet.Emulator.Wallet
mkPolicy :: PaymentPubKeyHash -> () -> ScriptContext -> Bool
mkPolicy pkh () ctx = txSignedBy (scriptContextTxInfo ctx) $ unPaymentPubKeyHash pkh
policy :: PaymentPubKeyHash -> Scripts.MintingPolicy
policy pkh = mkMintingPolicyScript $
$$(PlutusTx.compile [|| Scripts.wrapMintingPolicy . mkPolicy ||])
`PlutusTx.applyCode`
PlutusTx.liftCode pkh
curSymbol :: PaymentPubKeyHash -> CurrencySymbol
curSymbol = scriptCurrencySymbol . policy
data MintParams = MintParams
{ mpTokenName :: !TokenName
, mpAmount :: !Integer
} deriving (Generic, ToJSON, FromJSON, ToSchema)
type FreeSchema = Endpoint "mint" MintParams
mint :: MintParams -> Contract w FreeSchema Text ()
mint mp = do
pkh <- Contract.ownPaymentPubKeyHash
let val = Value.singleton (curSymbol pkh) (mpTokenName mp) (mpAmount mp)
lookups = Constraints.mintingPolicy $ policy pkh
tx = Constraints.mustMintValue val
ledgerTx <- submitTxConstraintsWith @Void lookups tx
void $ awaitTxConfirmed $ getCardanoTxId ledgerTx
Contract.logInfo @String $ printf "forged %s" (show val)
endpoints :: Contract () FreeSchema Text ()
endpoints = mint' >> endpoints
where
mint' = awaitPromise $ endpoint @"mint" mint
mkSchemaDefinitions ''
test :: IO ()
test = runEmulatorTraceIO $ do
let tn = "ABC"
h1 <- activateContractWallet (knownWallet 1) endpoints
h2 <- activateContractWallet (knownWallet 2) endpoints
callEndpoint @"mint" h1 $ MintParams
{ mpTokenName = tn
, mpAmount = 555
}
callEndpoint @"mint" h2 $ MintParams
{ mpTokenName = tn
, mpAmount = 444
}
void $ Emulator.waitNSlots 1
callEndpoint @"mint" h1 $ MintParams
{ mpTokenName = tn
, mpAmount = -222
}
void $ Emulator.waitNSlots 1
|
3845abeedb7fa4bfa9d85d20dca7c1bd789ddcd548b26fd31c9e3aa100485bda | arthur-adjedj/proof_assistant | ext_props.ml | open Proof_build.Formule
open Stored_props
open Proof_build.Sequent
exception Incomplete_or_wrong_proof
exception Already_exists
exception Wrong_sequent of ((sequent list) * string)
ajoute une propriété str : f à a liste
let add str f =
load_stored_props false;
if Hashtbl.mem props str then (
print_endline "Formula is already saved !";
raise Already_exists
)
else
begin
let file = open_out_gen [Open_creat;Open_append] 0o640 "stored_props" in
Printf.fprintf file "%s\n" (str^" : "^(formule_to_string f));
close_out file;
print_endline ("Formula \""^str^"\" successfully saved")
end
tactique d'emploi d'une propriété déjà prouvée et stockée
(*O(n(name))*)
let ext name s =
let pattern = Hashtbl.find props name in
if not (is_s_equiv ([||],[|pattern|]) s) then
raise (Wrong_sequent ([s],"Wrong sequent"))
else ([],"<- "^name)
| null | https://raw.githubusercontent.com/arthur-adjedj/proof_assistant/a47ae2e1012e77ed810de0f8c99a957571ad5c27/pr_assistant/proof_store/ext_props.ml | ocaml | O(n(name)) | open Proof_build.Formule
open Stored_props
open Proof_build.Sequent
exception Incomplete_or_wrong_proof
exception Already_exists
exception Wrong_sequent of ((sequent list) * string)
ajoute une propriété str : f à a liste
let add str f =
load_stored_props false;
if Hashtbl.mem props str then (
print_endline "Formula is already saved !";
raise Already_exists
)
else
begin
let file = open_out_gen [Open_creat;Open_append] 0o640 "stored_props" in
Printf.fprintf file "%s\n" (str^" : "^(formule_to_string f));
close_out file;
print_endline ("Formula \""^str^"\" successfully saved")
end
tactique d'emploi d'une propriété déjà prouvée et stockée
let ext name s =
let pattern = Hashtbl.find props name in
if not (is_s_equiv ([||],[|pattern|]) s) then
raise (Wrong_sequent ([s],"Wrong sequent"))
else ([],"<- "^name)
|
717e03bfadcd868ff4a7228b8238b2b0879c438d455d8946df6be792ad4015a9 | Plisp/vico | graphemes.lisp | (defpackage :vico-core.graphemes
(:use :cl)
(:export #:make-grapheme-searcher
#:next-grapheme
#:list-graphemes))
(in-package :vico-core.graphemes)
;;; graphemes - portable version of the grapheme breaking algorithm in sb-unicode
(defun %binary-search (value seq key)
(declare (simple-vector seq))
(declare (function key))
(labels ((recurse (start end)
(when (< start end)
(let* ((i (+ start (truncate (- end start) 2)))
(elt (svref seq i))
(key-value (funcall key elt)))
(cond ((< value key-value)
(recurse start i))
((> value key-value)
(recurse (1+ i) end))
(t
i))))))
(recurse 0 (length seq))))
(defun binary-search (value seq &key (key #'identity))
"Binary search for simple vectors."
(let ((index (%binary-search value seq key)))
(if index
(svref seq index))))
(defun hangul-syllable-type (character)
"Returns the Hangul syllable type of CHARACTER.
The syllable type can be one of :L, :V, :T, :LV, or :LVT. If the character is not a
Hangul syllable or Jamo, returns NIL"
(let ((cp (char-code character)))
(cond
((or
(and (<= #x1100 cp) (<= cp #x115f))
(and (<= #xa960 cp) (<= cp #xa97c))) :L)
((or
(and (<= #x1160 cp) (<= cp #x11a7))
(and (<= #xd7B0 cp) (<= cp #xd7C6))) :V)
((or
(and (<= #x11a8 cp) (<= cp #x11ff))
(and (<= #xd7c8 cp) (<= cp #xd7fb))) :T)
((and (<= #xac00 cp) (<= cp #xd7a3))
(if (= 0 (rem (- cp #xac00) 28)) :LV :LVT)))))
(defun grapheme-break-class (char)
"Returns the grapheme breaking class of CHARACTER, as specified in UAX #29."
(let ((cp (when char (char-code char)))
(gc (when char (cl-unicode:general-category char)))
(not-spacing-mark
#(#x102B #x102C #x1038 #x1062 #x1063 #x1064 #x1067 #x1068 #x1069
#x106A #x106B #x106C #x106D #x1083 #x1087 #x1088 #x1089 #x108A
#x108B #x108C #x108F #x109A #x109B #x109C #x19B0 #x19B1 #x19B2
#x19B3 #x19B4 #x19B8 #x19B9 #x19BB #x19BC #x19BD #x19BE #x19BF
#x19C0 #x19C8 #x19C9 #x1A61 #x1A63 #x1A64 #xAA7B #xAA7D)))
(cond
((not char) nil)
((= cp 10) :LF)
((= cp 13) :CR)
((or (member gc '("Mn" "Me") :test #'string=)
(cl-unicode:has-binary-property char "Other_Grapheme_Extend"))
:extend)
((or (member gc '("Zl" "Zp" "Cc" "Cs" "Cf") :test #'string=)
From Cn and Default_Ignorable_Code_Point
(eql cp #x2065) (eql cp #xE0000)
(<= #xFFF0 cp #xFFF8)
(<= #xE0002 cp #xE001F)
(<= #xE0080 cp #xE00FF)
(<= #xE01F0 cp #xE0FFF)) :control)
((<= #x1F1E6 cp #x1F1FF) :regional-indicator)
((and (or (string= gc "Mc")
(eql cp #x0E33) (eql cp #x0EB3))
(not (binary-search cp not-spacing-mark))) :spacing-mark)
(t (hangul-syllable-type char)))))
adapted from CL - PPCRE source
(declaim (inline maybe-coerce-to-simple-string))
(defun maybe-coerce-to-simple-string (string)
"Coerces STRING to a simple STRING unless it already is one or if it isn't a string at
all."
(cond (#+:lispworks
(lw:simple-text-string-p string)
#-:lispworks
(simple-string-p string)
string)
((stringp string)
(coerce string
#+:lispworks 'lw:simple-text-string
#-:lispworks 'simple-string))
(t string)))
(defun make-grapheme-searcher (sequence &key (start 0) from-end
(length (length sequence))
(accessor #'schar))
"Returns a stateful object that can be used by calling NEXT-GRAPHEME to obtain indexes
of graphemes (clusters) starting from START (non-inclusive) and proceeding forwards if
FROM-END is NIL and backwards if FROM-END is T in the sequence SEQUENCE.
Elements will be accessed by calling ACCESSOR, which defaults to SCHAR (argument coerced
to SIMPLE-STRING). LENGTH defaults to (length sequence) and is used to determine when to
terminate."
(declare (optimize speed)
(type function accessor)
(type fixnum start length))
(let ((sequence (maybe-coerce-to-simple-string sequence))
(step (if from-end -1 1))
(c1 nil)
(c2 (and (>= start 0) (< start length) ; bounds check before accessing
(if from-end
(grapheme-break-class (funcall accessor sequence (1- length)))
(grapheme-break-class (funcall accessor sequence start)))))
(end (if from-end length start)))
(declare (type fixnum end))
(lambda ()
(loop
(incf end step)
(when (and from-end (< end 0))
(return 0))
(when (>= end length)
(return length))
(shiftf c1 c2 (grapheme-break-class (funcall accessor sequence end)))
(cond
((and (eql c1 :cr) (eql c2 :lf)))
((or (member c1 '(:control :cr :lf))
(member c2 '(:control :cr :lf)))
(setf start end) (return end))
((or (and (eql c1 :l) (member c2 '(:l :v :lv :lvt)))
(and (or (eql c1 :v) (eql c1 :lv))
(or (eql c2 :v) (eql c2 :t)))
(and (eql c2 :t) (or (eql c1 :lvt) (eql c1 :t)))))
((and (eql c1 :regional-indicator) (eql c2 :regional-indicator)))
((eql c2 :extend))
((or (eql c2 :spacing-mark) (eql c1 :prepend)))
(t (setf start end) (return end)))))))
(defun next-grapheme (searcher)
"Returns the next grapheme found by SEARCHER and step it forward one grapheme cluster
or if SEARCHER has already searched the entire string, NIL."
(funcall searcher))
(defun list-graphemes (string)
"Breaks STRING into graphemes according to the default grapheme breaking rules
specified in UAX #29, returning a list of strings. The last element might not be a
grapheme."
(loop :with string = (coerce string 'simple-string)
:with searcher = (make-grapheme-searcher string)
:with list = (list)
:with last = 0
:for idx = (funcall searcher)
:while idx
:do (push (subseq string last idx) list)
(setf last idx)
:finally (push (subseq string last) list)
(return (nreverse list))))
;; (defun map-grapheme-boundaries (function string)
;; (do ((length (length string))
( start 0 )
( end 1 ( 1 + end ) )
;; (c1 nil)
;; (c2 (and (> (length string) 0) (grapheme-break-class (char string 0)))))
;; ((>= end length)
( if (= end length ) ( progn ( funcall function string start end ) nil ) ) )
( flet ( ( brk ( ) ( funcall function string start end ) ( setf start end ) ) )
;; (declare (dynamic-extent #'brk))
;; (shiftf c1 c2 (grapheme-break-class (char string end)))
;; (cond
;; ((and (eql c1 :cr) (eql c2 :lf)))
;; ((or (member c1 '(:control :cr :lf))
;; (member c2 '(:control :cr :lf)))
( brk ) )
;; ((or (and (eql c1 :l) (member c2 '(:l :v :lv :lvt)))
;; (and (or (eql c1 :v) (eql c1 :lv))
;; (or (eql c2 :v) (eql c2 :t)))
;; (and (eql c2 :t) (or (eql c1 :lvt) (eql c1 :t)))))
;; ((and (eql c1 :regional-indicator) (eql c2 :regional-indicator)))
;; ((eql c2 :extend))
;; ((or (eql c2 :spacing-mark) (eql c1 :prepend)))
;; (t (brk))))))
;; (defun map-graphemes (function string)
;; (let ((array (make-array 0 :element-type (array-element-type string)
;; :adjustable t
;; :displaced-to string)))
;; (flet ((fun (string start end)
;; (declare (type string string))
;; (funcall function (adjust-array array (- end start) :displaced-to string
;; :displaced-index-offset start))))
;; (declare (dynamic-extent #'fun))
;; (map-grapheme-boundaries #'fun string))))
;; (defun graphemes (string)
;; "Breaks STRING into graphemes according to the default
grapheme breaking rules specified in UAX # 29 , returning a list of strings . "
;; (let (result)
( map - graphemes ( lambda ( a ) ( push ( subseq a 0 ) result ) ) string )
( nreverse result ) ) )
| null | https://raw.githubusercontent.com/Plisp/vico/40606aea583ef9db98941ee337feb47f10c9696c/src/core/graphemes.lisp | lisp | graphemes - portable version of the grapheme breaking algorithm in sb-unicode
bounds check before accessing
(defun map-grapheme-boundaries (function string)
(do ((length (length string))
(c1 nil)
(c2 (and (> (length string) 0) (grapheme-break-class (char string 0)))))
((>= end length)
(declare (dynamic-extent #'brk))
(shiftf c1 c2 (grapheme-break-class (char string end)))
(cond
((and (eql c1 :cr) (eql c2 :lf)))
((or (member c1 '(:control :cr :lf))
(member c2 '(:control :cr :lf)))
((or (and (eql c1 :l) (member c2 '(:l :v :lv :lvt)))
(and (or (eql c1 :v) (eql c1 :lv))
(or (eql c2 :v) (eql c2 :t)))
(and (eql c2 :t) (or (eql c1 :lvt) (eql c1 :t)))))
((and (eql c1 :regional-indicator) (eql c2 :regional-indicator)))
((eql c2 :extend))
((or (eql c2 :spacing-mark) (eql c1 :prepend)))
(t (brk))))))
(defun map-graphemes (function string)
(let ((array (make-array 0 :element-type (array-element-type string)
:adjustable t
:displaced-to string)))
(flet ((fun (string start end)
(declare (type string string))
(funcall function (adjust-array array (- end start) :displaced-to string
:displaced-index-offset start))))
(declare (dynamic-extent #'fun))
(map-grapheme-boundaries #'fun string))))
(defun graphemes (string)
"Breaks STRING into graphemes according to the default
(let (result) | (defpackage :vico-core.graphemes
(:use :cl)
(:export #:make-grapheme-searcher
#:next-grapheme
#:list-graphemes))
(in-package :vico-core.graphemes)
(defun %binary-search (value seq key)
(declare (simple-vector seq))
(declare (function key))
(labels ((recurse (start end)
(when (< start end)
(let* ((i (+ start (truncate (- end start) 2)))
(elt (svref seq i))
(key-value (funcall key elt)))
(cond ((< value key-value)
(recurse start i))
((> value key-value)
(recurse (1+ i) end))
(t
i))))))
(recurse 0 (length seq))))
(defun binary-search (value seq &key (key #'identity))
"Binary search for simple vectors."
(let ((index (%binary-search value seq key)))
(if index
(svref seq index))))
(defun hangul-syllable-type (character)
"Returns the Hangul syllable type of CHARACTER.
The syllable type can be one of :L, :V, :T, :LV, or :LVT. If the character is not a
Hangul syllable or Jamo, returns NIL"
(let ((cp (char-code character)))
(cond
((or
(and (<= #x1100 cp) (<= cp #x115f))
(and (<= #xa960 cp) (<= cp #xa97c))) :L)
((or
(and (<= #x1160 cp) (<= cp #x11a7))
(and (<= #xd7B0 cp) (<= cp #xd7C6))) :V)
((or
(and (<= #x11a8 cp) (<= cp #x11ff))
(and (<= #xd7c8 cp) (<= cp #xd7fb))) :T)
((and (<= #xac00 cp) (<= cp #xd7a3))
(if (= 0 (rem (- cp #xac00) 28)) :LV :LVT)))))
(defun grapheme-break-class (char)
"Returns the grapheme breaking class of CHARACTER, as specified in UAX #29."
(let ((cp (when char (char-code char)))
(gc (when char (cl-unicode:general-category char)))
(not-spacing-mark
#(#x102B #x102C #x1038 #x1062 #x1063 #x1064 #x1067 #x1068 #x1069
#x106A #x106B #x106C #x106D #x1083 #x1087 #x1088 #x1089 #x108A
#x108B #x108C #x108F #x109A #x109B #x109C #x19B0 #x19B1 #x19B2
#x19B3 #x19B4 #x19B8 #x19B9 #x19BB #x19BC #x19BD #x19BE #x19BF
#x19C0 #x19C8 #x19C9 #x1A61 #x1A63 #x1A64 #xAA7B #xAA7D)))
(cond
((not char) nil)
((= cp 10) :LF)
((= cp 13) :CR)
((or (member gc '("Mn" "Me") :test #'string=)
(cl-unicode:has-binary-property char "Other_Grapheme_Extend"))
:extend)
((or (member gc '("Zl" "Zp" "Cc" "Cs" "Cf") :test #'string=)
From Cn and Default_Ignorable_Code_Point
(eql cp #x2065) (eql cp #xE0000)
(<= #xFFF0 cp #xFFF8)
(<= #xE0002 cp #xE001F)
(<= #xE0080 cp #xE00FF)
(<= #xE01F0 cp #xE0FFF)) :control)
((<= #x1F1E6 cp #x1F1FF) :regional-indicator)
((and (or (string= gc "Mc")
(eql cp #x0E33) (eql cp #x0EB3))
(not (binary-search cp not-spacing-mark))) :spacing-mark)
(t (hangul-syllable-type char)))))
adapted from CL - PPCRE source
(declaim (inline maybe-coerce-to-simple-string))
(defun maybe-coerce-to-simple-string (string)
"Coerces STRING to a simple STRING unless it already is one or if it isn't a string at
all."
(cond (#+:lispworks
(lw:simple-text-string-p string)
#-:lispworks
(simple-string-p string)
string)
((stringp string)
(coerce string
#+:lispworks 'lw:simple-text-string
#-:lispworks 'simple-string))
(t string)))
(defun make-grapheme-searcher (sequence &key (start 0) from-end
(length (length sequence))
(accessor #'schar))
"Returns a stateful object that can be used by calling NEXT-GRAPHEME to obtain indexes
of graphemes (clusters) starting from START (non-inclusive) and proceeding forwards if
FROM-END is NIL and backwards if FROM-END is T in the sequence SEQUENCE.
Elements will be accessed by calling ACCESSOR, which defaults to SCHAR (argument coerced
to SIMPLE-STRING). LENGTH defaults to (length sequence) and is used to determine when to
terminate."
(declare (optimize speed)
(type function accessor)
(type fixnum start length))
(let ((sequence (maybe-coerce-to-simple-string sequence))
(step (if from-end -1 1))
(c1 nil)
(if from-end
(grapheme-break-class (funcall accessor sequence (1- length)))
(grapheme-break-class (funcall accessor sequence start)))))
(end (if from-end length start)))
(declare (type fixnum end))
(lambda ()
(loop
(incf end step)
(when (and from-end (< end 0))
(return 0))
(when (>= end length)
(return length))
(shiftf c1 c2 (grapheme-break-class (funcall accessor sequence end)))
(cond
((and (eql c1 :cr) (eql c2 :lf)))
((or (member c1 '(:control :cr :lf))
(member c2 '(:control :cr :lf)))
(setf start end) (return end))
((or (and (eql c1 :l) (member c2 '(:l :v :lv :lvt)))
(and (or (eql c1 :v) (eql c1 :lv))
(or (eql c2 :v) (eql c2 :t)))
(and (eql c2 :t) (or (eql c1 :lvt) (eql c1 :t)))))
((and (eql c1 :regional-indicator) (eql c2 :regional-indicator)))
((eql c2 :extend))
((or (eql c2 :spacing-mark) (eql c1 :prepend)))
(t (setf start end) (return end)))))))
(defun next-grapheme (searcher)
"Returns the next grapheme found by SEARCHER and step it forward one grapheme cluster
or if SEARCHER has already searched the entire string, NIL."
(funcall searcher))
(defun list-graphemes (string)
"Breaks STRING into graphemes according to the default grapheme breaking rules
specified in UAX #29, returning a list of strings. The last element might not be a
grapheme."
(loop :with string = (coerce string 'simple-string)
:with searcher = (make-grapheme-searcher string)
:with list = (list)
:with last = 0
:for idx = (funcall searcher)
:while idx
:do (push (subseq string last idx) list)
(setf last idx)
:finally (push (subseq string last) list)
(return (nreverse list))))
( start 0 )
( end 1 ( 1 + end ) )
( if (= end length ) ( progn ( funcall function string start end ) nil ) ) )
( flet ( ( brk ( ) ( funcall function string start end ) ( setf start end ) ) )
( brk ) )
grapheme breaking rules specified in UAX # 29 , returning a list of strings . "
( map - graphemes ( lambda ( a ) ( push ( subseq a 0 ) result ) ) string )
( nreverse result ) ) )
|
9d855d0191f23797f9ad209d65b692bb43c505bf777a65f5a0ba1204649bc953 | andrenth/ocaml-swagger | param.ml | open Util
let sprintf = Printf.sprintf
type t = Swagger_t.parameter
let rec item_kind_to_ptyp (items : Swagger_t.items option) kind =
let open Ppxlib in
match kind with
| `String -> [%type: string]
| `Number -> [%type: float]
| `Integer -> [%type: int]
| `Boolean -> [%type: bool]
| `Array -> (
match items with
| Some is ->
let t = item_kind_to_ptyp is.items is.kind in
[%type: [%t t] list]
| None ->
failwith
"Param.item_kind_to_ptyp: array type must have an 'items' field")
let kind_to_ptyp (p : t) =
let open Ppxlib in
match Option.get p.kind with
| `String -> [%type: string]
| `Number -> [%type: float]
| `Integer -> [%type: int]
| `Boolean -> [%type: bool]
| `File -> [%type: file]
| `Array -> (
match p.items with
| Some items ->
let t = item_kind_to_ptyp items.items items.kind in
[%type: [%t t] array]
| None ->
failwith "Param.kind_to_ptyp: array type must have an 'items' field")
let name n =
let n = if n.[0] = '$' then String.sub n 1 (String.length n - 1) else n in
let n = snake_case n |> String.lowercase_ascii in
if Ppxlib.Keyword.is_keyword n then n ^ "_" else n
let string_of_location = function
| `Query -> "query"
| `Header -> "header"
| `Path -> "path"
| `FormData -> "formData"
| `Body -> "body"
let create ?(duplicate = false) ~reference_base ~reference_root (p : t) =
let t =
match p.location with
| `Body ->
Codegen_schema.create ~reference_base ~reference_root
(Option.get p.schema)
|> Codegen_schema.to_type
| _ -> kind_to_ptyp p
in
let n =
let n = name p.name in
let loc = string_of_location p.location in
if duplicate && n <> loc then sprintf "%s_%s" loc n else n
in
let descr = p.description in
let create_sig, create_impl =
if p.required then (Val.Sig.labelled, Val.Impl.labelled)
else (Val.Sig.optional, Val.Impl.optional)
in
(create_sig ?descr n t, create_impl n t ~origin:(Val.Impl.origin p))
| null | https://raw.githubusercontent.com/andrenth/ocaml-swagger/60486bee7220a868b7e0b4507adb1edad44b98cd/lib/param.ml | ocaml | open Util
let sprintf = Printf.sprintf
type t = Swagger_t.parameter
let rec item_kind_to_ptyp (items : Swagger_t.items option) kind =
let open Ppxlib in
match kind with
| `String -> [%type: string]
| `Number -> [%type: float]
| `Integer -> [%type: int]
| `Boolean -> [%type: bool]
| `Array -> (
match items with
| Some is ->
let t = item_kind_to_ptyp is.items is.kind in
[%type: [%t t] list]
| None ->
failwith
"Param.item_kind_to_ptyp: array type must have an 'items' field")
let kind_to_ptyp (p : t) =
let open Ppxlib in
match Option.get p.kind with
| `String -> [%type: string]
| `Number -> [%type: float]
| `Integer -> [%type: int]
| `Boolean -> [%type: bool]
| `File -> [%type: file]
| `Array -> (
match p.items with
| Some items ->
let t = item_kind_to_ptyp items.items items.kind in
[%type: [%t t] array]
| None ->
failwith "Param.kind_to_ptyp: array type must have an 'items' field")
let name n =
let n = if n.[0] = '$' then String.sub n 1 (String.length n - 1) else n in
let n = snake_case n |> String.lowercase_ascii in
if Ppxlib.Keyword.is_keyword n then n ^ "_" else n
let string_of_location = function
| `Query -> "query"
| `Header -> "header"
| `Path -> "path"
| `FormData -> "formData"
| `Body -> "body"
let create ?(duplicate = false) ~reference_base ~reference_root (p : t) =
let t =
match p.location with
| `Body ->
Codegen_schema.create ~reference_base ~reference_root
(Option.get p.schema)
|> Codegen_schema.to_type
| _ -> kind_to_ptyp p
in
let n =
let n = name p.name in
let loc = string_of_location p.location in
if duplicate && n <> loc then sprintf "%s_%s" loc n else n
in
let descr = p.description in
let create_sig, create_impl =
if p.required then (Val.Sig.labelled, Val.Impl.labelled)
else (Val.Sig.optional, Val.Impl.optional)
in
(create_sig ?descr n t, create_impl n t ~origin:(Val.Impl.origin p))
| |
a3061ffbb102e1c802ee671af9a76f8167d04e896ab16e3b2a348ddc084ec37a | cedlemo/OCaml-GI-ctypes-bindings-generator | Mount_operation.mli | open Ctypes
type t
val t_typ : t typ
val create :
unit -> t ptr
val get_anonymous :
t -> bool
val get_choice :
t -> int32
val get_domain :
t -> string option
val get_password :
t -> string option
val get_password_save :
t -> Password_save.t
val get_username :
t -> string option
val reply :
t -> Mount_operation_result.t -> unit
val set_anonymous :
t -> bool -> unit
val set_choice :
t -> int32 -> unit
val set_domain :
t -> string -> unit
val set_password :
t -> string -> unit
val set_password_save :
t -> Password_save.t -> unit
val set_username :
t -> string -> unit
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gio/Mount_operation.mli | ocaml | open Ctypes
type t
val t_typ : t typ
val create :
unit -> t ptr
val get_anonymous :
t -> bool
val get_choice :
t -> int32
val get_domain :
t -> string option
val get_password :
t -> string option
val get_password_save :
t -> Password_save.t
val get_username :
t -> string option
val reply :
t -> Mount_operation_result.t -> unit
val set_anonymous :
t -> bool -> unit
val set_choice :
t -> int32 -> unit
val set_domain :
t -> string -> unit
val set_password :
t -> string -> unit
val set_password_save :
t -> Password_save.t -> unit
val set_username :
t -> string -> unit
| |
0762b0330c02645edbdff351e28ff4da253147d4558a06467f6ed0fde65429f1 | mzp/coq-ruby | rules.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
$ I d : rules.mli 6141 2004 - 09 - 27 14:55:34Z corbinea $
open Term
open Tacmach
open Names
open Libnames
type seqtac= (Sequent.t -> tactic) -> Sequent.t -> tactic
type lseqtac= global_reference -> seqtac
type 'a with_backtracking = tactic -> 'a
val wrap : int -> bool -> seqtac
val id_of_global: global_reference -> identifier
val clear_global: global_reference -> tactic
val axiom_tac : constr -> Sequent.t -> tactic
val ll_atom_tac : constr -> lseqtac with_backtracking
val and_tac : seqtac with_backtracking
val or_tac : seqtac with_backtracking
val arrow_tac : seqtac with_backtracking
val left_and_tac : inductive -> lseqtac with_backtracking
val left_or_tac : inductive -> lseqtac with_backtracking
val left_false_tac : global_reference -> tactic
val ll_ind_tac : inductive -> constr list -> lseqtac with_backtracking
val ll_arrow_tac : constr -> constr -> constr -> lseqtac with_backtracking
val forall_tac : seqtac with_backtracking
val left_exists_tac : inductive -> lseqtac with_backtracking
val ll_forall_tac : types -> lseqtac with_backtracking
val normalize_evaluables : tactic
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/contrib/firstorder/rules.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
$ I d : rules.mli 6141 2004 - 09 - 27 14:55:34Z corbinea $
open Term
open Tacmach
open Names
open Libnames
type seqtac= (Sequent.t -> tactic) -> Sequent.t -> tactic
type lseqtac= global_reference -> seqtac
type 'a with_backtracking = tactic -> 'a
val wrap : int -> bool -> seqtac
val id_of_global: global_reference -> identifier
val clear_global: global_reference -> tactic
val axiom_tac : constr -> Sequent.t -> tactic
val ll_atom_tac : constr -> lseqtac with_backtracking
val and_tac : seqtac with_backtracking
val or_tac : seqtac with_backtracking
val arrow_tac : seqtac with_backtracking
val left_and_tac : inductive -> lseqtac with_backtracking
val left_or_tac : inductive -> lseqtac with_backtracking
val left_false_tac : global_reference -> tactic
val ll_ind_tac : inductive -> constr list -> lseqtac with_backtracking
val ll_arrow_tac : constr -> constr -> constr -> lseqtac with_backtracking
val forall_tac : seqtac with_backtracking
val left_exists_tac : inductive -> lseqtac with_backtracking
val ll_forall_tac : types -> lseqtac with_backtracking
val normalize_evaluables : tactic
|
22c6e096f066b70ceacf135696eb2337ea1a259a59e8eba6d186bd0214e3ea2f | glguy/advent2021 | Day14.hs | # Language ImportQualifiedPost , QuasiQuotes #
|
Module : Main
Description : Day 14 solution
Copyright : ( c ) , 2021
License : ISC
Maintainer :
< >
Build a huge polymer chain and compute how many of
each element it contains .
This problem requires memoization as the size of the
resulting polymer would be humungous !
Module : Main
Description : Day 14 solution
Copyright : (c) Eric Mertens, 2021
License : ISC
Maintainer :
<>
Build a huge polymer chain and compute how many of
each element it contains.
This problem requires memoization as the size of the
resulting polymer would be humungous!
-}
module Main (main) where
import Advent (format, power, counts)
import Data.Map (Map)
import Data.Map.Strict qualified as Map
-- | >>> :main
2068
2158894777814
main :: IO ()
main =
do (seed, table) <- [format|14 %s%n%n(%c%c -> %c%n)*|]
let rule = tableToRule table
print (solve rule 10 seed)
print (solve rule 40 seed)
solve :: Ord a => Map (a,a) (Map (a,a) Int) -> Integer -> [a] -> Int
solve rule n seed = maximum occ - minimum occ
where
ruleN = power (fmap . applyRule) rule n
start = counts (zip seed (tail seed))
occ = Map.insertWith (+) (head seed) 1
$ Map.mapKeysWith (+) snd
$ applyRule ruleN start
-- | Generate a replacement rule map from a list of input productions
--
-- >>> tableToRule [('L','R','M')] -- LR -> M
-- fromList [(('L','R'),fromList [(('L','M'),1),(('M','R'),1)])]
tableToRule :: Ord a => [(a,a,a)] -> Map (a,a) (Map (a,a) Int)
tableToRule xs = Map.fromList [((l,r), counts [(l,m), (m,r)]) | (l,r,m) <- xs]
-- | Apply a replacement rule to a map of counts.
--
-- >>> :set -XOverloadedLists
-- >>> applyRule [('a', [('b',1),('c',2)]),('z',[('y',1)])] [('a',10)]
-- fromList [('b',10),('c',20)]
applyRule :: (Ord a, Ord b) => Map a (Map b Int) -> Map a Int -> Map b Int
applyRule r m = Map.unionsWith (+) [(v *) <$> r Map.! k | (k,v) <- Map.toList m]
| null | https://raw.githubusercontent.com/glguy/advent2021/0e007d07babce240047a9dabaf612cd3ebd42974/execs/Day14.hs | haskell | | >>> :main
| Generate a replacement rule map from a list of input productions
>>> tableToRule [('L','R','M')] -- LR -> M
fromList [(('L','R'),fromList [(('L','M'),1),(('M','R'),1)])]
| Apply a replacement rule to a map of counts.
>>> :set -XOverloadedLists
>>> applyRule [('a', [('b',1),('c',2)]),('z',[('y',1)])] [('a',10)]
fromList [('b',10),('c',20)] | # Language ImportQualifiedPost , QuasiQuotes #
|
Module : Main
Description : Day 14 solution
Copyright : ( c ) , 2021
License : ISC
Maintainer :
< >
Build a huge polymer chain and compute how many of
each element it contains .
This problem requires memoization as the size of the
resulting polymer would be humungous !
Module : Main
Description : Day 14 solution
Copyright : (c) Eric Mertens, 2021
License : ISC
Maintainer :
<>
Build a huge polymer chain and compute how many of
each element it contains.
This problem requires memoization as the size of the
resulting polymer would be humungous!
-}
module Main (main) where
import Advent (format, power, counts)
import Data.Map (Map)
import Data.Map.Strict qualified as Map
2068
2158894777814
main :: IO ()
main =
do (seed, table) <- [format|14 %s%n%n(%c%c -> %c%n)*|]
let rule = tableToRule table
print (solve rule 10 seed)
print (solve rule 40 seed)
solve :: Ord a => Map (a,a) (Map (a,a) Int) -> Integer -> [a] -> Int
solve rule n seed = maximum occ - minimum occ
where
ruleN = power (fmap . applyRule) rule n
start = counts (zip seed (tail seed))
occ = Map.insertWith (+) (head seed) 1
$ Map.mapKeysWith (+) snd
$ applyRule ruleN start
tableToRule :: Ord a => [(a,a,a)] -> Map (a,a) (Map (a,a) Int)
tableToRule xs = Map.fromList [((l,r), counts [(l,m), (m,r)]) | (l,r,m) <- xs]
applyRule :: (Ord a, Ord b) => Map a (Map b Int) -> Map a Int -> Map b Int
applyRule r m = Map.unionsWith (+) [(v *) <$> r Map.! k | (k,v) <- Map.toList m]
|
1959391b04197f932a7c472a02e97051f2f4d1d2c11dc282dae531954d9c441a | okuoku/nausicaa | programs.sps | Copyright ( c ) 2008
;;;
;;;This library is free software; you can redistribute it and/or modify
it under the terms of the GNU Library General Public License as
published by the Free Software Foundation ; either version 2 of the
;;;License, or (at your option) any later version.
;;;
;;;This library is distributed in the hope that it will be useful, but
;;;WITHOUT ANY WARRANTY; without even the implied warranty of
;;;MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details .
;;;
You should have received a copy of the GNU Library General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
02110 - 1301 USA .
#!r6rs
(import (tests r6rs programs)
(tests r6rs test)
(rnrs io simple))
(display "Running tests for (rnrs programs)\n")
(run-programs-tests)
(report-test-results)
| null | https://raw.githubusercontent.com/okuoku/nausicaa/50e7b4d4141ad4d81051588608677223fe9fb715/scheme/tests/r6rs/run/programs.sps | scheme |
This library is free software; you can redistribute it and/or modify
either version 2 of the
License, or (at your option) any later version.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
if not , write to the Free Software | Copyright ( c ) 2008
it under the terms of the GNU Library General Public License as
Library General Public License for more details .
You should have received a copy of the GNU Library General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
02110 - 1301 USA .
#!r6rs
(import (tests r6rs programs)
(tests r6rs test)
(rnrs io simple))
(display "Running tests for (rnrs programs)\n")
(run-programs-tests)
(report-test-results)
|
a8ac97c8a0171d3ada0225d9e356add93450605dd0615a99a426881d6610ecc3 | facebook/flow | socket.mli |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type addr
val with_addr : addr -> (Unix.sockaddr -> 'a) -> 'a
val addr_for_open : string -> addr
val init_unix_socket : string -> Unix.file_descr
| null | https://raw.githubusercontent.com/facebook/flow/741104e69c43057ebd32804dd6bcc1b5e97548ea/src/hack_forked/socket/socket.mli | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type addr
val with_addr : addr -> (Unix.sockaddr -> 'a) -> 'a
val addr_for_open : string -> addr
val init_unix_socket : string -> Unix.file_descr
| |
d0ffe18f7646e6a62e587b6e4200ab64b52b3874ee894b872ac81b74096c6148 | digitallyinduced/ihp | View.hs | module IHP.LoginSupport.Helper.View
( currentUser
, currentUserId
, currentUserOrNothing
, currentAdmin
, currentAdminOrNothing
)
where
import IHP.Prelude
import IHP.Controller.Context
import IHP.LoginSupport.Helper.Controller (CurrentUserRecord, CurrentAdminRecord)
currentUser :: (?context :: ControllerContext, user ~ CurrentUserRecord, Typeable user) => user
currentUser = fromMaybe (error "Application.Helper.View.currentUser: Not logged in") currentUserOrNothing
currentUserId :: forall user userId. (?context :: ControllerContext, HasField "id" user userId, Typeable user, user ~ CurrentUserRecord) => userId
currentUserId = currentUser @user |> get #id
currentUserOrNothing :: forall user. (?context :: ControllerContext, user ~ CurrentUserRecord, Typeable user) => Maybe user
currentUserOrNothing = fromFrozenContext @(Maybe user)
currentAdmin :: (?context :: ControllerContext, admin ~ CurrentAdminRecord, Typeable admin) => admin
currentAdmin = fromMaybe (error "Application.Helper.View.currentAdmin: Not logged in") currentAdminOrNothing
currentAdminOrNothing :: forall admin. (?context :: ControllerContext, admin ~ CurrentAdminRecord, Typeable admin) => Maybe admin
currentAdminOrNothing = fromFrozenContext @(Maybe admin)
| null | https://raw.githubusercontent.com/digitallyinduced/ihp/9c33451a0c36bf61cde659adaddcfad003bfed46/IHP/LoginSupport/Helper/View.hs | haskell | module IHP.LoginSupport.Helper.View
( currentUser
, currentUserId
, currentUserOrNothing
, currentAdmin
, currentAdminOrNothing
)
where
import IHP.Prelude
import IHP.Controller.Context
import IHP.LoginSupport.Helper.Controller (CurrentUserRecord, CurrentAdminRecord)
currentUser :: (?context :: ControllerContext, user ~ CurrentUserRecord, Typeable user) => user
currentUser = fromMaybe (error "Application.Helper.View.currentUser: Not logged in") currentUserOrNothing
currentUserId :: forall user userId. (?context :: ControllerContext, HasField "id" user userId, Typeable user, user ~ CurrentUserRecord) => userId
currentUserId = currentUser @user |> get #id
currentUserOrNothing :: forall user. (?context :: ControllerContext, user ~ CurrentUserRecord, Typeable user) => Maybe user
currentUserOrNothing = fromFrozenContext @(Maybe user)
currentAdmin :: (?context :: ControllerContext, admin ~ CurrentAdminRecord, Typeable admin) => admin
currentAdmin = fromMaybe (error "Application.Helper.View.currentAdmin: Not logged in") currentAdminOrNothing
currentAdminOrNothing :: forall admin. (?context :: ControllerContext, admin ~ CurrentAdminRecord, Typeable admin) => Maybe admin
currentAdminOrNothing = fromFrozenContext @(Maybe admin)
| |
c44a07c0015afbd29925ea95e818090d377f7b1f4a70e50ce75b4d0cd1b9b8c8 | erlang/otp | smoke_test_SUITE.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2011 - 2022 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(smoke_test_SUITE).
-include_lib("common_test/include/ct.hrl").
%-compile(export_all).
-export([all/0, suite/0,
init_per_testcase/2, end_per_testcase/2]).
-export([boot_combo/1, native_atomics/1, jump_table/1]).
suite() ->
[{ct_hooks, [ts_install_cth]},
{timetrap, {minutes, 2}}].
all() ->
[boot_combo, native_atomics, jump_table].
init_per_testcase(boot_combo = Case, Config) when is_list(Config) ->
case erlang:system_info(build_type) of
opt ->
init_per_tc(Case, Config);
_ ->
{skip, "Cannot test boot_combo in special builds since beam.* may not exist"}
end;
init_per_testcase(Case, Config) when is_list(Config) ->
init_per_tc(Case, Config).
init_per_tc(Case, Config) ->
[{testcase, Case} | Config].
end_per_testcase(_Case, Config) when is_list(Config) ->
erts_test_utils:ept_check_leaked_nodes(Config).
%%%
%%% The test cases -------------------------------------------------------------
%%%
boot_combo(Config) when is_list(Config) ->
NOOP = fun() -> ok end,
A42 = fun() ->
case erlang:system_info(threads) of
true ->
42 = erlang:system_info(thread_pool_size);
false ->
ok
end
end,
chk_boot(["+Ktrue"], NOOP),
chk_boot(["+A42"], A42),
chk_boot(["+Ktrue", "+A42"], A42),
WBTArgs = ["very_short", "short", "medium", "long", "very_long"],
WTArgs = ["very_low", "low", "medium", "high", "very_high"],
[chk_boot(["+sbwt", WBT,
"+sbwtdcpu", WBT,
"+sbwtdio", WBT,
"+swt", WT,
"+swtdcpu", WT,
"+swtdio", WT], NOOP) || WBT <- WBTArgs, WT <- WTArgs],
WSArgs = ["legacy", "default"],
[chk_boot(["+sws", WS], NOOP) || WS <- WSArgs].
native_atomics(Config) when is_list(Config) ->
NA32Key = "32-bit native atomics",
NA64Key = "64-bit native atomics",
DWNAKey = "Double word native atomics",
EthreadInfo = erlang:system_info(ethread_info),
io:format("~p~n", [EthreadInfo]),
{value, {NA32Key, NA32, _}} = lists:keysearch(NA32Key, 1, EthreadInfo),
{value, {NA64Key, NA64, _}} = lists:keysearch(NA64Key, 1, EthreadInfo),
{value, {DWNAKey, DWNA, _}} = lists:keysearch(DWNAKey, 1, EthreadInfo),
case {erlang:system_info(build_type), NA32, NA64, DWNA} of
{opt, "no", "no", _} ->
ct:fail(optimized_smp_runtime_without_native_atomics);
_ ->
{comment,
NA32 ++ " 32-bit, "
++ NA64 ++ " 64-bit, and "
++ DWNA ++ " double word native atomics"}
end.
jump_table(Config) when is_list(Config) ->
case erlang:system_info(beam_jump_table) of
true ->
ok;
false ->
case erlang:system_info(build_type) of
opt ->
ct:fail(optimized_without_beam_jump_table);
BT ->
{comment, "No beam jump table, but build type is " ++ atom_to_list(BT)}
end
end.
%%%
%%% Aux functions --------------------------------------------------------------
%%%
chk_boot(Args, Fun) ->
Success = make_ref(),
Parent = self(),
io:format("--- Testing ~s~n", [lists:join(" ", Args)]),
{ok, Peer, Node} = ?CT_PEER(Args),
%% if spawn_link is used, race condition happens when
%% remote process exits with 'noconnection' reason and fails
%% the test case
Pid = spawn(Node, fun() ->
Fun(),
Parent ! {self(), Success}
end),
receive
{Pid, Success} ->
Node = node(Pid),
peer:stop(Peer),
io:format("--- Success!~n", []),
ok
end.
| null | https://raw.githubusercontent.com/erlang/otp/f02ae58612b257d187ad030d72b9e8b67ce471ef/erts/emulator/test/smoke_test_SUITE.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
-compile(export_all).
The test cases -------------------------------------------------------------
Aux functions --------------------------------------------------------------
if spawn_link is used, race condition happens when
remote process exits with 'noconnection' reason and fails
the test case | Copyright Ericsson AB 2011 - 2022 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(smoke_test_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([all/0, suite/0,
init_per_testcase/2, end_per_testcase/2]).
-export([boot_combo/1, native_atomics/1, jump_table/1]).
suite() ->
[{ct_hooks, [ts_install_cth]},
{timetrap, {minutes, 2}}].
all() ->
[boot_combo, native_atomics, jump_table].
init_per_testcase(boot_combo = Case, Config) when is_list(Config) ->
case erlang:system_info(build_type) of
opt ->
init_per_tc(Case, Config);
_ ->
{skip, "Cannot test boot_combo in special builds since beam.* may not exist"}
end;
init_per_testcase(Case, Config) when is_list(Config) ->
init_per_tc(Case, Config).
init_per_tc(Case, Config) ->
[{testcase, Case} | Config].
end_per_testcase(_Case, Config) when is_list(Config) ->
erts_test_utils:ept_check_leaked_nodes(Config).
boot_combo(Config) when is_list(Config) ->
NOOP = fun() -> ok end,
A42 = fun() ->
case erlang:system_info(threads) of
true ->
42 = erlang:system_info(thread_pool_size);
false ->
ok
end
end,
chk_boot(["+Ktrue"], NOOP),
chk_boot(["+A42"], A42),
chk_boot(["+Ktrue", "+A42"], A42),
WBTArgs = ["very_short", "short", "medium", "long", "very_long"],
WTArgs = ["very_low", "low", "medium", "high", "very_high"],
[chk_boot(["+sbwt", WBT,
"+sbwtdcpu", WBT,
"+sbwtdio", WBT,
"+swt", WT,
"+swtdcpu", WT,
"+swtdio", WT], NOOP) || WBT <- WBTArgs, WT <- WTArgs],
WSArgs = ["legacy", "default"],
[chk_boot(["+sws", WS], NOOP) || WS <- WSArgs].
native_atomics(Config) when is_list(Config) ->
NA32Key = "32-bit native atomics",
NA64Key = "64-bit native atomics",
DWNAKey = "Double word native atomics",
EthreadInfo = erlang:system_info(ethread_info),
io:format("~p~n", [EthreadInfo]),
{value, {NA32Key, NA32, _}} = lists:keysearch(NA32Key, 1, EthreadInfo),
{value, {NA64Key, NA64, _}} = lists:keysearch(NA64Key, 1, EthreadInfo),
{value, {DWNAKey, DWNA, _}} = lists:keysearch(DWNAKey, 1, EthreadInfo),
case {erlang:system_info(build_type), NA32, NA64, DWNA} of
{opt, "no", "no", _} ->
ct:fail(optimized_smp_runtime_without_native_atomics);
_ ->
{comment,
NA32 ++ " 32-bit, "
++ NA64 ++ " 64-bit, and "
++ DWNA ++ " double word native atomics"}
end.
jump_table(Config) when is_list(Config) ->
case erlang:system_info(beam_jump_table) of
true ->
ok;
false ->
case erlang:system_info(build_type) of
opt ->
ct:fail(optimized_without_beam_jump_table);
BT ->
{comment, "No beam jump table, but build type is " ++ atom_to_list(BT)}
end
end.
chk_boot(Args, Fun) ->
Success = make_ref(),
Parent = self(),
io:format("--- Testing ~s~n", [lists:join(" ", Args)]),
{ok, Peer, Node} = ?CT_PEER(Args),
Pid = spawn(Node, fun() ->
Fun(),
Parent ! {self(), Success}
end),
receive
{Pid, Success} ->
Node = node(Pid),
peer:stop(Peer),
io:format("--- Success!~n", []),
ok
end.
|
0530005917d727f244de4ed20a5d916a83bd8189f289a424b5db0f78a111f3c9 | chenyukang/eopl | store.scm | (define instrument-newref (make-parameter #f))
;;;;;;;;;;;;;;;; references and the store ;;;;;;;;;;;;;;;;
;;; world's dumbest model of the store: the store is a list and a
;;; reference is number which denotes a position in the list.
;; the-store: a Scheme variable containing the current state of the
;; store. Initially set to a dummy variable.
(define the-store 'uninitialized)
;; empty-store : () -> Sto
Page : 111
(define empty-store
(lambda () '()))
;; initialize-store! : () -> Sto
;; usage: (initialize-store!) sets the-store to the empty-store
;; Page 111
(define initialize-store!
(lambda ()
(set! the-store (empty-store))))
;; get-store : () -> Sto
Page : 111
;; This is obsolete. Replaced by get-store-as-list below
(define get-store
(lambda () the-store))
;; reference? : SchemeVal -> Bool
Page : 111
(define reference?
(lambda (v)
(integer? v)))
;; newref : ExpVal -> Ref
Page : 111
(define newref
(lambda (val)
(let ((next-ref (length the-store)))
(set! the-store
(append the-store (list val)))
(if (instrument-newref)
(printf
"newref: allocating location ~s with initial contents ~s~%"
next-ref val))
next-ref)))
deref : Ref - > ExpVal
;; Page 111
(define deref
(lambda (ref)
(list-ref the-store ref)))
;; setref! : Ref * ExpVal -> Unspecified
Page : 112
(define setref!
(lambda (ref val)
(set! the-store
(letrec
((setref-inner
returns a list like store1 , except that position ref1
contains .
(lambda (store1 ref1)
(cond
((null? store1)
(report-invalid-reference ref the-store))
((zero? ref1)
(cons val (cdr store1)))
(else
(cons
(car store1)
(setref-inner
(cdr store1) (- ref1 1))))))))
(setref-inner the-store ref)))))
(define report-invalid-reference
(lambda (ref the-store)
(error 'setref
"illegal reference ~s in store ~s"
ref the-store)))
get - store - as - list : ( ) - > , ) )
;; Exports the current state of the store as a scheme list.
( get - store - as - list ' ( foo bar baz ) ) = ( ( 0 foo)(1 bar ) ( 2 baz ) )
;; where foo, bar, and baz are expvals.
;; If the store were represented in a different way, this would be
;; replaced by something cleverer.
Replaces get - store ( p. 111 )
(define get-store-as-list
(lambda ()
(letrec
((inner-loop
;; convert sto to list as if its car was location n
(lambda (sto n)
(if (null? sto)
'()
(cons
(list n (car sto))
(inner-loop (cdr sto) (+ n 1)))))))
(inner-loop the-store 0))))
| null | https://raw.githubusercontent.com/chenyukang/eopl/0406ff23b993bfe020294fa70d2597b1ce4f9b78/ch9/base/typed-oo/store.scm | scheme | references and the store ;;;;;;;;;;;;;;;;
world's dumbest model of the store: the store is a list and a
reference is number which denotes a position in the list.
the-store: a Scheme variable containing the current state of the
store. Initially set to a dummy variable.
empty-store : () -> Sto
initialize-store! : () -> Sto
usage: (initialize-store!) sets the-store to the empty-store
Page 111
get-store : () -> Sto
This is obsolete. Replaced by get-store-as-list below
reference? : SchemeVal -> Bool
newref : ExpVal -> Ref
Page 111
setref! : Ref * ExpVal -> Unspecified
Exports the current state of the store as a scheme list.
where foo, bar, and baz are expvals.
If the store were represented in a different way, this would be
replaced by something cleverer.
convert sto to list as if its car was location n
| (define instrument-newref (make-parameter #f))
(define the-store 'uninitialized)
Page : 111
(define empty-store
(lambda () '()))
(define initialize-store!
(lambda ()
(set! the-store (empty-store))))
Page : 111
(define get-store
(lambda () the-store))
Page : 111
(define reference?
(lambda (v)
(integer? v)))
Page : 111
(define newref
(lambda (val)
(let ((next-ref (length the-store)))
(set! the-store
(append the-store (list val)))
(if (instrument-newref)
(printf
"newref: allocating location ~s with initial contents ~s~%"
next-ref val))
next-ref)))
deref : Ref - > ExpVal
(define deref
(lambda (ref)
(list-ref the-store ref)))
Page : 112
(define setref!
(lambda (ref val)
(set! the-store
(letrec
((setref-inner
returns a list like store1 , except that position ref1
contains .
(lambda (store1 ref1)
(cond
((null? store1)
(report-invalid-reference ref the-store))
((zero? ref1)
(cons val (cdr store1)))
(else
(cons
(car store1)
(setref-inner
(cdr store1) (- ref1 1))))))))
(setref-inner the-store ref)))))
(define report-invalid-reference
(lambda (ref the-store)
(error 'setref
"illegal reference ~s in store ~s"
ref the-store)))
get - store - as - list : ( ) - > , ) )
( get - store - as - list ' ( foo bar baz ) ) = ( ( 0 foo)(1 bar ) ( 2 baz ) )
Replaces get - store ( p. 111 )
(define get-store-as-list
(lambda ()
(letrec
((inner-loop
(lambda (sto n)
(if (null? sto)
'()
(cons
(list n (car sto))
(inner-loop (cdr sto) (+ n 1)))))))
(inner-loop the-store 0))))
|
856b69b83dc2819e79c4482c54948eedfbd1908ea326088a1e5c0b6b3ce58fc7 | roosta/herb | keyframes_use.cljs | (ns site.snippets.keyframes-use
(:require
[garden.units :refer [px]]
[herb.core :refer [<class defkeyframes]]))
(defkeyframes pulse-animation
[:from {:opacity 1}]
[:to {:opacity 0}])
;; In garden, if you use a single vector [arg1 arg1] you get a comma separated
string , if you add a second vector is gets space separated
(defn style
[]
{:animation [[pulse-animation "2s" :infinite :alternate]]
:background-color "black"
:transition "all 1s ease-out"
:width (px 20)
:height (px 20)})
(defn component
[]
[:div {:class (<class style)}])
| null | https://raw.githubusercontent.com/roosta/herb/64afb133a7bf51d7171a3c5260584c09dbe4e504/site/src/site/snippets/keyframes_use.cljs | clojure | In garden, if you use a single vector [arg1 arg1] you get a comma separated | (ns site.snippets.keyframes-use
(:require
[garden.units :refer [px]]
[herb.core :refer [<class defkeyframes]]))
(defkeyframes pulse-animation
[:from {:opacity 1}]
[:to {:opacity 0}])
string , if you add a second vector is gets space separated
(defn style
[]
{:animation [[pulse-animation "2s" :infinite :alternate]]
:background-color "black"
:transition "all 1s ease-out"
:width (px 20)
:height (px 20)})
(defn component
[]
[:div {:class (<class style)}])
|
8830d2d76193dc8971d512faf9b297db3521bc6f0078ba505b12564d05cb3b1a | VisionsGlobalEmpowerment/webchange | views.cljs | (ns webchange.ui.views
(:require
[re-frame.core :as re-frame]
[reagent.core :as r]
[webchange.ui.pages.index :refer [pages]]
[webchange.ui.pages.dashboard.views :as dashboard]
[webchange.ui.routes :as routes]
[webchange.ui.state :as state]))
(defn index
[]
(r/create-class
{:display-name "UI Index"
:component-did-mount
(fn [this]
(let [{:keys [route]} (r/props this)]
(routes/init! (:path route))))
:reagent-render
(fn []
(let [{:keys [handler props] :as page-params} @(re-frame/subscribe [::state/current-page])
page-component (if (= handler :dashboard)
dashboard/page
(get pages handler (:404 pages)))]
(routes/set-title! page-params)
[:div#tabschool-ui
[page-component props]]))}))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/e5747e187937d85e9c92c728d52a704f323f00ef/src/cljs/webchange/ui/views.cljs | clojure | (ns webchange.ui.views
(:require
[re-frame.core :as re-frame]
[reagent.core :as r]
[webchange.ui.pages.index :refer [pages]]
[webchange.ui.pages.dashboard.views :as dashboard]
[webchange.ui.routes :as routes]
[webchange.ui.state :as state]))
(defn index
[]
(r/create-class
{:display-name "UI Index"
:component-did-mount
(fn [this]
(let [{:keys [route]} (r/props this)]
(routes/init! (:path route))))
:reagent-render
(fn []
(let [{:keys [handler props] :as page-params} @(re-frame/subscribe [::state/current-page])
page-component (if (= handler :dashboard)
dashboard/page
(get pages handler (:404 pages)))]
(routes/set-title! page-params)
[:div#tabschool-ui
[page-component props]]))}))
| |
caa502e1fc8c8e17ec66fc4772cb1c647c6255a0f474cfa2f4d077d98f6f138a | tsloughter/kuberl | kuberl_v1beta1_custom_resource_conversion.erl | -module(kuberl_v1beta1_custom_resource_conversion).
-export([encode/1]).
-export_type([kuberl_v1beta1_custom_resource_conversion/0]).
-type kuberl_v1beta1_custom_resource_conversion() ::
#{ 'conversionReviewVersions' => list(),
'strategy' := binary(),
'webhookClientConfig' => kuberl_apiextensions_v1beta1_webhook_client_config:kuberl_apiextensions_v1beta1_webhook_client_config()
}.
encode(#{ 'conversionReviewVersions' := ConversionReviewVersions,
'strategy' := Strategy,
'webhookClientConfig' := WebhookClientConfig
}) ->
#{ 'conversionReviewVersions' => ConversionReviewVersions,
'strategy' => Strategy,
'webhookClientConfig' => WebhookClientConfig
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1beta1_custom_resource_conversion.erl | erlang | -module(kuberl_v1beta1_custom_resource_conversion).
-export([encode/1]).
-export_type([kuberl_v1beta1_custom_resource_conversion/0]).
-type kuberl_v1beta1_custom_resource_conversion() ::
#{ 'conversionReviewVersions' => list(),
'strategy' := binary(),
'webhookClientConfig' => kuberl_apiextensions_v1beta1_webhook_client_config:kuberl_apiextensions_v1beta1_webhook_client_config()
}.
encode(#{ 'conversionReviewVersions' := ConversionReviewVersions,
'strategy' := Strategy,
'webhookClientConfig' := WebhookClientConfig
}) ->
#{ 'conversionReviewVersions' => ConversionReviewVersions,
'strategy' => Strategy,
'webhookClientConfig' => WebhookClientConfig
}.
| |
84d8ccfe79bade892527c5072b61c1d1c29f1e8859a961de304320939914701b | brendanhay/amazonka | Internal.hs | -- |
-- Module : Test.Amazonka.WAFV2.Internal
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Test.Amazonka.WAFV2.Internal where
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-wafv2/test/Test/Amazonka/WAFV2/Internal.hs | haskell | |
Module : Test.Amazonka.WAFV2.Internal
Stability : auto-generated | Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Test.Amazonka.WAFV2.Internal where
|
35f2968e51d5fa159879d00896d16f6ed3ae78c82410c57199a9bf1528df081d | m2ym/cl-syntax | clsql.lisp | (in-package :cl-user)
(syntax:define-package-syntax :clsql
(:merge :standard)
(:macro-char #\[ #'clsql-sys::sql-reader-open)
(:macro-char #\] (cl:get-macro-character #\))))
| null | https://raw.githubusercontent.com/m2ym/cl-syntax/03f0c329bbd55b8622c37161e6278366525e2ccc/contrib/clsql.lisp | lisp | (in-package :cl-user)
(syntax:define-package-syntax :clsql
(:merge :standard)
(:macro-char #\[ #'clsql-sys::sql-reader-open)
(:macro-char #\] (cl:get-macro-character #\))))
| |
928c9373846e5b046674ff4f8ffbb884887df02806e2f943e9250940f4523156 | griffinbank/titan | migrate.clj | (ns titan.commands.db.migrate
(:gen-class)
(:require [titan.db.migrations :refer [migrate]]))
(defn -main
[]
(migrate))
| null | https://raw.githubusercontent.com/griffinbank/titan/990a7ab083ad1e6680f8f088ce43a095194b376a/titan/src/clj/titan/commands/db/migrate.clj | clojure | (ns titan.commands.db.migrate
(:gen-class)
(:require [titan.db.migrations :refer [migrate]]))
(defn -main
[]
(migrate))
| |
8864c3de0bed80d7f0d72db5b47de8da8c5b03be240c6414555694c964744614 | kmi/irs | publish-ontolingua-ontology.lisp | (in-package web-onto)
(defvar *root-published-ontolingua-ontology-directory*
"ocml:ontolingua-web;")
(defvar *published-ontolingua-ontology-root-url*
(concatenate 'string
web-onto::*default-host*
"/ontolingua/"))
(defvar *plain-default-ontolingua-sub-directory*
"ontolingua")
(defun publish-ontolingua-ontology (ontology
ontology-type
documentation
ocml-source-directory
load-file
files
&optional
(root-directory
*root-published-ontolingua-ontology-directory*)
(root-url
*published-ontolingua-ontology-root-url*))
(ocml::translate-ocml-ontology-to-ontolingua ontology)
(let* ((web-directory
(translate-logical-pathname
(ontology-directory-name-from-type
ontology-type ontology
root-directory)))
(source-directory
(make-pathname :directory
(append
(pathname-directory ocml-source-directory)
(list
*plain-default-ontolingua-sub-directory*))
:host (pathname-host ocml-source-directory))))
(ensure-directories-exist web-directory)
(generate-published-ontology-index-page
:ontolingua
web-directory
ontology
ontology-type
documentation (cons load-file files)
root-url)
(mapc
#'(lambda (file)
(generate-published-ontology-page
ontology :ontolingua
documentation web-directory
source-directory file
ocml::*ontolingua-suffix*
root-url))
(cons load-file files)))) | null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/src/webonto/publish-ontolingua-ontology.lisp | lisp | (in-package web-onto)
(defvar *root-published-ontolingua-ontology-directory*
"ocml:ontolingua-web;")
(defvar *published-ontolingua-ontology-root-url*
(concatenate 'string
web-onto::*default-host*
"/ontolingua/"))
(defvar *plain-default-ontolingua-sub-directory*
"ontolingua")
(defun publish-ontolingua-ontology (ontology
ontology-type
documentation
ocml-source-directory
load-file
files
&optional
(root-directory
*root-published-ontolingua-ontology-directory*)
(root-url
*published-ontolingua-ontology-root-url*))
(ocml::translate-ocml-ontology-to-ontolingua ontology)
(let* ((web-directory
(translate-logical-pathname
(ontology-directory-name-from-type
ontology-type ontology
root-directory)))
(source-directory
(make-pathname :directory
(append
(pathname-directory ocml-source-directory)
(list
*plain-default-ontolingua-sub-directory*))
:host (pathname-host ocml-source-directory))))
(ensure-directories-exist web-directory)
(generate-published-ontology-index-page
:ontolingua
web-directory
ontology
ontology-type
documentation (cons load-file files)
root-url)
(mapc
#'(lambda (file)
(generate-published-ontology-page
ontology :ontolingua
documentation web-directory
source-directory file
ocml::*ontolingua-suffix*
root-url))
(cons load-file files)))) | |
92176ce1d984baadb0d973cdbf8b382ce0a5e098c60d126bfbc0a2b2a88b79b4 | elbrujohalcon/wxhnotepad | Step5.hs | -- | Like Step4 but with Find / Replace support
module Step5 (step5) where
import Graphics.UI.WX
import Graphics.UI.WXCore hiding (wxID_CUT, wxID_COPY, wxID_PASTE,
wxID_FIND, wxID_FORWARD, wxID_REPLACE, wxID_BACKWARD)
import Data.Bits
import Data.Char (toLower)
import Data.List
| /FRFlags/ represents what the user choose in the FindReplaceDialog
data FRFlags = FRFlags {frfGoingDown :: Bool,
frfMatchCase :: Bool,
frfWholeWord :: Bool,
frfWrapSearch :: Bool}
deriving (Eq, Show)
data GUIContext = GUICtx { guiWin :: Frame (),
guiEditor :: TextCtrl (),
guiFile :: Var (Maybe FilePath),
guiTimer :: TimerEx (),
guiPast :: Var [String],
guiFuture :: Var [String],
guiSearch :: FindReplaceData () -- ^ Needed to hold the what the user is looking for
}
wxID_MYUNDO, wxID_MYREDO, wxID_CUT, wxID_COPY, wxID_PASTE,
wxID_FIND, wxID_FORWARD, wxID_BACKWARD, wxID_REPLACE :: Id
wxID_MYUNDO = 5108
wxID_MYREDO = 5109
wxID_CUT = 5031
wxID_COPY = 5032
wxID_PASTE = 5033
HACK : They 're not correctly numbered in WxcDefs or they
wxID_REPLACE = 5038 -- don't even exist
wxID_FORWARD = 5106
wxID_BACKWARD = 5107
step5 :: IO ()
step5 =
do
win <- frame [text := "wxhNotepad - Step 5", visible := False]
editor <- textCtrl win [font := fontFixed,
text := "Find / Replace functionality is supported " ++
"by wxHaskell but it's kinda hidden in " ++
"WXCore. We'll need a little digging for " ++
"this step.\n" ++
"This step involved tons of code, so I " ++
"think there must be a better way to do it." ++
"If you find it, please post it on the " ++
"wxhaskell-users mailing list :)"]
filePath <- varCreate Nothing
refreshTimer <- timer win []
past <- varCreate []
future <- varCreate []
-- We create a FindReplaceData that will hold the information about the
-- last search
search <- findReplaceDataCreate wxFR_DOWN
let guiCtx = GUICtx win editor filePath refreshTimer past future search
set editor [on keyboard := \_ -> restartTimer guiCtx >> propagateEvent]
timerOnCommand refreshTimer $ updatePast guiCtx
updatePast guiCtx
-- We create a menu for the window with the same items from previous steps
-- and a couple of new items in the edit menu
mnuFile <- menuPane [text := "File"]
mnuEdit <- menuPane [text := "Edit"]
menuAppend mnuFile wxID_OPEN "&Open...\tCtrl-o" "Open Page" False
menuAppend mnuFile wxID_SAVE "&Save\tCtrl-s" "Save Page" False
menuAppend mnuFile wxID_SAVEAS "Save &as...\tCtrl-Shift-s" "Save Page as" False
menuAppend mnuFile wxID_CLOSE "&Close\tCtrl-W" "Close Page" False
menuAppend mnuEdit wxID_MYUNDO "&Undo\tCtrl-z" "Undo last action" False
menuAppend mnuEdit wxID_MYREDO "&Redo\tCtrl-Shift-z" "Redo last undone action" False
menuAppendSeparator mnuEdit
menuAppend mnuEdit wxID_CUT "C&ut\tCtrl-x" "Cut" False
menuAppend mnuEdit wxID_COPY "&Copy\tCtrl-c" "Copy" False
menuAppend mnuEdit wxID_PASTE "&Paste\tCtrl-v" "Paste" False
menuAppendSeparator mnuEdit
menuAppend mnuEdit wxID_FIND "&Find...\tCtrl-f" "Find" False
menuAppend mnuEdit wxID_FORWARD "Find &Next\tCtrl-g" "Find Next" False
menuAppend mnuEdit wxID_BACKWARD "Find &Previous\tCtrl-Shift-g" "Find Previous" False
menuAppend mnuEdit wxID_REPLACE "&Replace...\tCtrl-Shift-r" "Replace" False
evtHandlerOnMenuCommand win wxID_OPEN $ openPage guiCtx
evtHandlerOnMenuCommand win wxID_SAVE $ savePage guiCtx
evtHandlerOnMenuCommand win wxID_SAVEAS $ savePageAs guiCtx
evtHandlerOnMenuCommand win wxID_CLOSE $ windowClose win False >> return ()
evtHandlerOnMenuCommand win wxID_MYUNDO $ undo guiCtx
evtHandlerOnMenuCommand win wxID_MYREDO $ redo guiCtx
evtHandlerOnMenuCommand win wxID_CUT $ cut guiCtx
evtHandlerOnMenuCommand win wxID_COPY $ copy guiCtx
evtHandlerOnMenuCommand win wxID_PASTE $ paste guiCtx
evtHandlerOnMenuCommand win wxID_FIND $ justFind guiCtx
evtHandlerOnMenuCommand win wxID_FORWARD $ justFindNext guiCtx
evtHandlerOnMenuCommand win wxID_BACKWARD $ justFindPrev guiCtx
evtHandlerOnMenuCommand win wxID_REPLACE $ findReplace guiCtx
set win [menuBar := [mnuFile, mnuEdit]]
set win [layout := fill $ widget editor,
clientSize := sz 640 480]
focusOn editor
set win [visible := True]
savePageAs, savePage, openPage,
undo, redo, restartTimer, killTimer,
updatePast, clearPast,
cut, copy, paste,
justFind, justFindNext, justFindPrev, findReplace :: GUIContext -> IO ()
openPage guiCtx@GUICtx{guiWin = win, guiEditor = editor, guiFile = filePath} =
do
maybePath <- fileOpenDialog win True True "Open file..." [("Haskells (*.hs)",["*.hs"]),
("Texts (*.txt)", ["*.txt"]),
("Any file (*.*)",["*.*"])] "" ""
case maybePath of
Nothing ->
return ()
Just path ->
do
clearPast guiCtx
textCtrlLoadFile editor path
updatePast guiCtx
set win [text := "wxhnotepad - " ++ path]
varSet filePath $ Just path
savePageAs GUICtx{guiWin = win, guiEditor = editor, guiFile = filePath} =
do
maybePath <- fileSaveDialog win True True "Save file..." [("Haskells (*.hs)",["*.hs"]),
("Texts (*.txt)", ["*.txt"]),
("Any file (*.*)",["*.*"])] "" ""
case maybePath of
Nothing ->
return ()
Just path ->
do
textCtrlSaveFile editor path
set win [text := "wxhnotepad - " ++ path]
varSet filePath $ Just path
savePage guiCtx@GUICtx{guiWin = win, guiEditor = editor, guiFile = filePath} =
do
maybePath <- varGet filePath
case maybePath of
Nothing ->
savePageAs guiCtx
Just path ->
textCtrlSaveFile editor path >> return ()
undo guiCtx@GUICtx{guiEditor = editor, guiPast = past, guiFuture = future} =
do
updatePast guiCtx
history <- varGet past
case history of
[] ->
return ()
[t] ->
return ()
tnow:tlast:ts ->
do
varUpdate future (tnow:)
varSet past $ tlast:ts
set editor [text := tlast]
redo guiCtx@GUICtx{guiEditor = editor, guiPast = past, guiFuture = future} =
do
updatePast guiCtx
coming <- varGet future
case coming of
[] ->
return ()
t:ts ->
do
varSet future ts
varUpdate past (t:)
set editor [text := t]
updatePast guiCtx@GUICtx{guiEditor = editor, guiPast = past, guiFuture = future} =
do
tnow <- get editor text
history <- varGet past
case history of
[] ->
varSet past [tnow]
t:_ ->
if t /= tnow
then do
varUpdate past (tnow:)
varSet future []
else
return ()
killTimer guiCtx
clearPast GUICtx{guiPast = past, guiFuture = future} =
do
varSet past []
varSet future []
restartTimer guiCtx@GUICtx{guiWin = win, guiTimer = refreshTimer} =
do
started <- timerStart refreshTimer 1000 True
if started
then return ()
else do
errorDialog win "Error" "Can't start more timers"
wxcAppExit
killTimer GUICtx{guiTimer = refreshTimer} = timerStop refreshTimer
copy GUICtx{guiEditor = editor} = textCtrlCopy editor
cut guiCtx@GUICtx{guiEditor = editor} = textCtrlCut editor >> updatePast guiCtx
paste guiCtx@GUICtx{guiEditor = editor} = textCtrlPaste editor >> updatePast guiCtx
We open a FindReplaceDialog with default style to let the user choose what to do
justFind guiCtx = openFindDialog guiCtx "Find..." dialogDefaultStyle
justFindNext guiCtx@GUICtx{guiSearch = search} =
do
-- We get the current search parameters
curFlags <- findReplaceDataGetFlags search
-- We set the finding direction to down
findReplaceDataSetFlags search $ curFlags .|. wxFR_DOWN
-- and we proceed with the search
findNextButton guiCtx
justFindPrev guiCtx@GUICtx{guiSearch = search} =
do
-- We get the current search parameters
curFlags <- findReplaceDataGetFlags search
-- We set the finding direction to down
findReplaceDataSetFlags search $ curFlags .&. complement wxFR_DOWN
-- and we proceed with the search
findNextButton guiCtx
We open a FindReplaceDialog with replace style
findReplace guiCtx = openFindDialog guiCtx "Find and Replace..." $ dialogDefaultStyle .|. wxFR_REPLACEDIALOG
-- | Auxiliary function to build a /FRFlags/
buildFRFlags :: Bool -- ^ Wrap the Search?
^ BitMask for Direction , Match Case and Whole Word flags
-> IO FRFlags
buildFRFlags w x = return FRFlags {frfGoingDown = (x .&. wxFR_DOWN) /= 0,
frfMatchCase = (x .&. wxFR_MATCHCASE) /= 0,
frfWholeWord = (x .&. wxFR_WHOLEWORD) /= 0,
frfWrapSearch = w}
-- | Opens a FindReplace Dialog
^ The current GUIContext
-> String -- ^ The title of the dialog
-> Int -- ^ The style of the dialog
-> IO ()
openFindDialog guiCtx@GUICtx{guiWin = win,
guiSearch = search} title dlgStyle =
do
-- First we must create a dialog with the search parameters that we
-- already have. The dialog itself is going to modify them according
-- to the user selections
frdialog <- findReplaceDialogCreate win search title $ dlgStyle + wxFR_NOWHOLEWORD
One of the weirdest functions on wxHaskell is windowOnEvent .
-- I did not really understand what are the parameters for exactly, but
-- if we use it this way, we manage to get a certain event with id k to
-- fire the function f... :)
let winSet k f = let hnd _ = f guiCtx >> propagateEvent
in windowOnEvent frdialog [k] hnd hnd
-- Using that magic trick, we associate our functions with the button
-- pressing events in the dialog...
winSet wxEVT_COMMAND_FIND findNextButton
winSet wxEVT_COMMAND_FIND_NEXT findNextButton
winSet wxEVT_COMMAND_FIND_REPLACE findReplaceButton
winSet wxEVT_COMMAND_FIND_REPLACE_ALL findReplaceAllButton
-- And... it's showtime!!
set frdialog [visible := True]
These 3 functions handle the button events in the dialog but also handle the
-- menuitems when the dialog is not there
findNextButton, findReplaceButton, findReplaceAllButton :: GUIContext -> IO ()
findNextButton guiCtx@GUICtx{guiEditor= editor,
guiWin = win,
guiSearch= search} =
do
-- We check what the user is trying to find
s <- findReplaceDataGetFindString search
-- We parse it, assuming that the user wants to wrap its search
fs <- findReplaceDataGetFlags search >>= buildFRFlags True
-- We try to find a match in the text
mip <- findMatch s fs editor
case mip of
Nothing -> -- If there's no match, we inform that to the user
infoDialog win "Find Results" $ s ++ " not found."
Just ip -> -- If there's a match, we select that text
do
textCtrlSetInsertionPoint editor ip
textCtrlSetSelection editor ip (length s + ip)
findReplaceButton guiCtx@GUICtx{guiEditor = editor,
guiWin = win,
guiSearch = search} =
do
-- We check what the user is trying to find
s <- findReplaceDataGetFindString search
-- and what is he wanting to replace it with
r <- findReplaceDataGetReplaceString search
-- We parse it, assuming that the user wants to wrap its search
fs <- findReplaceDataGetFlags search >>= buildFRFlags True
-- We try to find a match in the text
mip <- findMatch s fs editor
case mip of
Nothing -> -- If there's no match, we inform that to the user
infoDialog win "Find Results" $ s ++ " not found."
Just ip ->
do -- If there's a match, we replace that text
textCtrlReplace editor ip (length s + ip) r
-- select the result
textCtrlSetInsertionPoint editor ip
textCtrlSetSelection editor ip (length r + ip)
-- and finally update the history
updatePast guiCtx
findReplaceAllButton guiCtx@GUICtx{guiEditor = editor,
guiSearch = search} =
do
-- We check what the user is trying to find
s <- findReplaceDataGetFindString search
-- and what is he wanting to replace it with
r <- findReplaceDataGetReplaceString search
-- We parse it, assuming that the user wants to wrap its search
-- Note that we're NOT wrapping our search, to avoid infinite looping
fs <- findReplaceDataGetFlags search >>= buildFRFlags False
-- We start at the beginning of the text
textCtrlSetInsertionPoint editor 0
-- And we go through the text replacing s by r until there's nothing
-- more to replace
replaceAllIn s r fs editor
-- and finally update the history
updatePast guiCtx
where replaceAllIn s r fs editor =
do
mip <- findMatch s fs editor
case mip of
Nothing ->
return () -- we're done here
Just ip ->
do
textCtrlReplace editor ip (length s + ip) r
textCtrlSetInsertionPoint editor $ length r + ip
replaceAllIn s r fs editor -- we look for the next match
-- | Tries to find a string in a text control
findMatch :: String -- ^ The string to find
-> FRFlags -- ^ The flags to know how to look for it
-> TextCtrl () -- ^ The textControl
^ Nothing or Just the position of the first match
findMatch query flags editor =
do
-- We get the current text
txt <- get editor text
-- and the insertion point (that's where the search begins)
ip <- textCtrlGetInsertionPoint editor
-- If we're not required to match the case we move everything to lower
let (substring, string) = if frfMatchCase flags
then (query, txt)
else (map toLower query, map toLower txt)
-- we choose what function to use depending on the direction
funct = if frfGoingDown flags
then nextMatch (ip + 1)
else prevMatch ip
(mip, wrapped) = funct substring string
-- if it had to wrap around and that was 'forbbiden', then the match didn't happen
-- otherwise, the result is valid
return $ if (not $ frfWrapSearch flags) && wrapped
then Nothing
else mip
-- These functions try to find a string contained in another
prevMatch, nextMatch :: Int -- ^ Starting point
-> String -- ^ What to find
-> String -- ^ Where to find it
-> (Maybe Int, Bool) -- ^ (Nothing or Just the point where it was found, It needed to wrap around?)
prevMatch _ [] _ = (Nothing, True) -- When looking for nothing, that's what you get
prevMatch from substring string | length string < from || from <= 0 = prevMatch (length string) substring string
| otherwise =
case nextMatch (fromBack from) (reverse substring) (reverse string) of
(Nothing, wrapped) -> (Nothing, wrapped)
(Just ri, wrapped) -> (Just $ fromBack (ri + length substring), wrapped)
where fromBack x = length string - x
nextMatch _ [] _ = (Nothing, True) -- When looking for nothing, that's what you get
nextMatch from substring string | length substring > length string = (Nothing, True)
| length string <= from = nextMatch 0 substring string
| otherwise =
let after = drop from string
before = take (from + length substring) string
aIndex = indexOf substring after
bIndex = indexOf substring before
in case aIndex of
Just ai ->
(Just $ from + ai, False)
Nothing ->
case bIndex of
Nothing -> (Nothing, True)
Just bi -> (Just bi, True)
indexOf :: String -> String -> Maybe Int
indexOf substring string = findIndex (isPrefixOf substring) $ tails string | null | https://raw.githubusercontent.com/elbrujohalcon/wxhnotepad/76c1f619d39d36e83ee808b5209f37eda45e0694/src/Step5.hs | haskell | | Like Step4 but with Find / Replace support
^ Needed to hold the what the user is looking for
don't even exist
We create a FindReplaceData that will hold the information about the
last search
We create a menu for the window with the same items from previous steps
and a couple of new items in the edit menu
We get the current search parameters
We set the finding direction to down
and we proceed with the search
We get the current search parameters
We set the finding direction to down
and we proceed with the search
| Auxiliary function to build a /FRFlags/
^ Wrap the Search?
| Opens a FindReplace Dialog
^ The title of the dialog
^ The style of the dialog
First we must create a dialog with the search parameters that we
already have. The dialog itself is going to modify them according
to the user selections
I did not really understand what are the parameters for exactly, but
if we use it this way, we manage to get a certain event with id k to
fire the function f... :)
Using that magic trick, we associate our functions with the button
pressing events in the dialog...
And... it's showtime!!
menuitems when the dialog is not there
We check what the user is trying to find
We parse it, assuming that the user wants to wrap its search
We try to find a match in the text
If there's no match, we inform that to the user
If there's a match, we select that text
We check what the user is trying to find
and what is he wanting to replace it with
We parse it, assuming that the user wants to wrap its search
We try to find a match in the text
If there's no match, we inform that to the user
If there's a match, we replace that text
select the result
and finally update the history
We check what the user is trying to find
and what is he wanting to replace it with
We parse it, assuming that the user wants to wrap its search
Note that we're NOT wrapping our search, to avoid infinite looping
We start at the beginning of the text
And we go through the text replacing s by r until there's nothing
more to replace
and finally update the history
we're done here
we look for the next match
| Tries to find a string in a text control
^ The string to find
^ The flags to know how to look for it
^ The textControl
We get the current text
and the insertion point (that's where the search begins)
If we're not required to match the case we move everything to lower
we choose what function to use depending on the direction
if it had to wrap around and that was 'forbbiden', then the match didn't happen
otherwise, the result is valid
These functions try to find a string contained in another
^ Starting point
^ What to find
^ Where to find it
^ (Nothing or Just the point where it was found, It needed to wrap around?)
When looking for nothing, that's what you get
When looking for nothing, that's what you get | module Step5 (step5) where
import Graphics.UI.WX
import Graphics.UI.WXCore hiding (wxID_CUT, wxID_COPY, wxID_PASTE,
wxID_FIND, wxID_FORWARD, wxID_REPLACE, wxID_BACKWARD)
import Data.Bits
import Data.Char (toLower)
import Data.List
| /FRFlags/ represents what the user choose in the FindReplaceDialog
data FRFlags = FRFlags {frfGoingDown :: Bool,
frfMatchCase :: Bool,
frfWholeWord :: Bool,
frfWrapSearch :: Bool}
deriving (Eq, Show)
data GUIContext = GUICtx { guiWin :: Frame (),
guiEditor :: TextCtrl (),
guiFile :: Var (Maybe FilePath),
guiTimer :: TimerEx (),
guiPast :: Var [String],
guiFuture :: Var [String],
}
wxID_MYUNDO, wxID_MYREDO, wxID_CUT, wxID_COPY, wxID_PASTE,
wxID_FIND, wxID_FORWARD, wxID_BACKWARD, wxID_REPLACE :: Id
wxID_MYUNDO = 5108
wxID_MYREDO = 5109
wxID_CUT = 5031
wxID_COPY = 5032
wxID_PASTE = 5033
HACK : They 're not correctly numbered in WxcDefs or they
wxID_FORWARD = 5106
wxID_BACKWARD = 5107
step5 :: IO ()
step5 =
do
win <- frame [text := "wxhNotepad - Step 5", visible := False]
editor <- textCtrl win [font := fontFixed,
text := "Find / Replace functionality is supported " ++
"by wxHaskell but it's kinda hidden in " ++
"WXCore. We'll need a little digging for " ++
"this step.\n" ++
"This step involved tons of code, so I " ++
"think there must be a better way to do it." ++
"If you find it, please post it on the " ++
"wxhaskell-users mailing list :)"]
filePath <- varCreate Nothing
refreshTimer <- timer win []
past <- varCreate []
future <- varCreate []
search <- findReplaceDataCreate wxFR_DOWN
let guiCtx = GUICtx win editor filePath refreshTimer past future search
set editor [on keyboard := \_ -> restartTimer guiCtx >> propagateEvent]
timerOnCommand refreshTimer $ updatePast guiCtx
updatePast guiCtx
mnuFile <- menuPane [text := "File"]
mnuEdit <- menuPane [text := "Edit"]
menuAppend mnuFile wxID_OPEN "&Open...\tCtrl-o" "Open Page" False
menuAppend mnuFile wxID_SAVE "&Save\tCtrl-s" "Save Page" False
menuAppend mnuFile wxID_SAVEAS "Save &as...\tCtrl-Shift-s" "Save Page as" False
menuAppend mnuFile wxID_CLOSE "&Close\tCtrl-W" "Close Page" False
menuAppend mnuEdit wxID_MYUNDO "&Undo\tCtrl-z" "Undo last action" False
menuAppend mnuEdit wxID_MYREDO "&Redo\tCtrl-Shift-z" "Redo last undone action" False
menuAppendSeparator mnuEdit
menuAppend mnuEdit wxID_CUT "C&ut\tCtrl-x" "Cut" False
menuAppend mnuEdit wxID_COPY "&Copy\tCtrl-c" "Copy" False
menuAppend mnuEdit wxID_PASTE "&Paste\tCtrl-v" "Paste" False
menuAppendSeparator mnuEdit
menuAppend mnuEdit wxID_FIND "&Find...\tCtrl-f" "Find" False
menuAppend mnuEdit wxID_FORWARD "Find &Next\tCtrl-g" "Find Next" False
menuAppend mnuEdit wxID_BACKWARD "Find &Previous\tCtrl-Shift-g" "Find Previous" False
menuAppend mnuEdit wxID_REPLACE "&Replace...\tCtrl-Shift-r" "Replace" False
evtHandlerOnMenuCommand win wxID_OPEN $ openPage guiCtx
evtHandlerOnMenuCommand win wxID_SAVE $ savePage guiCtx
evtHandlerOnMenuCommand win wxID_SAVEAS $ savePageAs guiCtx
evtHandlerOnMenuCommand win wxID_CLOSE $ windowClose win False >> return ()
evtHandlerOnMenuCommand win wxID_MYUNDO $ undo guiCtx
evtHandlerOnMenuCommand win wxID_MYREDO $ redo guiCtx
evtHandlerOnMenuCommand win wxID_CUT $ cut guiCtx
evtHandlerOnMenuCommand win wxID_COPY $ copy guiCtx
evtHandlerOnMenuCommand win wxID_PASTE $ paste guiCtx
evtHandlerOnMenuCommand win wxID_FIND $ justFind guiCtx
evtHandlerOnMenuCommand win wxID_FORWARD $ justFindNext guiCtx
evtHandlerOnMenuCommand win wxID_BACKWARD $ justFindPrev guiCtx
evtHandlerOnMenuCommand win wxID_REPLACE $ findReplace guiCtx
set win [menuBar := [mnuFile, mnuEdit]]
set win [layout := fill $ widget editor,
clientSize := sz 640 480]
focusOn editor
set win [visible := True]
savePageAs, savePage, openPage,
undo, redo, restartTimer, killTimer,
updatePast, clearPast,
cut, copy, paste,
justFind, justFindNext, justFindPrev, findReplace :: GUIContext -> IO ()
openPage guiCtx@GUICtx{guiWin = win, guiEditor = editor, guiFile = filePath} =
do
maybePath <- fileOpenDialog win True True "Open file..." [("Haskells (*.hs)",["*.hs"]),
("Texts (*.txt)", ["*.txt"]),
("Any file (*.*)",["*.*"])] "" ""
case maybePath of
Nothing ->
return ()
Just path ->
do
clearPast guiCtx
textCtrlLoadFile editor path
updatePast guiCtx
set win [text := "wxhnotepad - " ++ path]
varSet filePath $ Just path
savePageAs GUICtx{guiWin = win, guiEditor = editor, guiFile = filePath} =
do
maybePath <- fileSaveDialog win True True "Save file..." [("Haskells (*.hs)",["*.hs"]),
("Texts (*.txt)", ["*.txt"]),
("Any file (*.*)",["*.*"])] "" ""
case maybePath of
Nothing ->
return ()
Just path ->
do
textCtrlSaveFile editor path
set win [text := "wxhnotepad - " ++ path]
varSet filePath $ Just path
savePage guiCtx@GUICtx{guiWin = win, guiEditor = editor, guiFile = filePath} =
do
maybePath <- varGet filePath
case maybePath of
Nothing ->
savePageAs guiCtx
Just path ->
textCtrlSaveFile editor path >> return ()
undo guiCtx@GUICtx{guiEditor = editor, guiPast = past, guiFuture = future} =
do
updatePast guiCtx
history <- varGet past
case history of
[] ->
return ()
[t] ->
return ()
tnow:tlast:ts ->
do
varUpdate future (tnow:)
varSet past $ tlast:ts
set editor [text := tlast]
redo guiCtx@GUICtx{guiEditor = editor, guiPast = past, guiFuture = future} =
do
updatePast guiCtx
coming <- varGet future
case coming of
[] ->
return ()
t:ts ->
do
varSet future ts
varUpdate past (t:)
set editor [text := t]
updatePast guiCtx@GUICtx{guiEditor = editor, guiPast = past, guiFuture = future} =
do
tnow <- get editor text
history <- varGet past
case history of
[] ->
varSet past [tnow]
t:_ ->
if t /= tnow
then do
varUpdate past (tnow:)
varSet future []
else
return ()
killTimer guiCtx
clearPast GUICtx{guiPast = past, guiFuture = future} =
do
varSet past []
varSet future []
restartTimer guiCtx@GUICtx{guiWin = win, guiTimer = refreshTimer} =
do
started <- timerStart refreshTimer 1000 True
if started
then return ()
else do
errorDialog win "Error" "Can't start more timers"
wxcAppExit
killTimer GUICtx{guiTimer = refreshTimer} = timerStop refreshTimer
copy GUICtx{guiEditor = editor} = textCtrlCopy editor
cut guiCtx@GUICtx{guiEditor = editor} = textCtrlCut editor >> updatePast guiCtx
paste guiCtx@GUICtx{guiEditor = editor} = textCtrlPaste editor >> updatePast guiCtx
We open a FindReplaceDialog with default style to let the user choose what to do
justFind guiCtx = openFindDialog guiCtx "Find..." dialogDefaultStyle
justFindNext guiCtx@GUICtx{guiSearch = search} =
do
curFlags <- findReplaceDataGetFlags search
findReplaceDataSetFlags search $ curFlags .|. wxFR_DOWN
findNextButton guiCtx
justFindPrev guiCtx@GUICtx{guiSearch = search} =
do
curFlags <- findReplaceDataGetFlags search
findReplaceDataSetFlags search $ curFlags .&. complement wxFR_DOWN
findNextButton guiCtx
We open a FindReplaceDialog with replace style
findReplace guiCtx = openFindDialog guiCtx "Find and Replace..." $ dialogDefaultStyle .|. wxFR_REPLACEDIALOG
^ BitMask for Direction , Match Case and Whole Word flags
-> IO FRFlags
buildFRFlags w x = return FRFlags {frfGoingDown = (x .&. wxFR_DOWN) /= 0,
frfMatchCase = (x .&. wxFR_MATCHCASE) /= 0,
frfWholeWord = (x .&. wxFR_WHOLEWORD) /= 0,
frfWrapSearch = w}
^ The current GUIContext
-> IO ()
openFindDialog guiCtx@GUICtx{guiWin = win,
guiSearch = search} title dlgStyle =
do
frdialog <- findReplaceDialogCreate win search title $ dlgStyle + wxFR_NOWHOLEWORD
One of the weirdest functions on wxHaskell is windowOnEvent .
let winSet k f = let hnd _ = f guiCtx >> propagateEvent
in windowOnEvent frdialog [k] hnd hnd
winSet wxEVT_COMMAND_FIND findNextButton
winSet wxEVT_COMMAND_FIND_NEXT findNextButton
winSet wxEVT_COMMAND_FIND_REPLACE findReplaceButton
winSet wxEVT_COMMAND_FIND_REPLACE_ALL findReplaceAllButton
set frdialog [visible := True]
These 3 functions handle the button events in the dialog but also handle the
findNextButton, findReplaceButton, findReplaceAllButton :: GUIContext -> IO ()
findNextButton guiCtx@GUICtx{guiEditor= editor,
guiWin = win,
guiSearch= search} =
do
s <- findReplaceDataGetFindString search
fs <- findReplaceDataGetFlags search >>= buildFRFlags True
mip <- findMatch s fs editor
case mip of
infoDialog win "Find Results" $ s ++ " not found."
do
textCtrlSetInsertionPoint editor ip
textCtrlSetSelection editor ip (length s + ip)
findReplaceButton guiCtx@GUICtx{guiEditor = editor,
guiWin = win,
guiSearch = search} =
do
s <- findReplaceDataGetFindString search
r <- findReplaceDataGetReplaceString search
fs <- findReplaceDataGetFlags search >>= buildFRFlags True
mip <- findMatch s fs editor
case mip of
infoDialog win "Find Results" $ s ++ " not found."
Just ip ->
textCtrlReplace editor ip (length s + ip) r
textCtrlSetInsertionPoint editor ip
textCtrlSetSelection editor ip (length r + ip)
updatePast guiCtx
findReplaceAllButton guiCtx@GUICtx{guiEditor = editor,
guiSearch = search} =
do
s <- findReplaceDataGetFindString search
r <- findReplaceDataGetReplaceString search
fs <- findReplaceDataGetFlags search >>= buildFRFlags False
textCtrlSetInsertionPoint editor 0
replaceAllIn s r fs editor
updatePast guiCtx
where replaceAllIn s r fs editor =
do
mip <- findMatch s fs editor
case mip of
Nothing ->
Just ip ->
do
textCtrlReplace editor ip (length s + ip) r
textCtrlSetInsertionPoint editor $ length r + ip
^ Nothing or Just the position of the first match
findMatch query flags editor =
do
txt <- get editor text
ip <- textCtrlGetInsertionPoint editor
let (substring, string) = if frfMatchCase flags
then (query, txt)
else (map toLower query, map toLower txt)
funct = if frfGoingDown flags
then nextMatch (ip + 1)
else prevMatch ip
(mip, wrapped) = funct substring string
return $ if (not $ frfWrapSearch flags) && wrapped
then Nothing
else mip
prevMatch from substring string | length string < from || from <= 0 = prevMatch (length string) substring string
| otherwise =
case nextMatch (fromBack from) (reverse substring) (reverse string) of
(Nothing, wrapped) -> (Nothing, wrapped)
(Just ri, wrapped) -> (Just $ fromBack (ri + length substring), wrapped)
where fromBack x = length string - x
nextMatch from substring string | length substring > length string = (Nothing, True)
| length string <= from = nextMatch 0 substring string
| otherwise =
let after = drop from string
before = take (from + length substring) string
aIndex = indexOf substring after
bIndex = indexOf substring before
in case aIndex of
Just ai ->
(Just $ from + ai, False)
Nothing ->
case bIndex of
Nothing -> (Nothing, True)
Just bi -> (Just bi, True)
indexOf :: String -> String -> Maybe Int
indexOf substring string = findIndex (isPrefixOf substring) $ tails string |
d17149726de928c51ef6a39c7e68106db6824f8b186053f15072414adac2a899 | janestreet/core | date_intf.ml | module type Date = sig
type t = Date0.t
* @inline
val of_time : Time_float.t -> zone:Time_float.Zone.t -> t
val today : zone:Time_float.Zone.t -> t
*
val format : [ `Use_Date_unix ] [@@deprecated "[since 2021-03] Use [Date_unix]"]
val of_tm : [ `Use_Date_unix ] [@@deprecated "[since 2021-03] Use [Date_unix]"]
val parse : [ `Use_Date_unix ] [@@deprecated "[since 2021-03] Use [Date_unix]"]
end
| null | https://raw.githubusercontent.com/janestreet/core/8c81161d689ea21df9acfff1f267c41db0835f77/core/src/date_intf.ml | ocaml | module type Date = sig
type t = Date0.t
* @inline
val of_time : Time_float.t -> zone:Time_float.Zone.t -> t
val today : zone:Time_float.Zone.t -> t
*
val format : [ `Use_Date_unix ] [@@deprecated "[since 2021-03] Use [Date_unix]"]
val of_tm : [ `Use_Date_unix ] [@@deprecated "[since 2021-03] Use [Date_unix]"]
val parse : [ `Use_Date_unix ] [@@deprecated "[since 2021-03] Use [Date_unix]"]
end
| |
e8fff83cd9b149b809d3f03dbc5fa742ab6c958a31348a71dd2076b99828d575 | aiya000/haskell-examples | WeekDays.hs | data Days = Sun | Mon | Tue | Wed | Thu | Fri | Sut
deriving (Show, Enum)
days = [Sun .. Sut]
easyCal :: Int -> IO ()
easyCal 32 = return ()
easyCal x = do
putStr $ show x
putStr " | "
putStrLn $ show $ days !! (x `mod` 7)
easyCal $ x + 1
main = easyCal 1
| null | https://raw.githubusercontent.com/aiya000/haskell-examples/a337ba0e86be8bb1333e7eea852ba5fa1d177d8a/Room/0_Books/GreatH_Book/Fundament/room/WeekDays.hs | haskell | data Days = Sun | Mon | Tue | Wed | Thu | Fri | Sut
deriving (Show, Enum)
days = [Sun .. Sut]
easyCal :: Int -> IO ()
easyCal 32 = return ()
easyCal x = do
putStr $ show x
putStr " | "
putStrLn $ show $ days !! (x `mod` 7)
easyCal $ x + 1
main = easyCal 1
| |
653678b5b2b47bc758fcc8120e7ffe28b0bbd377a8b13cab89f20e91a4b06e46 | travelping/ergw | ergw_socket_reg.erl | Copyright 2015 - 2020 Travelping GmbH < >
%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version
2 of the License , or ( at your option ) any later version .
-module(ergw_socket_reg).
-behaviour(regine_server).
%% API
-export([start_link/0]).
-export([register/3, lookup/2, waitfor/2]).
-export([all/0]).
-ignore_xref([start_link/0]).
%% regine_server callbacks
-export([init/1, handle_register/4, handle_unregister/3, handle_pid_remove/3,
handle_death/3, handle_call/3, terminate/2]).
%% --------------------------------------------------------------------
%% Include files
%% --------------------------------------------------------------------
-define(SERVER, ?MODULE).
-record(state, {waitfor = #{}}).
%%%===================================================================
%%% API
%%%===================================================================
start_link() ->
regine_server:start_link({local, ?SERVER}, ?MODULE, []).
register(Type, Name, Value) ->
regine_server:register(?SERVER, self(), {Type, Name}, Value).
unregister(Type , Name ) - >
%% regine_server:unregister(?SERVER, {Type, Name}, undefined).
lookup(Type, Name) ->
Key = {Type, Name},
case ets:lookup(?SERVER, Key) of
[{Key, _Pid, Value}] ->
Value;
_ ->
undefined
end.
waitfor(Type, Name) ->
regine_server:call(?SERVER, {waitfor, {Type, Name}}, infinity).
all() ->
ets:tab2list(?SERVER).
%%%===================================================================
%%% regine callbacks
%%%===================================================================
init([]) ->
ets:new(?SERVER, [ordered_set, named_table, public, {keypos, 1}]),
{ok, #state{}}.
handle_register(Pid, Id, Value, State) ->
case ets:insert_new(?SERVER, {Id, Pid, Value}) of
true -> {ok, [Id], notify(Id, Value, State)};
false -> {error, duplicate}
end.
handle_unregister(Key, _Value, State) ->
Pids = [Pid || {_, Pid, _} <- ets:take(?SERVER, Key)],
{Pids, State}.
handle_pid_remove(_Pid, Keys, State) ->
lists:foreach(fun(Key) -> ets:delete(?SERVER, Key) end, Keys),
State.
handle_death(_Pid, _Reason, State) ->
State.
handle_call({waitfor, Key}, From, #state{waitfor = WF} = State) ->
case ets:lookup(?SERVER, Key) of
[{Key, _Pid, Value}] ->
{reply, Value, State};
_ ->
{noreply, State#state{waitfor = WF#{Key => From}}}
end.
terminate(_Reason, _State) ->
ok.
%%%===================================================================
Internal functions
%%%===================================================================
notify(Key, Value, #state{waitfor = WF0} = State) ->
case maps:take(Key, WF0) of
{From, WF} ->
gen_server:reply(From, Value),
State#state{waitfor = WF};
_ ->
State
end.
| null | https://raw.githubusercontent.com/travelping/ergw/1b6cc3ee89eea4cf9df1d7de612744f0a850dfd9/apps/ergw_core/src/ergw_socket_reg.erl | erlang | This program is free software; you can redistribute it and/or
API
regine_server callbacks
--------------------------------------------------------------------
Include files
--------------------------------------------------------------------
===================================================================
API
===================================================================
regine_server:unregister(?SERVER, {Type, Name}, undefined).
===================================================================
regine callbacks
===================================================================
===================================================================
=================================================================== | Copyright 2015 - 2020 Travelping GmbH < >
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version
2 of the License , or ( at your option ) any later version .
-module(ergw_socket_reg).
-behaviour(regine_server).
-export([start_link/0]).
-export([register/3, lookup/2, waitfor/2]).
-export([all/0]).
-ignore_xref([start_link/0]).
-export([init/1, handle_register/4, handle_unregister/3, handle_pid_remove/3,
handle_death/3, handle_call/3, terminate/2]).
-define(SERVER, ?MODULE).
-record(state, {waitfor = #{}}).
start_link() ->
regine_server:start_link({local, ?SERVER}, ?MODULE, []).
register(Type, Name, Value) ->
regine_server:register(?SERVER, self(), {Type, Name}, Value).
unregister(Type , Name ) - >
lookup(Type, Name) ->
Key = {Type, Name},
case ets:lookup(?SERVER, Key) of
[{Key, _Pid, Value}] ->
Value;
_ ->
undefined
end.
waitfor(Type, Name) ->
regine_server:call(?SERVER, {waitfor, {Type, Name}}, infinity).
all() ->
ets:tab2list(?SERVER).
init([]) ->
ets:new(?SERVER, [ordered_set, named_table, public, {keypos, 1}]),
{ok, #state{}}.
handle_register(Pid, Id, Value, State) ->
case ets:insert_new(?SERVER, {Id, Pid, Value}) of
true -> {ok, [Id], notify(Id, Value, State)};
false -> {error, duplicate}
end.
handle_unregister(Key, _Value, State) ->
Pids = [Pid || {_, Pid, _} <- ets:take(?SERVER, Key)],
{Pids, State}.
handle_pid_remove(_Pid, Keys, State) ->
lists:foreach(fun(Key) -> ets:delete(?SERVER, Key) end, Keys),
State.
handle_death(_Pid, _Reason, State) ->
State.
handle_call({waitfor, Key}, From, #state{waitfor = WF} = State) ->
case ets:lookup(?SERVER, Key) of
[{Key, _Pid, Value}] ->
{reply, Value, State};
_ ->
{noreply, State#state{waitfor = WF#{Key => From}}}
end.
terminate(_Reason, _State) ->
ok.
Internal functions
notify(Key, Value, #state{waitfor = WF0} = State) ->
case maps:take(Key, WF0) of
{From, WF} ->
gen_server:reply(From, Value),
State#state{waitfor = WF};
_ ->
State
end.
|
c7369034e49afca82331777a8fa600179b9e8632ec3cb5417bb67af7406e3ad8 | motemen/jusk | JSType.hs | {-
JSType.hs
型変換
-}
module JSType where
import Prelude hiding (toInteger)
import DataTypes
import Parser (numericLiteral)
import ParserUtil (runLex)
import Internal
import Eval
toPrimitive :: Value -> String -> Evaluate Value
toPrimitive Undefined _ =
return Undefined
toPrimitive Null _ =
return Null
toPrimitive bool@(Boolean _) _ =
return bool
toPrimitive num@(Number _) _ =
return num
toPrimitive string@(String _) _ =
return string
toPrimitive ref@(Reference { }) preferredType =
flip toPrimitive preferredType =<< getValue ref
toPrimitive ref@(Ref _) preferredType =
flip toPrimitive preferredType =<< readRef ref
toPrimitive object preferredType =
defaultValue object preferredType
toBoolean :: Value -> Evaluate Bool
toBoolean Undefined = return False
toBoolean Null = return False
toBoolean (Boolean bool) = return bool
toBoolean (Number NaN) = return False
toBoolean (Number (Double n)) = return $ n /= 0
toBoolean (Number (Integer n)) = return $ n /= 0
toBoolean (String string) = return $ not $ null string
toBoolean Object { } = return True
toBoolean o = getValue o >>= readRef >>= toBoolean
toNumber :: Value -> Evaluate Number
toNumber Undefined =
return NaN
toNumber Null =
return $ Integer 0
toNumber (Boolean bool) =
return $ if bool then Integer 1
else Double 0.0
toNumber (Number num) =
return num
toNumber (String string) =
case runLex numericLiteral string of
Left _ -> return NaN
Right (Literal (Number n)) -> return n
toNumber object@Object { } =
toPrimitive object "Number" >>= toNumber
toNumber ref@Ref { } =
readRef ref >>= toNumber
toNumber ref@Reference { } =
getValue ref >>= toNumber
toNumber o =
(throw "NotImplemented" $ "toNumber: " ++ show o) >> return NaN
toInteger :: Value -> Evaluate Integer
toInteger (Number NaN) = return 0
toInteger (Number (Integer n)) = return n
toInteger (Number (Double n)) = return $ truncate n
toInteger value =
do num <- toNumber value
toInteger $ Number num
toIntWith :: (Integer -> Int) -> Value -> Evaluate Int
toIntWith convert value =
do num <- toNumber value
return $ case num of
NaN -> 0
Double n | isInfinite n -> 0
| otherwise -> convert $ truncate n
Integer n -> convert n
toInt :: Value -> Evaluate Int
toInt = toIntWith integerToInt
where integerToInt :: Integer -> Int
integerToInt n =
let intMax = floor (2**32)
n' = n `rem` intMax
in fromEnum $ if n' >= floor (2**31)
then n' - intMax
else n'
toUInt :: Value -> Evaluate Int
toUInt = toIntWith integerToUInt
where integerToUInt :: Integer -> Int
integerToUInt n =
fromEnum $ n `rem` floor (2**32)
toUInt16 :: Value -> Evaluate Int
toUInt16 = toIntWith integerToUInt16
where integerToUInt16 :: Integer -> Int
integerToUInt16 n =
fromEnum $ n `rem` floor (2**16)
toString :: Value -> Evaluate String
toString Void = return ""
toString Undefined = return "undefined"
toString Null = return "null"
toString (Boolean False) = return "false"
toString (Boolean True) = return "true"
toString (String string) = return string
toString (Number (Integer n)) = return $ show n
toString (Number (Double n)) = return $ show n
toString (Number NaN) = return "NaN"
toString Object { objName = name, objObject = NativeFunction { } } =
return $ "function " ++ name ++ "() { [native code] }"
toString ref@Reference { } =
do object <- getValue ref
toString object
toString ref@(Ref _) =
do object <- readRef ref
toString object
toString object =
do s <- callMethod object "toString" []
case s of
String s -> return s
_ | isPrimitive s -> return $ show s
_ -> do throw "TypeError" $ getName object ++ ".toString did not return string: " ++ show s
return ""
toSource :: Value -> Evaluate String
toSource Void = return ""
toSource Undefined = return "undefined"
toSource Null = return "null"
toSource (Boolean False) = return "false"
toSource (Boolean True) = return "true"
toSource (String string) = return $ show string
toSource (Number (Integer n)) = return $ show n
toSource (Number (Double n)) = return $ show n
toSource (Number NaN) = return "NaN"
toSource func@Object { objObject = Function { } } =
return $ show func
toSource Object { objName = name, objObject = NativeFunction { } } =
return $ "function " ++ name ++ "() { [native code] }"
toSource ref@Reference { } =
do object <- getValue ref
toSource object
toSource ref@(Ref _) =
do object <- readRef ref
toSource object
toSource object =
toString =<< callMethod object "toSource" []
toObject :: Value -> Evaluate Value
toObject Undefined =
throw "TypeError" "undefined cannot be converted to object"
toObject Null =
throw "TypeError" "null cannot be converted to object"
toObject num@(Number _) =
do klass <- getVar "Number"
object <- makeRef =<< construct klass []
modifyValue object $ setObjValue num
return object
toObject str@(String _) =
do klass <- getVar "String"
object <- makeRef =<< construct klass []
modifyValue object $ setObjValue str
return object
toObject x = return x
| null | https://raw.githubusercontent.com/motemen/jusk/4975915b8550aa09c452fb89dcad7bfcb1037c39/src/JSType.hs | haskell |
JSType.hs
型変換
|
module JSType where
import Prelude hiding (toInteger)
import DataTypes
import Parser (numericLiteral)
import ParserUtil (runLex)
import Internal
import Eval
toPrimitive :: Value -> String -> Evaluate Value
toPrimitive Undefined _ =
return Undefined
toPrimitive Null _ =
return Null
toPrimitive bool@(Boolean _) _ =
return bool
toPrimitive num@(Number _) _ =
return num
toPrimitive string@(String _) _ =
return string
toPrimitive ref@(Reference { }) preferredType =
flip toPrimitive preferredType =<< getValue ref
toPrimitive ref@(Ref _) preferredType =
flip toPrimitive preferredType =<< readRef ref
toPrimitive object preferredType =
defaultValue object preferredType
toBoolean :: Value -> Evaluate Bool
toBoolean Undefined = return False
toBoolean Null = return False
toBoolean (Boolean bool) = return bool
toBoolean (Number NaN) = return False
toBoolean (Number (Double n)) = return $ n /= 0
toBoolean (Number (Integer n)) = return $ n /= 0
toBoolean (String string) = return $ not $ null string
toBoolean Object { } = return True
toBoolean o = getValue o >>= readRef >>= toBoolean
toNumber :: Value -> Evaluate Number
toNumber Undefined =
return NaN
toNumber Null =
return $ Integer 0
toNumber (Boolean bool) =
return $ if bool then Integer 1
else Double 0.0
toNumber (Number num) =
return num
toNumber (String string) =
case runLex numericLiteral string of
Left _ -> return NaN
Right (Literal (Number n)) -> return n
toNumber object@Object { } =
toPrimitive object "Number" >>= toNumber
toNumber ref@Ref { } =
readRef ref >>= toNumber
toNumber ref@Reference { } =
getValue ref >>= toNumber
toNumber o =
(throw "NotImplemented" $ "toNumber: " ++ show o) >> return NaN
toInteger :: Value -> Evaluate Integer
toInteger (Number NaN) = return 0
toInteger (Number (Integer n)) = return n
toInteger (Number (Double n)) = return $ truncate n
toInteger value =
do num <- toNumber value
toInteger $ Number num
toIntWith :: (Integer -> Int) -> Value -> Evaluate Int
toIntWith convert value =
do num <- toNumber value
return $ case num of
NaN -> 0
Double n | isInfinite n -> 0
| otherwise -> convert $ truncate n
Integer n -> convert n
toInt :: Value -> Evaluate Int
toInt = toIntWith integerToInt
where integerToInt :: Integer -> Int
integerToInt n =
let intMax = floor (2**32)
n' = n `rem` intMax
in fromEnum $ if n' >= floor (2**31)
then n' - intMax
else n'
toUInt :: Value -> Evaluate Int
toUInt = toIntWith integerToUInt
where integerToUInt :: Integer -> Int
integerToUInt n =
fromEnum $ n `rem` floor (2**32)
toUInt16 :: Value -> Evaluate Int
toUInt16 = toIntWith integerToUInt16
where integerToUInt16 :: Integer -> Int
integerToUInt16 n =
fromEnum $ n `rem` floor (2**16)
toString :: Value -> Evaluate String
toString Void = return ""
toString Undefined = return "undefined"
toString Null = return "null"
toString (Boolean False) = return "false"
toString (Boolean True) = return "true"
toString (String string) = return string
toString (Number (Integer n)) = return $ show n
toString (Number (Double n)) = return $ show n
toString (Number NaN) = return "NaN"
toString Object { objName = name, objObject = NativeFunction { } } =
return $ "function " ++ name ++ "() { [native code] }"
toString ref@Reference { } =
do object <- getValue ref
toString object
toString ref@(Ref _) =
do object <- readRef ref
toString object
toString object =
do s <- callMethod object "toString" []
case s of
String s -> return s
_ | isPrimitive s -> return $ show s
_ -> do throw "TypeError" $ getName object ++ ".toString did not return string: " ++ show s
return ""
toSource :: Value -> Evaluate String
toSource Void = return ""
toSource Undefined = return "undefined"
toSource Null = return "null"
toSource (Boolean False) = return "false"
toSource (Boolean True) = return "true"
toSource (String string) = return $ show string
toSource (Number (Integer n)) = return $ show n
toSource (Number (Double n)) = return $ show n
toSource (Number NaN) = return "NaN"
toSource func@Object { objObject = Function { } } =
return $ show func
toSource Object { objName = name, objObject = NativeFunction { } } =
return $ "function " ++ name ++ "() { [native code] }"
toSource ref@Reference { } =
do object <- getValue ref
toSource object
toSource ref@(Ref _) =
do object <- readRef ref
toSource object
toSource object =
toString =<< callMethod object "toSource" []
toObject :: Value -> Evaluate Value
toObject Undefined =
throw "TypeError" "undefined cannot be converted to object"
toObject Null =
throw "TypeError" "null cannot be converted to object"
toObject num@(Number _) =
do klass <- getVar "Number"
object <- makeRef =<< construct klass []
modifyValue object $ setObjValue num
return object
toObject str@(String _) =
do klass <- getVar "String"
object <- makeRef =<< construct klass []
modifyValue object $ setObjValue str
return object
toObject x = return x
|
265503979a9bac168ef470a745156130f34fda8a7df963be017d4a32562df63e | lokedhs/maxima-client | package.lisp | (defpackage :infoparser
(:use :cl :maxima-client.common)
(:export #:parse-file
#:resolve-example-code
#:generate-doc-directory))
| null | https://raw.githubusercontent.com/lokedhs/maxima-client/f33161bf345a9831475e6ac66004dda2f80a9e72/infoparser/package.lisp | lisp | (defpackage :infoparser
(:use :cl :maxima-client.common)
(:export #:parse-file
#:resolve-example-code
#:generate-doc-directory))
| |
87e3b5e63f9cafb8fb6f7aa0beca5b6bfed4ccb0ae692938685080a5736d3e12 | e-bigmoon/haskell-blog | Quiz5.hs | #!/usr/bin/env stack
-- stack script --resolver lts-11.0
import Conduit
sink :: Monad m => ConduitM Int o m (String, Int)
sink = do
x <- takeC 5 .| mapC show .| foldC
y <- sumC
return (x, y)
main :: IO ()
main = do
let res = runConduitPure $ yieldMany [1..10] .| sink
print res
| null | https://raw.githubusercontent.com/e-bigmoon/haskell-blog/5c9e7c25f31ea6856c5d333e8e991dbceab21c56/quiz/Quiz5.hs | haskell | stack script --resolver lts-11.0 | #!/usr/bin/env stack
import Conduit
sink :: Monad m => ConduitM Int o m (String, Int)
sink = do
x <- takeC 5 .| mapC show .| foldC
y <- sumC
return (x, y)
main :: IO ()
main = do
let res = runConduitPure $ yieldMany [1..10] .| sink
print res
|
9199924115d436cc0abc4522200a434991bcd6ee4a0deb9b73b8e8db3f7ae6fb | imrehg/ypsilon | records.scm | #!core
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (core records)
(export make-record-type-descriptor
record-type-descriptor?
make-record-constructor-descriptor
record-constructor
record-predicate
record-accessor
record-mutator
record?
record-rtd
record-type-name
record-type-parent
record-type-uid
record-type-generative?
record-type-sealed?
record-type-opaque?
record-type-field-names
record-field-mutable?
record-type-descriptor
record-constructor-descriptor
define-record-type
fields
mutable
immutable
parent
protocol
sealed
opaque
nongenerative
parent-rtd)
(import (core primitives)
(core lists)
(core syntax-case)
(core destructuring))
(define-syntax define-record-type
(lambda (x)
(let ((stash (make-core-hashtable)))
(define stash-set!
(lambda (key value)
(and (core-hashtable-ref stash key #f)
(syntax-violation 'define-record-type (format "duplicate ~a clause" key) x))
(core-hashtable-set! stash key (list value))))
(define stash-ref
(lambda (key default)
(cond ((core-hashtable-ref stash key #f) => car)
(else default))))
(define parse-record-clauses
(lambda (first-name record-clauses)
(define method-name
(lambda (spec second-name)
(datum->syntax second-name (string->symbol (format spec first-name (syntax->datum second-name))))))
(for-each
(lambda (c)
(syntax-case c (parent protocol parent-rtd sealed opaque nongenerative fields)
((parent e1)
(identifier? #'e1)
(stash-set! 'parent #'e1))
((protocol e1)
(stash-set! 'protocol #'e1))
((parent-rtd e1 e2)
(stash-set! 'parent-rtd (cons #'e1 #'e2)))
((sealed e1)
(boolean? (syntax->datum #'e1))
(stash-set! 'sealed #'e1))
((opaque e1)
(boolean? (syntax->datum #'e1))
(stash-set! 'opaque #'e1))
((nongenerative e1)
(identifier? #'e1)
(stash-set! 'nongenerative #'e1))
((nongenerative)
(stash-set! 'nongenerative (datum->syntax #'k (string->symbol (format "<~a>" (make-uuid))))))
((fields specs ...)
(stash-set!
'fields
(map (lambda (spec)
(syntax-case spec (immutable mutable)
((immutable name accessor)
(and (identifier? #'name) (identifier? #'accessor))
#'((immutable name) accessor #f))
((mutable name accessor mutator)
(and (identifier? #'name) (identifier? #'accessor) (identifier? #'mutator))
#'((mutable name) accessor mutator))
((immutable name)
(identifier? #'name)
(with-syntax ((proc (method-name "~a-~a" #'name)))
#'((immutable name) proc #f)))
((mutable name)
(identifier? #'name)
(with-syntax
((proc1 (method-name "~a-~a" #'name))
(proc2 (method-name "~a-~a-set!" #'name)))
#'((mutable name) proc1 proc2)))
(name
(identifier? #'name)
(with-syntax ((proc (method-name "~a-~a" #'name)))
#'((immutable name) proc #f)))
(_
(syntax-violation 'define-record-type "malformed field spec" x spec))))
#'(specs ...))))
(_ (syntax-violation 'define-record-type "malformed record clauses" x (syntax->datum c)))))
record-clauses)))
(syntax-case x ()
((_ (record-name constructor-name predicate-name) record-clauses ...)
(and (identifier? #'record-name) (identifier? #'constructor-name) (identifier? #'predicate-name))
(begin
(parse-record-clauses (syntax->datum #'record-name) #'(record-clauses ...))
(and (stash-ref 'parent-rtd #f)
(stash-ref 'parent #f)
(syntax-violation 'define-record-type "definition have both parent and parent-rtd clause" x))
(with-syntax
((record-type
(with-syntax ((parent (stash-ref 'parent #f)))
(with-syntax ((rtd-parent (cond ((stash-ref 'parent-rtd #f) => car)
((stash-ref 'parent #f) #'(record-type-rtd parent))
(else #f)))
(rcd-parent (cond ((stash-ref 'parent-rtd #f) => cdr)
((stash-ref 'parent #f) #'(record-type-rcd parent))
(else #f)))
(uid (stash-ref 'nongenerative #f))
(sealed (stash-ref 'sealed #f))
(opaque (stash-ref 'opaque #f))
(protocol (stash-ref 'protocol #f))
(((fields _ _) ...) (datum->syntax #'k (stash-ref 'fields '()))))
#'(define record-name
(let* ((rtd (make-record-type-descriptor 'record-name rtd-parent 'uid sealed opaque '#(fields ...)))
(rcd (make-record-constructor-descriptor rtd rcd-parent protocol)))
(make-record-type 'record-name rtd rcd))))))
(constructor
#'(define constructor-name (record-constructor (record-type-rcd record-name))))
(predicate
#'(define predicate-name (record-predicate (record-type-rtd record-name))))
((accessors ...)
(let ((index -1))
(filter values
(map (lambda (spec)
(set! index (+ index 1))
(with-syntax (((_ name _) spec) (n index))
(and (identifier? #'name)
#'(define name (record-accessor (record-type-rtd record-name) n)))))
(stash-ref 'fields '())))))
((mutators ...)
(let ((index -1))
(filter values
(map (lambda (spec)
(set! index (+ index 1))
(with-syntax (((_ _ name) spec) (n index))
(and (identifier? #'name)
#'(define name (record-mutator (record-type-rtd record-name) n)))))
(stash-ref 'fields '()))))))
#'(begin record-type constructor predicate accessors ... mutators ...))))
((_ record-name record-clauses ...)
(identifier? #'record-name)
(let ((base-name (symbol->string (syntax->datum #'record-name))))
(with-syntax
((constructor-name (datum->syntax #'record-name (string->symbol (string-append "make-" base-name))))
(predicate-name (datum->syntax #'record-name (string->symbol (string-append base-name "?")))))
#'(define-record-type (record-name constructor-name predicate-name) record-clauses ...))))))))
(define-syntax record-type-descriptor
(lambda (x)
(syntax-case x ()
((_ name) (identifier? #'name) #'(record-type-rtd name)))))
(define-syntax record-constructor-descriptor
(lambda (x)
(syntax-case x ()
((_ name) (identifier? #'name) #'(record-type-rcd name)))))
(define-syntax fields
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax mutable
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax immutable
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax parent
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax protocol
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax sealed
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax opaque
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax nongenerative
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax parent-rtd
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
) ;[end]
| null | https://raw.githubusercontent.com/imrehg/ypsilon/e57a06ef5c66c1a88905b2be2fa791fa29848514/stdlib/core/records.scm | scheme | [end] | #!core
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (core records)
(export make-record-type-descriptor
record-type-descriptor?
make-record-constructor-descriptor
record-constructor
record-predicate
record-accessor
record-mutator
record?
record-rtd
record-type-name
record-type-parent
record-type-uid
record-type-generative?
record-type-sealed?
record-type-opaque?
record-type-field-names
record-field-mutable?
record-type-descriptor
record-constructor-descriptor
define-record-type
fields
mutable
immutable
parent
protocol
sealed
opaque
nongenerative
parent-rtd)
(import (core primitives)
(core lists)
(core syntax-case)
(core destructuring))
(define-syntax define-record-type
(lambda (x)
(let ((stash (make-core-hashtable)))
(define stash-set!
(lambda (key value)
(and (core-hashtable-ref stash key #f)
(syntax-violation 'define-record-type (format "duplicate ~a clause" key) x))
(core-hashtable-set! stash key (list value))))
(define stash-ref
(lambda (key default)
(cond ((core-hashtable-ref stash key #f) => car)
(else default))))
(define parse-record-clauses
(lambda (first-name record-clauses)
(define method-name
(lambda (spec second-name)
(datum->syntax second-name (string->symbol (format spec first-name (syntax->datum second-name))))))
(for-each
(lambda (c)
(syntax-case c (parent protocol parent-rtd sealed opaque nongenerative fields)
((parent e1)
(identifier? #'e1)
(stash-set! 'parent #'e1))
((protocol e1)
(stash-set! 'protocol #'e1))
((parent-rtd e1 e2)
(stash-set! 'parent-rtd (cons #'e1 #'e2)))
((sealed e1)
(boolean? (syntax->datum #'e1))
(stash-set! 'sealed #'e1))
((opaque e1)
(boolean? (syntax->datum #'e1))
(stash-set! 'opaque #'e1))
((nongenerative e1)
(identifier? #'e1)
(stash-set! 'nongenerative #'e1))
((nongenerative)
(stash-set! 'nongenerative (datum->syntax #'k (string->symbol (format "<~a>" (make-uuid))))))
((fields specs ...)
(stash-set!
'fields
(map (lambda (spec)
(syntax-case spec (immutable mutable)
((immutable name accessor)
(and (identifier? #'name) (identifier? #'accessor))
#'((immutable name) accessor #f))
((mutable name accessor mutator)
(and (identifier? #'name) (identifier? #'accessor) (identifier? #'mutator))
#'((mutable name) accessor mutator))
((immutable name)
(identifier? #'name)
(with-syntax ((proc (method-name "~a-~a" #'name)))
#'((immutable name) proc #f)))
((mutable name)
(identifier? #'name)
(with-syntax
((proc1 (method-name "~a-~a" #'name))
(proc2 (method-name "~a-~a-set!" #'name)))
#'((mutable name) proc1 proc2)))
(name
(identifier? #'name)
(with-syntax ((proc (method-name "~a-~a" #'name)))
#'((immutable name) proc #f)))
(_
(syntax-violation 'define-record-type "malformed field spec" x spec))))
#'(specs ...))))
(_ (syntax-violation 'define-record-type "malformed record clauses" x (syntax->datum c)))))
record-clauses)))
(syntax-case x ()
((_ (record-name constructor-name predicate-name) record-clauses ...)
(and (identifier? #'record-name) (identifier? #'constructor-name) (identifier? #'predicate-name))
(begin
(parse-record-clauses (syntax->datum #'record-name) #'(record-clauses ...))
(and (stash-ref 'parent-rtd #f)
(stash-ref 'parent #f)
(syntax-violation 'define-record-type "definition have both parent and parent-rtd clause" x))
(with-syntax
((record-type
(with-syntax ((parent (stash-ref 'parent #f)))
(with-syntax ((rtd-parent (cond ((stash-ref 'parent-rtd #f) => car)
((stash-ref 'parent #f) #'(record-type-rtd parent))
(else #f)))
(rcd-parent (cond ((stash-ref 'parent-rtd #f) => cdr)
((stash-ref 'parent #f) #'(record-type-rcd parent))
(else #f)))
(uid (stash-ref 'nongenerative #f))
(sealed (stash-ref 'sealed #f))
(opaque (stash-ref 'opaque #f))
(protocol (stash-ref 'protocol #f))
(((fields _ _) ...) (datum->syntax #'k (stash-ref 'fields '()))))
#'(define record-name
(let* ((rtd (make-record-type-descriptor 'record-name rtd-parent 'uid sealed opaque '#(fields ...)))
(rcd (make-record-constructor-descriptor rtd rcd-parent protocol)))
(make-record-type 'record-name rtd rcd))))))
(constructor
#'(define constructor-name (record-constructor (record-type-rcd record-name))))
(predicate
#'(define predicate-name (record-predicate (record-type-rtd record-name))))
((accessors ...)
(let ((index -1))
(filter values
(map (lambda (spec)
(set! index (+ index 1))
(with-syntax (((_ name _) spec) (n index))
(and (identifier? #'name)
#'(define name (record-accessor (record-type-rtd record-name) n)))))
(stash-ref 'fields '())))))
((mutators ...)
(let ((index -1))
(filter values
(map (lambda (spec)
(set! index (+ index 1))
(with-syntax (((_ _ name) spec) (n index))
(and (identifier? #'name)
#'(define name (record-mutator (record-type-rtd record-name) n)))))
(stash-ref 'fields '()))))))
#'(begin record-type constructor predicate accessors ... mutators ...))))
((_ record-name record-clauses ...)
(identifier? #'record-name)
(let ((base-name (symbol->string (syntax->datum #'record-name))))
(with-syntax
((constructor-name (datum->syntax #'record-name (string->symbol (string-append "make-" base-name))))
(predicate-name (datum->syntax #'record-name (string->symbol (string-append base-name "?")))))
#'(define-record-type (record-name constructor-name predicate-name) record-clauses ...))))))))
(define-syntax record-type-descriptor
(lambda (x)
(syntax-case x ()
((_ name) (identifier? #'name) #'(record-type-rtd name)))))
(define-syntax record-constructor-descriptor
(lambda (x)
(syntax-case x ()
((_ name) (identifier? #'name) #'(record-type-rcd name)))))
(define-syntax fields
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax mutable
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax immutable
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax parent
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax protocol
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax sealed
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax opaque
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax nongenerative
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
(define-syntax parent-rtd
(lambda (x)
(syntax-violation #f "misplaced auxiliary syntactic keyword" x)))
|
89f36ebf4305422e8a471088998aa56ab229f6f20b5e344bfb0b3fcfd4d560af | isovector/reviews | Main.hs | # LANGUAGE TypeApplications #
module Main where
import BialgebraSorting.Scratch
import Criterion.Main
import Test.QuickCheck
import Data.Traversable (for)
import Data.List (sort)
import BialgebraSorting (quickSort, treeSort, quickTree, treeQuick)
main :: IO ()
main = do
(defaultMain =<<) $
for [100000, 200000, 400000, 800000] $ \size -> do
let fwd = [1 .. size]
shuffled <- generate (shuffle fwd)
pure $ bgroup (show size)
[ bench "sort" $ whnf (sort @Int) shuffled
, bench "bubble" $ whnf (anacat @Int) shuffled
, bench "selection" $ whnf (anapar @Int) shuffled
, bench "selection no caching" $ whnf (anasus @Int) shuffled
, bench "naive insertion" $ whnf (catana @Int) shuffled
, bench "insertion" $ whnf (catapo @Int) shuffled
, bench "insertion no caching" $ whnf (catsus @Int) shuffled
, bench "quicksort" $ whnf (quickSort @Int) shuffled
, bench "treesort" $ whnf (treeSort @Int) shuffled
, bench "quickTree" $ whnf (quickTree @Int) shuffled
, bench "treeQuick" $ whnf (treeQuick @Int) shuffled
]
| null | https://raw.githubusercontent.com/isovector/reviews/d4dfc42aa2b993a6b0d536a6d96bef4e86f30f65/app/Main.hs | haskell | # LANGUAGE TypeApplications #
module Main where
import BialgebraSorting.Scratch
import Criterion.Main
import Test.QuickCheck
import Data.Traversable (for)
import Data.List (sort)
import BialgebraSorting (quickSort, treeSort, quickTree, treeQuick)
main :: IO ()
main = do
(defaultMain =<<) $
for [100000, 200000, 400000, 800000] $ \size -> do
let fwd = [1 .. size]
shuffled <- generate (shuffle fwd)
pure $ bgroup (show size)
[ bench "sort" $ whnf (sort @Int) shuffled
, bench "bubble" $ whnf (anacat @Int) shuffled
, bench "selection" $ whnf (anapar @Int) shuffled
, bench "selection no caching" $ whnf (anasus @Int) shuffled
, bench "naive insertion" $ whnf (catana @Int) shuffled
, bench "insertion" $ whnf (catapo @Int) shuffled
, bench "insertion no caching" $ whnf (catsus @Int) shuffled
, bench "quicksort" $ whnf (quickSort @Int) shuffled
, bench "treesort" $ whnf (treeSort @Int) shuffled
, bench "quickTree" $ whnf (quickTree @Int) shuffled
, bench "treeQuick" $ whnf (treeQuick @Int) shuffled
]
| |
20474af4b473f48d2a0bcea839cc8fa13d06c894946d98b38657d359722b6ad8 | originrose/cortex | project.clj | (defproject docker "0.9.23-SNAPSHOT"
:description "A simple example of how to run a cortex application in a docker container."
:dependencies [[org.clojure/clojure "1.9.0-alpha17"]
[thinktopic/cortex "0.9.22"]
[org.bytedeco.javacpp-presets/cuda "8.0-1.2"]]
:main docker-example.core
:aot [docker-example.core]
:uberjar-name "docker-example.jar")
| null | https://raw.githubusercontent.com/originrose/cortex/94b1430538e6187f3dfd1697c36ff2c62b475901/examples/docker-example/project.clj | clojure | (defproject docker "0.9.23-SNAPSHOT"
:description "A simple example of how to run a cortex application in a docker container."
:dependencies [[org.clojure/clojure "1.9.0-alpha17"]
[thinktopic/cortex "0.9.22"]
[org.bytedeco.javacpp-presets/cuda "8.0-1.2"]]
:main docker-example.core
:aot [docker-example.core]
:uberjar-name "docker-example.jar")
| |
e76a2d103bfbacf16357cb6f1c06aaeaf4cc3090ee9cb44c749f9523b7beea51 | webyrd/n-grams-for-synthesis | variant-dynamic-ordering.scm | (define *output-table-file-name* "tmp/variant-dynamic-ordering-table.scm")
(define allow-incomplete-search? #f)
(define lookup-optimization? #f)
(load "mk-vicare.scm")
(load "mk.scm")
(load "test-check.scm")
(load "interp-core.scm")
(load "construct-ordering.scm")
(load "interp-simplified-dynamic.scm")
(load "simplified-interp-tests.scm")
| null | https://raw.githubusercontent.com/webyrd/n-grams-for-synthesis/b53b071e53445337d3fe20db0249363aeb9f3e51/variant-dynamic-ordering.scm | scheme | (define *output-table-file-name* "tmp/variant-dynamic-ordering-table.scm")
(define allow-incomplete-search? #f)
(define lookup-optimization? #f)
(load "mk-vicare.scm")
(load "mk.scm")
(load "test-check.scm")
(load "interp-core.scm")
(load "construct-ordering.scm")
(load "interp-simplified-dynamic.scm")
(load "simplified-interp-tests.scm")
| |
80627359b9581cb68ea20931e944e09dbfd55a6370a59d6076aef539621ad89a | hkuplg/fcore | TestInh.hs | # OPTIONS -XTypeOperators -XMultiParamTypeClasses #
module TestInh where
-- class (:<) r1 r2 where
-- to :: r1 -> r2
-- override :: r1 -> (r2 -> r2) -> r1
newtype R1 = R1 {fun1 :: Int -> Int}
data R2 = R2 {toR1 :: R1, fun2 :: Int -> Int}
t1 this = R1 (\n -> if (n==0) then 1 else fun1 this (n-1))
t2 this = R2 (toR1 this) (\n -> if (n==1) then fun1 (toR1 this) n else fun2 this (n-1))
new f = let r = f r in r
wrapT1 f this = R2 (f (toR1 this)) (fun2 this)
p = new (t2 . wrapT1 t1) | null | https://raw.githubusercontent.com/hkuplg/fcore/e27b6dec5bfd319edb8c3e90d94a993bcc7b4c95/frontend/TestInh.hs | haskell | class (:<) r1 r2 where
to :: r1 -> r2
override :: r1 -> (r2 -> r2) -> r1 | # OPTIONS -XTypeOperators -XMultiParamTypeClasses #
module TestInh where
newtype R1 = R1 {fun1 :: Int -> Int}
data R2 = R2 {toR1 :: R1, fun2 :: Int -> Int}
t1 this = R1 (\n -> if (n==0) then 1 else fun1 this (n-1))
t2 this = R2 (toR1 this) (\n -> if (n==1) then fun1 (toR1 this) n else fun2 this (n-1))
new f = let r = f r in r
wrapT1 f this = R2 (f (toR1 this)) (fun2 this)
p = new (t2 . wrapT1 t1) |
55fd2e56d56918d7f1e3219a695bd5fa9eca4a4c12b850d266108b0702dc46b2 | maranget/hevea | latexscan.mli | (***********************************************************************)
(* *)
(* HEVEA *)
(* *)
, projet PARA , INRIA Rocquencourt
(* *)
Copyright 1998 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
open Lexstate
module type S =
sig
(* external entry points *)
val no_prelude : unit -> unit
val translate_put_unicode : char -> (unit -> int) -> unit
val translate_put_unicode_string : string -> unit
val main : Lexing.lexbuf -> unit
val expand_command : string -> Lexing.lexbuf -> unit
val expand_command_no_skip : string -> Lexing.lexbuf -> unit
val print_env_pos : unit -> unit
(* additional resources needed for extension modules. *)
val cur_env : string ref
val new_env : string -> unit
val close_env : string -> unit
val echo_toimage : unit -> bool
val echo_global_toimage : unit -> bool
val fun_register : (unit -> unit) -> unit
val newif_ref : string -> bool ref -> unit
val top_open_block : string -> string -> unit
val top_close_block : string -> unit
val top_open_group : unit -> unit
val top_close_group : unit -> unit
val check_alltt_skip : Lexing.lexbuf -> unit
val skip_pop : Lexing.lexbuf -> unit
(* 'def' functions for initialisation only *)
val def_code : string -> (Lexing.lexbuf -> unit) -> unit
val def_name_code : string -> (string -> Lexing.lexbuf -> unit) -> unit
val def_fun : string -> (string -> string) -> unit
(* various calls of main scanner, should tidy that a bit *)
val get_this_main : string -> string
val get_this_arg_mbox : string arg -> string
val get_prim_onarg : string Lexstate.arg -> string
val check_this_main : string -> bool
val get_prim : string -> string
val get_prim_arg : Lexing.lexbuf -> string
val get_prim_opt : string -> Lexing.lexbuf -> string
val get_csname : Lexing.lexbuf -> string
end
module Make (Dest : OutManager.S) (Image : ImageManager.S) : S
| null | https://raw.githubusercontent.com/maranget/hevea/226eac8c506f82a600d453492fbc1b9784dd865f/latexscan.mli | ocaml | *********************************************************************
HEVEA
*********************************************************************
external entry points
additional resources needed for extension modules.
'def' functions for initialisation only
various calls of main scanner, should tidy that a bit | , projet PARA , INRIA Rocquencourt
Copyright 1998 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
open Lexstate
module type S =
sig
val no_prelude : unit -> unit
val translate_put_unicode : char -> (unit -> int) -> unit
val translate_put_unicode_string : string -> unit
val main : Lexing.lexbuf -> unit
val expand_command : string -> Lexing.lexbuf -> unit
val expand_command_no_skip : string -> Lexing.lexbuf -> unit
val print_env_pos : unit -> unit
val cur_env : string ref
val new_env : string -> unit
val close_env : string -> unit
val echo_toimage : unit -> bool
val echo_global_toimage : unit -> bool
val fun_register : (unit -> unit) -> unit
val newif_ref : string -> bool ref -> unit
val top_open_block : string -> string -> unit
val top_close_block : string -> unit
val top_open_group : unit -> unit
val top_close_group : unit -> unit
val check_alltt_skip : Lexing.lexbuf -> unit
val skip_pop : Lexing.lexbuf -> unit
val def_code : string -> (Lexing.lexbuf -> unit) -> unit
val def_name_code : string -> (string -> Lexing.lexbuf -> unit) -> unit
val def_fun : string -> (string -> string) -> unit
val get_this_main : string -> string
val get_this_arg_mbox : string arg -> string
val get_prim_onarg : string Lexstate.arg -> string
val check_this_main : string -> bool
val get_prim : string -> string
val get_prim_arg : Lexing.lexbuf -> string
val get_prim_opt : string -> Lexing.lexbuf -> string
val get_csname : Lexing.lexbuf -> string
end
module Make (Dest : OutManager.S) (Image : ImageManager.S) : S
|
48355d90a75792dcf47f31c9874ecb282b7fdc6b9d8809f75dd3ae76f7d17b37 | ccqpein/Github-API-CL | api-doc.lisp | github api documents
(defpackage #:github-api-doc
(:use #:CL)
(:export #:api-doc
#:http-method
#:read-api-json
#:make-api-doc-from-json
#:make-call-parameters
#:make-call-url)
)
(in-package #:github-api-doc)
(defparameter *api-root-url* "")
(defparameter *api-json-file-path*
(merge-pathnames (asdf:component-pathname
(asdf:find-system :github-api-cl))
#P"/api.json")
"api json file path")
(defun read-api-json (&optional (file-path *api-json-file-path*))
"read json api file from *api-json-file-path*, return yason json
object"
(declare (pathname file-path))
(with-open-file (s file-path)
(let ((data (make-string (file-length s))))
(read-sequence data s)
(yason:parse data))))
(defun make-api-doc-from-json (json-obj &rest paths)
"call with read-api-json usually"
(let ((api-detail (dolist (p paths json-obj)
(setf json-obj (gethash p json-obj)))))
(make-instance 'api-doc
:api (gethash "api" api-detail)
:parameters (gethash "parameters" api-detail))))
(defclass api-doc ()
((api
:initarg :api
:type string
:accessor api)
(http-method
:type string
:accessor http-method)
;; (:slot-name...)
(slots
:type cons
:accessor slots)
(fmt-control
:type string
:accessor control-str)
;; ((para, type)...)
(parameters
:initarg :parameters
:type cons
:accessor parameters)
))
(defmethod initialize-instance :after ((api api-doc) &key)
(let ((api-detail (parse-api (api api))))
;; update method
(setf (http-method api) (car api-detail)
(control-str api) (cadr api-detail))
(setf (slots api)
(cddr api-detail))
))
(defun parse-api (str)
"give an api entry, return (method format-control slots)"
(declare (string str))
(let* ((a (str:split " " str))
(http-method (car a))
;; split api url
(b (str:split "/" (cadr a)))
(fmt-control '())
(cache '())
(slots '()))
(dolist (x b)
(unless (string= "" x)
(push "/" cache)
(if (str:starts-with? ":" x)
(progn (push "~a" cache)
(push x slots)
(push (str:join "" (reverse cache)) fmt-control)
(setf cache '())
)
(push x cache))))
(push (str:join "" (reverse cache)) fmt-control)
(the (cons string *)
(append
(list http-method
(reverse fmt-control))
(reverse slots)))
))
(defmethod print-object ((api api-doc) stream)
(format stream
"api-doc object:
api: ~a,
http method: ~a,
slots: ~a,
fmt-control: ~a
parameters: ~a"
(api api) (http-method api) (slots api) (control-str api) (parameters api)))
(defgeneric make-call-url (api &rest args &key &allow-other-keys)
(:documentation "Return the url of this api http call"))
(defmethod make-call-url ((api api-doc) &rest args &key (root *api-root-url*) &allow-other-keys)
"make the url of this api ask user to input data and return call
url"
(let ((result (make-string-output-stream))
(slot-finder (parse-keys args))) ;; make slot finder funtion
(format result root)
(loop
for k in (slots api)
for ss in (control-str api)
for v = (funcall slot-finder k)
push url to result one by one
finally (format result
(car (last (control-str api)))))
(the string (get-output-stream-string result))
))
(defun parse-keys (args)
"for make parse keywords function, make flexible of keywords input"
(if (not (evenp (length args)))
(error "args length should be even"))
(let ((keywords-pairs (loop
for i from 2 to (length args) by 2
collect (subseq args (- i 2) i))))
(lambda (slot)
(declare (string slot))
(let* ((k-slot (read-from-string slot)) ;; make string to keyword
(pair (find-if #'(lambda (pair) (eql (car pair) k-slot))
keywords-pairs)))
(if pair
(cadr pair)
(progn (format t "What's ~a: " slot)
(string-downcase (read-line))))))))
;; ss should be integar or string
(declaim (inline coerce-parameter-type))
(defun coerce-parameter-type (ss type-ss)
(declare (string type-ss))
(cond
((not ss) nil)
((string= type-ss "string")
ss)
((string= type-ss "boolean") ;; it might be "true", "false", or ""
(cond
((string= ss "true") ss)
((string= ss "false") ss)
(t "")))
;; integer can be integer from keyword, or string from (read-line)
;; return "" instead of 0 is fine. It will be cleaned anyway.
((string= type-ss "integer")
(if (integerp ss) ;; if from keyword ss is int
ss
(if (string/= ss "")
(parse-integer ss)
ss)))
(t ss)))
(defmethod make-call-parameters ((api api-doc) &rest args &key &allow-other-keys)
(let (values-list
(parameters-str (make-string-output-stream)))
(setf values-list
(if (zerop (length args))
if input nothing , ask one by one
;; all values input are string
(loop
for (pa type) in (parameters api)
collect (progn (format t "What's ~a (type is ~a)?: " pa type)
(list pa (coerce-parameter-type
(string-downcase (read-line))
type))))
;; else, parse keyword
(let ((keywords-pairs (loop
for i from 2 to (length args) by 2
collect (subseq args (- i 2) i))))
;; keywords-pairs = ((keyword val)...)
(loop
for (pa type) in (parameters api)
collect (list pa (coerce-parameter-type
(cadr
(find-if #'(lambda (x)
(equal (string-downcase
(string
(car x)))
pa))
keywords-pairs))
type))))
))
;; make parameters
(format parameters-str
"?~{~#[~:;~{~a=~a~}~#[~:;&~]~]~}"
;; clean all list if value is empty or nil
(loop
;; check the value legal or not. Clean nil and ""
with check-val = (lambda (x)
(if (stringp x)
(string/= x "")
(if x x)))
for (p v) in values-list
when (funcall check-val v)
collect (list p v)))
(the string (get-output-stream-string parameters-str))))
| null | https://raw.githubusercontent.com/ccqpein/Github-API-CL/c7331931d437fdda037d41dcf15e3f664f1a0bfb/api-doc.lisp | lisp | (:slot-name...)
((para, type)...)
update method
split api url
make slot finder funtion
make string to keyword
ss should be integar or string
it might be "true", "false", or ""
integer can be integer from keyword, or string from (read-line)
return "" instead of 0 is fine. It will be cleaned anyway.
if from keyword ss is int
all values input are string
else, parse keyword
keywords-pairs = ((keyword val)...)
make parameters
clean all list if value is empty or nil
check the value legal or not. Clean nil and "" | github api documents
(defpackage #:github-api-doc
(:use #:CL)
(:export #:api-doc
#:http-method
#:read-api-json
#:make-api-doc-from-json
#:make-call-parameters
#:make-call-url)
)
(in-package #:github-api-doc)
(defparameter *api-root-url* "")
(defparameter *api-json-file-path*
(merge-pathnames (asdf:component-pathname
(asdf:find-system :github-api-cl))
#P"/api.json")
"api json file path")
(defun read-api-json (&optional (file-path *api-json-file-path*))
"read json api file from *api-json-file-path*, return yason json
object"
(declare (pathname file-path))
(with-open-file (s file-path)
(let ((data (make-string (file-length s))))
(read-sequence data s)
(yason:parse data))))
(defun make-api-doc-from-json (json-obj &rest paths)
"call with read-api-json usually"
(let ((api-detail (dolist (p paths json-obj)
(setf json-obj (gethash p json-obj)))))
(make-instance 'api-doc
:api (gethash "api" api-detail)
:parameters (gethash "parameters" api-detail))))
(defclass api-doc ()
((api
:initarg :api
:type string
:accessor api)
(http-method
:type string
:accessor http-method)
(slots
:type cons
:accessor slots)
(fmt-control
:type string
:accessor control-str)
(parameters
:initarg :parameters
:type cons
:accessor parameters)
))
(defmethod initialize-instance :after ((api api-doc) &key)
(let ((api-detail (parse-api (api api))))
(setf (http-method api) (car api-detail)
(control-str api) (cadr api-detail))
(setf (slots api)
(cddr api-detail))
))
(defun parse-api (str)
"give an api entry, return (method format-control slots)"
(declare (string str))
(let* ((a (str:split " " str))
(http-method (car a))
(b (str:split "/" (cadr a)))
(fmt-control '())
(cache '())
(slots '()))
(dolist (x b)
(unless (string= "" x)
(push "/" cache)
(if (str:starts-with? ":" x)
(progn (push "~a" cache)
(push x slots)
(push (str:join "" (reverse cache)) fmt-control)
(setf cache '())
)
(push x cache))))
(push (str:join "" (reverse cache)) fmt-control)
(the (cons string *)
(append
(list http-method
(reverse fmt-control))
(reverse slots)))
))
(defmethod print-object ((api api-doc) stream)
(format stream
"api-doc object:
api: ~a,
http method: ~a,
slots: ~a,
fmt-control: ~a
parameters: ~a"
(api api) (http-method api) (slots api) (control-str api) (parameters api)))
(defgeneric make-call-url (api &rest args &key &allow-other-keys)
(:documentation "Return the url of this api http call"))
(defmethod make-call-url ((api api-doc) &rest args &key (root *api-root-url*) &allow-other-keys)
"make the url of this api ask user to input data and return call
url"
(let ((result (make-string-output-stream))
(format result root)
(loop
for k in (slots api)
for ss in (control-str api)
for v = (funcall slot-finder k)
push url to result one by one
finally (format result
(car (last (control-str api)))))
(the string (get-output-stream-string result))
))
(defun parse-keys (args)
"for make parse keywords function, make flexible of keywords input"
(if (not (evenp (length args)))
(error "args length should be even"))
(let ((keywords-pairs (loop
for i from 2 to (length args) by 2
collect (subseq args (- i 2) i))))
(lambda (slot)
(declare (string slot))
(pair (find-if #'(lambda (pair) (eql (car pair) k-slot))
keywords-pairs)))
(if pair
(cadr pair)
(progn (format t "What's ~a: " slot)
(string-downcase (read-line))))))))
(declaim (inline coerce-parameter-type))
(defun coerce-parameter-type (ss type-ss)
(declare (string type-ss))
(cond
((not ss) nil)
((string= type-ss "string")
ss)
(cond
((string= ss "true") ss)
((string= ss "false") ss)
(t "")))
((string= type-ss "integer")
ss
(if (string/= ss "")
(parse-integer ss)
ss)))
(t ss)))
(defmethod make-call-parameters ((api api-doc) &rest args &key &allow-other-keys)
(let (values-list
(parameters-str (make-string-output-stream)))
(setf values-list
(if (zerop (length args))
if input nothing , ask one by one
(loop
for (pa type) in (parameters api)
collect (progn (format t "What's ~a (type is ~a)?: " pa type)
(list pa (coerce-parameter-type
(string-downcase (read-line))
type))))
(let ((keywords-pairs (loop
for i from 2 to (length args) by 2
collect (subseq args (- i 2) i))))
(loop
for (pa type) in (parameters api)
collect (list pa (coerce-parameter-type
(cadr
(find-if #'(lambda (x)
(equal (string-downcase
(string
(car x)))
pa))
keywords-pairs))
type))))
))
(format parameters-str
"?~{~#[~:;~{~a=~a~}~#[~:;&~]~]~}"
(loop
with check-val = (lambda (x)
(if (stringp x)
(string/= x "")
(if x x)))
for (p v) in values-list
when (funcall check-val v)
collect (list p v)))
(the string (get-output-stream-string parameters-str))))
|
99bdd64fd346cdee7db9a8d72462b500d57d3b1b3c0b782905e4e94ee1f17e0a | benoitc/erlang-metrics | metrics_sup.erl |
Created by benoitc on 26/06/16 .
-module(metrics_sup).
-author("Benoit Chesneau").
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
-define(SERVER, ?MODULE).
%%%===================================================================
%%% API functions
%%%===================================================================
-spec start_link() -> {ok, pid()} | {error, term()}.
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
%%%===================================================================
%%% Supervisor callbacks
%%%===================================================================
-spec init(any()) ->
{ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
init([]) ->
Metrics = {metrics,
{metrics, start_link, []},
permanent, brutal_kill, worker,[metrics]},
ProcessTracker = {metrics_process_tracker,
{metrics_process_tracker, start_link, []},
permanent, brutal_kill, worker,[metrics_process_tracker]},
{ok, {{one_for_one, 4, 3600}, [Metrics, ProcessTracker]}}.
%%%===================================================================
Internal functions
%%%===================================================================
| null | https://raw.githubusercontent.com/benoitc/erlang-metrics/3ea572b5b6ca4a3e5dfc12009058f8771baf5789/src/metrics_sup.erl | erlang | API
Supervisor callbacks
===================================================================
API functions
===================================================================
===================================================================
Supervisor callbacks
===================================================================
===================================================================
=================================================================== |
Created by benoitc on 26/06/16 .
-module(metrics_sup).
-author("Benoit Chesneau").
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
-spec start_link() -> {ok, pid()} | {error, term()}.
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
-spec init(any()) ->
{ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
init([]) ->
Metrics = {metrics,
{metrics, start_link, []},
permanent, brutal_kill, worker,[metrics]},
ProcessTracker = {metrics_process_tracker,
{metrics_process_tracker, start_link, []},
permanent, brutal_kill, worker,[metrics_process_tracker]},
{ok, {{one_for_one, 4, 3600}, [Metrics, ProcessTracker]}}.
Internal functions
|
91ebced88b59076091b44c600088ae464beba5e2ef371b9efba44f2f4ed7cfc6 | HealthSamurai/ql | core_test.clj | (ns ql.core-test
(:require [ql.core :as sut]
[clojure.test :refer :all]
[matcho.core :as matcho]
[ql.core :as ql]))
(deftest test-ql
(testing "select"
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias :column
:constant "string"
:param {:ql/type :ql/param
:ql/value 10}})
{:sql "column AS alias , 'string' AS constant , ( ? ) AS param"
:params [10]}))
(matcho/match
(sut/sql "str" {:style :honeysql})
{:sql "?" :params ["str"]})
(matcho/match
(sut/sql {:ql/type :ql/param :ql/value 10})
{:sql "?" :params [10]})
(matcho/match
(sut/sql {:ql/type :ql/param :ql/value "str"} {:inline true})
{:sql "'str'" :params []})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias :column})
{:sql "column AS alias"})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias {:ql/type :ql/string
:ql/value "const"}})
{:sql "( $str$const$str$ ) AS alias"})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias {:ql/type :ql/param
:ql/value "const"}})
{:sql "( ? ) AS alias" :params ["const"]})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias {:ql/type :ql/select
:ql/select {:ql/type :ql/projection :x 1}}})
{:sql "( SELECT 1 AS x ) AS alias" :params []})
(matcho/match
(sut/sql {:ql/type :ql/predicate
:ql/comp "AND"
:cond-1 [:ql/= :user.id 1]
:cond-2 [:ql/<> :user.role "admin"]})
{:sql "/** cond-1 **/ ( user.id = 1 ) AND /** cond-2 **/ ( user.role <> 'admin' )"})
(matcho/match
(sut/sql {:ql/type :ql/predicate
:cond-1 [:ql/= :user.id 1]
:cond-2 [:ql/<> :user.role "admin"]})
{:sql "/** cond-1 **/ ( user.id = 1 ) AND /** cond-2 **/ ( user.role <> 'admin' )"})
(matcho/match
(sut/sql {:ql/type :ql/predicate
:ql/comp "OR"
:cond-1 [:ql/= :user.id 1]
:cond-2 [:ql/<> :user.role "admin"]})
{:sql "/** cond-1 **/ ( user.id = 1 ) OR /** cond-2 **/ ( user.role <> 'admin' )"})
(matcho/match
(sut/sql {:ql/type :ql/select
:ql/select {:name :name
:bd :birthDate}
:ql/from {:ql/type :ql/from
:user :user}
:ql/where {:user-ids [:ql/= :user.id 5]}
:ql/limit 10})
{:sql "SELECT name AS name , birthDate AS bd FROM user user WHERE /** user-ids **/ ( user.id = 5 ) LIMIT 10" :params []})
(matcho/match
(sut/sql {:ql/type :ql/select
:ql/select {:name :name :bd :birthDate}
:ql/from {:user :user}
:ql/where {:user-ids [:ql/= :user.id 5]}
:ql/limit 10})
{:sql "SELECT name AS name , birthDate AS bd FROM user user WHERE /** user-ids **/ ( user.id = 5 ) LIMIT 10" :params []})
(matcho/match
(sut/sql {:ql/select :*
:ql/from {:u :user
:g :group}
:ql/where {:user-ids [:ql/= :u.id :g.user_id]
:group-type [:ql/= :g.name "admin"]}})
{:sql "SELECT * FROM user u , group g WHERE /** user-ids **/ ( u.id = g.user_id ) AND /** group-type **/ ( g.name = 'admin' )", :params []})
(matcho/match
(sut/sql
{:ql/select :*
:ql/from {:post :post}
:ql/joins {:u {:ql/join-type "LEFT"
:ql/rel :user
:ql/on {:by-ids [:ql/= :u.id :post.user_id]}}}})
{:sql "SELECT * FROM post post \n LEFT JOIN user u ON /** by-ids **/ ( u.id = post.user_id )"})
(matcho/match
(sut/sql
{:ql/select :*
:ql/from {:post :post}
:ql/joins {:u {:ql/join-type "LEFT"
:ql/rel :user
:ql/on [:ql/= :u.id :post.user_id]}}})
{:sql "SELECT * FROM post post \n LEFT JOIN user u ON u.id = post.user_id"})
(matcho/match
(sut/sql
{:ql/type :ql/projection
:resource {:ql/type :jsonb/build-object
:name :user.name
:address [:jsonb/||
[:jsonb/-> :resource :address]
{:ql/type :jsonb/build-object
:city "NY"
:zip :address.zip}]}})
{:sql "( jsonb_build_object( 'name' , user.name , 'address' , resource ->'address' || jsonb_build_object( 'city' , 'NY' , 'zip' , address.zip ) ) ) AS resource"})
(matcho/match
(sut/sql
{:ql/type :ql/projection
:resource {:ql/type :jsonb/build-object
:name :user.name
:address [:jsonb/||
[:jsonb/-> :resource :address]
{:ql/type :jsonb/build-object
:city {:ql/type :ql/param
:ql/value "NY"}
:zip :address.zip}]}})
{:sql "( jsonb_build_object( 'name' , user.name , 'address' , resource ->'address' || jsonb_build_object( 'city' , ? , 'zip' , address.zip ) ) ) AS resource"
:params ["NY"]})
(matcho/match
(sut/sql {:ql/with {:users {:ql/weight 0
:ql/select {:name :name}
:ql/from {:users :users}}
:roles {:ql/weight 1
:ql/select {:name :name}
:ql/from {:group :group}
:ql/joins {:u {:ql/rel :users
:ql/on {:join-cond [:ql/= :u.id :g.user_id]}}}}}
:ql/select {:u :u.name :g :g.name}
:ql/from {:u :user
:r :roles}})
{:sql
"WITH users AS ( SELECT name AS name FROM users users ) \n , roles AS ( SELECT name AS name FROM group group \n JOIN users u ON /** join-cond **/ ( u.id = g.user_id ) ) \n SELECT u.name AS u , g.name AS g FROM user u , roles r",
:params []})
(testing "group-by"
(matcho/match
(sut/sql {:ql/select {:a :expr :b :other}
:ql/from {:t :t}
:ql/group-by [:ql/list :expr :other]})
{:sql "SELECT expr AS a , other AS b FROM t t GROUP BY expr , other"}))
)
(sut/sql
#:ql{:select {:name :u.name}
:from {:u :user}
:where {:by-id [:ql/= :u.id [:ql/param 5]]}})
(sut/sql
{:ql/select {:name :u.name}
:ql/from {:u :user}
:ql/where {:by-id {:ql/type :ql/=
0 :u.id
1 {:ql/type :ql/param
:ql/value 5}}}})
(sut/sql
(merge-with
merge
{:ql/from {:u :user}}
{:ql/from {:g :group}}
{:ql/select {:name :u.name}}
{:ql/select {:group :g.name}}
{:ql/where {:join [:ql/= :g.user_id :u.id]}}
{:ql/where {:by-role [:ql/= :u.role "admin"]}}
{:ql/where {:by-id [:ql/= :u.id [:ql/param 5]]}}))
{:ql/type :ql/fn
:ql/fn "lower"
0 "param-1"
1 "param-2"}
[:ql/fn "lower" "param-1" "param-2"]
{:ql/type :ql/cast
:ql/cast :pg/timestamptz
:ql/expression "2011-01-01"}
[:ql/cast :pg/timestamptz "2011-01-01"]
(comment
)
| null | https://raw.githubusercontent.com/HealthSamurai/ql/145241ee7484a9e8a6c106c271505f2e3f0788ee/test/ql/core_test.clj | clojure | (ns ql.core-test
(:require [ql.core :as sut]
[clojure.test :refer :all]
[matcho.core :as matcho]
[ql.core :as ql]))
(deftest test-ql
(testing "select"
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias :column
:constant "string"
:param {:ql/type :ql/param
:ql/value 10}})
{:sql "column AS alias , 'string' AS constant , ( ? ) AS param"
:params [10]}))
(matcho/match
(sut/sql "str" {:style :honeysql})
{:sql "?" :params ["str"]})
(matcho/match
(sut/sql {:ql/type :ql/param :ql/value 10})
{:sql "?" :params [10]})
(matcho/match
(sut/sql {:ql/type :ql/param :ql/value "str"} {:inline true})
{:sql "'str'" :params []})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias :column})
{:sql "column AS alias"})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias {:ql/type :ql/string
:ql/value "const"}})
{:sql "( $str$const$str$ ) AS alias"})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias {:ql/type :ql/param
:ql/value "const"}})
{:sql "( ? ) AS alias" :params ["const"]})
(matcho/match
(sut/sql {:ql/type :ql/projection
:alias {:ql/type :ql/select
:ql/select {:ql/type :ql/projection :x 1}}})
{:sql "( SELECT 1 AS x ) AS alias" :params []})
(matcho/match
(sut/sql {:ql/type :ql/predicate
:ql/comp "AND"
:cond-1 [:ql/= :user.id 1]
:cond-2 [:ql/<> :user.role "admin"]})
{:sql "/** cond-1 **/ ( user.id = 1 ) AND /** cond-2 **/ ( user.role <> 'admin' )"})
(matcho/match
(sut/sql {:ql/type :ql/predicate
:cond-1 [:ql/= :user.id 1]
:cond-2 [:ql/<> :user.role "admin"]})
{:sql "/** cond-1 **/ ( user.id = 1 ) AND /** cond-2 **/ ( user.role <> 'admin' )"})
(matcho/match
(sut/sql {:ql/type :ql/predicate
:ql/comp "OR"
:cond-1 [:ql/= :user.id 1]
:cond-2 [:ql/<> :user.role "admin"]})
{:sql "/** cond-1 **/ ( user.id = 1 ) OR /** cond-2 **/ ( user.role <> 'admin' )"})
(matcho/match
(sut/sql {:ql/type :ql/select
:ql/select {:name :name
:bd :birthDate}
:ql/from {:ql/type :ql/from
:user :user}
:ql/where {:user-ids [:ql/= :user.id 5]}
:ql/limit 10})
{:sql "SELECT name AS name , birthDate AS bd FROM user user WHERE /** user-ids **/ ( user.id = 5 ) LIMIT 10" :params []})
(matcho/match
(sut/sql {:ql/type :ql/select
:ql/select {:name :name :bd :birthDate}
:ql/from {:user :user}
:ql/where {:user-ids [:ql/= :user.id 5]}
:ql/limit 10})
{:sql "SELECT name AS name , birthDate AS bd FROM user user WHERE /** user-ids **/ ( user.id = 5 ) LIMIT 10" :params []})
(matcho/match
(sut/sql {:ql/select :*
:ql/from {:u :user
:g :group}
:ql/where {:user-ids [:ql/= :u.id :g.user_id]
:group-type [:ql/= :g.name "admin"]}})
{:sql "SELECT * FROM user u , group g WHERE /** user-ids **/ ( u.id = g.user_id ) AND /** group-type **/ ( g.name = 'admin' )", :params []})
(matcho/match
(sut/sql
{:ql/select :*
:ql/from {:post :post}
:ql/joins {:u {:ql/join-type "LEFT"
:ql/rel :user
:ql/on {:by-ids [:ql/= :u.id :post.user_id]}}}})
{:sql "SELECT * FROM post post \n LEFT JOIN user u ON /** by-ids **/ ( u.id = post.user_id )"})
(matcho/match
(sut/sql
{:ql/select :*
:ql/from {:post :post}
:ql/joins {:u {:ql/join-type "LEFT"
:ql/rel :user
:ql/on [:ql/= :u.id :post.user_id]}}})
{:sql "SELECT * FROM post post \n LEFT JOIN user u ON u.id = post.user_id"})
(matcho/match
(sut/sql
{:ql/type :ql/projection
:resource {:ql/type :jsonb/build-object
:name :user.name
:address [:jsonb/||
[:jsonb/-> :resource :address]
{:ql/type :jsonb/build-object
:city "NY"
:zip :address.zip}]}})
{:sql "( jsonb_build_object( 'name' , user.name , 'address' , resource ->'address' || jsonb_build_object( 'city' , 'NY' , 'zip' , address.zip ) ) ) AS resource"})
(matcho/match
(sut/sql
{:ql/type :ql/projection
:resource {:ql/type :jsonb/build-object
:name :user.name
:address [:jsonb/||
[:jsonb/-> :resource :address]
{:ql/type :jsonb/build-object
:city {:ql/type :ql/param
:ql/value "NY"}
:zip :address.zip}]}})
{:sql "( jsonb_build_object( 'name' , user.name , 'address' , resource ->'address' || jsonb_build_object( 'city' , ? , 'zip' , address.zip ) ) ) AS resource"
:params ["NY"]})
(matcho/match
(sut/sql {:ql/with {:users {:ql/weight 0
:ql/select {:name :name}
:ql/from {:users :users}}
:roles {:ql/weight 1
:ql/select {:name :name}
:ql/from {:group :group}
:ql/joins {:u {:ql/rel :users
:ql/on {:join-cond [:ql/= :u.id :g.user_id]}}}}}
:ql/select {:u :u.name :g :g.name}
:ql/from {:u :user
:r :roles}})
{:sql
"WITH users AS ( SELECT name AS name FROM users users ) \n , roles AS ( SELECT name AS name FROM group group \n JOIN users u ON /** join-cond **/ ( u.id = g.user_id ) ) \n SELECT u.name AS u , g.name AS g FROM user u , roles r",
:params []})
(testing "group-by"
(matcho/match
(sut/sql {:ql/select {:a :expr :b :other}
:ql/from {:t :t}
:ql/group-by [:ql/list :expr :other]})
{:sql "SELECT expr AS a , other AS b FROM t t GROUP BY expr , other"}))
)
(sut/sql
#:ql{:select {:name :u.name}
:from {:u :user}
:where {:by-id [:ql/= :u.id [:ql/param 5]]}})
(sut/sql
{:ql/select {:name :u.name}
:ql/from {:u :user}
:ql/where {:by-id {:ql/type :ql/=
0 :u.id
1 {:ql/type :ql/param
:ql/value 5}}}})
(sut/sql
(merge-with
merge
{:ql/from {:u :user}}
{:ql/from {:g :group}}
{:ql/select {:name :u.name}}
{:ql/select {:group :g.name}}
{:ql/where {:join [:ql/= :g.user_id :u.id]}}
{:ql/where {:by-role [:ql/= :u.role "admin"]}}
{:ql/where {:by-id [:ql/= :u.id [:ql/param 5]]}}))
{:ql/type :ql/fn
:ql/fn "lower"
0 "param-1"
1 "param-2"}
[:ql/fn "lower" "param-1" "param-2"]
{:ql/type :ql/cast
:ql/cast :pg/timestamptz
:ql/expression "2011-01-01"}
[:ql/cast :pg/timestamptz "2011-01-01"]
(comment
)
| |
91a02eef2d81d66d5b8afdfcc035476771cb0ace4ad90cf4e2f2ac5f3437ce5f | achirkin/qua-view | UserAction.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# OPTIONS_GHC -fno - warn - orphans #
module Program.UserAction
( QEventTag (..)
, ScId (..)
) where
import Commons.NoReflex
import Model.Scenario.Properties (PropName)
import Model.Scenario.Object (ObjectId)
-- | Represents a scenario id.
TODO : this datatype should be in luci module ;
newtype ScId = ScId Int
deriving (Eq, Show, Ord)
-- | Event types fired by user actions
data instance QEventTag UserAction evArg where
-- | User wants to save scenario with this name.
AskSaveScenario :: QEventTag UserAction Text
-- | User selects a scenario in the scenario list.
AskSelectScenario :: QEventTag UserAction ScId
-- | User wants to clear all geometry.
AskClearGeometry :: QEventTag UserAction ()
-- | User wants to reset camera to its default position.
AskResetCamera :: QEventTag UserAction ()
-- | User selected a property so that we can colorize all objects according to prop value
PropertyClicked :: QEventTag UserAction PropName
-- | Programmatically select or unselect an object.
-- This event does not fire when a user click on an object!
-- If you want to listen to object seletion events, use global dynamic `selectedObjectIdD`.
AskSelectObject :: QEventTag UserAction (Maybe ObjectId)
deriveEvent ''UserAction
| null | https://raw.githubusercontent.com/achirkin/qua-view/62626ead828889a1c7ef1fdba4d84324eb5420b3/src/Program/UserAction.hs | haskell | # LANGUAGE GADTs #
| Represents a scenario id.
| Event types fired by user actions
| User wants to save scenario with this name.
| User selects a scenario in the scenario list.
| User wants to clear all geometry.
| User wants to reset camera to its default position.
| User selected a property so that we can colorize all objects according to prop value
| Programmatically select or unselect an object.
This event does not fire when a user click on an object!
If you want to listen to object seletion events, use global dynamic `selectedObjectIdD`. | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleInstances #
# OPTIONS_GHC -fno - warn - orphans #
module Program.UserAction
( QEventTag (..)
, ScId (..)
) where
import Commons.NoReflex
import Model.Scenario.Properties (PropName)
import Model.Scenario.Object (ObjectId)
TODO : this datatype should be in luci module ;
newtype ScId = ScId Int
deriving (Eq, Show, Ord)
data instance QEventTag UserAction evArg where
AskSaveScenario :: QEventTag UserAction Text
AskSelectScenario :: QEventTag UserAction ScId
AskClearGeometry :: QEventTag UserAction ()
AskResetCamera :: QEventTag UserAction ()
PropertyClicked :: QEventTag UserAction PropName
AskSelectObject :: QEventTag UserAction (Maybe ObjectId)
deriveEvent ''UserAction
|
104ad04e50c529ad13c0f9243d43dfe18f850299156c7092f284447fff8800c5 | mstksg/functor-combinators | Step.hs | {-# LANGUAGE EmptyDataDeriving #-}
-- |
-- Module : Control.Applicative.Step
Copyright : ( c ) 2019
-- License : BSD3
--
Maintainer :
-- Stability : experimental
-- Portability : non-portable
--
-- This module provides functor combinators that are the fixed points of
-- applications of ':+:' and 'Data.Functor.These.These1'. They are useful
-- for their 'Data.HFunctor.Interpret.Interpret' instances, along with
-- their relationship to the 'Data.HBifunctor.Tensor.Monoidal' instances of
-- ':+:' and 'Data.Functor.These.These1'.
module Control.Applicative.Step (
-- * Fixed Points
Step(..)
, Steps(..)
, Flagged(..)
-- ** Steppers
, stepUp
, stepDown
, stepping
, stepsUp
, stepsDown
, steppings
-- * Void
, absurd1
, Void2
, absurd2
, Void3
, absurd3
) where
import Control.Natural
import Control.Natural.IsoF
import Data.Bifunctor
import Data.Data
import Data.Deriving
import Data.Functor.Alt
import Data.Functor.Bind
import Data.Functor.Contravariant
import Data.Functor.Contravariant.Conclude
import Data.Functor.Contravariant.Decide
import Data.Functor.Contravariant.Divise
import Data.Functor.Contravariant.Divisible
import Data.Functor.Invariant
import Data.Functor.These
import Data.Map.NonEmpty (NEMap)
import Data.Pointed
import Data.Semigroup
import Data.Semigroup.Foldable
import Data.Semigroup.Traversable
import Data.These
import GHC.Generics
import GHC.Natural
import qualified Data.Map.NonEmpty as NEM
-- | An @f a@, along with a 'Natural' index.
--
-- @
-- 'Step' f a ~ ('Natural', f a)
-- Step f ~ ((,) Natural) ':.:' f -- functor composition
-- @
--
-- It is the fixed point of infinite applications of ':+:' (functor sums).
--
-- Intuitively, in an infinite @f :+: f :+: f :+: f ...@, you have
exactly one /somewhere/. A @'Step ' f a@ has that , with
a ' Natural ' giving you " where " the @f@ is in the long chain .
--
-- Can be useful for using with the 'Data.HBifunctor.Tensor.Monoidal'
-- instance of ':+:'.
--
-- 'Data.HFunctor.Interpret.interpret'ing it requires no constraint on the
-- target context.
--
-- Note that this type and its instances equivalent to
@'Control . Comonad . Trans . Env . EnvT ' ( ' Data . Semigroup . Sum ' ' Natural')@.
data Step f a = Step { stepPos :: Natural, stepVal :: f a }
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Step
deriveRead1 ''Step
deriveEq1 ''Step
deriveOrd1 ''Step
| @since 0.3.0.0
instance Apply f => Apply (Step f) where
Step n f <.> Step m x = Step (n + m) (f <.> x)
instance Applicative f => Applicative (Step f) where
pure = Step 0 . pure
Step n f <*> Step m x = Step (n + m) (f <*> x)
| @since 0.3.0.0
instance Contravariant f => Contravariant (Step f) where
contramap f (Step x y) = Step x (contramap f y)
| @since 0.3.0.0
instance Divisible f => Divisible (Step f) where
divide f (Step n x) (Step m y) = Step (n + m) (divide f x y)
conquer = Step 0 conquer
| @since 0.3.0.0
instance Divise f => Divise (Step f) where
divise f (Step n x) (Step m y) = Step (n + m) (divise f x y)
| @since 0.3.0.0
instance Decide f => Decide (Step f) where
decide f (Step n x) (Step m y) = Step (n + m) (decide f x y)
| @since 0.3.0.0
instance Conclude f => Conclude (Step f) where
conclude = Step 0 . conclude
| @since 0.3.0.0
instance Decidable f => Decidable (Step f) where
choose f (Step n x) (Step m y) = Step (n + m) (choose f x y)
lose = Step 0 . lose
| @since 0.3.0.0
instance Invariant f => Invariant (Step f) where
invmap f g (Step x y) = Step x (invmap f g y)
instance Pointed f => Pointed (Step f) where
point = Step 0 . point
instance Foldable1 f => Foldable1 (Step f) where
fold1 = fold1 . stepVal
foldMap1 f = foldMap1 f . stepVal
toNonEmpty = toNonEmpty . stepVal
instance Traversable1 f => Traversable1 (Step f) where
traverse1 f (Step n x) = Step n <$> traverse1 f x
sequence1 (Step n x) = Step n <$> sequence1 x
| " Uncons and cons " an @f@ branch before a ' Step ' . This is basically
-- a witness that 'stepDown' and 'stepUp' form an isomorphism.
stepping :: Step f <~> f :+: Step f
stepping = isoF stepDown stepUp
| Pop off the first item in a ' Step ' . Because a @'Step ' f@ is @f : + :
f : + : f : + : ... @ forever , this matches on the first branch .
--
-- You can think of it as reassociating
--
-- @
-- f :+: f :+: f :+: f :+: ...
-- @
--
-- into
--
-- @
-- f :+: ( f :+: f :+: f :+: ...)
-- @
--
-- @
-- 'stepDown' ('Step' 2 "hello")
-- ' R1 ' ( Step 1 " hello " )
-- stepDown (Step 0 "hello")
-- -- 'L1' "hello"
-- @
--
-- Forms an isomorphism with 'stepUp' (see 'stepping').
stepDown :: Step f ~> f :+: Step f
stepDown (Step n x) = case minusNaturalMaybe n 1 of
Nothing -> L1 x
Just m -> R1 (Step m x)
-- | Unshift an item into a 'Step'. Because a @'Step' f@ is @f :+: f :+:
-- f :+: f :+: ...@ forever, this basically conses an additional
possibility of @f@ to the beginning of it all .
--
-- You can think of it as reassociating
--
-- @
-- f :+: ( f :+: f :+: f :+: ...)
-- @
--
-- into
--
-- @
-- f :+: f :+: f :+: f :+: ...
-- @
--
-- @
-- 'stepUp' ('L1' "hello")
-- -- 'Step' 0 "hello"
stepUp ( ' R1 ' ( Step 1 " hello " ) )
-- Step 2 " hello "
-- @
--
-- Forms an isomorphism with 'stepDown' (see 'stepping').
stepUp :: f :+: Step f ~> Step f
stepUp = \case
L1 x -> Step 0 x
R1 (Step n y) -> Step (n + 1) y
-- | We have a natural transformation between 'V1' and any other
functor with no constraints .
absurd1 :: V1 a -> f a
absurd1 = \case {}
-- | A non-empty map of 'Natural' to @f a@. Basically, contains multiple
-- @f a@s, each at a given 'Natural' index.
--
-- @
-- Steps f a ~ 'M.Map' 'Natural' (f a)
-- Steps f ~ 'M.Map' 'Natural' ':.:' f -- functor composition
-- @
--
-- It is the fixed point of applications of 'Data.Functor.These.TheseT'.
--
-- You can think of this as an infinite sparse array of @f a@s.
--
-- Intuitively, in an infinite @f \`TheseT\` f \`TheseT\` f \`TheseT\` f ...@,
-- each of those infinite positions may have an @f@ in them. However,
-- because of the at-least-one nature of 'Data.Functor.These.TheseT', we know we have at least
one f at one position /somewhere/.
--
A @'Steps ' f a@ has potentially many @f@s , each stored at a different
' Natural ' position , with the guaruntee that at least one @f@ exists .
--
-- Can be useful for using with the 'Data.HBifunctor.Tensor.Monoidal' instance
-- of 'Data.Functor.These.TheseT'.
--
-- 'Data.HFunctor.interpret'ing it requires at least an 'Alt'
-- instance in the target context, since we have to handle potentially more
than one @f@.
--
-- This type is essentailly the same as @'Control.Applicative.ListF.NEMapF'
-- ('Sum' 'Natural')@ (except with a different 'Semigroup' instance).
newtype Steps f a = Steps { getSteps :: NEMap Natural (f a) }
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Steps
deriveRead1 ''Steps
deriveEq1 ''Steps
deriveOrd1 ''Steps
instance Foldable1 f => Foldable1 (Steps f) where
fold1 = foldMap1 fold1 . getSteps
foldMap1 f = (foldMap1 . foldMap1) f . getSteps
toNonEmpty = foldMap1 toNonEmpty . getSteps
instance Traversable1 f => Traversable1 (Steps f) where
traverse1 f = fmap Steps . (traverse1 . traverse1) f . getSteps
sequence1 = fmap Steps . traverse1 sequence1 . getSteps
-- | Appends the items back-to-back, shifting all of the items in the
second map . Matches the behavior as the fixed - point of ' These1 ' .
instance Semigroup (Steps f a) where
Steps xs <> Steps ys = Steps $
let (k, _) = NEM.findMax xs
in xs <> NEM.mapKeysMonotonic (+ (k + 1)) ys
| @since 0.3.0.0
instance Contravariant f => Contravariant (Steps f) where
contramap f (Steps xs) = Steps ((fmap . contramap) f xs)
TODO : consider what Divisible / should be . Maybe no need to
-- rush into this.
| @since 0.3.0.0
instance Invariant f => Invariant (Steps f) where
invmap f g (Steps xs) = Steps (fmap (invmap f g) xs)
-- | Left-biased untion
instance Functor f => Alt (Steps f) where
Steps xs <!> Steps ys = Steps $ NEM.union xs ys
instance Pointed f => Pointed (Steps f) where
point = Steps . NEM.singleton 0 . point
| " Uncons and cons " an @f@ branch before a ' Steps ' . This is basically
-- a witness that 'stepsDown' and 'stepsUp' form an isomorphism.
steppings :: Steps f <~> These1 f (Steps f)
steppings = isoF stepsDown stepsUp
| Pop off the first item in a ' Steps ' . Because a @'Steps ' f@ is @f
` These1 ` f ` These1 ` f ` These1 ` ... @ forever , this matches on the first branch .
--
-- You can think of it as reassociating
--
-- @
-- f `These1` f `These1` f `These1` f `These1` ...
-- @
--
-- into
--
-- @
f ` These1 ` ( f ` These1 ` f ` These1 ` f ` These1 ` ... )
-- @
--
-- It returns:
--
* ' This1 ' if the first item is the /only/ item in the ' Steps '
* ' ' if the first item in the ' Steps ' is empty , but there are more
-- items left. The extra items are all shfited down.
* ' These1 ' if the first item in the ' Steps ' exists , and there are also
-- more items left. The extra items are all shifted down.
--
-- Forms an isomorphism with 'stepsUp' (see 'steppings').
stepsDown :: Steps f ~> These1 f (Steps f)
stepsDown = these This1 That1 These1
. bimap getFirst Steps
. NEM.foldMapWithKey decr
. getSteps
decr :: Natural -> f a -> These (First (f a)) (NEMap Natural (f a))
decr i x = case minusNaturalMaybe i 1 of
Nothing -> This $ First x
Just i' -> That $ NEM.singleton i' x
-- | Unshift an item into a 'Steps'. Because a @'Steps' f@ is @f `These1`
-- f `These1` f `These1` f `These1` ...@ forever, this basically conses an
additional possibility of @f@ to the beginning of it all .
--
-- You can think of it as reassociating
--
-- @
f ` These1 ` ( f ` These1 ` f ` These1 ` f ` These1 ` ... )
-- @
--
-- into
--
-- @
-- f `These1` f `These1` f `These1` f `These1` ...
-- @
--
-- If you give:
--
* ' This1 ' , then it returns a singleton ' Steps ' with one item at
index 0
* ' That1 ' , then it shifts every item in the given ' Steps ' up one
-- index.
* ' These1 ' , then it shifts every item in the given ' Steps ' up one
index , and adds the given item ( the @f@ ) at index zero .
--
-- Forms an isomorphism with 'stepDown' (see 'stepping').
stepsUp :: These1 f (Steps f) ~> Steps f
stepsUp = \case
This1 x -> Steps $ NEM.singleton 0 x
That1 xs -> Steps . NEM.mapKeysMonotonic (+ 1)
. getSteps
$ xs
These1 x xs -> Steps . NEM.insertMapMin 0 x
. NEM.toMap
. NEM.mapKeysMonotonic (+ 1)
. getSteps
$ xs
-- | An @f a@, along with a 'Bool' flag
--
-- @
-- 'Flagged' f a ~ ('Bool', f a)
Flagged f ~ ( ( , ) ) ' : . : ' f -- functor composition
-- @
--
Creation with ' Data.HFunctor.inject ' or ' pure ' uses ' False ' as the
-- boolean.
--
-- You can think of it as an @f a@ that is "flagged" with a boolean value,
-- and that value can indicuate whether or not it is "pure" (made with
' Data.HFunctor.inject ' or ' pure ' ) as ' False ' , or " impure "
-- (made from some other source) as 'True'. However, 'False' may be always
-- created directly, of course, using the constructor.
--
You can think of it like a ' Step ' that is either 0 or 1 , as well .
--
-- 'Data.HFunctor.Interpret.interpret'ing it requires no constraint on the
-- target context.
--
-- This type is equivalent (along with its instances) to:
--
* . HFunctor . HLift ' ' Control . . Trans . Identity . IdentityT'@
-- * @'Control.COmonad.Trans.Env.EnvT' 'Data.Semigroup.Any'@
data Flagged f a = Flagged { flaggedFlag :: Bool, flaggedVal :: f a }
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Flagged
deriveRead1 ''Flagged
deriveEq1 ''Flagged
deriveOrd1 ''Flagged
-- | Uses 'False' for 'pure', and '||' for '<*>'.
instance Applicative f => Applicative (Flagged f) where
pure = Flagged False . pure
Flagged n f <*> Flagged m x = Flagged (n || m) (f <*> x)
-- | Uses 'False' for 'point'.
instance Pointed f => Pointed (Flagged f) where
point = Flagged False . point
instance Foldable1 f => Foldable1 (Flagged f) where
fold1 = fold1 . flaggedVal
foldMap1 f = foldMap1 f . flaggedVal
toNonEmpty = toNonEmpty . flaggedVal
instance Traversable1 f => Traversable1 (Flagged f) where
traverse1 f (Flagged n x) = Flagged n <$> traverse1 f x
sequence1 (Flagged n x) = Flagged n <$> sequence1 x
| @'Void2 ' a b@ is uninhabited for all @a@ and
data Void2 a b
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Void2
deriveRead1 ''Void2
deriveEq1 ''Void2
deriveOrd1 ''Void2
instance Semigroup (Void2 a b) where
x <> _ = case x of {}
instance Alt (Void2 a) where
x <!> _ = absurd2 x
instance Bind (Void2 a) where
x >>- _ = case x of {}
instance Apply (Void2 a) where
x <.> _ = case x of {}
| @since 0.3.0.0
instance Contravariant (Void2 a) where
contramap _ = \case {}
| @since 0.3.0.0
instance Invariant (Void2 a) where
invmap _ _ = \case {}
| If you treat a @'Void2 ' f a@ as a functor combinator , then ' absurd2 '
lets you convert from a @'Void2 ' f a@ into a @t f a@ for any functor
combinator
absurd2 :: Void2 f a -> t f a
absurd2 = \case {}
| @'Void3 ' a b@ is uninhabited for all @a@ and
data Void3 a b c
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Void3
deriveRead1 ''Void3
deriveEq1 ''Void3
deriveOrd1 ''Void3
instance Semigroup (Void3 a b c) where
x <> _ = case x of {}
instance Alt (Void3 a b) where
x <!> _ = absurd3 x
instance Bind (Void3 a b) where
x >>- _ = case x of {}
instance Apply (Void3 a b) where
x <.> _ = case x of {}
| @since 0.3.0.0
instance Contravariant (Void3 a b) where
contramap _ = \case {}
| @since 0.3.0.0
instance Invariant (Void3 a b) where
invmap _ _ = \case {}
-- | If you treat a @'Void3' f a@ as a binary functor combinator, then
' absurd3 ' lets you convert from a @'Void3 ' f a@ into a @t f a@ for any
functor combinator
absurd3 :: Void3 f g a -> t f g a
absurd3 = \case {}
| null | https://raw.githubusercontent.com/mstksg/functor-combinators/e8158b6055be08f37525414068884a9d94b3bcf9/src/Control/Applicative/Step.hs | haskell | # LANGUAGE EmptyDataDeriving #
|
Module : Control.Applicative.Step
License : BSD3
Stability : experimental
Portability : non-portable
This module provides functor combinators that are the fixed points of
applications of ':+:' and 'Data.Functor.These.These1'. They are useful
for their 'Data.HFunctor.Interpret.Interpret' instances, along with
their relationship to the 'Data.HBifunctor.Tensor.Monoidal' instances of
':+:' and 'Data.Functor.These.These1'.
* Fixed Points
** Steppers
* Void
| An @f a@, along with a 'Natural' index.
@
'Step' f a ~ ('Natural', f a)
Step f ~ ((,) Natural) ':.:' f -- functor composition
@
It is the fixed point of infinite applications of ':+:' (functor sums).
Intuitively, in an infinite @f :+: f :+: f :+: f ...@, you have
Can be useful for using with the 'Data.HBifunctor.Tensor.Monoidal'
instance of ':+:'.
'Data.HFunctor.Interpret.interpret'ing it requires no constraint on the
target context.
Note that this type and its instances equivalent to
a witness that 'stepDown' and 'stepUp' form an isomorphism.
You can think of it as reassociating
@
f :+: f :+: f :+: f :+: ...
@
into
@
f :+: ( f :+: f :+: f :+: ...)
@
@
'stepDown' ('Step' 2 "hello")
' R1 ' ( Step 1 " hello " )
stepDown (Step 0 "hello")
-- 'L1' "hello"
@
Forms an isomorphism with 'stepUp' (see 'stepping').
| Unshift an item into a 'Step'. Because a @'Step' f@ is @f :+: f :+:
f :+: f :+: ...@ forever, this basically conses an additional
You can think of it as reassociating
@
f :+: ( f :+: f :+: f :+: ...)
@
into
@
f :+: f :+: f :+: f :+: ...
@
@
'stepUp' ('L1' "hello")
-- 'Step' 0 "hello"
Step 2 " hello "
@
Forms an isomorphism with 'stepDown' (see 'stepping').
| We have a natural transformation between 'V1' and any other
| A non-empty map of 'Natural' to @f a@. Basically, contains multiple
@f a@s, each at a given 'Natural' index.
@
Steps f a ~ 'M.Map' 'Natural' (f a)
Steps f ~ 'M.Map' 'Natural' ':.:' f -- functor composition
@
It is the fixed point of applications of 'Data.Functor.These.TheseT'.
You can think of this as an infinite sparse array of @f a@s.
Intuitively, in an infinite @f \`TheseT\` f \`TheseT\` f \`TheseT\` f ...@,
each of those infinite positions may have an @f@ in them. However,
because of the at-least-one nature of 'Data.Functor.These.TheseT', we know we have at least
Can be useful for using with the 'Data.HBifunctor.Tensor.Monoidal' instance
of 'Data.Functor.These.TheseT'.
'Data.HFunctor.interpret'ing it requires at least an 'Alt'
instance in the target context, since we have to handle potentially more
This type is essentailly the same as @'Control.Applicative.ListF.NEMapF'
('Sum' 'Natural')@ (except with a different 'Semigroup' instance).
| Appends the items back-to-back, shifting all of the items in the
rush into this.
| Left-biased untion
a witness that 'stepsDown' and 'stepsUp' form an isomorphism.
You can think of it as reassociating
@
f `These1` f `These1` f `These1` f `These1` ...
@
into
@
@
It returns:
items left. The extra items are all shfited down.
more items left. The extra items are all shifted down.
Forms an isomorphism with 'stepsUp' (see 'steppings').
| Unshift an item into a 'Steps'. Because a @'Steps' f@ is @f `These1`
f `These1` f `These1` f `These1` ...@ forever, this basically conses an
You can think of it as reassociating
@
@
into
@
f `These1` f `These1` f `These1` f `These1` ...
@
If you give:
index.
Forms an isomorphism with 'stepDown' (see 'stepping').
| An @f a@, along with a 'Bool' flag
@
'Flagged' f a ~ ('Bool', f a)
functor composition
@
boolean.
You can think of it as an @f a@ that is "flagged" with a boolean value,
and that value can indicuate whether or not it is "pure" (made with
(made from some other source) as 'True'. However, 'False' may be always
created directly, of course, using the constructor.
'Data.HFunctor.Interpret.interpret'ing it requires no constraint on the
target context.
This type is equivalent (along with its instances) to:
* @'Control.COmonad.Trans.Env.EnvT' 'Data.Semigroup.Any'@
| Uses 'False' for 'pure', and '||' for '<*>'.
| Uses 'False' for 'point'.
| If you treat a @'Void3' f a@ as a binary functor combinator, then |
Copyright : ( c ) 2019
Maintainer :
module Control.Applicative.Step (
Step(..)
, Steps(..)
, Flagged(..)
, stepUp
, stepDown
, stepping
, stepsUp
, stepsDown
, steppings
, absurd1
, Void2
, absurd2
, Void3
, absurd3
) where
import Control.Natural
import Control.Natural.IsoF
import Data.Bifunctor
import Data.Data
import Data.Deriving
import Data.Functor.Alt
import Data.Functor.Bind
import Data.Functor.Contravariant
import Data.Functor.Contravariant.Conclude
import Data.Functor.Contravariant.Decide
import Data.Functor.Contravariant.Divise
import Data.Functor.Contravariant.Divisible
import Data.Functor.Invariant
import Data.Functor.These
import Data.Map.NonEmpty (NEMap)
import Data.Pointed
import Data.Semigroup
import Data.Semigroup.Foldable
import Data.Semigroup.Traversable
import Data.These
import GHC.Generics
import GHC.Natural
import qualified Data.Map.NonEmpty as NEM
exactly one /somewhere/. A @'Step ' f a@ has that , with
a ' Natural ' giving you " where " the @f@ is in the long chain .
@'Control . Comonad . Trans . Env . EnvT ' ( ' Data . Semigroup . Sum ' ' Natural')@.
data Step f a = Step { stepPos :: Natural, stepVal :: f a }
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Step
deriveRead1 ''Step
deriveEq1 ''Step
deriveOrd1 ''Step
| @since 0.3.0.0
instance Apply f => Apply (Step f) where
Step n f <.> Step m x = Step (n + m) (f <.> x)
instance Applicative f => Applicative (Step f) where
pure = Step 0 . pure
Step n f <*> Step m x = Step (n + m) (f <*> x)
| @since 0.3.0.0
instance Contravariant f => Contravariant (Step f) where
contramap f (Step x y) = Step x (contramap f y)
| @since 0.3.0.0
instance Divisible f => Divisible (Step f) where
divide f (Step n x) (Step m y) = Step (n + m) (divide f x y)
conquer = Step 0 conquer
| @since 0.3.0.0
instance Divise f => Divise (Step f) where
divise f (Step n x) (Step m y) = Step (n + m) (divise f x y)
| @since 0.3.0.0
instance Decide f => Decide (Step f) where
decide f (Step n x) (Step m y) = Step (n + m) (decide f x y)
| @since 0.3.0.0
instance Conclude f => Conclude (Step f) where
conclude = Step 0 . conclude
| @since 0.3.0.0
instance Decidable f => Decidable (Step f) where
choose f (Step n x) (Step m y) = Step (n + m) (choose f x y)
lose = Step 0 . lose
| @since 0.3.0.0
instance Invariant f => Invariant (Step f) where
invmap f g (Step x y) = Step x (invmap f g y)
instance Pointed f => Pointed (Step f) where
point = Step 0 . point
instance Foldable1 f => Foldable1 (Step f) where
fold1 = fold1 . stepVal
foldMap1 f = foldMap1 f . stepVal
toNonEmpty = toNonEmpty . stepVal
instance Traversable1 f => Traversable1 (Step f) where
traverse1 f (Step n x) = Step n <$> traverse1 f x
sequence1 (Step n x) = Step n <$> sequence1 x
| " Uncons and cons " an @f@ branch before a ' Step ' . This is basically
stepping :: Step f <~> f :+: Step f
stepping = isoF stepDown stepUp
| Pop off the first item in a ' Step ' . Because a @'Step ' f@ is @f : + :
f : + : f : + : ... @ forever , this matches on the first branch .
stepDown :: Step f ~> f :+: Step f
stepDown (Step n x) = case minusNaturalMaybe n 1 of
Nothing -> L1 x
Just m -> R1 (Step m x)
possibility of @f@ to the beginning of it all .
stepUp ( ' R1 ' ( Step 1 " hello " ) )
stepUp :: f :+: Step f ~> Step f
stepUp = \case
L1 x -> Step 0 x
R1 (Step n y) -> Step (n + 1) y
functor with no constraints .
absurd1 :: V1 a -> f a
absurd1 = \case {}
one f at one position /somewhere/.
A @'Steps ' f a@ has potentially many @f@s , each stored at a different
' Natural ' position , with the guaruntee that at least one @f@ exists .
than one @f@.
newtype Steps f a = Steps { getSteps :: NEMap Natural (f a) }
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Steps
deriveRead1 ''Steps
deriveEq1 ''Steps
deriveOrd1 ''Steps
instance Foldable1 f => Foldable1 (Steps f) where
fold1 = foldMap1 fold1 . getSteps
foldMap1 f = (foldMap1 . foldMap1) f . getSteps
toNonEmpty = foldMap1 toNonEmpty . getSteps
instance Traversable1 f => Traversable1 (Steps f) where
traverse1 f = fmap Steps . (traverse1 . traverse1) f . getSteps
sequence1 = fmap Steps . traverse1 sequence1 . getSteps
second map . Matches the behavior as the fixed - point of ' These1 ' .
instance Semigroup (Steps f a) where
Steps xs <> Steps ys = Steps $
let (k, _) = NEM.findMax xs
in xs <> NEM.mapKeysMonotonic (+ (k + 1)) ys
| @since 0.3.0.0
instance Contravariant f => Contravariant (Steps f) where
contramap f (Steps xs) = Steps ((fmap . contramap) f xs)
TODO : consider what Divisible / should be . Maybe no need to
| @since 0.3.0.0
instance Invariant f => Invariant (Steps f) where
invmap f g (Steps xs) = Steps (fmap (invmap f g) xs)
instance Functor f => Alt (Steps f) where
Steps xs <!> Steps ys = Steps $ NEM.union xs ys
instance Pointed f => Pointed (Steps f) where
point = Steps . NEM.singleton 0 . point
| " Uncons and cons " an @f@ branch before a ' Steps ' . This is basically
steppings :: Steps f <~> These1 f (Steps f)
steppings = isoF stepsDown stepsUp
| Pop off the first item in a ' Steps ' . Because a @'Steps ' f@ is @f
` These1 ` f ` These1 ` f ` These1 ` ... @ forever , this matches on the first branch .
f ` These1 ` ( f ` These1 ` f ` These1 ` f ` These1 ` ... )
* ' This1 ' if the first item is the /only/ item in the ' Steps '
* ' ' if the first item in the ' Steps ' is empty , but there are more
* ' These1 ' if the first item in the ' Steps ' exists , and there are also
stepsDown :: Steps f ~> These1 f (Steps f)
stepsDown = these This1 That1 These1
. bimap getFirst Steps
. NEM.foldMapWithKey decr
. getSteps
decr :: Natural -> f a -> These (First (f a)) (NEMap Natural (f a))
decr i x = case minusNaturalMaybe i 1 of
Nothing -> This $ First x
Just i' -> That $ NEM.singleton i' x
additional possibility of @f@ to the beginning of it all .
f ` These1 ` ( f ` These1 ` f ` These1 ` f ` These1 ` ... )
* ' This1 ' , then it returns a singleton ' Steps ' with one item at
index 0
* ' That1 ' , then it shifts every item in the given ' Steps ' up one
* ' These1 ' , then it shifts every item in the given ' Steps ' up one
index , and adds the given item ( the @f@ ) at index zero .
stepsUp :: These1 f (Steps f) ~> Steps f
stepsUp = \case
This1 x -> Steps $ NEM.singleton 0 x
That1 xs -> Steps . NEM.mapKeysMonotonic (+ 1)
. getSteps
$ xs
These1 x xs -> Steps . NEM.insertMapMin 0 x
. NEM.toMap
. NEM.mapKeysMonotonic (+ 1)
. getSteps
$ xs
Creation with ' Data.HFunctor.inject ' or ' pure ' uses ' False ' as the
' Data.HFunctor.inject ' or ' pure ' ) as ' False ' , or " impure "
You can think of it like a ' Step ' that is either 0 or 1 , as well .
* . HFunctor . HLift ' ' Control . . Trans . Identity . IdentityT'@
data Flagged f a = Flagged { flaggedFlag :: Bool, flaggedVal :: f a }
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Flagged
deriveRead1 ''Flagged
deriveEq1 ''Flagged
deriveOrd1 ''Flagged
instance Applicative f => Applicative (Flagged f) where
pure = Flagged False . pure
Flagged n f <*> Flagged m x = Flagged (n || m) (f <*> x)
instance Pointed f => Pointed (Flagged f) where
point = Flagged False . point
instance Foldable1 f => Foldable1 (Flagged f) where
fold1 = fold1 . flaggedVal
foldMap1 f = foldMap1 f . flaggedVal
toNonEmpty = toNonEmpty . flaggedVal
instance Traversable1 f => Traversable1 (Flagged f) where
traverse1 f (Flagged n x) = Flagged n <$> traverse1 f x
sequence1 (Flagged n x) = Flagged n <$> sequence1 x
| @'Void2 ' a b@ is uninhabited for all @a@ and
data Void2 a b
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Void2
deriveRead1 ''Void2
deriveEq1 ''Void2
deriveOrd1 ''Void2
instance Semigroup (Void2 a b) where
x <> _ = case x of {}
instance Alt (Void2 a) where
x <!> _ = absurd2 x
instance Bind (Void2 a) where
x >>- _ = case x of {}
instance Apply (Void2 a) where
x <.> _ = case x of {}
| @since 0.3.0.0
instance Contravariant (Void2 a) where
contramap _ = \case {}
| @since 0.3.0.0
instance Invariant (Void2 a) where
invmap _ _ = \case {}
| If you treat a @'Void2 ' f a@ as a functor combinator , then ' absurd2 '
lets you convert from a @'Void2 ' f a@ into a @t f a@ for any functor
combinator
absurd2 :: Void2 f a -> t f a
absurd2 = \case {}
| @'Void3 ' a b@ is uninhabited for all @a@ and
data Void3 a b c
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable, Typeable, Generic, Data)
deriveShow1 ''Void3
deriveRead1 ''Void3
deriveEq1 ''Void3
deriveOrd1 ''Void3
instance Semigroup (Void3 a b c) where
x <> _ = case x of {}
instance Alt (Void3 a b) where
x <!> _ = absurd3 x
instance Bind (Void3 a b) where
x >>- _ = case x of {}
instance Apply (Void3 a b) where
x <.> _ = case x of {}
| @since 0.3.0.0
instance Contravariant (Void3 a b) where
contramap _ = \case {}
| @since 0.3.0.0
instance Invariant (Void3 a b) where
invmap _ _ = \case {}
' absurd3 ' lets you convert from a @'Void3 ' f a@ into a @t f a@ for any
functor combinator
absurd3 :: Void3 f g a -> t f g a
absurd3 = \case {}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.